How to use the plaso.containers.time_events function in plaso

To help you get started, we’ve selected a few plaso examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github log2timeline / plaso / plaso / parsers / winreg_plugins / msie_zones.py View on Github external
Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      registry_key (dfwinreg.WinRegistryKey): Windows Registry key.
    """
    values_dict = self._GetValuesFromKey(registry_key)

    # Generate an event for the key.
    event_data = windows_events.WindowsRegistryEventData()
    event_data.key_path = registry_key.path
    event_data.values = ' '.join([
        '{0:s}: {1!s}'.format(name, value)
        for name, value in sorted(values_dict.items())]) or None

    event = time_events.DateTimeValuesEvent(
        registry_key.last_written_time, definitions.TIME_DESCRIPTION_WRITTEN)
    parser_mediator.ProduceEventWithEventData(event, event_data)

    if registry_key.number_of_subkeys == 0:
      error_string = 'Key: {0:s} missing subkeys.'.format(registry_key.path)
      parser_mediator.ProduceExtractionWarning(error_string)
      return

    for zone_key in registry_key.GetSubkeys():
      # TODO: these values are stored in the Description value of the
      # zone key. This solution will break on zone values that are larger
      # than 5.
      path = '{0:s}\\{1:s}'.format(
          registry_key.path, self._ZONE_NAMES[zone_key.name])

      settings = []
github log2timeline / plaso / plaso / parsers / sqlite_plugins / imessage.py View on Github external
query_hash = hash(query)

    event_data = IMessageEventData()
    event_data.attachment_location = self._GetRowValue(
        query_hash, row, 'attachment_location')
    event_data.imessage_id = self._GetRowValue(query_hash, row, 'imessage_id')
    event_data.message_type = self._GetRowValue(query_hash, row, 'message_type')
    event_data.offset = self._GetRowValue(query_hash, row, 'ROWID')
    event_data.query = query
    event_data.read_receipt = self._GetRowValue(query_hash, row, 'read_receipt')
    event_data.service = self._GetRowValue(query_hash, row, 'service')
    event_data.text = self._GetRowValue(query_hash, row, 'text')

    timestamp = self._GetRowValue(query_hash, row, 'date')
    date_time = dfdatetime_cocoa_time.CocoaTime(timestamp=timestamp)
    event = time_events.DateTimeValuesEvent(
        date_time, definitions.TIME_DESCRIPTION_CREATION)
    parser_mediator.ProduceEventWithEventData(event, event_data)
github log2timeline / plaso / plaso / parsers / fseventsd.py View on Github external
if page_header.signature not in self._DLS_SIGNATURES:
      raise errors.UnableToParseFile('Invalid file signature')

    current_page_end = page_header.page_size

    file_entry = parser_mediator.GetFileEntry()
    date_time = self._GetParentModificationTime(file_entry)
    # TODO: Change this to use a more representative time definition (time span)
    # when https://github.com/log2timeline/dfdatetime/issues/65 is resolved.
    if date_time:
      timestamp_description = definitions.TIME_DESCRIPTION_RECORDED
    else:
      date_time = dfdatetime_semantic_time.SemanticTime('Not set')
      timestamp_description = definitions.TIME_DESCRIPTION_NOT_A_TIME
    event = time_events.DateTimeValuesEvent(date_time, timestamp_description)

    file_size = file_object.get_size()
    while file_offset < file_size:
      if file_offset >= current_page_end:
        try:
          page_header, header_size = self._ParseDLSPageHeader(
              file_object, file_offset)
        except errors.ParseError as exception:
          parser_mediator.ProduceExtractionWarning(
              'Unable to parse page header with error: {0!s}'.format(
                  exception))
          break

        current_page_end += page_header.page_size
        file_offset += header_size
        continue
github log2timeline / plaso / plaso / parsers / winlnk.py View on Github external
event_data.network_path = lnk_file.network_path
    event_data.relative_path = lnk_file.relative_path
    event_data.volume_label = lnk_file.volume_label
    event_data.working_directory = lnk_file.working_directory

    access_time = lnk_file.get_file_access_time_as_integer()
    if access_time != 0:
      date_time = dfdatetime_filetime.Filetime(timestamp=access_time)
      event = time_events.DateTimeValuesEvent(
          date_time, definitions.TIME_DESCRIPTION_LAST_ACCESS)
      parser_mediator.ProduceEventWithEventData(event, event_data)

    creation_time = lnk_file.get_file_creation_time_as_integer()
    if creation_time != 0:
      date_time = dfdatetime_filetime.Filetime(timestamp=creation_time)
      event = time_events.DateTimeValuesEvent(
          date_time, definitions.TIME_DESCRIPTION_CREATION)
      parser_mediator.ProduceEventWithEventData(event, event_data)

    modification_time = lnk_file.get_file_modification_time_as_integer()
    if modification_time != 0:
      date_time = dfdatetime_filetime.Filetime(timestamp=modification_time)
      event = time_events.DateTimeValuesEvent(
          date_time, definitions.TIME_DESCRIPTION_MODIFICATION)
      parser_mediator.ProduceEventWithEventData(event, event_data)

    if access_time == 0 and creation_time == 0 and modification_time == 0:
      date_time = dfdatetime_semantic_time.SemanticTime('Not set')
      event = time_events.DateTimeValuesEvent(
          date_time, definitions.TIME_DESCRIPTION_NOT_A_TIME)
      parser_mediator.ProduceEventWithEventData(event, event_data)
github log2timeline / plaso / plaso / parsers / winreg_plugins / appcompatcache.py View on Github external
event_data = AppCompatCacheEventData()
      event_data.entry_index = cached_entry_index + 1
      event_data.key_path = registry_key.path
      event_data.offset = cached_entry_offset
      event_data.path = cached_entry_object.path

      if cached_entry_object.last_modification_time is not None:
        if not cached_entry_object.last_modification_time:
          date_time = dfdatetime_semantic_time.SemanticTime('Not set')
        else:
          date_time = dfdatetime_filetime.Filetime(
              timestamp=cached_entry_object.last_modification_time)

        # TODO: refactor to file modification event.
        event = time_events.DateTimeValuesEvent(
            date_time, 'File Last Modification Time')
        parser_mediator.ProduceEventWithEventData(event, event_data)

      if cached_entry_object.last_update_time is not None:
        if not cached_entry_object.last_update_time:
          date_time = dfdatetime_semantic_time.SemanticTime('Not set')
        else:
          date_time = dfdatetime_filetime.Filetime(
              timestamp=cached_entry_object.last_update_time)

        # TODO: refactor to process run event.
        event = time_events.DateTimeValuesEvent(
            date_time, definitions.TIME_DESCRIPTION_LAST_RUN)
        parser_mediator.ProduceEventWithEventData(event, event_data)

      cached_entry_offset += cached_entry_object.cached_entry_size
github log2timeline / plaso / plaso / parsers / plist_plugins / install_history.py View on Github external
continue

      display_name = entry.get('displayName', '')
      display_version = entry.get('displayVersion', '')
      process_name = entry.get('processName', '')
      package_identifiers = ', '.join(package_identifiers)

      event_data = plist_event.PlistTimeEventData()
      event_data.desc = (
          'Installation of [{0:s} {1:s}] using [{2:s}]. Packages: '
          '{3:s}.').format(
              display_name, display_version, process_name, package_identifiers)
      event_data.key = ''
      event_data.root = '/item'

      event = time_events.PythonDatetimeEvent(
          datetime_value, definitions.TIME_DESCRIPTION_WRITTEN)
      parser_mediator.ProduceEventWithEventData(event, event_data)
github log2timeline / plaso / plaso / parsers / winlnk.py View on Github external
if creation_time != 0:
      date_time = dfdatetime_filetime.Filetime(timestamp=creation_time)
      event = time_events.DateTimeValuesEvent(
          date_time, definitions.TIME_DESCRIPTION_CREATION)
      parser_mediator.ProduceEventWithEventData(event, event_data)

    modification_time = lnk_file.get_file_modification_time_as_integer()
    if modification_time != 0:
      date_time = dfdatetime_filetime.Filetime(timestamp=modification_time)
      event = time_events.DateTimeValuesEvent(
          date_time, definitions.TIME_DESCRIPTION_MODIFICATION)
      parser_mediator.ProduceEventWithEventData(event, event_data)

    if access_time == 0 and creation_time == 0 and modification_time == 0:
      date_time = dfdatetime_semantic_time.SemanticTime('Not set')
      event = time_events.DateTimeValuesEvent(
          date_time, definitions.TIME_DESCRIPTION_NOT_A_TIME)
      parser_mediator.ProduceEventWithEventData(event, event_data)

    if lnk_file.droid_file_identifier:
      try:
        self._ParseDistributedTrackingIdentifier(
            parser_mediator, lnk_file.droid_file_identifier, display_name)
      except (TypeError, ValueError) as exception:
        parser_mediator.ProduceExtractionWarning(
            'unable to read droid file identifier with error: {0!s}.'.format(
                exception))

    if lnk_file.birth_droid_file_identifier:
      try:
        self._ParseDistributedTrackingIdentifier(
            parser_mediator, lnk_file.birth_droid_file_identifier, display_name)
github log2timeline / plaso / plaso / parsers / bencode_plugins / utorrent.py View on Github external
# Convert seconds to minutes.
      seedtime = value.get('seedtime', None)
      event_data.seedtime, _ = divmod(seedtime, 60)

      # Create timeline events based on extracted values.
      for event_key, event_value in iter(value.items()):
        if event_key == 'added_on':
          date_time = dfdatetime_posix_time.PosixTime(timestamp=event_value)
          event = time_events.DateTimeValuesEvent(
              date_time, definitions.TIME_DESCRIPTION_ADDED)
          parser_mediator.ProduceEventWithEventData(event, event_data)

        elif event_key == 'completed_on':
          date_time = dfdatetime_posix_time.PosixTime(timestamp=event_value)
          event = time_events.DateTimeValuesEvent(
              date_time, definitions.TIME_DESCRIPTION_FILE_DOWNLOADED)
          parser_mediator.ProduceEventWithEventData(event, event_data)

        elif event_key == 'modtimes':
          for modtime in event_value:
            # Some values are stored as 0, skip those.
            if not modtime:
              continue

            date_time = dfdatetime_posix_time.PosixTime(timestamp=modtime)
            event = time_events.DateTimeValuesEvent(
                date_time, definitions.TIME_DESCRIPTION_MODIFICATION)
            parser_mediator.ProduceEventWithEventData(event, event_data)
github log2timeline / plaso / plaso / parsers / sqlite_plugins / android_webview.py View on Github external
event_data.cookie_name = cookie_name
    event_data.data = cookie_value
    event_data.host = hostname
    event_data.offset = self._GetRowValue(query_hash, row, '_id')
    event_data.path = path
    event_data.query = query
    event_data.secure = secure
    event_data.url = url

    timestamp = self._GetRowValue(query_hash, row, 'expires')
    if timestamp:
      date_time = dfdatetime_java_time.JavaTime(timestamp=timestamp)
    else:
      date_time = dfdatetime_semantic_time.SemanticTime('Infinity')

    event = time_events.DateTimeValuesEvent(
        date_time, definitions.TIME_DESCRIPTION_EXPIRATION)
    parser_mediator.ProduceEventWithEventData(event, event_data)

    # Go through all cookie plugins to see if there are is any specific parsing
    # needed.
    for cookie_plugin in self._cookie_plugins:
      try:
        cookie_plugin.UpdateChainAndProcess(
            parser_mediator, cookie_name=cookie_name,
            cookie_data=cookie_value, url=url)
      except errors.WrongPlugin:
        pass
github log2timeline / plaso / plaso / parsers / plist_plugins / timemachine.py View on Github external
'unable to parse backup alias value with error: {0!s}'.format(
                exception))
        alias = 'Unknown alias'

      destination_identifier = (
          destination.get('DestinationID', None) or 'Unknown device')

      event_data = plist_event.PlistTimeEventData()
      event_data.desc = 'TimeMachine Backup in {0:s} ({1:s})'.format(
          alias, destination_identifier)
      event_data.key = 'item/SnapshotDates'
      event_data.root = '/Destinations'

      snapshot_dates = destination.get('SnapshotDates', [])
      for datetime_value in snapshot_dates:
        event = time_events.PythonDatetimeEvent(
            datetime_value, definitions.TIME_DESCRIPTION_WRITTEN)
        parser_mediator.ProduceEventWithEventData(event, event_data)