How to use the plaso.containers.time_events.DateTimeValuesEvent function in plaso

To help you get started, we’ve selected a few plaso examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github log2timeline / plaso / plaso / containers / time_events.py View on Github external
date_time (dfdatetime.DateTimeValues): date and time values.
      date_time_description (str): description of the meaning of the date and
          time values.
      data_type (Optional[str]): event data type. If the data type is not set
          it is derived from the DATA_TYPE class attribute.
      time_zone (Optional[datetime.tzinfo]): time zone.
    """
    timestamp = date_time.GetPlasoTimestamp()
    if date_time.is_local_time and time_zone:
      timestamp = timelib.Timestamp.LocaltimeToUTC(timestamp, time_zone)

    super(DateTimeValuesEvent, self).__init__(
        timestamp, date_time_description, data_type=data_type)


class PythonDatetimeEvent(DateTimeValuesEvent):
  """Python datetime-based event attribute container."""

  def __init__(
      self, datetime_value, date_time_description, data_type=None,
      time_zone=None):
    """Initializes an event.

    Args:
      datetime_value (datetime.datetime): date and time values.
      date_time_description (str): description of the meaning of the date and
          time values.
      data_type (Optional[str]): event data type. If the data type is not set
          it is derived from the DATA_TYPE class attribute.
      time_zone (Optional[datetime.tzinfo]): time zone.
    """
    year, month, day_of_month, hours, minutes, seconds, _, _, _ = (
github log2timeline / plaso / plaso / parsers / winreg_plugins / userassist.py View on Github external
elif format_version == 5:
        userassist_entry_index += 1

        event_data.application_focus_count = (
            user_assist_entry.application_focus_count)
        event_data.application_focus_duration = (
            user_assist_entry.application_focus_duration)
        event_data.entry_index = userassist_entry_index

      timestamp = user_assist_entry.last_execution_time
      if not timestamp:
        date_time = dfdatetime_semantic_time.SemanticTime('Not set')
      else:
        date_time = dfdatetime_filetime.Filetime(timestamp=timestamp)

      event = time_events.DateTimeValuesEvent(
          date_time, definitions.TIME_DESCRIPTION_LAST_RUN)
      parser_mediator.ProduceEventWithEventData(event, event_data)
github log2timeline / plaso / plaso / parsers / sqlite_plugins / tango_android.py View on Github external
and other components, such as storage and dfvfs.
      query (str): query that created the row.
      row (sqlite3.Row): row resulting from query.
    """
    query_hash = hash(query)

    event_data = TangoAndroidConversationEventData()
    event_data.conversation_identifier = self._GetRowValue(
        query_hash, row, 'conv_id')

    # TODO: payload is a base64 encoded binary blob, we need to find the
    # structure to extract the relevant bits.
    # event_data.payload = self._GetRowValue(query_hash, row, 'payload')

    date_time = dfdatetime_semantic_time.NotSet()
    event = time_events.DateTimeValuesEvent(
        date_time, definitions.TIME_DESCRIPTION_NOT_A_TIME)
    parser_mediator.ProduceEventWithEventData(event, event_data)
github log2timeline / plaso / plaso / parsers / winreg_plugins / officemru.py View on Github external
if not timestamp:
        date_time = dfdatetime_semantic_time.SemanticTime('Not set')
      else:
        date_time = dfdatetime_filetime.Filetime(timestamp=timestamp)

      # TODO: determine if this should be last written time.
      event = time_events.DateTimeValuesEvent(
          date_time, definitions.TIME_DESCRIPTION_WRITTEN)
      parser_mediator.ProduceEventWithEventData(event, event_data)

    event_data = OfficeMRUListWindowsRegistryEventData()
    event_data.entries = ' '.join([value for value in entries]) or None
    event_data.key_path = registry_key.path

    event = time_events.DateTimeValuesEvent(
        registry_key.last_written_time, definitions.TIME_DESCRIPTION_WRITTEN)
    parser_mediator.ProduceEventWithEventData(event, event_data)
github log2timeline / plaso / plaso / parsers / winlnk.py View on Github external
self, parser_mediator, uuid_string, origin):
    """Extracts data from a Distributed Tracking identifier.

    Args:
      parser_mediator (ParserMediator): mediates interactions between parsers
          and other components, such as storage and dfvfs.
      uuid_string (str): UUID string of the Distributed Tracking identifier.
      origin (str): origin of the event (event source).
    """
    uuid_object = uuid.UUID(uuid_string)

    if uuid_object.version == 1:
      event_data = windows_events.WindowsDistributedLinkTrackingEventData(
          uuid_object, origin)
      date_time = dfdatetime_uuid_time.UUIDTime(timestamp=uuid_object.time)
      event = time_events.DateTimeValuesEvent(
          date_time, definitions.TIME_DESCRIPTION_CREATION)
      parser_mediator.ProduceEventWithEventData(event, event_data)
github log2timeline / plaso / plaso / parsers / sqlite_plugins / chrome_extension_activity.py View on Github external
event_data = ChromeExtensionActivityEventData()
    event_data.action_type = self._GetRowValue(query_hash, row, 'action_type')
    event_data.activity_id = self._GetRowValue(query_hash, row, 'activity_id')
    event_data.api_name = self._GetRowValue(query_hash, row, 'api_name')
    event_data.arg_url = self._GetRowValue(query_hash, row, 'arg_url')
    event_data.args = self._GetRowValue(query_hash, row, 'args')
    event_data.extension_id = self._GetRowValue(query_hash, row, 'extension_id')
    event_data.other = self._GetRowValue(query_hash, row, 'other')
    event_data.page_title = self._GetRowValue(query_hash, row, 'page_title')
    event_data.page_url = self._GetRowValue(query_hash, row, 'page_url')
    event_data.query = query

    timestamp = self._GetRowValue(query_hash, row, 'time')
    date_time = dfdatetime_webkit_time.WebKitTime(timestamp=timestamp)
    event = time_events.DateTimeValuesEvent(
        date_time, definitions.TIME_DESCRIPTION_UNKNOWN)
    parser_mediator.ProduceEventWithEventData(event, event_data)
github log2timeline / plaso / plaso / parsers / esedb_plugins / msie_webcache.py View on Github external
event_data.cached_filename = cached_filename
        event_data.cached_file_size = cached_file_size
        event_data.cache_identifier = cache_identifier
        event_data.container_identifier = container_identifier
        event_data.entry_identifier = entry_identifier
        event_data.file_extension = file_extension
        event_data.redirect_url = redirect_url
        event_data.request_headers = request_headers
        event_data.response_headers = response_headers
        event_data.sync_count = sync_count
        event_data.url = url

        timestamp = record_values.get('SyncTime', None)
        if timestamp:
          date_time = dfdatetime_filetime.Filetime(timestamp=timestamp)
          event = time_events.DateTimeValuesEvent(
              date_time, 'Synchronization time')
          parser_mediator.ProduceEventWithEventData(event, event_data)

        timestamp = record_values.get('CreationTime', None)
        if timestamp:
          date_time = dfdatetime_filetime.Filetime(timestamp=timestamp)
          event = time_events.DateTimeValuesEvent(
              date_time, definitions.TIME_DESCRIPTION_CREATION)
          parser_mediator.ProduceEventWithEventData(event, event_data)

        timestamp = record_values.get('ExpiryTime', None)
        if timestamp:
          date_time = dfdatetime_filetime.Filetime(timestamp=timestamp)
          event = time_events.DateTimeValuesEvent(
              date_time, definitions.TIME_DESCRIPTION_EXPIRATION)
          parser_mediator.ProduceEventWithEventData(event, event_data)
github log2timeline / plaso / plaso / parsers / msiecf.py View on Github external
event_data.number_of_hits = msiecf_item.number_of_hits
    event_data.offset = msiecf_item.offset
    event_data.recovered = recovered
    event_data.url = msiecf_item.location

    if (event_data.cache_directory_index >= 0 and
        event_data.cache_directory_index < len(cache_directories)):
      event_data.cache_directory_name = (
          cache_directories[event_data.cache_directory_index])

    event = time_events.DateTimeValuesEvent(
        primary_date_time, primary_date_time_description)
    parser_mediator.ProduceEventWithEventData(event, event_data)

    if secondary_date_time.timestamp != 0:
      event = time_events.DateTimeValuesEvent(
          secondary_date_time, secondary_date_time_description,
          time_zone=parser_mediator.timezone)
      parser_mediator.ProduceEventWithEventData(event, event_data)

    expiration_timestamp = msiecf_item.get_expiration_time_as_integer()
    if expiration_timestamp != 0:
      # The expiration time in MSIECF version 4.7 is stored as a FILETIME value
      # in version 5.2 it is stored as a FAT date time value.
      # Since the as_integer function returns the raw integer value we need to
      # apply the right conversion here.
      if format_version == '4.7':
        if expiration_timestamp == 0x7fffffffffffffff:
          expiration_date_time = dfdatetime_semantic_time.SemanticTime('Never')
        else:
          expiration_date_time = dfdatetime_filetime.Filetime(
              timestamp=expiration_timestamp)
github log2timeline / plaso / plaso / parsers / firefox_cache.py View on Github external
timestamp=file_metadata_header.last_fetched_time)
    event = time_events.DateTimeValuesEvent(
        date_time, definitions.TIME_DESCRIPTION_LAST_VISITED)
    parser_mediator.ProduceEventWithEventData(event, event_data)

    if file_metadata_header.last_modified_time:
      date_time = dfdatetime_posix_time.PosixTime(
          timestamp=file_metadata_header.last_modified_time)
      event = time_events.DateTimeValuesEvent(
          date_time, definitions.TIME_DESCRIPTION_WRITTEN)
      parser_mediator.ProduceEventWithEventData(event, event_data)

    if file_metadata_header.expiration_time:
      date_time = dfdatetime_posix_time.PosixTime(
          timestamp=file_metadata_header.expiration_time)
      event = time_events.DateTimeValuesEvent(
          date_time, definitions.TIME_DESCRIPTION_EXPIRATION)
      parser_mediator.ProduceEventWithEventData(event, event_data)