Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def testParseFile(self):
"""Tests the Parse function on a stand-alone $MFT file."""
parser = ntfs.NTFSMFTParser()
test_file_path = self._GetTestFilePath(['MFT'])
self._SkipIfPathNotExists(test_file_path)
os_path_spec = path_spec_factory.Factory.NewPathSpec(
dfvfs_definitions.TYPE_INDICATOR_OS, location=test_file_path)
storage_writer = self._ParseFileByPathSpec(os_path_spec, parser)
self.assertEqual(storage_writer.number_of_warnings, 0)
self.assertEqual(storage_writer.number_of_events, 126352)
events = list(storage_writer.GetEvents())
# A distributed link tracking event.
event = events[3684]
self.CheckTimestamp(event.timestamp, '2007-06-30 12:58:40.500004')
self.assertEqual(
event.timestamp_desc, definitions.TIME_DESCRIPTION_CREATION)
from tests.output import test_lib
class JSONLinesOutputTest(test_lib.OutputModuleTestCase):
"""Tests for the JSON lines output module."""
_OS_PATH_SPEC = path_spec_factory.Factory.NewPathSpec(
dfvfs_definitions.TYPE_INDICATOR_OS, location='{0:s}{1:s}'.format(
os.path.sep, os.path.join('cases', 'image.dd')))
_TEST_EVENTS = [
{'data_type': 'test:output',
'display_name': 'OS: /var/log/syslog.1',
'hostname': 'ubuntu',
'inode': 12345678,
'pathspec': path_spec_factory.Factory.NewPathSpec(
dfvfs_definitions.TYPE_INDICATOR_TSK, inode=15,
location='/var/log/syslog.1', parent=_OS_PATH_SPEC),
'text': (
'Reporter PID: |8442| (pam_unix(cron:session): session\n '
'closed for user root)'),
'timestamp': timelib.Timestamp.CopyFromString('2012-06-27 18:17:01'),
'timestamp_desc': definitions.TIME_DESCRIPTION_UNKNOWN,
'username': 'root'}]
def setUp(self):
"""Makes preparations before running an individual test."""
output_mediator = self._CreateOutputMediator()
self._output_writer = cli_test_lib.TestOutputWriter()
self._output_module = json_line.JSONLineOutputModule(output_mediator)
self._output_module.SetOutputWriter(self._output_writer)
def testPrintAPFSVolumeIdentifiersOverview(self):
"""Tests the _PrintAPFSVolumeIdentifiersOverview function."""
test_file_path = self._GetTestFilePath(['apfs.dmg'])
self._SkipIfPathNotExists(test_file_path)
test_os_path_spec = path_spec_factory.Factory.NewPathSpec(
dfvfs_definitions.TYPE_INDICATOR_OS, location=test_file_path)
test_raw_path_spec = path_spec_factory.Factory.NewPathSpec(
dfvfs_definitions.TYPE_INDICATOR_RAW, parent=test_os_path_spec)
test_tsk_partition_path_spec = path_spec_factory.Factory.NewPathSpec(
dfvfs_definitions.TYPE_INDICATOR_TSK_PARTITION, location='/p1',
parent=test_raw_path_spec)
test_apfs_container_path_spec = path_spec_factory.Factory.NewPathSpec(
dfvfs_definitions.TYPE_INDICATOR_APFS_CONTAINER, location='/',
parent=test_tsk_partition_path_spec)
volume_system = apfs_volume_system.APFSVolumeSystem()
volume_system.Open(test_apfs_container_path_spec)
file_object = io.BytesIO()
test_output_writer = tools.FileObjectOutputWriter(file_object)
test_tool = storage_media_tool.StorageMediaTool(
output_writer=test_output_writer)
test_tool._PrintAPFSVolumeIdentifiersOverview(volume_system, ['apfs1'])
file_object.seek(0, os.SEEK_SET)
output_data = file_object.read()
from dfvfs.lib import definitions as dfvfs_definitions
from dfvfs.path import factory as path_spec_factory
from plaso.lib import definitions
from plaso.lib import timelib
from plaso.output import null
from tests.cli import test_lib as cli_test_lib
from tests.containers import test_lib as containers_test_lib
from tests.output import test_lib
class DynamicOutputModuleTest(test_lib.OutputModuleTestCase):
"""Test the null output module."""
_OS_PATH_SPEC = path_spec_factory.Factory.NewPathSpec(
dfvfs_definitions.TYPE_INDICATOR_OS, location='{0:s}{1:s}'.format(
os.path.sep, os.path.join('cases', 'image.dd')))
_TEST_EVENTS = [
{'data_type': 'test:output',
'display_name': 'OS: /var/log/syslog.1',
'hostname': 'ubuntu',
'inode': 12345678,
'pathspec': path_spec_factory.Factory.NewPathSpec(
dfvfs_definitions.TYPE_INDICATOR_TSK, inode=15,
location='/var/log/syslog.1', parent=_OS_PATH_SPEC),
'text': (
'Reporter PID: |8442| (pam_unix(cron:session): session\n '
'closed for user root)'),
'timestamp': timelib.Timestamp.CopyFromString('2012-06-27 18:17:01'),
'timestamp_desc': definitions.TIME_DESCRIPTION_UNKNOWN,
same for sys.tgz.
The end results should therefore be:
* logs/hidden.zip (unchanged)
* logs/hidden.zip:syslog (the text file extracted out)
* logs/sys.tgz (unchanged)
* logs/sys.tgz (read as a GZIP file, so not compressed)
* logs/sys.tgz:syslog.gz (A GZIP file from the TAR container)
* logs/sys.tgz:syslog.gz:syslog (the extracted syslog file)
This means that the collection script should collect 6 files in total.
"""
test_file_path = self._GetTestFilePath(['syslog_image.dd'])
self._SkipIfPathNotExists(test_file_path)
volume_path_spec = path_spec_factory.Factory.NewPathSpec(
dfvfs_definitions.TYPE_INDICATOR_OS, location=test_file_path)
source_path_spec = path_spec_factory.Factory.NewPathSpec(
dfvfs_definitions.TYPE_INDICATOR_TSK, location='/',
parent=volume_path_spec)
resolver_context = context.Context()
test_extractor = extractors.PathSpecExtractor()
path_specs = list(test_extractor.ExtractPathSpecs(
[source_path_spec], resolver_context=resolver_context))
self.assertEqual(len(path_specs), 3)
def testScanForVolumeSystemOnBodyFile(self):
"""Test the ScanForVolumeSystem function on a body file."""
test_path = self._GetTestFilePath(['mactime.body'])
self._SkipIfPathNotExists(test_path)
test_os_path_spec = path_spec_factory.Factory.NewPathSpec(
definitions.TYPE_INDICATOR_OS, location=test_path)
path_spec = self._source_scanner.ScanForVolumeSystem(test_os_path_spec)
self.assertIsNone(path_spec)
'Reporter PID: 8442 (pam_unix(cron:session): '
'session closed for user root)\n')
event_body = self._output_writer.ReadOutput()
self.assertEqual(event_body, expected_event_body)
self.assertEqual(event_body.count('|'), 4)
formatters_manager.FormattersManager.DeregisterFormatter(
test_lib.TestEventFormatter)
class L2TTLNOutputModuleTest(test_lib.OutputModuleTestCase):
"""Tests for the log2timeline TLN output module."""
_OS_PATH_SPEC = path_spec_factory.Factory.NewPathSpec(
dfvfs_definitions.TYPE_INDICATOR_OS, location='{0:s}{1:s}'.format(
os.path.sep, os.path.join('cases', 'image.dd')))
_TEST_EVENTS = [
{'data_type': 'test:output',
'display_name': 'OS: /var/log/syslog.1',
'hostname': 'ubuntu',
'inode': 12345678,
'pathspec': path_spec_factory.Factory.NewPathSpec(
dfvfs_definitions.TYPE_INDICATOR_TSK, inode=15,
location='/var/log/syslog.1', parent=_OS_PATH_SPEC),
'text': (
'Reporter PID: |8442| (pam_unix(cron:session): session\n '
'closed for user root)'),
'timestamp': timelib.Timestamp.CopyFromString('2012-06-27 18:17:01'),
'timestamp_desc': definitions.TIME_DESCRIPTION_UNKNOWN,
_, _, parent_filename = parent_filename.rpartition('\\')
location_path_segments.pop()
location_path_segments.append(parent_filename)
parent_file_location = file_system.JoinPath(location_path_segments)
# Note that we don't want to set the keyword arguments when not used
# because the path specification base class will check for unused
# keyword arguments and raise.
kwargs = path_spec_factory.Factory.GetProperties(path_spec)
kwargs['location'] = parent_file_location
if path_spec.parent is not None:
kwargs['parent'] = path_spec.parent
parent_file_path_spec = path_spec_factory.Factory.NewPathSpec(
path_spec.type_indicator, **kwargs)
if not file_system.FileEntryExistsByPathSpec(parent_file_path_spec):
return
file_object = resolver.Resolver.OpenFileObject(
parent_file_path_spec, resolver_context=self._resolver_context)
vhdi_parent_file = pyvhdi.file()
vhdi_parent_file.open_file_object(file_object)
if vhdi_parent_file.parent_identifier: # pylint: disable=using-constant-test
self._OpenParentFile(
file_system, parent_file_path_spec, vhdi_parent_file)
vhdi_file.set_parent(vhdi_parent_file)
location_segments = path_segments[:-1]
location_segments.append('index')
location = file_system.JoinPath(location_segments)
index_path_spec = path_spec_factory.Factory.NewPathSpec(
file_entry.type_indicator, location=location,
parent=file_entry.path_spec.parent)
if file_system.FileEntryExistsByPathSpec(index_path_spec):
# TODO: improve this check if "index" is a Chrome Cache index file.
return True
elif self._FIREFOX_CACHE_DATA_FILE_RE.match(path_segments[-1]):
location_segments = path_segments[:-4]
location_segments.append('_CACHE_MAP_')
location = file_system.JoinPath(location_segments)
cache_map_path_spec = path_spec_factory.Factory.NewPathSpec(
file_entry.type_indicator, location=location,
parent=file_entry.path_spec.parent)
if file_system.FileEntryExistsByPathSpec(cache_map_path_spec):
# TODO: improve this check if "_CACHE_MAP_" is a Firefox Cache
# version 1 cache map file.
return True
elif self._FIREFOX_CACHE2_DATA_FILE_RE.match(path_segments[-1]):
location_segments = path_segments[:-2]
location_segments.append('index')
location = file_system.JoinPath(location_segments)
index_path_spec = path_spec_factory.Factory.NewPathSpec(
file_entry.type_indicator, location=location,
parent=file_entry.path_spec.parent)