How to use the turbinia.workers.TurbiniaTask function in turbinia

To help you get started, we’ve selected a few turbinia examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github google / turbinia / turbinia / workers / worker_stat.py View on Github external
"""Task for running a test evidence stat call on the supplied evidence.

TODO(aarontp): In the future we can use this for doing a count and healthcheck
               of all PSQ workers since there is currently no mechanism for that
               in PSQ.
"""

from __future__ import unicode_literals

import os

from turbinia.workers import TurbiniaTask
from turbinia.evidence import ReportText


class StatTask(TurbiniaTask):
  """Task to run Stat."""

  def run(self, evidence, result):
    """Test Stat task.

    Args:
        evidence: Path to data to process.
        result: TurbiniaTaskResult to populate with results.

    Returns:
        TurbiniaTaskResult: object.
    """
    result.log('Running stat on evidence {0:s}'.format(evidence.source_path))
    report_path = os.path.join(self.output_dir, 'report.txt')
    report = ReportText(source_path=report_path)
    report.text_data = str(os.stat(evidence.source_path))
github google / turbinia / turbinia / workers / strings.py View on Github external
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Task for gathering ascii strings."""

from __future__ import unicode_literals

import os

from turbinia.evidence import TextFile
from turbinia.workers import TurbiniaTask


class StringsAsciiTask(TurbiniaTask):
  """Task to generate ascii strings."""

  def run(self, evidence, result):
    """Run strings binary.

    Args:
        evidence (Evidence object):  The evidence we will process.
        result (TurbiniaTaskResult): The object to place task results into.

    Returns:
        TurbiniaTaskResult object.
    """
    # Create a path that we can write the new file to.
    base_name = os.path.basename(evidence.device_path)
    output_file_path = os.path.join(
        self.output_dir, '{0:s}.ascii'.format(base_name))
github google / turbinia / turbinia / workers / be.py View on Github external
Returns:
        job_id: The job_id provided.
    """
    # TODO(aarontp): Fix all these methods to take evidence
    # TODO(aarontp): Standardize output path format
    out_path = '{0:s}/{1:s}/{2}_{3}'.format(
        out_path, job_id, offsets[0], offsets[1])
    if not os.path.exists(out_path):
      os.makedirs(out_path)
    cmd_output = subprocess.check_output([
        '/usr/local/bin/be_wrapper.sh', src_path, out_path, '{0}-{1}'.format(
            offsets[0], offsets[1]), job_id])
    return job_id


class BulkExtractorCalcOffsetsTask(TurbiniaTask):
  """Task to calculate offsets for Bulk extractor."""

  def run(self, evidence, num_workers, page_size=PAGE_SIZE):
    """Reads data and calculates offsets based on page_size.

    Args:
      src_path: Path to image to be processed.
      num_workers: Number of workers that will be used in processing.
      page_size: Page size used in bulk_extractor.

    Returns:
      List of offsets.
    """
    disk_size = os.path.getsize(src_path)
    offset1 = 0
    offset2 = page_size
github google / turbinia / turbinia / workers / be.py View on Github external
o1 = instance_parts[0][0]
      o2 = instance_parts[-1][1]
      offsets.append(
          (o1, o2),)

    if extra:
      last_instance_parts = parts[index_stop:]
      o1 = last_instance_parts[0][0]
      o2 = last_instance_parts[-1][1]
      offsets.append(
          (o1, o2),)

    return offsets


class BulkExtractorReducerTask(TurbiniaTask):
  """Reduce bulk extractor outputs."""

  def run(self, evidence, results):
    """Task that reduces the results into one SQLite database.

    Args:
        results: List of returned values from tasks.

    Returns:
        Task result object (instance of TurbiniaTaskResult) as JSON.
    """
    job_id = results[0]
    cmd_output = subprocess.check_output([
        '/usr/local/bin/be_reducer.sh', job_id])
    result = TurbiniaTaskResult()
    result.add_result(result_type='PATH', result=cmd_output)
github google / turbinia / turbinia / workers / artifact.py View on Github external
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Task for running Plaso."""

from __future__ import unicode_literals

import os

from turbinia import config
from turbinia.evidence import ExportedFileArtifact
from turbinia.workers import TurbiniaTask


class FileArtifactExtractionTask(TurbiniaTask):
  """Task to run image_export (log2timeline)."""

  def __init__(self, artifact_name='FileArtifact'):
    super(FileArtifactExtractionTask, self).__init__()
    self.artifact_name = artifact_name

  def run(self, evidence, result):
    """Extracts artifacts using Plaso image_export.py.

    Args:
        evidence (Evidence object):  The evidence we will process.
        result (TurbiniaTaskResult): The object to place task results into.

    Returns:
        TurbiniaTaskResult object.
    """
github google / turbinia / turbinia / workers / bulk_extractor.py View on Github external
"""Task for running Bulk Extractor."""

import os
import logging
import xml.etree.ElementTree as xml_tree

from turbinia import TurbiniaException

from turbinia.evidence import BulkExtractorOutput
from turbinia.workers import TurbiniaTask
from turbinia.lib import text_formatter as fmt

log = logging.getLogger('turbinia')


class BulkExtractorTask(TurbiniaTask):
  """Task to generate Bulk Extractor output."""

  def run(self, evidence, result):
    """Run Bulk Extractor binary.

    Args:
        evidence (Evidence object): The evidence we will process.
        result (TurbiniaTaskResult): The object to place task results into.

    Returns:
        TurbiniaTaskResult object.
    """
    # TODO(wyassine): Research whether bulk extractor has an option to
    # generate a summary report to stdout so that it could be used for
    # a report in this task.
    # Create the new Evidence object that will be generated by this Task.
github google / turbinia / turbinia / state_manager.py View on Github external
for attr in task.result.STORED_ATTRIBUTES:
        if not hasattr(task.result, attr):
          raise TurbiniaException(
              'Task {0:s} result does not have attribute {1:s}'.format(
                  task.name, attr))
        task_dict[attr] = getattr(task.result, attr)
        if isinstance(task_dict[attr], six.binary_type):
          task_dict[attr] = six.u(task_dict[attr])

    # We'll store the run_time as seconds instead of a timedelta()
    if task_dict.get('run_time'):
      task_dict['run_time'] = task_dict['run_time'].total_seconds()

    # Set all non-existent keys to None
    all_attrs = set(
        TurbiniaTask.STORED_ATTRIBUTES + TurbiniaTaskResult.STORED_ATTRIBUTES)
    task_dict.update({k: None for k in all_attrs if k not in task_dict})
    task_dict = self._validate_data(task_dict)

    # Using the pubsub topic as an instance attribute in order to have a unique
    # namespace per Turbinia installation.
    # TODO(aarontp): Migrate this to actual Datastore namespaces
    config.LoadConfig()
    task_dict.update({'instance': config.INSTANCE_ID})
    return task_dict
github google / turbinia / turbinia / workers / volatility.py View on Github external
# See the License for the specific language governing permissions and
# limitations under the License.
"""Task for executing volatility."""

from __future__ import unicode_literals

import os

from turbinia import config
from turbinia.evidence import VolatilityReport
from turbinia.workers import TurbiniaTask

MAX_REPORT_SIZE = 2**30  # 1 GiB


class VolatilityTask(TurbiniaTask):
  """Task to execute volatility.

  Attributes:
    module(str): The name of the volatility module to run.
  """

  def __init__(self, module='test', *args, **kwargs):
    super(VolatilityTask, self).__init__(*args, **kwargs)
    self.module = module

  def run(self, evidence, result):
    """Run volatility against evidence.

    Args:
        evidence (Evidence object):  The evidence we will process.
        result (TurbiniaTaskResult): The object to place task results into.
github google / turbinia / turbinia / workers / sshd.py View on Github external
# See the License for the specific language governing permissions and
# limitations under the License.
"""Task for analysing sshd_config files."""

from __future__ import unicode_literals

import os
import re

from turbinia.evidence import ReportText
from turbinia.lib import text_formatter as fmt
from turbinia.workers import TurbiniaTask
from turbinia.workers import Priority


class SSHDAnalysisTask(TurbiniaTask):
  """Task to analyze a sshd_config file."""

  def run(self, evidence, result):
    """Run the sshd_config analysis worker.

    Args:
        evidence (Evidence object):  The evidence we will process.
        result (TurbiniaTaskResult): The object to place task results into.

    Returns:
        TurbiniaTaskResult object.
    """
    # Where to store the resulting output file.
    output_file_name = 'sshd_config_analysis.txt'
    output_file_path = os.path.join(self.output_dir, output_file_name)
    # Set the output file as the data source for the output evidence.
github google / turbinia / turbinia / task_manager.py View on Github external
def task_runner(obj, *args, **kwargs):
  """Wrapper function to run specified TurbiniaTask object.

  Args:
    obj: An instantiated TurbiniaTask object.
    *args: Any Args to pass to obj.
    **kwargs: Any keyword args to pass to obj.

  Returns:
    Output from TurbiniaTask (should be TurbiniaTaskResult).
  """
  obj = workers.TurbiniaTask.deserialize(obj)
  return obj.run_wrapper(*args, **kwargs)