How to use the turbinia.TurbiniaException function in turbinia

To help you get started, we’ve selected a few turbinia examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github google / turbinia / turbinia / workers / __init__.py View on Github external
TurbiniaException: If the evidence can not be found.
    """
    self.output_manager.setup(self)
    self.tmp_dir, self.output_dir = self.output_manager.get_local_output_dirs()
    if not self.result:
      self.result = TurbiniaTaskResult(
          input_evidence=evidence, base_output_dir=self.base_output_dir,
          request_id=self.request_id, job_id=self.job_id)
      self.result.setup(self)

    if not self.run_local:
      if evidence.copyable and not config.SHARED_FILESYSTEM:
        self.output_manager.retrieve_evidence(evidence)

    if evidence.source_path and not os.path.exists(evidence.source_path):
      raise TurbiniaException(
          'Evidence source path {0:s} does not exist'.format(
              evidence.source_path))
    evidence.preprocess(self.tmp_dir)
    return self.result
github google / turbinia / turbinia / jobs / manager.py View on Github external
jobs_blacklist (Optional[list[str]]): Job names to exclude.
      jobs_whitelist (Optional[list[str]]): Job names to include.

    Returns:
     list[str]: Job names

    Raises:
      TurbiniaException if both jobs_blacklist and jobs_whitelist are specified.
    """
    jobs_blacklist = jobs_blacklist if jobs_blacklist else []
    jobs_blacklist = [job.lower() for job in jobs_blacklist]
    jobs_whitelist = jobs_whitelist if jobs_whitelist else []
    jobs_whitelist = [job.lower() for job in jobs_whitelist]

    if jobs_whitelist and jobs_blacklist:
      raise TurbiniaException(
          'jobs_whitelist and jobs_blacklist cannot be specified at the same '
          'time.')
    elif jobs_blacklist:
      return [job for job in job_names if job.lower() not in jobs_blacklist]
    elif jobs_whitelist:
      return [job for job in job_names if job.lower() in jobs_whitelist]
    else:
      return job_names
github google / turbinia / turbinia / state_manager.py View on Github external
from turbinia import config
from turbinia.config import DATETIME_FORMAT
from turbinia import TurbiniaException
from turbinia.workers import TurbiniaTask
from turbinia.workers import TurbiniaTaskResult

config.LoadConfig()
if config.STATE_MANAGER.lower() == 'datastore':
  from google.cloud import datastore
  from google.cloud import exceptions
elif config.STATE_MANAGER.lower() == 'redis':
  import redis
else:
  msg = 'State Manager type "{0:s}" not implemented'.format(
      config.STATE_MANAGER)
  raise TurbiniaException(msg)

MAX_DATASTORE_STRLEN = 1500
log = logging.getLogger('turbinia')


def get_state_manager():
  """Return state manager object based on config.

  Returns:
    Initialized StateManager object.

  Raises:
    TurbiniaException: When an unknown State Manager is specified.
  """
  config.LoadConfig()
  # pylint: disable=no-else-return
github google / turbinia / turbinia / output_manager.py View on Github external
def get_local_output_dirs(self):
    """Gets the local output directories from the local output writer.

    Returns:
      Tuple(string): (Path to temp directory, path to local output directory)

    Raises:
      TurbiniaException: If no local output writer with output_dir is found.
    """
    if not self._output_writers:
      raise TurbiniaException('No output writers found.')

    # Get the local writer
    writer = [w for w in self._output_writers if w.name == 'LocalWriter'][0]
    if not hasattr(writer, 'local_output_dir'):
      raise TurbiniaException(
          'Local output writer does not have local_output_dir attribute.')

    if not writer.local_output_dir:
      raise TurbiniaException(
          'Local output writer attribute local_output_dir is not set')

    if not hasattr(writer, 'tmp_dir'):
      raise TurbiniaException(
          'Local output writer does not have tmp_dir attribute.')

    if not writer.tmp_dir:
      raise TurbiniaException(
          'Local output writer attribute tmp_dir is not set')

    return (writer.tmp_dir, writer.local_output_dir)
github google / turbinia / turbinia / lib / utils.py View on Github external
Args:
    file_name: Name of file (without path) to be extracted.
    output_dir: Path to directory to store the the extracted files.

  Returns:
    dict: file names and paths to extracted files.

  Raises:
    TurbiniaException: If an error occurs when running image_export.
  """
  # TODO: Consider using the exec helper to gather stdin/err.
  log.debug('Running image_export as [{0:s}]'.format(' '.join(command)))
  try:
    subprocess.check_call(command)
  except subprocess.CalledProcessError:
    raise TurbiniaException('image_export.py failed.')

  collected_file_paths = []
  file_count = 0
  for dirpath, _, filenames in os.walk(output_dir):
    for filename in filenames:
      collected_file_paths.append(os.path.join(dirpath, filename))
      file_count += 1

  log.debug('Collected {0:d} files with image_export'.format(file_count))
  return collected_file_paths
github google / turbinia / turbinia / task_manager.py View on Github external
Returns
    Initialized TaskManager object.

  Raises:
    TurbiniaException: When an unknown task manager type is specified
  """
  config.LoadConfig()
  # pylint: disable=no-else-return
  if config.TASK_MANAGER.lower() == 'psq':
    return PSQTaskManager()
  elif config.TASK_MANAGER.lower() == 'celery':
    return CeleryTaskManager()
  else:
    msg = 'Task Manager type "{0:s}" not implemented'.format(
        config.TASK_MANAGER)
    raise turbinia.TurbiniaException(msg)
github google / turbinia / turbinia / processors / mount_local.py View on Github external
Args:
    device_path(str): the path to the block device to remove
      (ie: /dev/loopX).

  Raises:
    TurbiniaException: if the losetup command failed to run.
  """
  # TODO(aarontp): Remove hard-coded sudo in commands:
  # https://github.com/google/turbinia/issues/73
  losetup_cmd = ['sudo', 'losetup', '-d', device_path]
  log.info('Running: {0:s}'.format(' '.join(losetup_cmd)))
  try:
    subprocess.check_call(losetup_cmd)
  except subprocess.CalledProcessError as e:
    raise TurbiniaException('Could not delete losetup device {0!s}'.format(e))
github google / turbinia / turbinia / client.py View on Github external
response = cloud_function.ExecuteFunction(function_name, func_args)
    if 'result' not in response:
      log.error('No results found')
      if response.get('error', '{}') != '{}':
        msg = 'Error executing Cloud Function: [{0!s}].'.format(
            response.get('error'))
        log.error(msg)
      log.debug('GCF response: {0!s}'.format(response))
      raise TurbiniaException(
          'Cloud Function {0:s} returned no results.'.format(function_name))

    try:
      results = json.loads(response['result'])
    except (TypeError, ValueError) as e:
      raise TurbiniaException(
          'Could not deserialize result [{0!s}] from GCF: [{1!s}]'.format(
              response.get('result'), e))

    # Convert run_time/last_update back into datetime objects
    task_data = results[0]
    for task in task_data:
      if task.get('run_time'):
        task['run_time'] = timedelta(seconds=task['run_time'])
      if task.get('last_update'):
        task['last_update'] = datetime.strptime(
            task['last_update'], DATETIME_FORMAT)

    return task_data
github google / turbinia / turbinia / task_manager.py View on Github external
def add_evidence(self, evidence_):
    """Adds new evidence and creates tasks to process it.

    This creates all tasks configured to process the given type of evidence.

    Args:
      evidence_: evidence object to add.

    Raises:
      TurbiniaException: When no Jobs are found.
    """
    if not self.jobs:
      raise turbinia.TurbiniaException(
          'Jobs must be registered before evidence can be added')
    log.info('Adding new evidence: {0:s}'.format(str(evidence_)))
    job_count = 0
    jobs_whitelist = evidence_.config.get('jobs_whitelist', [])
    jobs_blacklist = evidence_.config.get('jobs_blacklist', [])
    if jobs_blacklist or jobs_whitelist:
      log.info(
          'Filtering Jobs with whitelist {0!s} and blacklist {1!s}'.format(
              jobs_whitelist, jobs_blacklist))
      jobs_list = jobs_manager.JobsManager.FilterJobObjects(
          self.jobs, jobs_blacklist, jobs_whitelist)
    else:
      jobs_list = self.jobs

    # TODO(aarontp): Add some kind of loop detection in here so that jobs can
    # register for Evidence(), or or other evidence types that may be a super
github google / turbinia / turbinia / evidence.py View on Github external
def evidence_decode(evidence_dict):
  """Decode JSON into appropriate Evidence object.

  Args:
    evidence_dict: JSON serializable evidence object (i.e. a dict post JSON
                   decoding).

  Returns:
    An instantiated Evidence object (or a sub-class of it).

  Raises:
    TurbiniaException: If input is not a dict, does not have a type attribute,
                       or does not deserialize to an evidence object.
  """
  if not isinstance(evidence_dict, dict):
    raise TurbiniaException(
        'Evidence_dict is not a dictionary, type is {0:s}'.format(
            str(type(evidence_dict))))

  type_ = evidence_dict.pop('type', None)
  if not type_:
    raise TurbiniaException(
        'No Type attribute for evidence object [{0:s}]'.format(
            str(evidence_dict)))

  try:
    evidence_class = getattr(sys.modules[__name__], type_)
    evidence = evidence_class.from_dict(evidence_dict)
  except AttributeError:
    raise TurbiniaException(
        'No Evidence object of type {0:s} in evidence module'.format(type_))