def run(self, evidence, result): """Task that process data with Plaso. Args: evidence: Path to data to process. result: A TurbiniaTaskResult object to place task results into. Returns: TurbiniaTaskResult object. """ plaso_evidence = PlasoFile() plaso_file = os.path.join(self.output_dir, '{0:s}.plaso'.format(self.id)) plaso_evidence.local_path = plaso_file plaso_log = os.path.join(self.output_dir, '{0:s}.log'.format(self.id)) # TODO(aarontp): Move these flags into a recipe cmd = ( 'log2timeline.py --status_view none --hashers all ' '--partition all --vss_stores all').split() cmd.extend(['--logfile', plaso_log]) cmd.extend([plaso_file, evidence.local_path]) result.log('Running plaso as [{0:s}]'.format(' '.join(cmd))) self.execute(cmd, result, save_files=[plaso_log], new_evidence=[plaso_evidence], close=True) return result
def run(self, evidence, result): """Task that process data with Plaso. Args: evidence: Path to data to process. result: A TurbiniaTaskResult object to place task results into. Returns: TurbiniaTaskResult object. """ config.LoadConfig() plaso_evidence = PlasoFile() # Write plaso file into tmp_dir because sqlite has issues with some shared # filesystems (e.g NFS). plaso_file = os.path.join(self.tmp_dir, '{0:s}.plaso'.format(self.id)) plaso_evidence.local_path = plaso_file plaso_log = os.path.join(self.output_dir, '{0:s}.log'.format(self.id)) # TODO(aarontp): Move these flags into a recipe cmd = ( 'log2timeline.py --status_view none --hashers all ' '--partition all --vss_stores all').split() if config.DEBUG_TASKS: cmd.append('-d') if isinstance(evidence, (APFSEncryptedDisk, BitlockerDisk)): if evidence.recovery_key: cmd.extend([ '--credential', 'recovery_password:{0:s}'.format( evidence.recovery_key) ]) elif evidence.password: cmd.extend(['--credential', 'password:{0:s}'.format(evidence.password)]) else: result.close( self, False, 'No credentials were provided ' 'for a bitlocker disk.') return result cmd.extend(['--logfile', plaso_log]) cmd.extend([plaso_file, evidence.local_path]) result.log('Running plaso as [{0:s}]'.format(' '.join(cmd))) self.execute( cmd, result, log_files=[plaso_log], new_evidence=[plaso_evidence], close=True) return result
def run(self, evidence, result): """Task that process data with Plaso. Args: evidence: Path to data to process. result: A TurbiniaTaskResult object to place task results into. Returns: TurbiniaTaskResult object. """ plaso_result = PlasoFile() plaso_file = os.path.join(self.output_dir, u'{0:s}.plaso'.format(self.id)) plaso_log = os.path.join(self.output_dir, u'{0:s}.log'.format(self.id)) # TODO(aarontp): Move these flags into a recipe cmd = (u'log2timeline.py --status_view none --hashers all ' u'--partition all --vss_stores all').split() cmd.extend([u'--logfile', plaso_log]) cmd.extend([plaso_file, evidence.local_path]) result.log(u'Running plaso as [{0:s}]'.format(' '.join(cmd))) # TODO(aarontp): Create helper function to do all this plaso_proc = subprocess.Popen(cmd) stdout, stderr = plaso_proc.communicate() result.error['stdout'] = stdout result.error['stderr'] = stderr ret = plaso_proc.returncode if ret: msg = u'Plaso execution failed with status {0:d}'.format(ret) result.log(msg) result.close(success=False, status=msg) else: # TODO(aarontp): Get and set plaso version here result.log('Plaso output file in {0:s}'.format(plaso_file)) plaso_result.local_path = plaso_file result.add_evidence(plaso_result) result.close(success=True) return result
def run(self, evidence, result): """Task that process data with Plaso. Args: evidence: Path to data to process. result: A TurbiniaTaskResult object to place task results into. Returns: TurbiniaTaskResult object. """ config.LoadConfig() plaso_evidence = PlasoFile() # Write plaso file into tmp_dir because sqlite has issues with some shared # filesystems (e.g NFS). plaso_file = os.path.join(self.tmp_dir, '{0:s}.plaso'.format(self.id)) plaso_evidence.local_path = plaso_file plaso_log = os.path.join(self.output_dir, '{0:s}.log'.format(self.id)) # TODO(aarontp): Move these flags into a recipe cmd = ('log2timeline.py --status_view none --hashers all ' '--partition all --vss_stores all').split() if config.DEBUG_TASKS: cmd.append('-d') cmd.extend(['--logfile', plaso_log]) cmd.extend([plaso_file, evidence.local_path]) result.log('Running plaso as [{0:s}]'.format(' '.join(cmd))) self.execute(cmd, result, save_files=[plaso_log], new_evidence=[plaso_evidence], close=True) return result
def run(self, evidence, result): """Task that process data with Plaso. Args: evidence (Evidence object): The evidence we will process. result (TurbiniaTaskResult): The object to place task results into. Returns: TurbiniaTaskResult object. """ config.LoadConfig() # Write plaso file into tmp_dir because sqlite has issues with some shared # filesystems (e.g NFS). plaso_file = os.path.join(self.tmp_dir, '{0:s}.plaso'.format(self.id)) plaso_evidence = PlasoFile(source_path=plaso_file) plaso_log = os.path.join(self.output_dir, '{0:s}.log'.format(self.id)) cmd = self.build_plaso_command('log2timeline.py', self.task_config) if config.DEBUG_TASKS or self.task_config.get('debug_tasks'): cmd.append('-d') if evidence.credentials: for credential_type, credential_data in evidence.credentials: cmd.extend([ '--credential', '{0:s}:{1:s}'.format(credential_type, credential_data) ]) cmd.extend(['--temporary_directory', self.tmp_dir]) cmd.extend(['--logfile', plaso_log]) cmd.extend(['--unattended']) cmd.extend(['--storage_file', plaso_file]) cmd.extend([evidence.local_path]) result.log('Running plaso as [{0:s}]'.format(' '.join(cmd))) self.execute(cmd, result, log_files=[plaso_log], new_evidence=[plaso_evidence], close=True) return result
class PsortJob(TurbiniaJob): """Run psort on PlasoFile to generate a CSV file.""" # The types of evidence that this Job will process evidence_input = [type(PlasoFile())] evidence_output = [type(PlasoCsvFile())] def __init__(self): super(PsortJob, self).__init__(name='PsortJob') def create_tasks(self, evidence): """Create task for Psort. Args: evidence: List of evidence object to process Returns: A list of PsortTasks. """ return [PsortTask() for e in evidence]
class PlasoJob(TurbiniaJob): # The types of evidence that this Job will process evidence_input = [ type(Directory()), type(RawDisk()), type(GoogleCloudDisk()) ] evidence_output = [type(PlasoFile())] def __init__(self): super(PlasoJob, self).__init__(name='PlasoJob') def create_tasks(self, evidence): """Create task for Plaso. Args: evidence: List of evidence object to process Returns: A list of PlasoTasks. """ return [PlasoTask() for e in evidence]
def run(self, evidence, result): """Task that process data with Plaso. Args: evidence (Evidence object): The evidence we will process. result (TurbiniaTaskResult): The object to place task results into. Returns: TurbiniaTaskResult object. """ config.LoadConfig() # TODO: Convert to using real recipes after # https://github.com/google/turbinia/pull/486 is in. For now we're just # using the --recipe_config flag, and this can be used with colon separated # values like: # --recipe_config='artifact_filters=BrowserFoo:BrowserBar,parsers=foo:bar' if evidence.config and evidence.config.get('artifact_filters'): artifact_filters = evidence.config.get('artifact_filters') artifact_filters = artifact_filters.replace(':', ',') else: artifact_filters = None if evidence.config and evidence.config.get('parsers'): parsers = evidence.config.get('parsers') parsers = parsers.replace(':', ',') else: parsers = None if evidence.config and evidence.config.get('file_filters'): file_filters = evidence.config.get('file_filters') file_filter_file = os.path.join(self.tmp_dir, 'file_filter.txt') try: with open(file_filter_file, 'wb') as file_filter_fh: for filter_ in file_filters.split(':'): file_filter_fh.write(filter_.encode('utf-8') + b'\n') except IOError as exception: message = 'Cannot write to filter file {0:s}: {1!s}'.format( file_filter_file, exception) result.close(self, success=False, status=message) return result else: file_filters = None file_filter_file = None if evidence.config and evidence.config.get('vss'): vss = evidence.config.get('vss') else: vss = None # Write plaso file into tmp_dir because sqlite has issues with some shared # filesystems (e.g NFS). plaso_file = os.path.join(self.tmp_dir, '{0:s}.plaso'.format(self.id)) plaso_evidence = PlasoFile(source_path=plaso_file) plaso_log = os.path.join(self.output_dir, '{0:s}.log'.format(self.id)) # TODO(aarontp): Move these flags into a recipe cmd = ('log2timeline.py --status_view none --hashers all ' '--partition all').split() if config.DEBUG_TASKS: cmd.append('-d') if artifact_filters: cmd.extend(['--artifact_filters', artifact_filters]) if parsers: cmd.extend(['--parsers', parsers]) if file_filters: cmd.extend(['--file_filter', file_filter_file]) if vss: cmd.extend(['--vss_stores', vss]) if isinstance(evidence, (APFSEncryptedDisk, BitlockerDisk)): if evidence.recovery_key: cmd.extend([ '--credential', 'recovery_password:{0:s}'.format(evidence.recovery_key) ]) elif evidence.password: cmd.extend([ '--credential', 'password:{0:s}'.format(evidence.password) ]) else: result.close( self, False, 'No credentials were provided ' 'for a bitlocker disk.') return result cmd.extend(['--logfile', plaso_log]) cmd.extend([plaso_file, evidence.device_path]) result.log('Running plaso as [{0:s}]'.format(' '.join(cmd))) self.execute(cmd, result, log_files=[plaso_log], new_evidence=[plaso_evidence], close=True) return result