def add_processing_module(self, module): m = get_class(module['path'], module['class']) if m: m.info = module self._modules['Processing'][module['name']] = m self._add_module_options(module) self._add_module_permissions(module) # Add module to transform if 'generates' is defined if module['generates']: self._add_transforms(module) # Add module to triggers if 'triggered_by' is defined if module['triggered_by']: self._add_module_triggers(module) # Otherwise, add to general purpose modules else: self._general.append(module['name']) # Also, if module acts on specific file type, add a specific trigger if module['acts_on']: for source_type in iterify(module['acts_on']): self._add_trigger(self._triggers, "_generated_file(%s)" % source_type, module)
def static_info(cls): info = { "name": cls.name, "description": cls.description, "type": "Processing", "config": cls.vm_config + cls.config, "diffs": {}, "acts_on": iterify(cls.acts_on), "generates": iterify(cls.generates), "triggered_by": iterify(cls.triggered_by), "queue": cls.queue } init_config_values(info) return ModuleInfo(info)
def _local_field(self, names): local_field = self for name in iterify(names): local_field = local_field[name] return local_field
def run(self): """To implement, when :func:`fame.core.module.ProcessingModule.each` cannot be used. This method will be called and should perform the actual analysis. It should have the same output than :func:`fame.core.module.ProcessingModule.each`. By default, it will call :func:`fame.core.module.ProcessingModule.each` on every elligible file in the analysis. You should only define this method when the module does not work on files, but on the analysis itself. The analysis can be accessed using ``self._analysis``. Returns: boolean indicating if module was successful. Raises: ModuleExecutionError: if any error occurs during the analysis. """ result = False # Process all the files available for this module, # if 'acts_on' is defined if self.info['acts_on']: for source_type in iterify(self.info['acts_on']): for target in self._analysis.get_files(source_type): if self._try_each(target, source_type): result = True # Otherwise, only run on main target else: return self._try_each(self._analysis.get_main_file(), self._analysis._file['type']) return result
def _wait_for_completion(self, state): state = iterify(state) def correct_state(): return self._state() in state if with_timeout(correct_state, timedelta(seconds=120), 0.5) is None: raise ModuleExecutionError('Timeout while waiting for machine "{}" to be "{}"'.format(self.vm_label, self.state))
def _can_execute_module(self, module): if not module.info['acts_on']: return True else: for source_type in iterify(module.info['acts_on']): if source_type in self._types_available(): return True return False
def _add_transforms(self, module): if not module['acts_on']: for generated_type in iterify(module['generates']): if generated_type not in self._direct_transforms: self._direct_transforms[generated_type] = [] self._direct_transforms[generated_type].append(module['name']) else: for source_type in iterify(module['acts_on']): if source_type not in self._transforms: self._transforms[source_type] = [] for generated_type in iterify(module['generates']): self._transforms[source_type].append({ 'type': generated_type, 'module': module['name'] })
def add_ioc(self, value, tags=[]): """Add IOCs to the analysis. Args: value: string or list of strings containing the IOC's value. tags (optional): string or list of strings containing tags that describe these IOCs. """ for ioc in iterify(value): self._analysis.add_ioc(ioc, self.name, tags)
def add_ioc(self, value, source, tags=[]): for ioc in self['iocs']: if ioc['value'] == value: break else: ioc = {'value': value, 'tags': set()} self['iocs'].append(ioc) for tag in iterify(tags): ioc['tags'].add(tag)
def _local_field(self, names): local_field = self for name in iterify(names): if name not in local_field: local_field[name] = {} local_field = local_field[name] return local_field
def queue_modules(self, modules, fallback_waiting=True): for module_name in iterify(modules): self.log("debug", "Trying to queue module '{0}'".format(module_name)) if module_name not in self['executed_modules'] and module_name not in self['pending_modules']: module = self._get_module(module_name) if self._can_execute_module(module): if self.append_to('pending_modules', module_name): run_module.apply_async((self['_id'], module_name), queue=module.info['queue']) elif fallback_waiting: self.append_to('waiting_modules', module_name)
def next_module(self, types_available, module_name, excluded_modules): module = self.get_processing_module(module_name) if not module.info['acts_on']: return module_name else: for acts_on in iterify(module.info['acts_on']): if acts_on in types_available: return module_name else: return self._shortest_path_to_module(types_available, module, excluded_modules)
def static_info(cls): info = { "name": cls.name, "description": cls.description, "acts_on": iterify(cls.acts_on), "type": "Filetype", "config": cls.config, "diffs": {}, } init_config_values(info) return ModuleInfo(info)
def _can_execute_module(self, module): # Only Preloading modules can execute on a hash if self._needs_preloading(): return module.info['type'] == "Preloading" # When a file is present, look at acts_on property elif 'acts_on' not in module.info or not module.info['acts_on']: return True else: for source_type in iterify(module.info['acts_on']): if source_type in self._types_available(): return True return False
def needs_plugin(self, plugins): """Indicate that this module needs specific volatility plugins. Only useful when outside of the default scenario. Args: plugins: a string or list of strings containing volatility plugins names. Raises: ModuleInitializationError: when one of the plugins is not available.""" for plugin in iterify(plugins): if plugin not in self.plugins: raise ModuleInitializationError(self, "volatility plugin '{}' needed".format(plugin))
def next_module(self, types_available, module_name, excluded_modules): module = self.get_processing_module(module_name) if module is None: raise DispatchingException("Could not find execution path") if not module.info['acts_on']: return module_name else: for acts_on in iterify(module.info['acts_on']): if acts_on in types_available: return module_name else: return self._shortest_path_to_module(types_available, module, excluded_modules)
def add_generated_files(self, file_type, locations): # First, save the files to db / storage if file_type not in self['generated_files']: self['generated_files'][file_type] = [] for location in iterify(locations): location = self._get_generated_file_path(location) self.log( 'debug', u"Adding generated file '{0}' of type '{1}'".format( location, file_type)) self.append_to(['generated_files', file_type], location) # Then, trigger registered modules if magic is enabled if self.magic_enabled(): self.queue_modules( dispatcher.triggered_by("_generated_file(%s)" % file_type))
def add_generated_files(self, file_type, locations): # First, save the files to db / storage if file_type not in self['generated_files']: self['generated_files'][file_type] = [] for location in iterify(locations): if fame_config.remote: response = send_file_to_remote(location, '/analyses/{}/generated_file'.format(self['_id'])) filepath = response.json()['path'] else: filepath = location self.log('debug', "Adding generated file '{0}' of type '{1}'".format(filepath, file_type)) self.append_to(['generated_files', file_type], filepath) # Then, trigger registered modules self.queue_modules(dispatcher.triggered_by("_generated_file(%s)" % file_type))
def _shortest_path_to_module(self, types_available, target_module, excluded_modules): next_module = None path_length = None for destination_type in iterify(target_module.info['acts_on']): module, length = self._shortest_path_to_type( types_available, destination_type, excluded_modules + [target_module.info['name']]) if path_length is None or length < path_length: path_length = length next_module = module if path_length is None: raise DispatchingException("Could not find execution path") else: return next_module
def needs_variable(self, variables): """Indicate that the module needs a specific attribute to work properly. This function is only useful in abstract modules, in order to make sure that modules that inherit from this class correctly defines needed class attributes. Args: variables: a string or an array of strings containing the name of needed class attributes. Raises: ModuleInitializationError: One of the needed attributes is not correctly defined. """ for variable in iterify(variables): if getattr(self, variable) is None: raise ModuleInitializationError(self, "no '%s' defined" % variable)
def queue_modules(self, modules, fallback_waiting=True): for module_name in iterify(modules): self.log("debug", "Trying to queue module '{0}'".format(module_name)) if (module_name not in self['executed_modules'] and module_name not in self['pending_modules']): module = dispatcher.get_module(module_name) if module is None: self._error_with_module( module_name, "module has been removed or disabled.") else: if self._can_execute_module(module): if self.append_to('pending_modules', module_name): celery.send_task('run_module', args=(self['_id'], module_name), queue=module.info['queue']) elif fallback_waiting: self.append_to('waiting_modules', module_name)
def add_ioc(self, value, source, tags=[]): # First, we need to make sure there is a record for this IOC r = self.collection.update_one({'_id': self['_id'], 'iocs.value': {'$ne': value}}, {'$push': {'iocs': {'value': value, 'tags': [], 'ti_tags': [], 'ti_indicators': [], 'sources': []}}}) # If this is the first time we are adding this IOC, lookup Threat Intelligence data if r.modified_count == 1: ti_tags, ti_indicators = self._lookup_ioc(value, tags) # If we have Threat Intelligence data, enrich analysis if ti_tags: self.collection.update_one({'_id': self['_id'], 'iocs.value': value}, {'$addToSet': {'iocs.$.ti_tags': {'$each': ti_tags}}}) if ti_indicators: self.collection.update_one({'_id': self['_id'], 'iocs.value': value}, {'$addToSet': {'iocs.$.ti_indicators': {'$each': ti_indicators}}}) # Then add tags to the list self.collection.update_one({'_id': self['_id'], 'iocs.value': value}, {'$addToSet': {'iocs.$.tags': {'$each': iterify(tags)}}}) # Finally, add the source self.collection.update_one({'_id': self['_id'], 'iocs.value': value}, {'$addToSet': {'iocs.$.sources': source}})
def _add_module_triggers(self, module): for trigger in iterify(module['triggered_by']): if '*' in trigger or '?' in trigger or '[' in trigger: self._add_trigger(self._dynamic_triggers, trigger, module) else: self._add_trigger(self._triggers, trigger, module)