def _store_preloaded_file(self, filepath=None, fd=None): if not filepath and not fd: raise ValueError( "Please provide either the path to the file or a file-like " "object containing the data.") if filepath and fd: self.log( "debug", "Please provide either the path to the file or a " "file-like object containing the data, not both." "Choosing the filepath for now.") if fame_config.remote: if filepath: response = send_file_to_remote(filepath, '/files/') else: response = send_file_to_remote(fd, '/files/') return File(response.json()['file']) else: if filepath: with open(filepath, 'rb') as f: return File(filename=os.path.basename(filepath), stream=f) else: return File(filename=self._file['names'][0], stream=fd)
def add_extracted_file(self, filepath, automatic_analysis=True): self.log('debug', u"Adding extracted file '{}'".format(filepath)) fd = open(filepath, 'rb') filename = os.path.basename(filepath) f = File(filename=filename, stream=fd, create=False) if not f.existing: if fame_config.remote: response = send_file_to_remote(filepath, '/files/') f = File(response.json()['file']) else: f = File(filename=os.path.basename(filepath), stream=fd) # Automatically analyze extracted file if magic is enabled and module did not disable it if self.magic_enabled() and automatic_analysis: modules = None config = Config.get(name="extracted").get_values() if config is not None and "modules" in config: modules = config["modules"].split() f.analyze(self['groups'], self['analyst'], modules, self['options']) fd.close() self.append_to('extracted_files', f['_id']) f.add_parent_analysis(self)
def add_support_file(self, module_name, name, filepath): self.log('debug', "Adding support file '{}' at '{}'".format(name, filepath)) if fame_config.remote: response = send_file_to_remote( filepath, '/analyses/{}/support_file/{}'.format(self['_id'], module_name)) dstfilepath = response.json()['path'] else: dirpath = os.path.join(fame_config.storage_path, 'support_files', module_name, str(self['_id'])) dstfilepath = os.path.join(dirpath, os.path.basename(filepath)) # Create parent dirs if they don't exist try: os.makedirs(dirpath) except: pass copy(filepath, dstfilepath) if module_name not in self['support_files']: self['support_files'][module_name] = [] self.append_to(['support_files', module_name], (name, os.path.basename(dstfilepath)))
def add_generated_files(self, file_type, locations): # First, save the files to db / storage if file_type not in self['generated_files']: self['generated_files'][file_type] = [] for location in iterify(locations): if fame_config.remote: response = send_file_to_remote(location, '/analyses/{}/generated_file'.format(self['_id'])) filepath = response.json()['path'] else: filepath = location self.log('debug', "Adding generated file '{0}' of type '{1}'".format(filepath, file_type)) self.append_to(['generated_files', file_type], filepath) # Then, trigger registered modules self.queue_modules(dispatcher.triggered_by("_generated_file(%s)" % file_type))
def add_extracted_file(self, filepath): self.log('debug', "Adding extracted file '{}'".format(filepath)) fd = open(filepath, 'rb') filename = os.path.basename(filepath) f = File(filename=filename, stream=fd, create=False) if not f.existing: if fame_config.remote: response = send_file_to_remote(filepath, '/files/') f = File(response.json()['file']) else: f = File(filename=os.path.basename(filepath), stream=fd) f.analyze(self['groups'], self['analyst'], None, self['options']) fd.close() self.append_to('extracted_files', f['_id']) f.add_parent_analysis(self)