def test_remove_fully_from_current_analyses(self): self.scheduler.currently_running = {'parent_uid': ['foo']} fo = FileObject(binary=b'foo') fo.parent_firmware_uids = {'parent_uid'} fo.uid = 'foo' self.scheduler._remove_from_current_analyses(fo) assert self.scheduler.currently_running == {}
def test_add_file_to_current_analyses(self): self.scheduler.currently_running = {'parent_uid': ['foo', 'bar']} fo = FileObject(binary=b'foo') fo.parent_firmware_uids = {'parent_uid'} fo.files_included = ['bar', 'new'] self.scheduler._add_to_current_analyses(fo) assert sorted(self.scheduler.currently_running['parent_uid']) == ['bar', 'foo', 'new']
def test_remove_but_not_found(self, caplog): self.scheduler.currently_running = {'parent_uid': {'file_list': ['bar'], 'analyzed_files_count': 1}} fo = FileObject(binary=b'foo') fo.parent_firmware_uids = {'parent_uid'} fo.uid = 'foo' with caplog.at_level(logging.WARNING): self.scheduler._remove_from_current_analyses(fo) assert any('but it is not included' in m for m in caplog.messages)
def test_remove_partial_from_current_analyses(self): self.scheduler.currently_running = {'parent_uid': {'file_list': ['foo', 'bar'], 'analyzed_files_count': 0}} fo = FileObject(binary=b'foo') fo.parent_firmware_uids = {'parent_uid'} fo.uid = 'foo' self.scheduler._remove_from_current_analyses(fo) assert 'parent_uid' in self.scheduler.currently_running assert self.scheduler.currently_running['parent_uid']['file_list'] == ['bar'] assert self.scheduler.currently_running['parent_uid']['analyzed_files_count'] == 1
def test_remove_but_not_found(self, caplog): self.status.currently_running = { 'parent_uid': { 'files_to_analyze': ['bar'], 'analyzed_files_count': 1 } } fo = FileObject(binary=b'foo') fo.parent_firmware_uids = {'parent_uid'} fo.uid = 'foo' with caplog.at_level(logging.DEBUG): self.status.remove_from_current_analyses(fo) assert any('Failed to remove' in m for m in caplog.messages)
def _convert_to_file_object(self, entry, analysis_filter=None): file_object = FileObject() file_object.uid = entry['_id'] file_object.size = entry['size'] file_object.set_name(entry['file_name']) file_object.virtual_file_path = entry['virtual_file_path'] file_object.parents = entry['parents'] file_object.processed_analysis = self.retrieve_analysis(entry['processed_analysis'], analysis_filter=analysis_filter) file_object.files_included = set(entry['files_included']) file_object.parent_firmware_uids = set(entry["parent_firmware_uids"]) for attribute in ["comments"]: # for backwards compatibility if attribute in entry: setattr(file_object, attribute, entry[attribute]) return file_object
def _convert_to_file_object(self, entry: dict, analysis_filter: List[str] = None) -> FileObject: file_object = FileObject() file_object.uid = entry['_id'] file_object.size = entry['size'] file_object.file_name = entry['file_name'] file_object.virtual_file_path = entry['virtual_file_path'] file_object.parents = entry['parents'] file_object.processed_analysis = self.retrieve_analysis(entry['processed_analysis'], analysis_filter=analysis_filter) file_object.files_included = set(entry['files_included']) file_object.parent_firmware_uids = set(entry['parent_firmware_uids']) file_object.analysis_tags = entry['analysis_tags'] if 'analysis_tags' in entry else dict() for attribute in ['comments']: # for backwards compatibility if attribute in entry: setattr(file_object, attribute, entry[attribute]) return file_object
def test_remove_but_still_unpacking(self): self.status.currently_running = { 'parent_uid': { 'files_to_unpack': ['bar'], 'files_to_analyze': ['foo'], 'analyzed_files_count': 1 } } fo = FileObject(binary=b'foo') fo.parent_firmware_uids = {'parent_uid'} fo.uid = 'foo' self.status.remove_from_current_analyses(fo) result = self.status.currently_running assert 'parent_uid' in result assert result['parent_uid']['files_to_analyze'] == [] assert result['parent_uid']['files_to_unpack'] == ['bar'] assert result['parent_uid']['analyzed_files_count'] == 2
def test_remove_fully_from_current_analyses(self): self.scheduler.currently_running = { 'parent_uid': { 'files_to_unpack': [], 'files_to_analyze': ['foo'], 'analyzed_files_count': 1, 'start_time': 0, 'total_files_count': 2 } } self.scheduler.recently_finished = {} fo = FileObject(binary=b'foo') fo.parent_firmware_uids = {'parent_uid'} fo.uid = 'foo' self.scheduler._remove_from_current_analyses(fo) assert self.scheduler.currently_running == {} assert 'parent_uid' in self.scheduler.recently_finished assert self.scheduler.recently_finished['parent_uid'][ 'total_files_count'] == 2
def test_add_duplicate_file_to_current_analyses(self): self.status.currently_running = { 'parent_uid': { 'files_to_unpack': ['foo'], 'files_to_analyze': ['duplicate'], 'total_files_count': 2, 'unpacked_files_count': 3 } } fo = FileObject(binary=b'foo') fo.parent_firmware_uids = {'parent_uid'} fo.files_included = ['duplicate'] fo.uid = 'foo' self.status.add_to_current_analyses(fo) assert sorted(self.status.currently_running['parent_uid'] ['files_to_unpack']) == [] assert sorted(self.status.currently_running['parent_uid'] ['files_to_analyze']) == ['duplicate', 'foo'] assert self.status.currently_running['parent_uid'][ 'total_files_count'] == 2
def test_add_file_to_current_analyses(self): self.status.currently_running = { 'parent_uid': { 'files_to_unpack': ['foo'], 'files_to_analyze': ['bar'], 'total_files_count': 2, 'unpacked_files_count': 1 } } fo = FileObject(binary=b'foo') fo.parent_firmware_uids = {'parent_uid'} fo.files_included = ['bar', 'new'] fo.uid = 'foo' self.status.add_to_current_analyses(fo) result = self.status.currently_running['parent_uid'] assert sorted(result['files_to_unpack']) == ['new'] assert sorted(result['files_to_analyze']) == ['bar', 'foo'] assert result['unpacked_files_count'] == 2 assert result['total_files_count'] == 3