def execute_task(mqueue): """ Simple worker wich will execute the tasks analyses. It ends on setting the analysis status as finished. """ while True: m_analysis = mqueue.get(True) if m_analysis is None: return False m_analysis.tasks.sort() for level, mtask in m_analysis.tasks: # TRY/CATCH block to avoid blocking tasks try: result = mtask.execute() if result: mtask.apply_result() if not result: app.logger.error("Error executing task %s" % (mtask)) except Exception as e: app.logger.error("Error executing task %s (%s)" % (mtask, e)) app.logger.exception(e) with app.app_context(): db.session.rollback() continue del mtask m_analysis.set_finished() return True
def parse_ida_cmds(self, sid, functions): """ Parse and add IDA commands dumped by AnalyzeIt, and updates the functions names if needed """ idac = IDAActionsController() funcs = dict.copy(functions) fname = self.storage_file + '.idacmd' act = None if not os.path.exists(fname): return funcs with open(fname) as fdata: for line in fdata: if line.startswith('idc.MakeName'): addr, name = self.get_addr_data(line) try: # update functions list with idc.MakeName() information funcs[addr]['name'] = name except KeyError: app.logger.debug("No function found for %x" % (addr)) act = idac.add_name(addr, name) elif line.startswith('idc.MakeRptCmt'): addr, cmt = self.get_addr_data(line) act = idac.add_comment(addr, cmt) else: app.logger.debug("Unknown IDA command %s" % (line)) continue with app.app_context(): SampleController.add_idaaction(sid, act) return funcs
def apply_result(self): with app.app_context(): samplecontrol = SampleController() sample = SampleController.get_by_id(self.sid) if sample is None: app.logger.error(self.tmessage + "Sample has disappeared...") raise IOError app.logger.debug(self.tmessage + "APPLY_RESULT") # TXT report app.logger.info("Creating new analyzeit report") SampleController.create_analysis( sample, self.txt_report, "analyzeit", True) functions = self.parse_machoc_signatures() # IDA COMMANDS report: app.logger.info("Parsing idacommands") functions = self.parse_ida_cmds(sample.id, functions) # Functions: just push the list app.logger.info("Storing functions") samplecontrol.add_multiple_functions(self.sid, functions) # global machoc match app.logger.info("Calculating machoc80 matches") samplecontrol.match_by_machoc80(sample) return True
def apply_result(self): with app.app_context(): samplecontrol = SampleController() sample = SampleController.get_by_id(self.sid) if sample is None: app.logger.error(self.tmessage + "Sample has disappeared...") raise IOError app.logger.debug(self.tmessage + "APPLY_RESULT") # TXT report app.logger.info("Creating new analyzeit report") SampleController.create_analysis(sample, self.txt_report, "analyzeit", True) functions = self.parse_machoc_signatures() # IDA COMMANDS report: app.logger.info("Parsing idacommands") functions = self.parse_ida_cmds(sample.id, functions) # Functions: just push the list app.logger.info("Storing functions") samplecontrol.add_multiple_functions(self.sid, functions) # global machoc match app.logger.info("Calculating machoc80 matches") samplecontrol.match_by_machoc80(sample) return True
def apply_result(self): with app.app_context(): s_controller = SampleController() sample = s_controller.get_by_id(self.sid) app.logger.debug(self.tmessage + "APPLY_RESULT") s_controller.add_multiple_strings(sample, self.resultstrings) app.logger.debug(self.tmessage + "END - TIME %i" % (int(time.time()) - self.tstart)) return True
def set_finished(self): """ Sets the analysis status to FINISHED. Sets by the jobpool after tasks execution. """ with app.app_context(): if self.sid: sample = Sample.query.get(self.sid) if sample: sample.analysis_status = AnalysisStatus.FINISHED db.session.commit() return True
def apply_result(self): s_controller = SampleController() with app.app_context(): sample = s_controller.get_by_id(self.sid) app.logger.debug(self.tmessage + "APPLY_RESULT") # Compilation timestamp (even when faked) IS a file date, so update it. s_controller.add_multiple_metadata(sample, self.metadata_extracted) s_controller.set_file_date(sample, self.compile_timestamp) s_controller.set_import_hash(sample, self.import_hash) app.logger.debug(self.tmessage + "END - TIME %i" % (int(time.time()) - self.tstart)) return True
def execute(self): """ Extended yara execution. Stores hits on the yaramatched attribute. """ s_controller = SampleController() with app.app_context(): sample = s_controller.get_by_id(self.sid) self.tstart = int(time.time()) app.logger.debug(self.tmessage + "EXECUTE") for yar in YaraRule.query.all(): if run_extended_yara(yar.raw_rule, sample) is True: self.yaramatched.append(yar) return True
def apply_result(self): """ Commits to database. """ with app.app_context(): s_controller = SampleController() sample = s_controller.get_by_id(self.sid) app.logger.debug(self.tmessage + "APPLY_RESULT") for match in self.yaramatched: # use the static YaraController => the () will create a JobPool, # causing exceptions (daemon => child). YaraController.add_to_sample(sample, match) app.logger.debug(self.tmessage + "END - TIME %i" % (int(time.time()) - self.tstart)) return True
def execute_yara_task(mqueue): """ Special dedicated YARA worker. Dispatches newly created yara rules on the samples pool. There is no analysis in this case, nor priority considerations, that's why it has been separated. """ while True: yara_task = mqueue.get(True) if yara_task is None: return False try: result = yara_task.execute() if result: result = yara_task.apply_result() if not result: app.logger.error("Error executing yara task %s" % (yara_task)) except Exception as e: with app.app_context(): db.session.rollback() app.logger.error("Exception executing yara task: %s" % (e)) app.logger.exception(e) continue return True
""" This file is part of Polichombr. (c) 2017 ANSSI-FR Description: Creates the database """ import os.path from poli import app, db from config import SQLALCHEMY_DATABASE_URI from config import SQLALCHEMY_MIGRATE_REPO from migrate.versioning import api from migrate import exceptions try: with app.app_context(): db.create_all() except exceptions.DatabaseAlreadyControlledError: pass if not os.path.exists(SQLALCHEMY_MIGRATE_REPO): api.create(SQLALCHEMY_MIGRATE_REPO, 'database repository') api.version_control(SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO) else: api.version_control(SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO, api.version(SQLALCHEMY_MIGRATE_REPO))
(c) 2017 ANSSI-FR Description: Creates the database """ import os.path from poli import app, db from config import SQLALCHEMY_DATABASE_URI from config import SQLALCHEMY_MIGRATE_REPO from migrate.versioning import api from migrate import exceptions try: with app.app_context(): db.create_all() except exceptions.DatabaseAlreadyControlledError: pass if not os.path.exists(SQLALCHEMY_MIGRATE_REPO): api.create(SQLALCHEMY_MIGRATE_REPO, 'database repository') api.version_control(SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO) else: api.version_control(SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO, api.version(SQLALCHEMY_MIGRATE_REPO))