def get_running_jobs(): try: db = DB() jobs = db.get_running_jobs() return jsonify({"jobs": [j.serialize() for j in jobs]}), 200 except Exception as e: print(e) abort(500)
def get_models(): try: db = DB() models = db.get_models() return jsonify({"models": [m.serialize() for m in models]}), 200 except Exception as e: print(e) abort(500)
def get_binaries(): try: db = DB() binaries = db.get_binaries() return jsonify({"binaries": [b.serialize() for b in binaries]}), 200 except Exception as e: print(e) abort(500)
def setUp(self) -> None: # to create db instance and access db_file field is the only way to extract file path self.db = DB('test.db') self.db.disconnect() # ensure there is no trash file from previous incorrectly handled tests present os.unlink(self.db.db_file) # create shiny new db instance from scratch self.db = DB('test.db')
def get_binary(binary_id): try: db = DB() binary = db.get_binary_by_id(binary_id) if binary: return jsonify({"binary": binary.serialize()}), 200 else: return jsonify({"error": "binary not found"}), 400 except Exception as e: print(e) abort(500)
def handle(msg): db = DB("eggs.db") content_type, chat_type, chat_id = telepot.glance(msg) print(content_type) if content_type == "text": m = msg["text"] if m.startswith('/'): command, dist, pokemon = m.split() if command == "/hatch": db.add_egg(msg["from"], dist, pokemon)
def get_model(model_id): try: db = DB() model = db.get_model_by_id(model_id) if model: return jsonify({"model": model.serialize()}), 200 else: return jsonify({"error": "Model not found"}), 400 except Exception as e: print(e) abort(500)
def create_binary(): try: db = DB() data = request.get_json(silent=True) # name, desc, benign binary = db.add(Binary(data["name"])) if binary: return jsonify({"binary_id": binary.id}), 200 else: return jsonify({"error": "binary entry couldn't be created"}), 400 except Exception as e: print(str(e)) abort(500)
def create_model(): try: db = DB() data = request.get_json(silent=True) # name, desc, benign model = db.add(Model(data["name"], data["desc"], data["benign"])) if model: return jsonify({"model_id": model.id}), 200 else: return jsonify({"error": "model couldn't be created"}), 400 except Exception as e: print(e) abort(500)
def get_results(binary_id): db = DB() res = [] binary = db.get_binary_by_id(binary_id) results = db.get_results_by_binary_id(binary.id) if len(results) != 0: results = parse_results(results) for r in results: m = db.get_model_by_id(r.model) res.append({"model": m.name, "score": str(int(r.score))}) return res
def upload_binary_file(binary_id): try: db = DB() binary = db.get_binary_by_id(binary_id) if not binary: return jsonify({"error": "Binary not found"}), 400 file = request.files['file'] if file and file.filename: file.save( os.path.join(FILE_UPLOAD_FOLDER, binary.filename + ".bin")) hm.analyze_binary(binary.id) return jsonify({}), 200 except Exception as e: print(e) abort(500)
def train_model_thr(model_id, bin_path): db = DB() model = db.get_model_by_id(model_id) model_path = os.path.join(MODEL_FOLDER, model.filename) diss_path = os.path.join(model_path, "diss") map_path = os.path.join(model_path, "map.json") model_file_path = os.path.join(model_path, "model") bin_files = os.listdir(bin_path) for i, binf in enumerate(bin_files): Disassembler(os.path.join(bin_path, binf)).disassemble_to_file( os.path.join(diss_path, str(i) + "-" + binf + ".json")) ModelCreator(diss_path, map_path, model_file_path).train_model()
def upload_model_file(model_id): try: db = DB() model = db.get_model_by_id(model_id) if not model: return jsonify({"error": "Model not found"}), 400 file = request.files['file'] if file and file.filename: file.save(os.path.join(FILE_UPLOAD_FOLDER, model.filename + ".zip")) hm.create_model(model.id) return jsonify({}), 200 except Exception as e: print(e) abort(500)
def get_results(binary_id): try: db = DB() rt = [] binary = db.get_binary_by_id(binary_id) results = db.get_results_by_binary_id(binary.id) if len(results) != 0: results = parse_results(results) for r in results: m = db.get_model_by_id(r.model) rt.append({"model": m.name, "score": str(int(r.score))}) return jsonify({"results": rt, "analyzed": binary.analyzed}), 200 except Exception as e: print(e) abort(500)
class ObjectRelationsMappingTest(TestCase): def setUp(self) -> None: # to create db instance and access db_file field is the only way to extract file path self.db = DB('test.db') self.db.disconnect() # ensure there is no trash file from previous incorrectly handled tests present os.unlink(self.db.db_file) # create shiny new db instance from scratch self.db = DB('test.db') def tearDown(self) -> None: self.db.disconnect() os.unlink(self.db.db_file) def test_users_model(self): with patch('db_helper.db', self.db): u1 = User(None, 1, 'name', 'surname', 'middle', 'tok1') u2 = User(124, 1, 'name2', 'surname2', 'middle', 'tok2') u3 = User(125, 1, 'name3', 'surname3', 'middle', 'tok3') u1upd = User(12312, 1, 'name1', 'surname1', 'middle', 'tok1') u1_data = { 'chat_id': 12312, 'type': 1, 'name': 'name1', 'surname': 'surname1', 'middlename': 'middle', 'token': 'tok1', 'id': 1 } u2_data = { 'chat_id': 124, 'type': 1, 'name': 'name2', 'surname': 'surname2', 'middlename': 'middle', 'token': 'tok2', 'id': 2 } u3_data = { 'chat_id': 125, 'type': 1, 'name': 'name3', 'surname': 'surname3', 'middlename': 'middle', 'token': 'tok3', 'id': 3 } users = Users() assert [asdict(u) for u in users] == [u1_data, u2_data, u3_data] assert asdict(users.by_token['tok2']) == u2_data assert asdict(users.by_id[1]) == u1_data assert asdict(users.by_chat_id[12312]) == u1_data assert users.get_by_id(123) is None assert asdict(users.get_by_id(1)) == u1_data assert len(users) == 3 def test_problems_model(self): with patch('db_helper.db', self.db): p1 = Problem(1, 1, 'а', 'Гы', 'текст', 0, 0, r'\d+', 'ЧИСЛО!', 123, 'check_int', 'ЛОЖЬ!', 'Крутяк') p2 = Problem(1, 1, 'б', 'Гы', 'текст', 0, 0, r'\d+', 'ЧИСЛО!', 123, 'check_int', 'ЛОЖЬ!', 'Крутяк') p3 = Problem(1, 2, '', 'Гы', 'текст', 0, 0, r'\d+', 'ЧИСЛО!', 123, 'check_int', 'ЛОЖЬ!', 'Крутяк') p2upd = Problem(1, 1, 'б', 'Гы', 'текст_upd', 0, 0, r'\d+', 'ЧИСЛО!', 123, 'check_int', 'ЛОЖЬ!', 'Крутяк') p1_data = { 'lesson': 1, 'prob': 1, 'item': 'а', 'title': 'Гы', 'prob_text': 'текст', 'prob_type': 0, 'ans_type': 0, 'ans_validation': '\\d+', 'validation_error': 'ЧИСЛО!', 'cor_ans': '123', 'cor_ans_checker': 'check_int', 'wrong_ans': 'ЛОЖЬ!', 'congrat': 'Крутяк', 'id': 1 } p2_data = { 'lesson': 1, 'prob': 1, 'item': 'б', 'title': 'Гы', 'prob_text': 'текст_upd', 'prob_type': 0, 'ans_type': 0, 'ans_validation': '\\d+', 'validation_error': 'ЧИСЛО!', 'cor_ans': '123', 'cor_ans_checker': 'check_int', 'wrong_ans': 'ЛОЖЬ!', 'congrat': 'Крутяк', 'id': 2 } p3_data = { 'lesson': 1, 'prob': 2, 'item': '', 'title': 'Гы', 'prob_text': 'текст', 'prob_type': 0, 'ans_type': 0, 'ans_validation': '\\d+', 'validation_error': 'ЧИСЛО!', 'cor_ans': '123', 'cor_ans_checker': 'check_int', 'wrong_ans': 'ЛОЖЬ!', 'congrat': 'Крутяк', 'id': 3 } problems = Problems() assert [asdict(p) for p in problems] == [p1_data, p2_data, p3_data] assert asdict(problems.by_id[1]) == p1_data assert asdict(problems.by_key[(1, 1, 'б')]) == p2_data assert problems.get_by_key(1, 1, 'б') is None assert problems.get_by_key(1, 1, 'бs') is None assert len(problems) == 3
def new_binary(name, file): db = DB() binary = db.add(Binary(name)) cpdest = os.path.join(FILE_UPLOAD_FOLDER, binary.filename + ".bin") os.system("cp {} {}".format(file, cpdest)) hm.analyze_binary_thr(binary.id)
def new_model(name, desc, benign, zipfile): db = DB() model = db.add(Model(name, desc, benign)) cpdest = os.path.join(FILE_UPLOAD_FOLDER, model.filename + ".zip") os.system("cp {} {}".format(zipfile, cpdest)) hm.create_model_thr(model.id)
cpdest = os.path.join(FILE_UPLOAD_FOLDER, m["zipfile"].split("/")[-1]) bin_path = os.path.join(FILE_UPLOAD_FOLDER, m["name"].replace(" ", "")) os.system("mkdir '{}'".format(bin_path)) os.system("cp '{}' '{}'".format(m["zipfile"], cpdest)) os.system("unzip '{}' -d '{}' > /dev/null".format(cpdest, bin_path)) bin_files = os.listdir(bin_path) for f in bin_files: try: new_binary(m["name"], os.path.join(bin_path, f)) except: pass os.system("rm -rf {}".format(bin_path)) os.system("rm {}".format(cpdest)) elif args.csv: db = DB() binary_names = db.get_binary_names() print("[+] {} binary types found".format(len(binary_names))) for bin_name in binary_names: binaries = db.get_binaries_by_name(bin_name[0]) print("[+] Writting to {}".format( os.path.join(args.csv, bin_name[0] + ".csv"))) with open(os.path.join(args.csv, bin_name[0] + ".csv"), "w") as wfile: writer = csv.writer(wfile, delimiter=",", quotechar='"', quoting=csv.QUOTE_MINIMAL) writer.writerow([bin_name[0]]) for bin in binaries: rt = []
def analyze_binary_thr(binary_id): db = DB() binary = db.get_binary_by_id(binary_id) models = db.get_available_models() job = db.add(Job("Analyzing binary " + binary.name)) bin_path = os.path.join(FILE_UPLOAD_FOLDER, binary.filename + ".bin") diss_path = os.path.join(DISS_BINARY_FOLDER, binary.filename + ".json") hash = get_file_sha_hash(bin_path) db.binary_add_hash(binary.id, hash) Disassembler(bin_path).disassemble_to_file(diss_path) for i, m in enumerate(models): db.job_add_log( job.id, "Testing against model {}, {}/{}".format(m.name, len(models), i + 1)) model_path = os.path.join(MODEL_FOLDER, m.filename) map_path = os.path.join(model_path, "map.json") model_file_path = os.path.join(model_path, "model.npz") rating = ModelComparator(diss_path, map_path, model_file_path).get_rating() db.add(Result(binary.id, m.id, rating)) db.job_add_log(job.id, "Cleaning up the file mess") db.set_binary_analyzed(binary.id) os.system("rm {}".format(bin_path)) db.set_job_ended(job.id)
def create_model_thr(model_id): db = DB() model = db.get_model_by_id(model_id) job = db.add(Job("Generating model " + model.name)) bin_path = os.path.join(FILE_UPLOAD_FOLDER, model.filename) model_path = os.path.join(MODEL_FOLDER, model.filename) diss_path = os.path.join(model_path, "diss") map_path = os.path.join(model_path, "map.json") model_file_path = os.path.join(model_path, "model") os.system("mkdir {}".format(bin_path)) os.system("mkdir {}".format(model_path)) os.system("mkdir {}".format(diss_path)) os.system("unzip {} -d {} > /dev/null".format(bin_path + ".zip", bin_path)) bin_files = os.listdir(bin_path) for i, binf in enumerate(bin_files): db.job_add_log( job.id, "Disassembling file {}/{}".format(len(bin_files), i + 1)) Disassembler(os.path.join(bin_path, binf)).disassemble_to_file( os.path.join(diss_path, str(i) + "-" + binf + ".json")) db.job_add_log(job.id, "Creating map") MapCreator(diss_path, map_path).create_map() db.job_add_log(job.id, "Generating model") ModelCreator(diss_path, map_path, model_file_path).create_model() db.job_add_log(job.id, "Model generated") db.set_model_analyzed(model.id) db.job_add_log(job.id, "Cleaning up the file mess") os.system("rm {}".format(bin_path + ".zip")) os.system("rm -rf {}".format(bin_path)) db.set_job_ended(job.id)