def call_back(ch, method, properties, file_hash): """Cuckoo Processing call back function.""" app = create_app() db.init_app(app) report = submit_file_to_cuckoo(file_hash) cuckoo_url = os.environ.get('CUCKOO_URL') report_list_ids = [] url_list = [] for items in report: url_list.append(str(cuckoo_url + "/analysis/" + str(items))) report_list_ids.append(str(items)) with app.app_context(): try: new_cuckoo = CuckooReports(url=str(url_list), md5_hash=file_hash.decode("utf-8"), modify_time=udatetime.utcnow(), report_ids=report_list_ids) db.session.add(new_cuckoo) db.session.commit() message_data = ujson.dumps(new_cuckoo.to_dict(), indent=2, sort_keys=True) match_known_item = UNUM.query.filter_by( md5_hash=file_hash.decode("utf-8")).first() if match_known_item: cuckoo_notification = \ Message(sender_id=1, recipient_id=match_known_item.created_by, body=message_data) db.session.add(cuckoo_notification) db.session.commit() except: error("Problem creating the cuckoo report")
def call_back(ch, method, properties, md5_hash): """ Main function to process documents with mmbot.""" # First make a log file to track any errors and all running info upload_path = os.environ.get('FILE_FOLDER') result = mmb.mmb_predict(str(upload_path + "/" + md5_hash.decode('utf-8')), datatype='filepath') json_result = mmb.mmb_prediction_to_json(result) if json_result: app = create_app() db.init_app(app) with app.app_context(): new_mmbot = MmBotTable(vba_lang_features=str(json_result[0]["vba_lang_features"]), group_access=2, vba_avg_param_per_func=str(json_result[0]["vba_avg_param_per_func"]), vba_cnt_comment_loc_ratio=str(json_result[0]["vba_cnt_comment_loc_ratio"]), vba_cnt_comments=str(json_result[0]["vba_cnt_comments"]), vba_cnt_func_loc_ratio=str(json_result[0]["vba_cnt_func_loc_ratio"]), vba_cnt_functions=str(json_result[0]["vba_cnt_functions"]), vba_cnt_loc=str(json_result[0]["vba_cnt_loc"]), vba_entropy_chars=str(json_result[0]["vba_entropy_chars"]), vba_entropy_func_names=str(json_result[0]["vba_entropy_func_names"]), vba_entropy_words=str(json_result[0]["vba_entropy_words"]), vba_mean_loc_per_func=str(json_result[0]["vba_mean_loc_per_func"]), function_names=str(json_result[0]["function_names"]), prediction=str(json_result[0]["prediction"]), confidence=str(json_result[0]["confidence"]), md5_hash=md5_hash.decode('utf-8')) db.session.add(new_mmbot) db.session.commit() upload_file = UNUM.query.filter_by(md5_hash=md5_hash.decode('utf-8')).first() message_data = ujson.dumps(new_mmbot.to_dict(), indent=2, sort_keys=True) msg = Message(sender_id=1, recipient_id=upload_file.created_by, body=message_data) db.session.add(msg) db.session.commit() uploaded_user = User.query.filter_by(id=upload_file.created_by).first() uploaded_user.add_notification('unread_message_count', uploaded_user.new_messages()) db.session.commit()
def call_back(ch, method, properties, report_id): """IDS Processing call back function.""" app = create_app() db.init_app(app) rules_dir = os.environ.get('IDS_RULE_DIR') or "aucr_app/plugins/IDS_Plugin/rules/" logs_dir = os.environ.get('IDS_LOGS_DIR') or "aucr_app/plugins/IDS_Plugin/logs/" with app.app_context(): ids_report = IDSRules.query.filter_by(id=report_id.decode('utf-8')).first() ids_rules_file = current_app.mongo.db.aucr.find_one({"filename": ids_report.ids_plugin_list_name}) "suricata -v -k none -c suricata.yml -S signatures.rules -r pcap/test.pcap" with open("aucr_app/plugins/IDS_Plugin/rules/" + str(ids_report.ids_plugin_list_name), 'w') as test_signature: test_signature.write(ids_rules_file["fileobj"]) args = ["suricata", "-c", os.environ.get('SURICATA_CONFIG'), "-k", "none", "-S", rules_dir + str(ids_report.ids_plugin_list_name), "-r", os.environ.get('FILE_FOLDER'), "-l", logs_dir, ] subprocess.check_call(args) with open(str(logs_dir + "eve.json"), 'r') as eve_json: raw_data = eve_json.readlines() for item in raw_data: data = ujson.loads(item) flat_data_dictionary = flatten_dictionary(data) flat_data_dictionary["report"]["process_time"] = udatetime.utcnow() # TODO create and use bulk index to ES for better performance. index_data_to_es("ids_suricata", flat_data_dictionary["report"]) shutil.rmtree(logs_dir) # TODO upload result data to object storage. os.mkdir(logs_dir)
def call_back(ch, method, properties, report_id): """Yara Processing call back function.""" app = create_app() db.init_app(app) with app.app_context(): yara_report = YaraRules.query.filter_by( id=report_id.decode('utf-8')).first() yara_rule_file = yara_report.yara_rules yara_matches = test_yara(yara_report) for item in yara_matches: match_known_item = UNUM.query.filter_by(md5_hash=item).first() if match_known_item: match_known_classification = Classification.query.filter_by( id=match_known_item.classification).first() new_yara_result = YaraRuleResults( yara_list_id=yara_report.id, matches=match_known_item.md5_hash, file_matches=match_known_item.id, file_string_matches=yara_matches[item]["strings"], file_classification=match_known_classification. classification, run_time=udatetime.utcnow()) db.session.add(new_yara_result) db.session.commit() message_data = ujson.dumps(new_yara_result.to_dict(), indent=2, sort_keys=True) yara_notification = \ Message(sender_id=1, recipient_id=yara_report.created_by, body=message_data) db.session.add(yara_notification) db.session.commit()
def test_yara(yara_report): yara_matches = {} try: scanner = yara.compile(source=yara_report.yara_rules) file_dir = os.environ.get('FILE_FOLDER') scan(scanner, file_dir, yara_matches) return yara_matches except Exception as e: logging.warning("Not a valid Yara File" + str(e)) app = create_app() db.init_app(app) with app.app_context(): group_ids = Group.query.filter_by( groups_id=yara_report.group_access).all() for player in group_ids: yara_notification = \ Message(sender_id=1, recipient_id=player.username_id, body=("Not a valid Yara File ID:" + str(yara_report.id) + " Error:" + str(e))) db.session.add(yara_notification) db.session.commit() return [], []
# coding=utf-8 from sqlalchemy.exc import ProgrammingError from yaml_info.yamlinfo import YamlInfo from aucr_app import db, create_app from aucr_app.plugins.unum.models import Classification app = create_app() db.init_app(app) CLASSIFICATION_AVAILABLE_CHOICES = None with app.app_context(): count = 0 items_available_choices_list = [] try: classification_data = Classification.query.all() except: classification_data = YamlInfo("aucr_app/plugins/unum/classification.yml", "none", "none").get() for items in classification_data: count += 1 new_list = (str(count), items) items_available_choices_list.append(new_list) CLASSIFICATION_AVAILABLE_CHOICES = items_available_choices_list