def parse_yara(yara_rule: str) -> List[YaraRuleData]: yar = Yaramod() raw_rules = yar.parse_string(yara_rule) rules: Dict[str, YaraRuleData] = {} for raw_rule in raw_rules.rules: rule = YaraRuleData(raw_rule, rules) rules[rule.name] = rule return list(rules.values())
def query(priority: str) -> Any: req = request.get_json() raw_yara = req["raw_yara"] try: rules = Yaramod().parse_string(raw_yara).rules except Exception as e: return jsonify({"error": f"Yara rule parsing failed{e}"}), 400 if not rules: return jsonify({"error": "No rule was specified."}), 400 if len(rules) > 1: return jsonify({"error": "More than one rule specified!"}), 400 rule = rules[0] author_meta = rule.get_meta_with_name("author") if author_meta: rule_author = author_meta.value.pure_text else: rule_author = "" rule_name = rule.name try: rule_strings = {} for r_string in rule.strings: rule_strings[r_string.identifier] = r_string parsed = yara_traverse(rule.condition, rule_strings) except Exception as e: logging.exception("YaraParser failed") return jsonify({"error": f"Yara rule conversion failed: {e}"}), 400 if req["method"] == "parse": return jsonify({"rule_name": rule_name, "parsed": parsed}) job_hash = "".join(random.SystemRandom().choice(string.ascii_uppercase + string.digits) for _ in range(12)) job_obj = { "status": "new", "max_files": -1, "rule_name": rule_name, "rule_author": rule_author, "parsed": parsed, "raw_yara": raw_yara, "submitted": int(time.time()), "priority": priority, } if req["method"] == "query_100": job_obj.update({"max_files": 100}) redis.hmset("job:" + job_hash, job_obj) redis.rpush("queue-search", job_hash) return jsonify({"query_hash": job_hash})
def parse_string(yara_string: str) -> str: yar = Yaramod() rules = yar.parse_string(yara_string) assert len(rules.rules) == 1 rule = rules.rules[0] rule_strings = {} for string in rule.strings: rule_strings[string.identifier] = string result = yara_traverse(rule.condition, rule_strings) if result is not None: return result return "{}"