def is_malware(filename): ret = [] if not os.path.exists("rules_compiled/malware"): os.mkdir("rules_compiled/malware") for n in os.listdir("rules/malware/"): if not os.path.isdir(n): try: rule = yara.compile("rules/malware/" + n) rule.save("rules_compiled/malware/" + n) rule = yara.load("rules_compiled/malware/" + n) m = rule.match(filename) if m: ret += m except: pass # internal fatal error or warning else: pass else: print "use compiled file" for n in os.listdir("rules_compiled/malware/"): try: rule = yara.load("rules_compiled/malware/" + n) m = rule.match(filename) if m: ret += m except: print "yara internal error" return ret
def is_custom_rules(filename): ret = [] if not os.path.exists("custom/customrulesed/"): os.mkdir("custom/customrulesed/") for n in os.listdir("custom/customrules/"): try: rule = yara.compile("custom/customrules/" + n) rule.save("custom/customrulesed/" + n) rule = yara.load("custom/customrulesed/" + n) m = rule.match(filename) if m: ret += m except: pass # internal fatal error or warning return ret else: print "use compiled file" for n in os.listdir("custom/customrulesed/"): try: rule = yara.load("custom/customrulesed/" + n) m = rule.match(filename) if m: ret += m except yara.Error, e: print "yara internal error", e.args[0] return ret
def is_file_packed(filename): i = 0 # 统计数量 ret = [] # 保存结果 # 没有编译过yara规则时 if not os.path.exists("rules_compiled/Packers"): os.mkdir("rules_compiled/Packers") for n in os.listdir("rules/Packers"): try: rule = yara.compile("rules/Packers/" + n) rule.save("rules_compiled/Packers/" + n) rule = yara.load("rules_compiled/Packers/" + n) m = rule.match(filename) if m: ret += m except: print "internal error" # 已经生成了yara规则的二进制文件 else: print "use compiled file" for n in os.listdir("rules_compiled/Packers/"): try: rule = yara.load("rules_compiled/Packers/" + n) m = rule.match(filename) if m: ret += m except: print "yara internal error" return ret
def is_your_target(filename, yara_file): if not os.path.exists("rules_compiled/your_target"): os.mkdir("rules_compiled/your_target") if os.path.isdir(yara_file): for n in os.listdir(yara_file): if not os.path.isdir("./" + n): try: rule = yara.compile(yara_file + "/" + n) rule.save("rules_compiled/your_target/" + n) rule = yara.load("rules_compiled/malware/" + n) m = rule.match(filename) if m: return m except: pass else: pass elif os.path.isfile(yara_file): try: rule = yara.compile(yara_file) rule.save("rules_compiled/your_target/" + yara_file) rule = yara.load("rules_compiled/malware/" + yara_file) m = rule.match(filename) if m: return m except: pass else: return "[x] Wrong type of input!"
def main(path_to_yara, path_to_ioc): count = 0 try: data = get_data_json() misp_receiver = MISPReceiver(data, path_to_yara, path_to_ioc, misp_key=data["id"], misp_url=data["url"],misp_verify_cert=val, siem_mode=True, debugon=False) misp_receiver.start() time.sleep(5) p1 = Process(target=yaramem) p2 = Process(target=stop_conns) p1.start() p2.start() #misp_receiver.join() #p1.start() #p1.join() while True: if os.path.isfile("saved_yara_file.yara"): try: print("WATE") yara.load("saved_yara_file.yara") collect() time.sleep(0.4) except Exception as err: print("problem with yara : ", err) else: print("No sutch file") print("SCANNING END") except: count += 1 if count != 2: main(path_to_yara, path_to_ioc) else: main_sys_log.error("Total system fail")
def run(self): ruleset: List[Tuple[str, yara.Rules]] = [] for rulepath in self.directories_with_rules: # you should add a "index.yar" or "index.yas" file # and select only the rules you would like to run if os.path.isdir(rulepath): if os.path.isfile(rulepath + "/index.yas"): ruleset.append( (rulepath, yara.load(rulepath + "/index.yas"))) elif os.path.isfile(rulepath + "/index.yar"): ruleset.append(( rulepath, yara.compile( rulepath + "/index.yar", externals={"filename": self.filename}, ), )) else: # if you do not have an index file,... # .. just extract all the rules in the .yar files for f in os.listdir(rulepath): full_path = f"{rulepath}/{f}" if os.path.isfile(full_path): if full_path.endswith( ".yar") or full_path.endswith(".yara"): ruleset.append(( full_path, yara.compile( full_path, externals={"filename": self.filename}, ), )) elif full_path.endswith(".yas"): ruleset.append( (full_path, yara.load(full_path))) if not ruleset: raise AnalyzerRunException("there are no yara rules installed") for path, rule in ruleset: matches = self._validated_matches(rule) for match in matches: # limited to 20 strings reasons because it could be a very long list self.result.append({ "match": str(match), "strings": str(match.strings[:20]) if match else "", "tags": match.tags, "meta": match.meta, "path": path, }) return self.result
def __init__(self, input_files, timeout=30): self.files = self.collect_files(input_files) self.files.sort() rules_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'rules/rules.yarc') self.rules = yara.load(rules_path) self.timeout = timeout
def lambda_handler(event, context): r''' handles the image files and directs it to ghiro for digital forensics''' bucket = event['Records'][0]['s3']['bucket']['name'] srcbucket = s3.Bucket(bucket) key = urllib.unquote_plus( event['Records'][0]['s3']['object']['key'].encode('utf8')) try: toolbucket.download_file(rule_file, pwd + rule_file) rules = yara.load(pwd + rule_file) except Exception as e: print( 'Error getting rules from bucket. Make sure they exist and your bucket is in the same region as this function.' ) raise e try: srcbucket.download_file(key, pwd + key) m = rules.match(pwd + key) except Exception as e: print( 'Error getting object {} from bucket {}. Make sure they exist and your bucket is in the same region as this function.' .format(key, bucket)) raise e # if known bad file add to db for pruning if len(m) > 0: insertTodb(pwd + key) else: s3_client.copy_object(Bucket=dstbucket, CopySource={ 'Bucket': bucket, 'Key': key }, Key=key) s3_client.delete_object(Bucket=bucket, Key=key)
def is_malware(filename): """ Run all the rules in the malware directory and save the match on the array to show them in the console """ if not os.path.exists("yara_files/rules_compiled/malware"): os.mkdir("yara_files/rules_compiled/malware") rules_match = [] for rule_file in os.listdir("yara_files/rules/malware/"): if not os.path.isdir("./" + rule_file): try: rule = yara.compile("yara_files/rules/malware/" + rule_file) rule.save("yara_files/rules_compiled/malware/" + rule_file) rule = yara.load("yara_files/rules_compiled/malware/" + rule_file) rule_match = rule.match(filename) if rule_match: rules_match.append(rule_match) except: pass # internal fatal error or warning else: pass if rules_match: return rules_match
def __init__(self, rules_file: str) -> None: """Initialize the analyzer with a prebuilt binary YARA rules file. Args: rules_file: Path to the binary rules file. """ self._rules = yara.load(rules_file)
def _custom_scan(self, payload, ruleset): # StringIO Object if hasattr(ruleset, 'getvalue'): try: rules = yara.compile(source=ruleset.getvalue()) except yara.Error: rules = None self.log.error( "Unable to compile ruleset from passed StringIO object") # File path else: try: # Assume the target ruleset is already compiled rules = yara.load(filepath=ruleset) except yara.Error: # What?! These rules aren't compiled? Fine... let's try to compile them try: rules = yara.compile(filepath=ruleset) except yara.Error: # Well that was unfortunate, no rules for us rules = None self.log.error( "Unable to load custom ruleset from filepath {}". format(ruleset)) if rules: rules.match(data=payload, timeout=60, callback=self._scan_callback)
def process_yara_options(cls, config: Dict[str, Any]): rules = None if config.get('yara_rules', None) is not None: rule = config['yara_rules'] if rule[0] not in ["{", "/"]: rule = f'"{rule}"' if config.get('case', False): rule += " nocase" if config.get('wide', False): rule += " wide ascii" rules = yara.compile( sources={ 'n': f'rule r1 {{strings: $a = {rule} condition: $a}}' }) elif config.get('yara_source', None) is not None: rules = yara.compile(source=config['yara_source']) elif config.get('yara_file', None) is not None: rules = yara.compile(file=resources.ResourceAccessor().open( config['yara_file'], "rb")) elif config.get('yara_compiled_file', None) is not None: rules = yara.load(file=resources.ResourceAccessor().open( config['yara_compiled_file'], "rb")) else: vollog.error("No yara rules, nor yara rules file were specified") return rules
def load_directory(self, rulepath): # if you do not have an index file,... # .. just extract all the rules in the .yar files for f in os.listdir(rulepath): full_path = f"{rulepath}/{f}" if os.path.isfile(full_path): try: if (full_path.endswith(".yar") or full_path.endswith(".yara") or full_path.endswith(".rule")): self.ruleset.append(( full_path, yara.compile( full_path, externals={"filename": self.filename}, ), )) elif full_path.endswith(".yas"): self.ruleset.append((full_path, yara.load(full_path))) except yara.SyntaxError as e: logger.warning(f"Rule {full_path} " f"has a syntax error {e}") continue else: if self.recursive: logger.info(f"Loading directory {full_path}") self.load_directory(full_path)
def is_antiVM(self, filename): " check if file contains anti Debug or anti virtual machine detection features" rule_match = None if self.yara_rules_dir: packed_dir = os.path.join(os.path.sep, self.yara_rules_dir, 'Antidebug_AntiVM') if self.yara_compiled_rules_dir: compiled_packed_dir = os.path.join(os.path.sep, self.yara_compiled_rules_dir, 'Antidebug_AntiVM') if not os.path.exists(compiled_packed_dir): os.mkdir(compiled_packed_dir) for each_file in os.listdir(packed_dir): full_path = os.path.join(os.path.sep, packed_dir, each_file) rule = yara.compile(full_path) full_path_compiled = os.path.join(os.path.sep, compiled_packed_dir, each_file) rule.save(full_path_compiled) rule = yara.load(full_path_compiled) rule_match = rule.match(filename) if rule_match: return rule_match return rule_match
def is_CVErules(self, filename): " check if file matches any of the exploitable CVE vulnerability payloads" rule_match = None if self.yara_rules_dir: packed_dir = os.path.join(os.path.sep, self.yara_rules_dir, 'CVE_Rules') if self.yara_compiled_rules_dir: compiled_packed_dir = os.path.join(os.path.sep, self.yara_compiled_rules_dir, 'CVE_Rules') if not os.path.exists(compiled_packed_dir): os.mkdir(compiled_packed_dir) for each_file in os.listdir(packed_dir): full_path = os.path.join(os.path.sep, packed_dir, each_file) rule = yara.compile(full_path) full_path_compiled = os.path.join(os.path.sep, compiled_packed_dir, each_file) rule.save(full_path_compiled) rule = yara.load(full_path_compiled) rule_match = rule.match(filename) if rule_match: return rule_match return rule_match
def yara_scan(self): ''' { 'tags': ['foo', 'bar'], 'matches': True, 'namespace': 'default', 'rule': 'my_rule', 'meta': {}, 'strings': [(81L, '$a', 'abc'), (141L, '$b', 'def')] } ''' try: self.yara_scan_result = [] yara_uncompiled_rules = static_conf["yara_uncompiled_rules"] yara_compiled_rules = static_conf["yara_compiled_rules"] yara_rules_list = [] # load rules if yara_uncompiled_rules: yara_rules_list.append(yara.compile(filepaths = yara_uncompiled_rules)) if yara_compiled_rules: yara_rules_list.extend([yara.load(os.path.join(yara_compiled_rules,item)) for item in os.listdir(yara_compiled_rules)]) # match yara rules for rules in yara_rules_list: matches = rules.match(self.filepath) self.yara_scan_result.extend([{"namespace":match.namespace,"rule":match.rule,"meta":match.meta} for match in matches]) except Exception as e: self.logger.exception('%s: %s' % (Exception, e))
def is_webshell(self, filename): " check if file contains any webshells" rule_match = None if self.yara_rules_dir: packed_dir = os.path.join(os.path.sep, self.yara_rules_dir, 'Webshells') if self.yara_compiled_rules_dir: compiled_packed_dir = os.path.join(os.path.sep, self.yara_compiled_rules_dir, 'Webshells') if not os.path.exists(compiled_packed_dir): os.mkdir(compiled_packed_dir) for each_file in os.listdir(packed_dir): full_path = os.path.join(os.path.sep, packed_dir, each_file) rule = yara.compile(full_path) full_path_compiled = os.path.join(os.path.sep, compiled_packed_dir, each_file) rule.save(full_path_compiled) rule = yara.load(full_path_compiled) rule_match = rule.match(filename) if rule_match: return rule_match return rule_match
def is_exploitkit(self, filename): " check if file matchs any exploitkit signature(s) " rule_match = None if self.yara_rules_dir: packed_dir = os.path.join(os.path.sep, self.yara_rules_dir, 'Exploit-Kits') if self.yara_compiled_rules_dir: compiled_packed_dir = os.path.join(os.path.sep, self.yara_compiled_rules_dir, 'Exploit-Kits') if not os.path.exists(compiled_packed_dir): os.mkdir(compiled_packed_dir) for each_file in os.listdir(packed_dir): full_path = os.path.join(os.path.sep, packed_dir, each_file) rule = yara.compile(full_path) full_path_compiled = os.path.join(os.path.sep, compiled_packed_dir, each_file) rule.save(full_path_compiled) rule = yara.load(full_path_compiled) rule_match = rule.match(filename) if rule_match: return rule_match return rule_match
def is_cryptofeatures(self, filename): " check if file has crypto related features - like encryption functions, CRC16 function, CRC32 function, hash functions etc " rule_match = None if self.yara_rules_dir: packed_dir = os.path.join(os.path.sep, self.yara_rules_dir, 'Crypto') if self.yara_compiled_rules_dir: compiled_packed_dir = os.path.join(os.path.sep, self.yara_compiled_rules_dir, 'Crypto') if not os.path.exists(compiled_packed_dir): os.mkdir(compiled_packed_dir) for each_file in os.listdir(packed_dir): full_path = os.path.join(os.path.sep, packed_dir, each_file) rule = yara.compile(full_path) full_path_compiled = os.path.join(os.path.sep, compiled_packed_dir, each_file) rule.save(full_path_compiled) rule = yara.load(full_path_compiled) rule_match = rule.match(filename) if rule_match: return rule_match return rule_match
def yara_on_demand(rule, theBuffer, externalVars={}, maxBytes=0): try: logging.debug("util: doing on demand yara scan with rule: %s" % rule) logging.debug("util: externalVars: %s" % str(externalVars)) if rule not in yara_on_demand_rules: if not is_compiled(rule): logging.debug("util: compiling %s for lazy load" % rule) yara_on_demand_rules[rule] = yara.compile( rule, externals=externalVars) else: yara_on_demand_rules[rule] = yara.load(rule) if maxBytes and len(theBuffer) > maxBytes: matches = yara_on_demand_rules[rule].match( data=buffer(theBuffer, 0, maxBytes) or 'EMPTY', externals=externalVars) else: matches = yara_on_demand_rules[rule].match(data=theBuffer or 'EMPTY', externals=externalVars) return matches except (QuitScanException, GlobalScanTimeoutError, GlobalModuleTimeoutError): raise except: logging.exception("util: yara on demand scan failed with rule %s" % rule) raise
def collect(): with open('paths.json', "r") as p: json_paths = json.load(p) p.close() get_ps() first = ("RUN", hKeys_run) second = ("NO_RUN", hKeys) get_paths(first[0],first[1]) get_paths(second[0],second[1]) find_procs_by_name() counter = 0 compiled_yara = yara.load("saved_yara_file.yara") for path in paths: try: with open(path, "rb", buffering=2000000) as f: print(path) matches = compiled_yara.match(data=f.read()) f.close() if matches: print("ITS A MATCHHHHH",path) base_name = os.path.basename(path) for p in psutil.process_iter(): if base_name == p.name(): p.kill() get_stats_zip(path, matches) except Exception as err: print(err) continue
def is_packed(self, filename): "check if the file is packed using packer tools like UPX etc" rule_match = None if self.yara_rules_dir: packed_dir = os.path.join(os.path.sep, self.yara_rules_dir, 'Packers') if self.yara_compiled_rules_dir: compiled_packed_dir = os.path.join(os.path.sep, self.yara_compiled_rules_dir, 'Packers') if not os.path.exists(compiled_packed_dir): os.mkdir(compiled_packed_dir) for each_file in os.listdir(packed_dir): full_path = os.path.join(os.path.sep, packed_dir, each_file) rule = yara.compile(full_path) full_path_compiled = os.path.join(os.path.sep, compiled_packed_dir, each_file) rule.save(full_path_compiled) rule = yara.load(full_path_compiled) rule_match = rule.match(filename) if rule_match: return rule_match return rule_match
def yara_info(self): """ { 'tags': ['foo', 'bar'], 'matches': True, 'namespace': 'default', 'rule': 'my_rule', 'meta': {}, 'strings': [(81L, '$a', 'abc'), (141L, '$b', 'def')] } """ file_path = self.cfg.target_abs_path yara_info = [] if os.path.exists(self.cfg.yara_rules_data): rules = yara.load(self.cfg.yara_rules_data) matches = rules.match(file_path) self.log.info(matches) if len(matches): for item in matches: self.log.info(type(item)) node={} node["ID"] = metrics.S_ID_YARA_INFO node["str"] = item.rule yara_info.append(node) self.log.info(yara_info) self.info["yara_info"] = yara_info
def yara_info(self): """ { 'tags': ['foo', 'bar'], 'matches': True, 'namespace': 'default', 'rule': 'my_rule', 'meta': {}, 'strings': [(81L, '$a', 'abc'), (141L, '$b', 'def')] } """ file_path = self.cfg.target_abs_path yara_info = [] if os.path.exists(self.cfg.yara_rules_data): rules = yara.load(self.cfg.yara_rules_data) matches = rules.match(file_path) self.log.info(matches) if len(matches): for item in matches: self.log.info(type(item)) node = {} node["ID"] = metrics.S_ID_YARA_INFO node["str"] = item.rule yara_info.append(node) self.log.info(yara_info) self.info["yara_info"] = yara_info
def is_malicious_document(self, filename): "check if the file is malicious MS-WORD file" rule_match = None if self.yara_rules_dir: packed_dir = os.path.join(os.path.sep, self.yara_rules_dir, 'Malicious_Documents') if self.yara_compiled_rules_dir: compiled_packed_dir = os.path.join(os.path.sep, self.yara_compiled_rules_dir, 'Malicious_Documents') if not os.path.exists(compiled_packed_dir): os.mkdir(compiled_packed_dir) for each_file in os.listdir(packed_dir): full_path = os.path.join(os.path.sep, packed_dir, each_file) rule = yara.compile(full_path) full_path_compiled = os.path.join(os.path.sep, compiled_packed_dir, each_file) rule.save(full_path_compiled) rule = yara.load(full_path_compiled) rule_match = rule.match(filename) if rule_match: return rule_match return rule_match
def scan(app_home, path_to_app): rules_path = os.path.join(setting.get_yara_rules_dir(), "rules.yarc") rules = yara.load(rules_path) results = {} try: zf = zipfile.ZipFile(path_to_app, 'r') target_member = filter(lambda n: n.startswith('classes'), zf.namelist()) td = tempfile.mkdtemp() zf.extractall(td, members=target_member) zf.close() for file_type, file_path in collect_files(td): entry_name = file_path.replace('{}/'.format(td), '') key_path = '{}!{}'.format(path_to_app, entry_name) match_dic = do_yara(file_path, rules) if len(match_dic) > 0: results[key_path] = match_dic results_json = json.dumps(results, sort_keys=True, indent=4) with open(app_home+"/apkid.json", "w") as f: f.write(results_json) f.close() shutil.rmtree(td) except Exception as e: print(e) print("yara规则匹配失败。")
def test_compilation(self): """Ensure all real YARA rules compile correctly.""" compile_rules.compile_rules('compiled_yara_rules.bin') rules = yara.load('compiled_yara_rules.bin') num_rules_files = sum(1 for _ in compile_rules._find_yara_files()) # The number of compiled YARA rules should be >= the number of YARA rule files. self.assertGreaterEqual(sum(1 for _ in rules), num_rules_files)
def applyara(): copy(FILE_TO_SCAN, normpath(join(dirname(__file__), 'yara'))) rules = yara.load(normpath(join(dirname(__file__), 'my_compiled_rules'))) prevcount, zipcount = 0, 0 while True: for root, subdir, files in walk( normpath(join(dirname(__file__), 'yara'))): for filename in files: file_path = join(root, filename) file_type = magic.from_file(file_path, mime=True) if file_type == 'application/zip': zip_ref = zipfile.ZipFile(file_path, 'r') zip_ref.extractall(FILE_TO_SCAN) zip_ref.close() remove(file_path) zipcount += 1 if prevcount is zipcount: break else: prevcount += 1 for root, subdir, files in walk(normpath(join(dirname(__file__), 'yara'))): for filename in files: file_path = join(root, filename) file_type = magic.from_file(file_path, mime=True) if file_type != 'application/zip': print 'tried to match' matches = rules.match(file_path, callback=mycallback, which_callbacks=yara.CALLBACK_MATCHES)
def hitClamavRule(self): if os.path.exists("./checkrulethread/clamav/clamav_compiled.yar"): rule = yara.load("./checkrulethread/clamav/clamav_compiled.yar") m = rule.match(self.filename) if m: print m elif os.path.exists("./checkrulethread/clamav/clamav.yara"): print "generate rule from clamav2yara" rule = yara.compile("./checkrulethread/clamav/clamav.yara") rule.save("./checkrulethread/clamav/clamav_compiled.yar") rule = yara.load("./checkrulethread/clamav/clamav_compiled.yar") m = rule.match(self.filename) if m: print m else: print "generate yara rule failed"
def __init__(self, compiled_rules, output_formatter=None, timeout=30): try: self.rules = yara.load(compiled_rules) except Exception as e: raise YaraLoadException(e) from e self.output_formatter = output_formatter self.timeout = timeout
def check_bitcoin(filepath): for n in os.listdir("rules/Bitcoin"): rule = yara.load("rules_compiled/Bitcoin/" + n) m = rule.match(filepath) if m: return 1 else: return 0
def get_rules_from_workbench(self): samples = self.workbench.generate_sample_set('yara_rules') if not samples: return None elif len(samples)>1: print 'Error: More than one yara rule set!' exit(1) else: return yara.load(self.workbench.get_sample[samples[0]])
def __init__(self, input_files, timeout, output_json): self.files = self.collect_files(input_files) self.files.sort() rules_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'rules/rules.yarc') self.rules = yara.load(rules_path) self.timeout = timeout self.output_json = output_json
def __init__(self): Analyzer.__init__(self) self.rulepaths = self.get_param('config.rules', None, 'No paths for rules provided.') if isinstance(self.rulepaths, str): self.rulepaths = [self.rulepaths] self.ruleset = [] for rulepath in self.rulepaths: if os.path.isfile(rulepath): if rulepath[len(rulepath)-3:] == 'yar': self.ruleset.append(yara.compile(rulepath)) elif rulepath[len(rulepath)-3:] == 'yas': self.ruleset.append(yara.load(rulepath)) elif os.path.isdir(rulepath): if os.path.isfile(rulepath + '/index.yas'): self.ruleset.append(yara.load(rulepath + '/index.yas')) elif os.path.isfile(rulepath + '/index.yar'): self.ruleset.append(yara.compile(rulepath + '/index.yar'))
def process(self, filename, rules=None): if rules: ruleBuff = StringIO() ruleBuff.write(rules) ruleBuff.seek(0) rules = yara.load(file=ruleBuff) results = rules.match(filename[0], externals={'filename': filename[1]}) else: results = self.YaraEngine.match(filename[0], externals={'filename': filename[1]}) results2 = list(map(lambda x: {"rule": x.rule}, results)) return results2
def testStringIO(self): # Python 2/3 try: yac1 = StringIO.StringIO(YAC_FILE) yac2 = StringIO.StringIO() except: yac1 = io.BytesIO(YAC_FILE) yac2 = io.BytesIO() r = yara.load(yac1) r.save(yac2) m = r.match(data="dummy") self.assertTrue(len(m) == 1) yac2.seek(0) r = yara.load(yac2) m = r.match(data="dummy") self.assertTrue(len(m) == 1)
def process(self, tup, rules=None): try: if rules: ruleBuff = StringIO() ruleBuff.write(rules) ruleBuff.seek(0) rules = yara.load(file=ruleBuff) results = rules.match(tup) else: results = self.YaraEngine.match(tup) results2 = list(map(lambda x: {"rules": x.rules}, results)) return results2 except Exception as e: return e
def testStringIO(self): # Python 2/3 try: stream = StringIO.StringIO() except: stream = io.BytesIO() r1 = yara.compile(source='rule test { condition: true }') r1.save(stream) stream.seek(0) r2 = yara.load(stream) m = r2.match(data="dummy") self.assertTrue(len(m) == 1)
def process(self, filename, rules=None): try: if rules: ruleBuff = BytesIO() ruleBuff.write(rules) ruleBuff.seek(0) rules = yara.load(file=ruleBuff) results = rules.match(filename[0], externals={"filename": filename[1]}) else: results = self.YaraEngine.match(filename[0], externals={"filename": filename[1]}) results2 = list(map(lambda x: {"rule": x.rule}, results)) return results2 except yara.Error: # Rules are uncompiled -> compile them rules = yara.compile(source=rules.decode("latin-1")) results = rules.match(filename[0], externals={"filename": filename[1]}) results2 = list(map(lambda x: {"rule": x.rule}, results)) return results2 except Exception as e: return e
def yara_on_demand(rule, theBuffer, externalVars={}, maxBytes=0): try: logging.debug("util: doing on demand yara scan with rule: %s" % rule) logging.debug("util: externalVars: %s" % str(externalVars)) if rule not in yara_on_demand_rules: if not is_compiled(rule): logging.debug("util: compiling %s for lazy load" % rule) yara_on_demand_rules[rule] = yara.compile(rule, externals=externalVars) else: yara_on_demand_rules[rule] = yara.load(rule) if maxBytes and len(theBuffer) > maxBytes: matches = yara_on_demand_rules[rule].match(data=buffer(theBuffer, 0, maxBytes) or 'EMPTY', externals=externalVars) else: matches = yara_on_demand_rules[rule].match(data=theBuffer or 'EMPTY', externals=externalVars) return matches except (QuitScanException, GlobalScanTimeoutError, GlobalModuleTimeoutError): raise except: logging.exception("util: yara on demand scan failed with rule %s" % (rule)) raise
def init(self, plugins_path, verbose=False): # 플러그인 엔진 초기화 self.verbose = verbose self.sig_num_yara = 0 # Yara 모듈이 없을 경우 엔질 로딩 실패 처리 if not LOAD_YARA: return -1 # Adware Yara 룰 로딩 try: b = open(os.path.join(plugins_path, 'adware.y01'), 'rb').read() self.sig_num_yara = kavutil.get_uint32(b, 4) if b[:4] == 'KAVS': t = zlib.decompress(b[12:]) buff = StringIO.StringIO(t) self.adware_gen = yara.load(file=buff) except: self.adware_gen = None return 0 # 플러그인 엔진 초기화 성공
def load(): global RULES if not RULES: RULES = yara.load(RULES_PATH) return RULES
def get_rules(): rules_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'rules/rules.yarc') return yara.load(rules_path)
def YARA_LOAD(fn): compiled_rule = yara.load(fn) return YARA_DICT(compiled_rule)
def YaraEngine(self): return yara.load(sys.argv[1])
def __init__(self, rule_files, processing_mode='raw', compiled=False, **kwargs): """ Default initializer. Keyword arguments: rule_files -- (List) Filepaths to yara rule files. (Ex. ['/path/to/file1', '/path/to/file2']) processing_mode -- (String) Mode used in processing data. Allowed options include; fixed_buffer, sliding_window, and raw. Default is raw mode. compiled -- (Boolean) If True, treat the provided rule file as compiled. Optional arguments: "fixed_buffer" processing mode: Data will be processed by yara in fixed sized buffers. buffer_size -- (Integer) Amount of data to buffer before processing in bytes. Default is 1024 bytes. "sliding_window" processing mode: Data will be processed by yara in fixed sized buffers, but it is possible for buffers to "overlap" by controlling the buffer increment. buffer_size -- (Integer) Amount of data to process in bytes. Default is 1024 bytes. window_step -- (Integer) Amount to increment window per chunk. Default is 1 byte. """ # Get handle to logger self.logger = logging.getLogger('yaraprocessor') # Validate all file names to ensure they exist and can be read for f in rule_files: if os.path.isfile(f): try: with open(f): pass except IOError: raise IOError((errno.EACCES, 'Cannot open/read file.', f)) else: raise IOError((errno.ENOENT, 'Cannot find file.', f)) if not compiled: self._rule_files = self._prepare_rules(rule_files) # Try to load the rules into yara try: self._rules = yara.compile(filepaths=self._rule_files) except yara.SyntaxError as e: err = ('Rule syntax error. If using compiled rules, you must ' 'pass the "compiled" argument. Original error: %s' % e) raise ProcessorException(err) except yara.Error: raise else: # rules are compiled try: # yara.load only accepts a single file assert(len(rule_files) == 1) except AssertionError: err = ('Compiled rules must be compiled to one file. Loading ' 'from compiled rules does not support multiple rule files.') raise ProcessorException(err) self._rule_files = rule_files[0] try: self._rules = yara.load(self._rule_files) except yara.Error as e: err = ('Generic error loading compiled rules. ' 'Original error: %s' % e) raise ProcessorException(err) # Validate that the processing mode is supported self._allowed_modes = ['raw', 'fixed_buffer', 'sliding_window'] if not processing_mode.lower() in self._allowed_modes: raise ProcessorException("%s is not a supported processing mode." \ % processing_mode) self._processing_mode = processing_mode # Optional arguments with defaults self._buffer_size = kwargs.get('buffer_size', 1024) self._window_step = kwargs.get('window_step', 1) # Set window_step to buffer size when processing in fixed buffer mode # This makes the analysis code simpler if self._processing_mode == 'fixed_buffer': self._window_step = self._buffer_size # Attribute used to hold data and results to be processed self._raw_results = [] self._formatted_results = [] self.data = '' # Private variables for buffering and window processing self._current = '' self._next = None self._window_index = 0 self._offset = 0
def YaraEngine(self): return yara.load(Config["yara_rules"]["local_path"])
def YaraEngine(self): return yara.load(Config.yara_rules.local_path)