def get_file(): tmp_folder="/tmp/mass_download" subprocess.call(["mkdir","-p",tmp_folder]) file_hash = clean_hash(request.query.file_hash) key = '' if len(file_hash) == 40: key = 'sha1' else: response.code = 400 return jsonize({'message':'Invalid hash format (use sha1)'}) pc=PackageController() res=pc.searchFile(file_hash) if res==None: response.code = 404 return jsonize({'message':'File not found in the database'}) if res==1: response.code = 400 return jsonize({'message':'File not available for downloading'}) res=pc.getFile(file_hash) zip_name=os.path.join(tmp_folder,str(file_hash)+'.zip') file_name=os.path.join(tmp_folder,str(file_hash)+'.codex') fd=open(file_name,"wb") fd.write(res) fd.close() subprocess.call(["zip","-ju","-P","codex",zip_name,file_name]) return static_file(str(file_hash)+".zip",root=tmp_folder,download=True)
def db_inconsistency(file_hash): if(not valid_hash(file_hash)): raise ValueError("db_inconsistency invalid hash") pc = PackageController() v = VersionController() file_id = get_file_id(file_hash) if file_id is not None: # meta exists file_bin = pc.getFile(file_id) if file_bin is not None: # sample exists version = v.searchVersion(file_id) if version is not None: return 0 # ok else: # version does not exist logging.info( "inconsistency: meta and sample exists. Version does not") return 3 else: # has meta but not sample logging.info("inconsistency: meta exists, sample does not") return 2 else: # does not have meta if len(file_hash) == 64: return 0 # cant search in grid by sha256 if len(file_hash) == 40: file_bin = pc.getFile(file_hash) else: # md5 sha1 = pc.md5_to_sha1(file_hash) if sha1 is None: return 0 # does not have meta or sample file_bin = pc.getFile(file_hash) if file_bin is None: return 0 else: logging.info("inconsistency: does not have meta. has sample") return 1
def get_file(): tmp_folder = "/tmp/mass_download" subprocess.call(["mkdir", "-p", tmp_folder]) file_hash = clean_hash(request.query.file_hash) key = '' if len(file_hash) == 40: key = 'sha1' else: response.code = 400 return jsonize({'message': 'Invalid hash format (use sha1)'}) pc = PackageController() res = pc.searchFile(file_hash) if res == None: response.code = 404 return jsonize({'message': 'File not found in the database'}) if res == 1: response.code = 400 return jsonize({'message': 'File not available for downloading'}) res = pc.getFile(file_hash) zip_name = os.path.join(tmp_folder, str(file_hash) + '.zip') file_name = os.path.join(tmp_folder, str(file_hash) + '.codex') fd = open(file_name, "wb") fd.write(res) fd.close() subprocess.call(["zip", "-ju", "-P", "codex", zip_name, file_name]) return static_file(str(file_hash) + ".zip", root=tmp_folder, download=True)
def yara(): tmp_folder = "/tmp/yara_working_dir" subprocess.call(["mkdir", "-p", tmp_folder]) hashes = request.forms.dict.get("file_hash[]") if hashes is not None: if len(hashes) == 1: random_id = hashes[0] else: random_id = id_generator() else: return jsonize({'message': 'Error. no file selected'}) folder_path = os.path.join(tmp_folder, random_id) subprocess.call(["mkdir", "-p", folder_path]) yara_output_file = os.path.join(tmp_folder, random_id + ".txt") for file_hash in hashes: key = '' if len(file_hash) == 40: key = 'sha1' else: response.code = 400 return jsonize({'message': 'Invalid hash format (use sha1)'}) pc = PackageController() res = pc.searchFile(file_hash) if res == None: response.code = 404 return jsonize({'message': 'File not found in the database' }) #needs a better fix res = pc.getFile(file_hash) file_name = os.path.join(folder_path, str(file_hash) + ".codex") if not os.path.isfile(file_name): fd = open(file_name, "wb") fd.write(res) fd.close() yara_cli_output = call_with_output([ "python", env['yara-script2'], "--opcodes", "--excludegood", "--nosimple", "-z", "5", "-m", folder_path, "-o", yara_output_file ]) #yara_cli_output = call_with_output(["python",env['yara-script1'],"-f","exe","-a","Codex Gigas","-r",yara_output_file, folder_path+"/"]) # yara_output_file += ".yar" # because the script yara-script2 is ugly and saves the file to x.yar.yar if os.path.isfile(yara_output_file) is False: fp = open(yara_output_file, 'w+') fp.write(yara_cli_output) fp.close() yara_output_fp = open(yara_output_file, 'r') output_cleaned = yara_output_fp.read().replace( "[!] Rule Name Can Not Contain Spaces or Begin With A Non Alpha Character", "") output_cleaned = re.sub( r"\[\+\] Generating Yara Rule \/tmp\/yara_working_dir\/[A-Z0-9]+\.txt from files located in: /tmp/yara_working_dir/[A-Z0-9]+/", "", output_cleaned) output_cleaned = re.sub(r"rule /tmp/yara_working_dir/([a-zA-Z0-9]+).txt", r"rule \1", output_cleaned) # lines = [line for line in output_with_credits_removed if line.strip()] return jsonize({"message": output_cleaned})
def yara(): tmp_folder = "/tmp/yara_working_dir" subprocess.call(["mkdir", "-p", tmp_folder]) hashes = request.forms.dict.get("file_hash[]") if hashes is not None: if len(hashes) == 1: random_id = hashes[0] else: random_id = id_generator() else: return jsonize({'message': 'Error. no file selected'}) folder_path = os.path.join(tmp_folder, random_id) subprocess.call(["mkdir", "-p", folder_path]) yara_output_file = os.path.join(tmp_folder, random_id + ".txt") for file_hash in hashes: key = '' if len(file_hash) == 40: key = 'sha1' else: response.status = 400 return jsonize({'message': 'Invalid hash format (use sha1)'}) pc = PackageController() res = pc.searchFile(file_hash) if res is None: response.status = 404 # needs a better fix return jsonize({'message': 'File not found in the database'}) res = pc.getFile(file_hash) file_name = os.path.join(folder_path, str(file_hash) + ".codex") if not os.path.isfile(file_name): fd = open(file_name, "wb") fd.write(res) fd.close() yara_cli_output = call_with_output(["python", envget( 'yara-script2'), "--opcodes", "--excludegood", "--nosimple", "-z", "5", "-m", folder_path, "-o", yara_output_file]) # yara_cli_output = call_with_output(["python",envget('yara-script1'),"-f","exe","-a","Codex Gigas","-r",yara_output_file, folder_path+"/"]) # yara_output_file += ".yar" # because the script yara-script2 is ugly and # saves the file to x.yar.yar if os.path.isfile(yara_output_file) is False: fp = open(yara_output_file, 'w+') fp.write(yara_cli_output) fp.close() yara_output_fp = open(yara_output_file, 'r') output_cleaned = yara_output_fp.read().replace( "[!] Rule Name Can Not Contain Spaces or Begin With A Non Alpha Character", "") output_cleaned = re.sub( r"\[\+\] Generating Yara Rule \/tmp\/yara_working_dir\/[A-Z0-9]+\.txt from files located in: /tmp/yara_working_dir/[A-Z0-9]+/", "", output_cleaned) output_cleaned = re.sub( r"rule /tmp/yara_working_dir/([a-zA-Z0-9]+).txt", r"rule \1", output_cleaned) # lines = [line for line in output_with_credits_removed if line.strip()] return jsonize({"message": output_cleaned})
def get_package_file(): tmp_folder="/tmp/mass_download" subprocess.call(["mkdir","-p",tmp_folder]) hashes = request.forms.dict.get("file_hash[]") if hashes is None: hashes = request.forms.get("file_hash").split("\n") if hashes is not None: if len(hashes) == 1: random_id=hashes[0] else: random_id = id_generator() else: return jsonize({'message':'Error. no file selected'}) folder_path=os.path.join(tmp_folder,random_id) subprocess.call(["mkdir","-p",folder_path]) zip_name=os.path.join(tmp_folder,random_id+".zip") pc=PackageController() for file_hash in hashes: file_hash = clean_hash(file_hash.replace('\r','')) data="1="+file_hash res=SearchModule.search_by_id(data,1) if(len(res)==0): pass else: file_hash=res[0]["sha1"] res=pc.searchFile(file_hash) if res != 1 and res is not None: res=pc.getFile(file_hash) file_name=os.path.join(folder_path,str(file_hash)+".codex") fd=open(file_name,"wb") fd.write(res) fd.close() elif res == 1: fd=open(os.path.join(folder_path,'readme.txt'),'a+') fd.write(str(file_hash)+" is not available to download.\n") fd.close() elif res is None: fd=open(os.path.join(folder_path,'readme.txt'),'a+') fd.write(str(file_hash)+" not found.") fd.close() else: print "Unknown res:"+str(res) subprocess.call(["zip","-P","codex","-jr", zip_name,folder_path]) resp = static_file(str(random_id)+".zip",root=tmp_folder,download=True) resp.set_cookie('fileDownload','true'); # http://johnculviner.com/jquery-file-download-plugin-for-ajax-like-feature-rich-file-downloads/ return resp
def get_package_file(): tmp_folder = "/tmp/mass_download" subprocess.call(["mkdir", "-p", tmp_folder]) hashes = request.forms.dict.get("file_hash[]") if hashes is None: hashes = request.forms.get("file_hash").split("\n") if hashes is not None: if len(hashes) == 1: random_id = hashes[0] else: random_id = id_generator() else: return jsonize({'message': 'Error. no file selected'}) folder_path = os.path.join(tmp_folder, random_id) subprocess.call(["mkdir", "-p", folder_path]) zip_name = os.path.join(tmp_folder, random_id + ".zip") pc = PackageController() for file_hash in hashes: file_hash = clean_hash(file_hash.replace('\r', '')) data = "1=" + file_hash res = SearchModule.search_by_id(data, 1) if (len(res) == 0): pass else: file_hash = res[0]["sha1"] res = pc.searchFile(file_hash) if res != 1 and res is not None: res = pc.getFile(file_hash) file_name = os.path.join(folder_path, str(file_hash) + ".codex") fd = open(file_name, "wb") fd.write(res) fd.close() elif res == 1: fd = open(os.path.join(folder_path, 'readme.txt'), 'a+') fd.write(str(file_hash) + " is not available to download.\n") fd.close() elif res is None: fd = open(os.path.join(folder_path, 'readme.txt'), 'a+') fd.write(str(file_hash) + " not found.") fd.close() else: print "Unknown res:" + str(res) subprocess.call(["zip", "-P", "codex", "-jr", zip_name, folder_path]) resp = static_file(str(random_id) + ".zip", root=tmp_folder, download=True) resp.set_cookie('fileDownload', 'true') # http://johnculviner.com/jquery-file-download-plugin-for-ajax-like-feature-rich-file-downloads/ return resp
def getBinary(self): if(self.binary != None): return self.binary if(not self.binary_try_to_load): return None self.binary_try_to_load=False if(self.pc==None): tmp_pc=PackageController() #we use a temporary PackageController so we don't leave a mongo cursor open. self.binary=tmp_pc.getFile(self.sample_id) return self.binary self.binary=self.pc.getFile(self.sample_id) return self.binary
def process_file(file_hash): #print "process_file("+str(file_hash)+")" pc=PackageController() res=pc.getFile(file_hash) if res==None:return None sam_id=file_hash sample=Sample() sample.setID(sam_id) sample.setBinary(res) sample.setStorageVersion({}) lc=Launcher() lc.launchAnalysisByID(sample) return 0
def process_file(file_hash): #print "process_file("+str(file_hash)+")" pc = PackageController() res = pc.getFile(file_hash) if res == None: return None sam_id = file_hash sample = Sample() sample.setID(sam_id) sample.setBinary(res) sample.setStorageVersion({}) lc = Launcher() lc.launchAnalysisByID(sample) return 0
def save_file_from_vt(hash_id): downloaded_file=download_from_virus_total(hash_id) if(downloaded_file==None): return None data_bin=downloaded_file file_id=hashlib.sha1(data_bin).hexdigest() # print "file_id="+str(file_id) pc=PackageController() res=pc.searchFile(file_id) if(res==None): # File not found. Add it to the package. pc.append(file_id,data_bin,True) print("Added: %s" % (file_id,)) return file_id
def save_file_from_vt(hash_id): downloaded_file = download_from_virus_total(hash_id) if (downloaded_file == None): return None data_bin = downloaded_file file_id = hashlib.sha1(data_bin).hexdigest() # print "file_id="+str(file_id) pc = PackageController() res = pc.searchFile(file_id) if (res == None): # File not found. Add it to the package. pc.append(file_id, data_bin, True) print("Added: %s" % (file_id, )) return file_id
def getBinary(self): if (self.binary != None): return self.binary if (not self.binary_try_to_load): return None self.binary_try_to_load = False if (self.pc == None): tmp_pc = PackageController( ) #we use a temporary PackageController so we don't leave a mongo cursor open. self.binary = tmp_pc.getFile(self.sample_id) return self.binary self.binary = self.pc.getFile(self.sample_id) return self.binary
def add_file_from_vt(hash_id): #return None # FUNCION DESABILITADA - SACAR LA LINEA PARA PONER downloaded_file=download_from_virus_total(hash_id) if(downloaded_file==None): return None data_bin=downloaded_file file_id=hashlib.sha1(data_bin).hexdigest() #print "file_id="+str(file_id) pc=PackageController() res=pc.searchFile(file_id) if(res==None): # File not found. Add it to the package. pc.append(file_id,data_bin,True) #print("Added: %s" % (file_id,)) return file_id
def add_file_from_vt(hash_id): #return None # FUNCION DESABILITADA - SACAR LA LINEA PARA PONER downloaded_file = download_from_virus_total(hash_id) if (downloaded_file == None): return None data_bin = downloaded_file file_id = hashlib.sha1(data_bin).hexdigest() #print "file_id="+str(file_id) pc = PackageController() res = pc.searchFile(file_id) if (res == None): # File not found. Add it to the package. pc.append(file_id, data_bin, True) #print("Added: %s" % (file_id,)) return file_id
def load_to_mongo2(folder_path): pc=PackageController() ram = Ram() files=recursive_read(folder_path) count=0 reset=0 already_loaded=0 time_start = datetime.datetime.now() uploaded=0 in_mem=0 loaded_ram_counter=0 lc=Launcher() if(files is None): return "No files where found." while (uploaded < len(files)): loaded_ram_counter=0 data_vector=[] print "loading files to memory" while (in_mem < len(files)): f=files[in_mem] file_cursor=open(f,"r") data_vector.append(file_cursor.read()) in_mem=in_mem+1 loaded_ram_counter=loaded_ram_counter+1 if(loaded_ram_counter > 100): if(ram.free_percent() < 0.3): print "Ram full" break for data in data_vector: file_id=hashlib.sha1(data).hexdigest() print "loading to db: "+str(file_id) res=pc.searchFile(file_id) if(res==None): pc.append(file_id,data) sample=Sample() sample.setID(file_id) sample.setBinary(data) sample.setStorageVersion({}) count+=1 lc.launchAnalysisByID(sample) else: already_loaded+=1 uploaded=uploaded+1 result=str(already_loaded)+" were already loaded to mongo.\n" result+=thetime(time_start,datetime.datetime.now(),count) print result return result
def last_uploaded(): number = request.query.get("n") if number is None: response.status = 400 return jsonize({"error": 1, "error_message": "Parameter n is missing"}) if number.isdigit() is False: response.status = 400 return jsonize({"error": 2, "error_message": "Parameter n must be a number"}) if int(number) == 0: return jsonize({"error": 3, "error_message": "Parameter n must be greater than zero."}) pc = PackageController() lasts = pc.last_updated(int(number)) for i in range(0, len(lasts)): # Convert datetime objects lasts[i] = change_date_to_str(lasts[i]) return jsonize(lasts)
def upload_file(data_bin): pc = PackageController() file_id = hashlib.sha1(data_bin).hexdigest() res = pc.searchFile(file_id) if (res == None): # File not found. Add it to the package. pc.append(file_id, data_bin) print("Added: %s" % (file_id, )) log_event("file added", str(file_id)) return "ok" else: if (res == 0): #file already exists log_event("file already exists", str(file_id)) return "already exists" else: #existe y esta bloqueado por vt log_event("file already exists", str(file_id)) return "virustotal"
def generic_process_hash(hash_str): if hash_str is None: return None hash_str = clean_hash(hash_str) if(not valid_hash(hash_str)): return None if(len(hash_str) == 64): hash_str = get_file_id(hash_str) elif(len(hash_str) == 32): pc = PackageController() hash_str = pc.md5_to_sha1(hash_str) logging.debug("generic_process_hash-->sha1: " + str(hash_str)) if(hash_str is not None): return process_file(hash_str) else: return None
def generic_process_hash(hash_str): if hash_str is None: return None hash_str = clean_hash(hash_str) if (not valid_hash(hash_str)): return None if (len(hash_str) == 64): hash_str = get_file_id(hash_str) elif (len(hash_str) == 32): pc = PackageController() hash_str = pc.md5_to_sha1(hash_str) logging.debug("generic_process_hash-->sha1: " + str(hash_str)) if (hash_str is not None): return process_file(hash_str) else: return None
def upload_file(data_bin): pc=PackageController() file_id=hashlib.sha1(data_bin).hexdigest() res=pc.searchFile(file_id) if(res==None): # File not found. Add it to the package. pc.append(file_id,data_bin) print("Added: %s" % (file_id,)) log_event("file added",str(file_id)) return "ok" else: if(res==0):#file already exists log_event("file already exists",str(file_id)) return "already exists" else:#existe y esta bloqueado por vt log_event("file already exists",str(file_id)) return "virustotal"
def process_file(file_hash, force=False): if file_hash is None: return None print "process_file(" + str(file_hash) + ")" pc = PackageController() res = pc.getFile(file_hash) if res == None: return None sam_id = file_hash sample = Sample() sample.setID(sam_id) sample.setBinary(res) if force: sample.setStorageVersion({}) lc = Launcher() lc.launchAnalysisByID(sample) log_event("process", str(file_hash)) return 0
def save_file_from_vt(hash_id): downloaded_file=download_from_virus_total(hash_id) if(downloaded_file==None): return {"status": "unknown", "hash": None} if downloaded_file.get('status') == "out_of_credits": return {"status": "out_of_credits", "hash": None} if downloaded_file.get('status') == "not_found": return {"status": "not_found", "hash": None} if downloaded_file.get('status') == 'ok': data_bin=downloaded_file.get('file') file_id=hashlib.sha1(data_bin).hexdigest() pc=PackageController() res=pc.searchFile(file_id) if(res==None): # File not found. Add it to the package. pc.append(file_id,data_bin,True) return {"status": "added", "hash": file_id} else: process_file(file_id) return {"status": "inconsistency_found", "hash": file_id}
def save_file_from_vt(hash_id): downloaded_file = download_from_virus_total(hash_id) if(downloaded_file is None): return {"status": "unknown", "hash": None} if downloaded_file.get('status') == "out_of_credits": return {"status": "out_of_credits", "hash": None} if downloaded_file.get('status') == "not_found": return {"status": "not_found", "hash": None} if downloaded_file.get('status') == 'ok': data_bin = downloaded_file.get('file') file_id = hashlib.sha1(data_bin).hexdigest() pc = PackageController() res = pc.searchFile(file_id) if(res is None): # File not found. Add it to the package. pc.append(file_id, data_bin, True) return {"status": "added", "hash": file_id} else: process_file(file_id) return {"status": "inconsistency_found", "hash": file_id}
def process_file(file_hash, force=False): if not is_sha1(file_hash): raise ValueError("process_file only accepts sha1") logging.debug("process_file(" + str(file_hash) + ")") pc = PackageController() res = pc.getFile(file_hash) if res is None: logging.warning("Error: process_file(" + str(file_hash) + "): pc.getFile returned None") return None sam_id = file_hash sample = Sample() sample.setID(sam_id) sample.setBinary(res) if force: sample.setStorageVersion({}) lc = Launcher() lc.launchAnalysisByID(sample) log_event("process", str(file_hash)) return 0
def process_file(file_hash, force=False): if not is_sha1(file_hash): raise ValueError("process_file only accepts sha1") logging.debug("process_file(" + str(file_hash) + ")") pc = PackageController() res = pc.getFile(file_hash) if res is None: logging.warning( "Error: process_file(" + str(file_hash) + "): pc.getFile returned None") return None sam_id = file_hash sample = Sample() sample.setID(sam_id) sample.setBinary(res) if force: sample.setStorageVersion({}) lc = Launcher() lc.launchAnalysisByID(sample) log_event("process", str(file_hash)) return 0
def add_file_from_vt(hash_id): downloaded_file=download_from_virus_total(hash_id) if(downloaded_file==None): print "add_file_from_vt(): "+str(hash_id)+" not found in VT." return None print "add_file_from_vt(): downloaded_file is not None."+str(hash_id) data_bin=downloaded_file file_id=hashlib.sha1(data_bin).hexdigest() #print "file_id="+str(file_id) pc=PackageController() res=pc.searchFile(file_id) if(res==None): # File not found. Add it to the package. pc.append(file_id,data_bin,True) print str(hash_id)+" added to DB from VT." #print("Added: %s" % (file_id,)) else: print "add_file_from_vt(): "+str(hash_id)+" was found in the DB and asked in VT: BUG. Going to process right now." process_file(file_id) return file_id
def db_inconsistency(file_hash): if (not valid_hash(file_hash)): raise ValueError("db_inconsistency invalid hash") pc = PackageController() v = VersionController() file_id = get_file_id(file_hash) if file_id is not None: #meta exists file_bin = pc.getFile(file_id) if file_bin is not None: #sample exists version = v.searchVersion(file_id) if version is not None: return 0 # ok else: #version does not exist logging.info( "inconsistency: meta and sample exists. Version does not") return 3 else: # has meta but not sample logging.info("inconsistency: meta exists, sample does not") return 2 else: # does not have meta if len(file_hash) == 64: return 0 # cant search in grid by sha256 if len(file_hash) == 40: file_bin = pc.getFile(file_hash) else: # md5 sha1 = pc.md5_to_sha1(file_hash) if sha1 is None: return 0 # does not have meta or sample file_bin = pc.getFile(file_hash) if file_bin is None: return 0 else: logging.info("inconsistency: does not have meta. has sample") return 1
def __init__(self): formato = '[%(asctime)-15s][%(levelname)s] %(message)s' path = os.path.abspath(os.path.dirname(os.path.abspath(__file__))) logfile = os.path.join(path, "launcher.log") logging.basicConfig(format=formato, filename=logfile, level=logging.INFO) self.vc = VersionController() self.pc = PackageController() self.mdc = MetaController() def launchOnlyHashingByID(self, sample): sample.setPackageController(self.pc) sample.setMetaController(self.mdc) sample.setVersionController(self.vc) category = sample.getCategory() if (category == None): category = Cataloger().catalog(sample.getBinary()) logging.debug("Category not found in DB, categorized as %s", str(category)) else: logging.debug("Category found in DB, categorized as %s", str(category)) processor = ProcessorFactory().getHashProcessor(category, sample) result_dic = processor.process() result_version = processor.getVersion() if (len(result_version) > 0): logging.debug("Updating metadata") if (self.mdc.write(sample.getID(), result_dic) != 0): logging.error("Error writing Metadata to DB, sample:%s", sample.getID()) return -1 logging.debug("Metadata writed in DB") self.vc.updateVersion(sample.getID(), result_version) logging.debug("Versions writed to DB") else: logging.debug("Nothing to update") logging.debug("Analysis Finished OK") return 0
def api_batch_process_debug_file(): yield "<html><body><pre>" yield "Running Batch process\n" file_hashes = request.forms.get('file_hash') if file_hashes is None: response.status = 422 logging.debug("api_batch_process_debug_file(): file_hash is missing") yield "file_hash parameter is missing" # transform file_hashes in a list of hashes. not_found = [] added_to_queue = 0 downloaded_from_vt = 0 for hash_id in file_hashes.split("\n"): hash_id = clean_hash(hash_id) if hash_id is None: continue data = "1=" + hash_id if(len(hash_id) == 40 or len(hash_id) == 32): pc = PackageController() res = pc.getFile(hash_id) if res is not None and len(SearchModule.search_by_id(data, 1, [], False)) == 0: logging.debug("Processing right now: " + str(hash_id)) process_file(hash_id) if(envget('auto_get_av_result')): add_task_to_download_av_result(hash_id) continue res = SearchModule.search_by_id(data, 1, [], False) if(len(res) == 0): legging.debug("process_debug(): metadata of " + str(hash_id) + " was not found. We will look in Pc. hash length: " + str(len(hash_id))) if(len(hash_id) == 40 or len(hash_id) == 32): pc = PackageController() res = pc.getFile(hash_id) if res is not None: logging.debug( "process_debug(): hash was found (" + str(hash_id) + ")") else: logging.debug( "process_debug(): hash was not found(" + str(hash_id) + ")") logging.debug("process_debug():") logging.debug("process_debug(): going to search " + str(hash_id) + " in vt") add_response = SearchModule.add_file_from_vt(hash_id) sha1 = add_response.get('hash') if(sha1 is None): logging.debug("process_debug(): sha1 is None: " + str(hash_id)) not_found.append(hash_id) continue else: downloaded_from_vt += 1 else: sha1 = res[0]["sha1"] added_to_queue += 1 add_hash_to_process_queue(sha1) if(envget('auto_get_av_result')): add_task_to_download_av_result(sha1) yield str(sha1) + "\n" responsex = str(added_to_queue) + " files added to the process queue.\n" if(downloaded_from_vt > 0): responsex += str(downloaded_from_vt) + " new hashes.\n" if(len(not_found) != 0): responsex += str(len(not_found)) + " hashes not found.\n" responsex += "Not Found:\n" for aux in not_found: responsex = responsex + str(aux) + "\n" yield responsex yield "END"
def api_batch_process_debug_file(): yield "<html><body><pre>" yield "Running Batch process\n" file_hashes = request.forms.get('file_hash') if file_hashes is None: response.status = 422 logging.debug("api_batch_process_debug_file(): file_hash is missing") yield "file_hash parameter is missing" # transform file_hashes in a list of hashes. not_found = [] added_to_queue = 0 downloaded_from_vt = 0 for hash_id in file_hashes.split("\n"): hash_id = clean_hash(hash_id) if hash_id is None: continue data = "1=" + hash_id if (len(hash_id) == 40 or len(hash_id) == 32): pc = PackageController() res = pc.getFile(hash_id) if res is not None and len( SearchModule.search_by_id(data, 1, [], False)) == 0: logging.debug("Processing right now: " + str(hash_id)) process_file(hash_id) if (env['auto_get_av_result']): add_task_to_download_av_result(hash_id) continue res = SearchModule.search_by_id(data, 1, [], False) if (len(res) == 0): legging.debug("process_debug(): metadata of " + str(hash_id) + " was not found. We will look in Pc. hash length: " + str(len(hash_id))) if (len(hash_id) == 40 or len(hash_id) == 32): pc = PackageController() res = pc.getFile(hash_id) if res is not None: logging.debug("process_debug(): hash was found (" + str(hash_id) + ")") else: logging.debug("process_debug(): hash was not found(" + str(hash_id) + ")") logging.debug("process_debug():") logging.debug("process_debug(): going to search " + str(hash_id) + " in vt") add_response = SearchModule.add_file_from_vt(hash_id) sha1 = add_response.get('hash') if (sha1 == None): logging.debug("process_debug(): sha1 is None: " + str(hash_id)) not_found.append(hash_id) continue else: downloaded_from_vt += 1 else: sha1 = res[0]["sha1"] added_to_queue += 1 add_hash_to_process_queue(sha1) if (env['auto_get_av_result']): add_task_to_download_av_result(sha1) yield str(sha1) + "\n" responsex = str(added_to_queue) + " files added to the process queue.\n" if (downloaded_from_vt > 0): responsex += str(downloaded_from_vt) + " new hashes.\n" if (len(not_found) != 0): responsex += str(len(not_found)) + " hashes not found.\n" responsex += "Not Found:\n" for aux in not_found: responsex = responsex + str(aux) + "\n" yield responsex yield "END"