def translate_id(id, str_value): str_value = str_value.replace("+", " ") if (id == 1): str_value = clean_hash(str_value) largo = len(str_value) if (largo == 32): id = 1 elif (largo == 40): id = 2 elif (largo == 64): id = 3 else: id = 1 if (id == 12): str_value = clean_hash(str_value) largo = len(str_value) if (largo == 32): id = 12 elif (largo == 40): id = 13 elif (largo == 64): id = 14 else: id = 12 dic = tree_menu.ids[id] path = str(dic["path"]) type_format = dic["type"] do = dic.get("do") if do is "clean_hash": str_value = clean_hash(str_value) if type_format == "string": value = str(urllib.unquote(str_value).decode('utf8')) if (id == 1 or id == 2 or id == 3): value = str(urllib.unquote(str_value).decode('utf8')).lower() else: value = str(urllib.unquote(str_value).decode('utf8')) #print(value) elif type_format == "int": value = int(str_value) elif type_format == "float": value = float(str_value) elif type_format == "check": if str_value == "true": value = "True" else: value = "False" elif type_format == "s_string": aux = str(urllib.unquote(str_value).decode('utf8')).lower() value = "'%s'" % (aux, ) elif type_format == "s_string_no_lower": aux = str(urllib.unquote(str_value).decode('utf8')) value = "'%s'" % (aux, ) elif type_format == "s_string_nl": aux = str(urllib.unquote(str_value).decode('utf8')) value = "'%s'" % (aux, ) else: value = None return path, value
def translate_id(id,str_value): str_value=str_value.replace("+"," ") if(id==1): str_value=clean_hash(str_value) largo=len(str_value) if(largo==32):id=1 elif(largo==40):id=2 elif(largo==64):id=3 else: id=1 if(id==12): str_value=clean_hash(str_value) largo=len(str_value) if(largo==32):id=12 elif(largo==40):id=13 elif(largo==64):id=14 else: id=12 dic=tree_menu.ids[id] path=str(dic["path"]) type_format=dic["type"] do=dic.get("do") if do is "clean_hash": str_value=clean_hash(str_value) if type_format=="string": value=str(urllib.unquote(str_value).decode('utf8')) if(id==1 or id==2 or id==3): value=str(urllib.unquote(str_value).decode('utf8')).lower() else: value=str(urllib.unquote(str_value).decode('utf8')) #print(value) elif type_format == "int": value=int(str_value) elif type_format == "float": value=float(str_value) elif type_format == "check": if str_value=="true": value="True" else: value="False" elif type_format == "s_string": aux=str(urllib.unquote(str_value).decode('utf8')).lower() value="'%s'"%(aux,) elif type_format == "s_string_no_lower": aux=str(urllib.unquote(str_value).decode('utf8')) value="'%s'"%(aux,) elif type_format == "s_string_nl": aux=str(urllib.unquote(str_value).decode('utf8')) value="'%s'"%(aux,) else: value = None return path,value
def get_result_from_av(): hash_id=request.query.file_hash if len(hash_id) == 0: response.code = 400 return jsonize({'error': 4, 'error_message':'file_hash parameter is missing.'}) hash_id=clean_hash(hash_id) if not valid_hash(hash_id): return jsonize({'error': 5, 'error_message':'Invalid hash format.'}) if(len(hash_id)!=40): data="1="+str(hash_id) res=SearchModule.search_by_id(data,1,[],True) if(len(res)==0): response.code = 400 return jsonize({'error': 6, 'error_message':'File not found'}) else: sha1=res[0]["sha1"] else: sha1=hash_id if(vt_key()): av_result=get_av_result(sha1) else: return jsonize({'error': 7, "error_message": "Error: VirusTotal API key missing from secrets.py file"}) if(av_result==None): return jsonize({"error": 8, "error_message": "Cannot get analysis (hash not found in VT? out of credits?)"}) return jsonize({"message": "AV scans downloaded."})
def get_file(): tmp_folder = "/tmp/mass_download" subprocess.call(["mkdir", "-p", tmp_folder]) file_hash = clean_hash(request.query.file_hash) key = '' if len(file_hash) == 40: key = 'sha1' else: response.code = 400 return jsonize({'message': 'Invalid hash format (use sha1)'}) pc = PackageController() res = pc.searchFile(file_hash) if res == None: response.code = 404 return jsonize({'message': 'File not found in the database'}) if res == 1: response.code = 400 return jsonize({'message': 'File not available for downloading'}) res = pc.getFile(file_hash) zip_name = os.path.join(tmp_folder, str(file_hash) + '.zip') file_name = os.path.join(tmp_folder, str(file_hash) + '.codex') fd = open(file_name, "wb") fd.write(res) fd.close() subprocess.call(["zip", "-ju", "-P", "codex", zip_name, file_name]) return static_file(str(file_hash) + ".zip", root=tmp_folder, download=True)
def export_metadata(): mdc = MetaController() hashes = request.forms.dict.get("file_hash[]") dump_to_save = "" for hash in hashes: hash = clean_hash(hash.replace('\r', '')) res = mdc.read(hash) dump = dumps(res, indent=4) line = "\n\n#### File:%s\n" % hash dump_to_save = dump_to_save + line + dump id_random = id_generator() tmp_folder = "/tmp/meta_export" subprocess.call(["mkdir", "-p", tmp_folder]) file_name = os.path.join(tmp_folder, str(id_random) + '.txt') fd = open(file_name, "w") fd.write(dump_to_save) fd.close() resp = static_file(str(id_random) + '.txt', root=tmp_folder, download=True) resp.set_cookie('fileDownload', 'true') return resp
def get_file(): tmp_folder="/tmp/mass_download" subprocess.call(["mkdir","-p",tmp_folder]) file_hash = clean_hash(request.query.file_hash) key = '' if len(file_hash) == 40: key = 'sha1' else: response.code = 400 return jsonize({'message':'Invalid hash format (use sha1)'}) pc=PackageController() res=pc.searchFile(file_hash) if res==None: response.code = 404 return jsonize({'message':'File not found in the database'}) if res==1: response.code = 400 return jsonize({'message':'File not available for downloading'}) res=pc.getFile(file_hash) zip_name=os.path.join(tmp_folder,str(file_hash)+'.zip') file_name=os.path.join(tmp_folder,str(file_hash)+'.codex') fd=open(file_name,"wb") fd.write(res) fd.close() subprocess.call(["zip","-ju","-P","codex",zip_name,file_name]) return static_file(str(file_hash)+".zip",root=tmp_folder,download=True)
def api_batch_process_file(): logging.debug("api_batch_process_file(): Running Batch process") file_hashes = request.forms.get('file_hash') # transform file_hashes in a list of hashes if file_hashes is None: return jsonize({"Error: file_hash parameter is missing."}) not_found = [] added_to_queue = 0 downloaded_from_vt = 0 for hash_id in file_hashes.split("\n"): hash_id = clean_hash(hash_id) if hash_id is None: continue data = "1=" + str(hash_id) res = SearchModule.search_by_id(data, 1, [], True) if (len(res) == 0): not_found.append(hash_id) continue else: sha1 = res[0]["sha1"] added_to_queue += 1 logging.debug(str(hash_id) + " added to queue") add_hash_to_process_queue(sha1) responsex = str(added_to_queue) + " files added to the process queue.\n" if (downloaded_from_vt > 0): responsex += str(downloaded_from_vt) + " new hashes.\n" if (len(not_found) != 0): responsex += str(len(not_found)) + " hashes not found.\n" responsex += "Not Found:\n" for aux in not_found: responsex = responsex + str(aux) + "\n" return jsonize({"message": responsex})
def api_batch_process_file(): logging.debug("api_batch_process_file(): Running Batch process") file_hashes = request.forms.get('file_hash') # transform file_hashes in a list of hashes if file_hashes is None: return jsonize({"Error: file_hash parameter is missing."}) not_found = [] added_to_queue = 0 downloaded_from_vt = 0 for hash_id in file_hashes.split("\n"): hash_id = clean_hash(hash_id) if hash_id is None: continue data = "1=" + str(hash_id) res = SearchModule.search_by_id(data, 1, [], True) if(len(res) == 0): not_found.append(hash_id) continue else: sha1 = res[0]["sha1"] added_to_queue += 1 logging.debug(str(hash_id) + " added to queue") add_hash_to_process_queue(sha1) responsex = str(added_to_queue) + " files added to the process queue.\n" if(downloaded_from_vt > 0): responsex += str(downloaded_from_vt) + " new hashes.\n" if(len(not_found) != 0): responsex += str(len(not_found)) + " hashes not found.\n" responsex += "Not Found:\n" for aux in not_found: responsex = responsex + str(aux) + "\n" return jsonize({"message": responsex})
def export_metadata(): mdc=MetaController() hashes = request.forms.dict.get("file_hash[]") dump_to_save="" for hash in hashes: hash = clean_hash(hash.replace('\r','')) res=mdc.read(hash) dump=dumps(res,indent=4) line="\n\n#### File:%s\n"%hash dump_to_save=dump_to_save+line+dump id_random=id_generator() tmp_folder="/tmp/meta_export" subprocess.call(["mkdir","-p",tmp_folder]) file_name=os.path.join(tmp_folder,str(id_random)+'.txt') fd=open(file_name,"w") fd.write(dump_to_save) fd.close() resp = static_file(str(id_random)+'.txt',root=tmp_folder,download=True) resp.set_cookie('fileDownload','true'); return resp
def get_result_from_av(): file_hash=clean_hash(request.query.file_hash) if len(file_hash) != 40: response.code = 400 return jsonize({'message':'Invalid hash format (use sha1)'}) av_result=get_av_result(file_hash) if(av_result==None): return jsonize("Can not get analysis") return jsonize("File processed")
def generic_process_hash(hash_str): hash_str = clean_hash(hash_str) if(not valid_hash(hash_str)): return None if(len(hash_str)==32): hash_str=get_file_id(hash_str) if(hash_str is not None): return process_file(hash_str) else : return None
def generic_process_hash(hash_str): hash_str = clean_hash(hash_str) if (not valid_hash(hash_str)): return None if (len(hash_str) == 32): hash_str = get_file_id(hash_str) if (hash_str is not None): return process_file(hash_str) else: return None
def get_result_from_av(): file_hash = clean_hash(request.query.file_hash) if len(file_hash) != 40: response.code = 400 return jsonize({'message': 'Invalid hash format (use sha1)'}) av_result = get_av_result(file_hash) if (av_result == None): return jsonize("Can not get analysis") return jsonize("File processed")
def api_process_file(): file_hash = clean_hash(request.query.file_hash) if len(file_hash) != 40: response.code = 400 return jsonize({'message': 'Invalid hash format (use sha1)'}) res = process_file(file_hash) if res == None: response.code = 404 return jsonize("File not found in the database") return jsonize("File processed")
def api_process_file(): file_hash=clean_hash(request.query.file_hash) if len(file_hash) != 40: response.code = 400 return jsonize({'message':'Invalid hash format (use sha1)'}) res=process_file(file_hash) if res==None: response.code = 404 return jsonize("File not found in the database") return jsonize("File processed")
def get_package_file(): tmp_folder="/tmp/mass_download" subprocess.call(["mkdir","-p",tmp_folder]) hashes = request.forms.dict.get("file_hash[]") if hashes is None: hashes = request.forms.get("file_hash").split("\n") if hashes is not None: if len(hashes) == 1: random_id=hashes[0] else: random_id = id_generator() else: return jsonize({'message':'Error. no file selected'}) folder_path=os.path.join(tmp_folder,random_id) subprocess.call(["mkdir","-p",folder_path]) zip_name=os.path.join(tmp_folder,random_id+".zip") pc=PackageController() for file_hash in hashes: file_hash = clean_hash(file_hash.replace('\r','')) data="1="+file_hash res=SearchModule.search_by_id(data,1) if(len(res)==0): pass else: file_hash=res[0]["sha1"] res=pc.searchFile(file_hash) if res != 1 and res is not None: res=pc.getFile(file_hash) file_name=os.path.join(folder_path,str(file_hash)+".codex") fd=open(file_name,"wb") fd.write(res) fd.close() elif res == 1: fd=open(os.path.join(folder_path,'readme.txt'),'a+') fd.write(str(file_hash)+" is not available to download.\n") fd.close() elif res is None: fd=open(os.path.join(folder_path,'readme.txt'),'a+') fd.write(str(file_hash)+" not found.") fd.close() else: print "Unknown res:"+str(res) subprocess.call(["zip","-P","codex","-jr", zip_name,folder_path]) resp = static_file(str(random_id)+".zip",root=tmp_folder,download=True) resp.set_cookie('fileDownload','true'); # http://johnculviner.com/jquery-file-download-plugin-for-ajax-like-feature-rich-file-downloads/ return resp
def get_package_file(): tmp_folder = "/tmp/mass_download" subprocess.call(["mkdir", "-p", tmp_folder]) hashes = request.forms.dict.get("file_hash[]") if hashes is None: hashes = request.forms.get("file_hash").split("\n") if hashes is not None: if len(hashes) == 1: random_id = hashes[0] else: random_id = id_generator() else: return jsonize({'message': 'Error. no file selected'}) folder_path = os.path.join(tmp_folder, random_id) subprocess.call(["mkdir", "-p", folder_path]) zip_name = os.path.join(tmp_folder, random_id + ".zip") pc = PackageController() for file_hash in hashes: file_hash = clean_hash(file_hash.replace('\r', '')) data = "1=" + file_hash res = SearchModule.search_by_id(data, 1) if (len(res) == 0): pass else: file_hash = res[0]["sha1"] res = pc.searchFile(file_hash) if res != 1 and res is not None: res = pc.getFile(file_hash) file_name = os.path.join(folder_path, str(file_hash) + ".codex") fd = open(file_name, "wb") fd.write(res) fd.close() elif res == 1: fd = open(os.path.join(folder_path, 'readme.txt'), 'a+') fd.write(str(file_hash) + " is not available to download.\n") fd.close() elif res is None: fd = open(os.path.join(folder_path, 'readme.txt'), 'a+') fd.write(str(file_hash) + " not found.") fd.close() else: print "Unknown res:" + str(res) subprocess.call(["zip", "-P", "codex", "-jr", zip_name, folder_path]) resp = static_file(str(random_id) + ".zip", root=tmp_folder, download=True) resp.set_cookie('fileDownload', 'true') # http://johnculviner.com/jquery-file-download-plugin-for-ajax-like-feature-rich-file-downloads/ return resp
def get_result_from_av(): hash_id = request.query.file_hash if len(hash_id) == 0: response.status = 400 return jsonize({ 'error': 4, 'error_message': 'file_hash parameter is missing.' }) hash_id = clean_hash(hash_id) if not valid_hash(hash_id): return jsonize({'error': 5, 'error_message': 'Invalid hash format.'}) if (len(hash_id) != 40): data = "1=" + str(hash_id) res = SearchModule.search_by_id(data, 1, [], True) if (len(res) == 0): response.status = 400 return jsonize({'error': 6, 'error_message': 'File not found'}) else: sha1 = res[0]["sha1"] else: sha1 = hash_id key_manager = KeyManager() if (key_manager.check_keys_in_secrets()): av_result = get_av_result(sha1, 'high') else: return jsonize({ 'error': 7, "error_message": "Error: VirusTotal API key missing from secrets.py file" }) if (av_result.get('status') == "added"): return jsonize({"message": "AV scans downloaded."}) elif (av_result.get('status') == "already_had_it"): return jsonize({"message": "File already have AV scans."}) elif (av_result.get('status') == "not_found"): return jsonize({"error": 10, "error_message": "Not found on VT."}) elif (av_result.get('status') == "no_key_available"): return jsonize({ "error": 11, "error_message": "No key available right now. Please try again later." }) else: logging.error("av_result for hash=" + str(sha1)) logging.error("av_result=" + str(av_result)) return jsonize({"error": 9, "error_message": "Cannot get analysis."})
def generic_process_hash(hash_str): if hash_str is None: return None hash_str = clean_hash(hash_str) if(not valid_hash(hash_str)): return None if(len(hash_str) == 64): hash_str = get_file_id(hash_str) elif(len(hash_str) == 32): pc = PackageController() hash_str = pc.md5_to_sha1(hash_str) logging.debug("generic_process_hash-->sha1: " + str(hash_str)) if(hash_str is not None): return process_file(hash_str) else: return None
def generic_process_hash(hash_str): if hash_str is None: return None hash_str = clean_hash(hash_str) if (not valid_hash(hash_str)): return None if (len(hash_str) == 64): hash_str = get_file_id(hash_str) elif (len(hash_str) == 32): pc = PackageController() hash_str = pc.md5_to_sha1(hash_str) logging.debug("generic_process_hash-->sha1: " + str(hash_str)) if (hash_str is not None): return process_file(hash_str) else: return None
def api_batch_process_file(): print("Running Batch process") file_hashes = request.forms.get('file_hash') #print(dir(request.forms)) #print(request.forms.keys()) #transformar file_hashes a una lista de hashes not_found = [] added_to_queue = 0 downloaded_from_vt = 0 for hash_id in file_hashes.split("\n"): hash_id = clean_hash(hash_id) if hash_id is None: continue data = "1=" + str(hash_id) res = SearchModule.search_by_id(data, 1, [], True) if (len(res) == 0): not_found.append(hash_id) continue """ print "downloading "+str(hash_id)+" from vt" sha1=SearchModule.add_file_from_vt(hash_id) if(sha1==None): print "not found on vt: "+str(hash_id) not_found.append(hash_id) continue else: downloaded_from_vt+=1 """ else: sha1 = res[0]["sha1"] added_to_queue += 1 print str(hash_id) + " added to queue" add_hash_to_process_queue(sha1) if (env['auto_get_av_result']): get_av_result(sha1) responsex = str(added_to_queue) + " files added to the process queue.\n" if (downloaded_from_vt > 0): responsex += str(downloaded_from_vt) + " new hashes.\n" if (len(not_found) != 0): responsex += str(len(not_found)) + " hashes not found.\n" responsex += "Not Found:\n" for aux in not_found: responsex = responsex + str(aux) + "\n" return jsonize({"message": responsex})
def api_batch_process_file(): print("Running Batch process") file_hashes=request.forms.get('file_hash') #print(dir(request.forms)) #print(request.forms.keys()) #transformar file_hashes a una lista de hashes not_found=[] added_to_queue=0 downloaded_from_vt=0 for hash_id in file_hashes.split("\n"): hash_id=clean_hash(hash_id) if hash_id is None: continue data="1="+str(hash_id) res=SearchModule.search_by_id(data,1,[],False) if(len(res)==0): not_found.append(hash_id) continue """ print "downloading "+str(hash_id)+" from vt" sha1=SearchModule.add_file_from_vt(hash_id) if(sha1==None): print "not found on vt: "+str(hash_id) not_found.append(hash_id) continue else: downloaded_from_vt+=1 """ else: sha1=res[0]["sha1"] added_to_queue+=1 print str(hash_id)+" added to queue" add_hash_to_process_queue(sha1) responsex=str(added_to_queue)+" files added to the process queue.\n" if(downloaded_from_vt > 0): responsex+=str(downloaded_from_vt)+" new hashes.\n" if(len(not_found)!=0): responsex+=str(len(not_found))+ " hashes not found.\n" responsex+="Not Found:\n" for aux in not_found: responsex=responsex+str(aux)+"\n" return jsonize({"message":responsex})
def get_metadata(): if request.query.file_hash == '': response.status = 400 return jsonize({'message': 'file_hash parameter is missing'}) file_hash = clean_hash(request.query.file_hash) if not valid_hash(file_hash): response.status = 400 return jsonize({'message': 'Invalid hash format (use MD5, SHA1 or SHA2)'}) file_hash = get_file_id(file_hash) if file_hash is None: response.status = 404 return jsonize({'message': 'Metadata not found in the database'}) mdc = MetaController() res = mdc.read(file_hash) if res is None: log_event("metadata", file_hash) return dumps(change_date_to_str(res))
def api_batch_process_debug_file(): yield "<html><body><pre>" yield "Running Batch process\n" file_hashes = request.forms.get('file_hash') #print(dir(request.forms)) #print(request.forms.keys()) #transformar file_hashes a una lista de hashes not_found = [] added_to_queue = 0 downloaded_from_vt = 0 for hash_id in file_hashes.split("\n"): hash_id = clean_hash(hash_id) if hash_id is None: continue data = "1=" + hash_id res = SearchModule.search_by_id(data, 1, [], False) if (len(res) == 0): print "downloading " + str(hash_id) + " from vt" sha1 = SearchModule.add_file_from_vt(hash_id) if (sha1 == None): not_found.append(hash_id) continue else: downloaded_from_vt += 1 else: sha1 = res[0]["sha1"] added_to_queue += 1 add_hash_to_process_queue(sha1) if (env['auto_get_av_result']): get_av_result(sha1) yield str(sha1) + "\n" responsex = str(added_to_queue) + " files added to the process queue.\n" if (downloaded_from_vt > 0): responsex += str(downloaded_from_vt) + " new hashes.\n" if (len(not_found) != 0): responsex += str(len(not_found)) + " hashes not found.\n" responsex += "Not Found:\n" for aux in not_found: responsex = responsex + str(aux) + "\n" yield responsex yield "END"
def get_metadata(): file_hash=clean_hash(request.query.file_hash) if file_hash is None: return if len(file_hash) == 32: #ToDo: validate hash key = 'md5' elif len(file_hash) == 40: key = 'sha1' else: response.code = 400 return jsonize({'message':'Invalid hash format (use MD5, SHA1 or SHA2)'}) mdc=MetaController() res=mdc.read(file_hash) if res==None: response.code = 404 return jsonize({'message':'Metadata not found in the database'}) log_event("metadata",file_hash) return dumps(change_date_to_str(res))
def api_batch_process_debug_file(): yield "<html><body><pre>" yield "Running Batch process\n" file_hashes=request.forms.get('file_hash') #print(dir(request.forms)) #print(request.forms.keys()) #transformar file_hashes a una lista de hashes not_found=[] added_to_queue=0 downloaded_from_vt=0 for hash_id in file_hashes.split("\n"): hash_id=clean_hash(hash_id) if hash_id is None: continue data="1="+hash_id res=SearchModule.search_by_id(data,1,[],False) if(len(res)==0): print "downloading "+str(hash_id)+" from vt" sha1=SearchModule.add_file_from_vt(hash_id) if(sha1==None): not_found.append(hash_id) continue else: downloaded_from_vt+=1 else: sha1=res[0]["sha1"] added_to_queue+=1 add_hash_to_process_queue(sha1) yield str(sha1)+"\n" responsex=str(added_to_queue)+" files added to the process queue.\n" if(downloaded_from_vt > 0): responsex+=str(downloaded_from_vt)+" new hashes.\n" if(len(not_found)!=0): responsex+=str(len(not_found))+ " hashes not found.\n" responsex+="Not Found:\n" for aux in not_found: responsex=responsex+str(aux)+"\n" yield responsex yield "END"
def get_metadata(): file_hash=clean_hash(request.query.file_hash) if file_hash is None: return if len(file_hash) == 32: #ToDo: validate hash key = 'md5' elif len(file_hash) == 40: key = 'sha1' #elif len(file_hash) == 64: # key = 'sha256' else: response.code = 400 return jsonize({'message':'Invalid hash format (use MD5, SHA1 or SHA2)'}) mdc=MetaController() res=mdc.read(file_hash) if res==None: response.code = 404 return jsonize({'message':'Metadata not found in the database'}) log_event("metadata",file_hash) return dumps(res)
def export_metadata(): mdc = MetaController() hashes = request.forms.dict.get("file_hash[]") dump_to_save = "" random_id = id_generator() tmp_path = "/tmp/meta_export" tmp_folder = os.path.join(tmp_path, random_id) call_with_output(["mkdir", "-p", tmp_folder]) for hash in hashes: hash = clean_hash(hash.replace('\r', '')) res = mdc.read(hash) dump = dumps(res, indent=4) file_name = os.path.join(tmp_folder, str(hash) + '.txt') fd = open(file_name, "w") fd.write(dump) fd.close() zip_path = os.path.join(tmp_path, random_id + '.zip') call_with_output(["zip", "-jr", zip_path, tmp_folder]) resp = static_file(str(random_id) + '.zip', root=tmp_path, download=True) resp.set_cookie('fileDownload', 'true') shutil.rmtree(tmp_folder) os.remove(zip_path) return resp
def export_metadata(): mdc = MetaController() hashes = request.forms.dict.get("file_hash[]") dump_to_save = "" random_id = id_generator() tmp_path = "/tmp/meta_export" tmp_folder = os.path.join(tmp_path, random_id) call_with_output(["mkdir", "-p", tmp_folder]) for hash in hashes: hash = clean_hash(hash.replace('\r', '')) res = mdc.read(hash) dump = dumps(res, indent=4) file_name = os.path.join(tmp_folder, str(hash) + '.txt') fd = open(file_name, "w") fd.write(dump) fd.close() zip_path = os.path.join(tmp_path, random_id + '.zip') call_with_output(["zip", "-jr", zip_path, tmp_folder]) resp = static_file(str(random_id) + '.zip', root=tmp_path, download=True) resp.set_cookie('fileDownload', 'true') shutil.rmtree(tmp_folder) os.remove(zip_path) return resp
def get_result_from_av(): hash_id = request.query.file_hash if len(hash_id) == 0: response.status = 400 return jsonize({'error': 4, 'error_message': 'file_hash parameter is missing.'}) hash_id = clean_hash(hash_id) if not valid_hash(hash_id): return jsonize({'error': 5, 'error_message': 'Invalid hash format.'}) if(len(hash_id) != 40): data = "1=" + str(hash_id) res = SearchModule.search_by_id(data, 1, [], True) if(len(res) == 0): response.status = 400 return jsonize({'error': 6, 'error_message': 'File not found'}) else: sha1 = res[0]["sha1"] else: sha1 = hash_id key_manager = KeyManager() if(key_manager.check_keys_in_secrets()): av_result = get_av_result(sha1, 'high') else: return jsonize({'error': 7, "error_message": "Error: VirusTotal API key missing from secrets.py file"}) if(av_result.get('status') == "added"): return jsonize({"message": "AV scans downloaded."}) elif(av_result.get('status') == "already_had_it"): return jsonize({"message": "File already have AV scans."}) elif(av_result.get('status') == "not_found"): return jsonize({"error": 10, "error_message": "Not found on VT."}) elif(av_result.get('status') == "no_key_available"): return jsonize({"error": 11, "error_message": "No key available right now. Please try again later."}) else: logging.error("av_result for hash=" + str(sha1)) logging.error("av_result=" + str(av_result)) return jsonize({"error": 9, "error_message": "Cannot get analysis."})
def api_batch_process_debug_file(): yield "<html><body><pre>" yield "Running Batch process\n" file_hashes = request.forms.get('file_hash') if file_hashes is None: response.status = 422 logging.debug("api_batch_process_debug_file(): file_hash is missing") yield "file_hash parameter is missing" # transform file_hashes in a list of hashes. not_found = [] added_to_queue = 0 downloaded_from_vt = 0 for hash_id in file_hashes.split("\n"): hash_id = clean_hash(hash_id) if hash_id is None: continue data = "1=" + hash_id if(len(hash_id) == 40 or len(hash_id) == 32): pc = PackageController() res = pc.getFile(hash_id) if res is not None and len(SearchModule.search_by_id(data, 1, [], False)) == 0: logging.debug("Processing right now: " + str(hash_id)) process_file(hash_id) if(envget('auto_get_av_result')): add_task_to_download_av_result(hash_id) continue res = SearchModule.search_by_id(data, 1, [], False) if(len(res) == 0): legging.debug("process_debug(): metadata of " + str(hash_id) + " was not found. We will look in Pc. hash length: " + str(len(hash_id))) if(len(hash_id) == 40 or len(hash_id) == 32): pc = PackageController() res = pc.getFile(hash_id) if res is not None: logging.debug( "process_debug(): hash was found (" + str(hash_id) + ")") else: logging.debug( "process_debug(): hash was not found(" + str(hash_id) + ")") logging.debug("process_debug():") logging.debug("process_debug(): going to search " + str(hash_id) + " in vt") add_response = SearchModule.add_file_from_vt(hash_id) sha1 = add_response.get('hash') if(sha1 is None): logging.debug("process_debug(): sha1 is None: " + str(hash_id)) not_found.append(hash_id) continue else: downloaded_from_vt += 1 else: sha1 = res[0]["sha1"] added_to_queue += 1 add_hash_to_process_queue(sha1) if(envget('auto_get_av_result')): add_task_to_download_av_result(sha1) yield str(sha1) + "\n" responsex = str(added_to_queue) + " files added to the process queue.\n" if(downloaded_from_vt > 0): responsex += str(downloaded_from_vt) + " new hashes.\n" if(len(not_found) != 0): responsex += str(len(not_found)) + " hashes not found.\n" responsex += "Not Found:\n" for aux in not_found: responsex = responsex + str(aux) + "\n" yield responsex yield "END"
def api_batch_process_debug_file(): yield "<html><body><pre>" yield "Running Batch process\n" file_hashes = request.forms.get('file_hash') if file_hashes is None: response.status = 422 logging.debug("api_batch_process_debug_file(): file_hash is missing") yield "file_hash parameter is missing" # transform file_hashes in a list of hashes. not_found = [] added_to_queue = 0 downloaded_from_vt = 0 for hash_id in file_hashes.split("\n"): hash_id = clean_hash(hash_id) if hash_id is None: continue data = "1=" + hash_id if (len(hash_id) == 40 or len(hash_id) == 32): pc = PackageController() res = pc.getFile(hash_id) if res is not None and len( SearchModule.search_by_id(data, 1, [], False)) == 0: logging.debug("Processing right now: " + str(hash_id)) process_file(hash_id) if (env['auto_get_av_result']): add_task_to_download_av_result(hash_id) continue res = SearchModule.search_by_id(data, 1, [], False) if (len(res) == 0): legging.debug("process_debug(): metadata of " + str(hash_id) + " was not found. We will look in Pc. hash length: " + str(len(hash_id))) if (len(hash_id) == 40 or len(hash_id) == 32): pc = PackageController() res = pc.getFile(hash_id) if res is not None: logging.debug("process_debug(): hash was found (" + str(hash_id) + ")") else: logging.debug("process_debug(): hash was not found(" + str(hash_id) + ")") logging.debug("process_debug():") logging.debug("process_debug(): going to search " + str(hash_id) + " in vt") add_response = SearchModule.add_file_from_vt(hash_id) sha1 = add_response.get('hash') if (sha1 == None): logging.debug("process_debug(): sha1 is None: " + str(hash_id)) not_found.append(hash_id) continue else: downloaded_from_vt += 1 else: sha1 = res[0]["sha1"] added_to_queue += 1 add_hash_to_process_queue(sha1) if (env['auto_get_av_result']): add_task_to_download_av_result(sha1) yield str(sha1) + "\n" responsex = str(added_to_queue) + " files added to the process queue.\n" if (downloaded_from_vt > 0): responsex += str(downloaded_from_vt) + " new hashes.\n" if (len(not_found) != 0): responsex += str(len(not_found)) + " hashes not found.\n" responsex += "Not Found:\n" for aux in not_found: responsex = responsex + str(aux) + "\n" yield responsex yield "END"