def api_batch_process_file(): logging.debug("api_batch_process_file(): Running Batch process") file_hashes = request.forms.get('file_hash') # transform file_hashes in a list of hashes if file_hashes is None: return jsonize({"Error: file_hash parameter is missing."}) not_found = [] added_to_queue = 0 downloaded_from_vt = 0 for hash_id in file_hashes.split("\n"): hash_id = clean_hash(hash_id) if hash_id is None: continue data = "1=" + str(hash_id) res = SearchModule.search_by_id(data, 1, [], True) if (len(res) == 0): not_found.append(hash_id) continue else: sha1 = res[0]["sha1"] added_to_queue += 1 logging.debug(str(hash_id) + " added to queue") add_hash_to_process_queue(sha1) responsex = str(added_to_queue) + " files added to the process queue.\n" if (downloaded_from_vt > 0): responsex += str(downloaded_from_vt) + " new hashes.\n" if (len(not_found) != 0): responsex += str(len(not_found)) + " hashes not found.\n" responsex += "Not Found:\n" for aux in not_found: responsex = responsex + str(aux) + "\n" return jsonize({"message": responsex})
def search(): data = request.query.data str_lim = request.query.limit columns = request.query.getall("selected[]") logging.debug("search(). columns=" + str(columns)) if (str_lim == ''): limit = 0 else: limit = int(str_lim) callback_name = cgi.escape(request.query.callback) logging.debug("search(). callback=" + str(callback_name)) res = SearchModule.search_by_id(data, limit, columns, True) add_list_to_process_queue(res[0:10]) # to only show a few columns (uggly stuff) if (len(columns) == 0): show = ["sha1", "description", "size"] else: show = ["sha1"] for col in columns: dic = tree_menu.ids[int(col)] path = str(dic["path"]).split('.')[-1] show.append(path) responsex = {} responsex["normal"] = res responsex["show"] = show return jsonp(clean_tree(responsex), callback_name)
def api_batch_process_file(): logging.debug("api_batch_process_file(): Running Batch process") file_hashes = request.forms.get('file_hash') # transform file_hashes in a list of hashes if file_hashes is None: return jsonize({"Error: file_hash parameter is missing."}) not_found = [] added_to_queue = 0 downloaded_from_vt = 0 for hash_id in file_hashes.split("\n"): hash_id = clean_hash(hash_id) if hash_id is None: continue data = "1=" + str(hash_id) res = SearchModule.search_by_id(data, 1, [], True) if(len(res) == 0): not_found.append(hash_id) continue else: sha1 = res[0]["sha1"] added_to_queue += 1 logging.debug(str(hash_id) + " added to queue") add_hash_to_process_queue(sha1) responsex = str(added_to_queue) + " files added to the process queue.\n" if(downloaded_from_vt > 0): responsex += str(downloaded_from_vt) + " new hashes.\n" if(len(not_found) != 0): responsex += str(len(not_found)) + " hashes not found.\n" responsex += "Not Found:\n" for aux in not_found: responsex = responsex + str(aux) + "\n" return jsonize({"message": responsex})
def search(): data=request.query.data str_lim=request.query.limit columns=request.query.getall("selected[]") #print(request.query.keys()) print(columns) if(str_lim==''): limit=0 else: limit=int(str_lim) callback_name = cgi.escape(request.query.callback) print "callback="+str(callback_name) res=SearchModule.search_by_id(data,limit,columns) add_list_to_process_queue(res[0:10]) #para que muestre solo algunas columnas (gronchada) if(len(columns)==0): show=["sha1","description","size"] else: show=["sha1"] for col in columns: dic=tree_menu.ids[int(col)] path=str(dic["path"]).split('.')[-1] show.append(path) responsex={} responsex["normal"]=res responsex["show"]=show return jsonp(responsex,callback_name)
def search(): data = request.query.data str_lim = request.query.limit columns = request.query.getall("selected[]") logging.debug("search(). columns=" + str(columns)) if(str_lim == ''): limit = 0 else: limit = int(str_lim) callback_name = cgi.escape(request.query.callback) logging.debug("search(). callback=" + str(callback_name)) res = SearchModule.search_by_id(data, limit, columns, True) add_list_to_process_queue(res[0:10]) # to only show a few columns (uggly stuff) if(len(columns) == 0): show = ["sha1", "description", "size"] else: show = ["sha1"] for col in columns: dic = tree_menu.ids[int(col)] path = str(dic["path"]).split('.')[-1] show.append(path) responsex = {} responsex["normal"] = res responsex["show"] = show return jsonp(clean_tree(responsex), callback_name)
def search(): data = request.query.data str_lim = request.query.limit columns = request.query.getall("selected[]") #print(request.query.keys()) print(columns) if (str_lim == ''): limit = 0 else: limit = int(str_lim) callback_name = cgi.escape(request.query.callback) print "callback=" + str(callback_name) res = SearchModule.search_by_id(data, limit, columns) add_list_to_process_queue(res[0:10]) #para que muestre solo algunas columnas (gronchada) if (len(columns) == 0): show = ["sha1", "description", "size"] else: show = ["sha1"] for col in columns: dic = tree_menu.ids[int(col)] path = str(dic["path"]).split('.')[-1] show.append(path) responsex = {} responsex["normal"] = res responsex["show"] = show return jsonp(responsex, callback_name)
def get_result_from_av(): hash_id=request.query.file_hash if len(hash_id) == 0: response.code = 400 return jsonize({'error': 4, 'error_message':'file_hash parameter is missing.'}) hash_id=clean_hash(hash_id) if not valid_hash(hash_id): return jsonize({'error': 5, 'error_message':'Invalid hash format.'}) if(len(hash_id)!=40): data="1="+str(hash_id) res=SearchModule.search_by_id(data,1,[],True) if(len(res)==0): response.code = 400 return jsonize({'error': 6, 'error_message':'File not found'}) else: sha1=res[0]["sha1"] else: sha1=hash_id if(vt_key()): av_result=get_av_result(sha1) else: return jsonize({'error': 7, "error_message": "Error: VirusTotal API key missing from secrets.py file"}) if(av_result==None): return jsonize({"error": 8, "error_message": "Cannot get analysis (hash not found in VT? out of credits?)"}) return jsonize({"message": "AV scans downloaded."})
def get_package_file(): tmp_folder = "/tmp/mass_download" subprocess.call(["mkdir", "-p", tmp_folder]) hashes = request.forms.dict.get("file_hash[]") if hashes is None: hashes = request.forms.get("file_hash").split("\n") if hashes is not None: if len(hashes) == 1: random_id = hashes[0] else: random_id = id_generator() else: return jsonize({'message': 'Error. no file selected'}) folder_path = os.path.join(tmp_folder, random_id) subprocess.call(["mkdir", "-p", folder_path]) zip_name = os.path.join(tmp_folder, random_id + ".zip") pc = PackageController() for file_hash in hashes: file_hash = clean_hash(file_hash.replace('\r', '')) data = "1=" + file_hash res = SearchModule.search_by_id(data, 1) if (len(res) == 0): pass else: file_hash = res[0]["sha1"] res = pc.searchFile(file_hash) if res != 1 and res is not None: res = pc.getFile(file_hash) file_name = os.path.join(folder_path, str(file_hash) + ".codex") fd = open(file_name, "wb") fd.write(res) fd.close() elif res == 1: fd = open(os.path.join(folder_path, 'readme.txt'), 'a+') fd.write(str(file_hash) + " is not available to download.\n") fd.close() elif res is None: fd = open(os.path.join(folder_path, 'readme.txt'), 'a+') fd.write(str(file_hash) + " not found.") fd.close() else: print "Unknown res:" + str(res) subprocess.call(["zip", "-P", "codex", "-jr", zip_name, folder_path]) resp = static_file(str(random_id) + ".zip", root=tmp_folder, download=True) resp.set_cookie('fileDownload', 'true') # http://johnculviner.com/jquery-file-download-plugin-for-ajax-like-feature-rich-file-downloads/ return resp
def get_package_file(): tmp_folder="/tmp/mass_download" subprocess.call(["mkdir","-p",tmp_folder]) hashes = request.forms.dict.get("file_hash[]") if hashes is None: hashes = request.forms.get("file_hash").split("\n") if hashes is not None: if len(hashes) == 1: random_id=hashes[0] else: random_id = id_generator() else: return jsonize({'message':'Error. no file selected'}) folder_path=os.path.join(tmp_folder,random_id) subprocess.call(["mkdir","-p",folder_path]) zip_name=os.path.join(tmp_folder,random_id+".zip") pc=PackageController() for file_hash in hashes: file_hash = clean_hash(file_hash.replace('\r','')) data="1="+file_hash res=SearchModule.search_by_id(data,1) if(len(res)==0): pass else: file_hash=res[0]["sha1"] res=pc.searchFile(file_hash) if res != 1 and res is not None: res=pc.getFile(file_hash) file_name=os.path.join(folder_path,str(file_hash)+".codex") fd=open(file_name,"wb") fd.write(res) fd.close() elif res == 1: fd=open(os.path.join(folder_path,'readme.txt'),'a+') fd.write(str(file_hash)+" is not available to download.\n") fd.close() elif res is None: fd=open(os.path.join(folder_path,'readme.txt'),'a+') fd.write(str(file_hash)+" not found.") fd.close() else: print "Unknown res:"+str(res) subprocess.call(["zip","-P","codex","-jr", zip_name,folder_path]) resp = static_file(str(random_id)+".zip",root=tmp_folder,download=True) resp.set_cookie('fileDownload','true'); # http://johnculviner.com/jquery-file-download-plugin-for-ajax-like-feature-rich-file-downloads/ return resp
def get_result_from_av(): hash_id = request.query.file_hash if len(hash_id) == 0: response.status = 400 return jsonize({ 'error': 4, 'error_message': 'file_hash parameter is missing.' }) hash_id = clean_hash(hash_id) if not valid_hash(hash_id): return jsonize({'error': 5, 'error_message': 'Invalid hash format.'}) if (len(hash_id) != 40): data = "1=" + str(hash_id) res = SearchModule.search_by_id(data, 1, [], True) if (len(res) == 0): response.status = 400 return jsonize({'error': 6, 'error_message': 'File not found'}) else: sha1 = res[0]["sha1"] else: sha1 = hash_id key_manager = KeyManager() if (key_manager.check_keys_in_secrets()): av_result = get_av_result(sha1, 'high') else: return jsonize({ 'error': 7, "error_message": "Error: VirusTotal API key missing from secrets.py file" }) if (av_result.get('status') == "added"): return jsonize({"message": "AV scans downloaded."}) elif (av_result.get('status') == "already_had_it"): return jsonize({"message": "File already have AV scans."}) elif (av_result.get('status') == "not_found"): return jsonize({"error": 10, "error_message": "Not found on VT."}) elif (av_result.get('status') == "no_key_available"): return jsonize({ "error": 11, "error_message": "No key available right now. Please try again later." }) else: logging.error("av_result for hash=" + str(sha1)) logging.error("av_result=" + str(av_result)) return jsonize({"error": 9, "error_message": "Cannot get analysis."})
def api_batch_process_file(): print("Running Batch process") file_hashes = request.forms.get('file_hash') #print(dir(request.forms)) #print(request.forms.keys()) #transformar file_hashes a una lista de hashes not_found = [] added_to_queue = 0 downloaded_from_vt = 0 for hash_id in file_hashes.split("\n"): hash_id = clean_hash(hash_id) if hash_id is None: continue data = "1=" + str(hash_id) res = SearchModule.search_by_id(data, 1, [], True) if (len(res) == 0): not_found.append(hash_id) continue """ print "downloading "+str(hash_id)+" from vt" sha1=SearchModule.add_file_from_vt(hash_id) if(sha1==None): print "not found on vt: "+str(hash_id) not_found.append(hash_id) continue else: downloaded_from_vt+=1 """ else: sha1 = res[0]["sha1"] added_to_queue += 1 print str(hash_id) + " added to queue" add_hash_to_process_queue(sha1) if (env['auto_get_av_result']): get_av_result(sha1) responsex = str(added_to_queue) + " files added to the process queue.\n" if (downloaded_from_vt > 0): responsex += str(downloaded_from_vt) + " new hashes.\n" if (len(not_found) != 0): responsex += str(len(not_found)) + " hashes not found.\n" responsex += "Not Found:\n" for aux in not_found: responsex = responsex + str(aux) + "\n" return jsonize({"message": responsex})
def api_batch_process_file(): print("Running Batch process") file_hashes=request.forms.get('file_hash') #print(dir(request.forms)) #print(request.forms.keys()) #transformar file_hashes a una lista de hashes not_found=[] added_to_queue=0 downloaded_from_vt=0 for hash_id in file_hashes.split("\n"): hash_id=clean_hash(hash_id) if hash_id is None: continue data="1="+str(hash_id) res=SearchModule.search_by_id(data,1,[],False) if(len(res)==0): not_found.append(hash_id) continue """ print "downloading "+str(hash_id)+" from vt" sha1=SearchModule.add_file_from_vt(hash_id) if(sha1==None): print "not found on vt: "+str(hash_id) not_found.append(hash_id) continue else: downloaded_from_vt+=1 """ else: sha1=res[0]["sha1"] added_to_queue+=1 print str(hash_id)+" added to queue" add_hash_to_process_queue(sha1) responsex=str(added_to_queue)+" files added to the process queue.\n" if(downloaded_from_vt > 0): responsex+=str(downloaded_from_vt)+" new hashes.\n" if(len(not_found)!=0): responsex+=str(len(not_found))+ " hashes not found.\n" responsex+="Not Found:\n" for aux in not_found: responsex=responsex+str(aux)+"\n" return jsonize({"message":responsex})
def api_batch_process_debug_file(): yield "<html><body><pre>" yield "Running Batch process\n" file_hashes = request.forms.get('file_hash') #print(dir(request.forms)) #print(request.forms.keys()) #transformar file_hashes a una lista de hashes not_found = [] added_to_queue = 0 downloaded_from_vt = 0 for hash_id in file_hashes.split("\n"): hash_id = clean_hash(hash_id) if hash_id is None: continue data = "1=" + hash_id res = SearchModule.search_by_id(data, 1, [], False) if (len(res) == 0): print "downloading " + str(hash_id) + " from vt" sha1 = SearchModule.add_file_from_vt(hash_id) if (sha1 == None): not_found.append(hash_id) continue else: downloaded_from_vt += 1 else: sha1 = res[0]["sha1"] added_to_queue += 1 add_hash_to_process_queue(sha1) if (env['auto_get_av_result']): get_av_result(sha1) yield str(sha1) + "\n" responsex = str(added_to_queue) + " files added to the process queue.\n" if (downloaded_from_vt > 0): responsex += str(downloaded_from_vt) + " new hashes.\n" if (len(not_found) != 0): responsex += str(len(not_found)) + " hashes not found.\n" responsex += "Not Found:\n" for aux in not_found: responsex = responsex + str(aux) + "\n" yield responsex yield "END"
def api_batch_process_debug_file(): yield "<html><body><pre>" yield "Running Batch process\n" file_hashes=request.forms.get('file_hash') #print(dir(request.forms)) #print(request.forms.keys()) #transformar file_hashes a una lista de hashes not_found=[] added_to_queue=0 downloaded_from_vt=0 for hash_id in file_hashes.split("\n"): hash_id=clean_hash(hash_id) if hash_id is None: continue data="1="+hash_id res=SearchModule.search_by_id(data,1,[],False) if(len(res)==0): print "downloading "+str(hash_id)+" from vt" sha1=SearchModule.add_file_from_vt(hash_id) if(sha1==None): not_found.append(hash_id) continue else: downloaded_from_vt+=1 else: sha1=res[0]["sha1"] added_to_queue+=1 add_hash_to_process_queue(sha1) yield str(sha1)+"\n" responsex=str(added_to_queue)+" files added to the process queue.\n" if(downloaded_from_vt > 0): responsex+=str(downloaded_from_vt)+" new hashes.\n" if(len(not_found)!=0): responsex+=str(len(not_found))+ " hashes not found.\n" responsex+="Not Found:\n" for aux in not_found: responsex=responsex+str(aux)+"\n" yield responsex yield "END"
def get_result_from_av(): hash_id = request.query.file_hash if len(hash_id) == 0: response.status = 400 return jsonize({'error': 4, 'error_message': 'file_hash parameter is missing.'}) hash_id = clean_hash(hash_id) if not valid_hash(hash_id): return jsonize({'error': 5, 'error_message': 'Invalid hash format.'}) if(len(hash_id) != 40): data = "1=" + str(hash_id) res = SearchModule.search_by_id(data, 1, [], True) if(len(res) == 0): response.status = 400 return jsonize({'error': 6, 'error_message': 'File not found'}) else: sha1 = res[0]["sha1"] else: sha1 = hash_id key_manager = KeyManager() if(key_manager.check_keys_in_secrets()): av_result = get_av_result(sha1, 'high') else: return jsonize({'error': 7, "error_message": "Error: VirusTotal API key missing from secrets.py file"}) if(av_result.get('status') == "added"): return jsonize({"message": "AV scans downloaded."}) elif(av_result.get('status') == "already_had_it"): return jsonize({"message": "File already have AV scans."}) elif(av_result.get('status') == "not_found"): return jsonize({"error": 10, "error_message": "Not found on VT."}) elif(av_result.get('status') == "no_key_available"): return jsonize({"error": 11, "error_message": "No key available right now. Please try again later."}) else: logging.error("av_result for hash=" + str(sha1)) logging.error("av_result=" + str(av_result)) return jsonize({"error": 9, "error_message": "Cannot get analysis."})
def api_batch_process_debug_file(): yield "<html><body><pre>" yield "Running Batch process\n" file_hashes = request.forms.get('file_hash') if file_hashes is None: response.status = 422 logging.debug("api_batch_process_debug_file(): file_hash is missing") yield "file_hash parameter is missing" # transform file_hashes in a list of hashes. not_found = [] added_to_queue = 0 downloaded_from_vt = 0 for hash_id in file_hashes.split("\n"): hash_id = clean_hash(hash_id) if hash_id is None: continue data = "1=" + hash_id if (len(hash_id) == 40 or len(hash_id) == 32): pc = PackageController() res = pc.getFile(hash_id) if res is not None and len( SearchModule.search_by_id(data, 1, [], False)) == 0: logging.debug("Processing right now: " + str(hash_id)) process_file(hash_id) if (env['auto_get_av_result']): add_task_to_download_av_result(hash_id) continue res = SearchModule.search_by_id(data, 1, [], False) if (len(res) == 0): legging.debug("process_debug(): metadata of " + str(hash_id) + " was not found. We will look in Pc. hash length: " + str(len(hash_id))) if (len(hash_id) == 40 or len(hash_id) == 32): pc = PackageController() res = pc.getFile(hash_id) if res is not None: logging.debug("process_debug(): hash was found (" + str(hash_id) + ")") else: logging.debug("process_debug(): hash was not found(" + str(hash_id) + ")") logging.debug("process_debug():") logging.debug("process_debug(): going to search " + str(hash_id) + " in vt") add_response = SearchModule.add_file_from_vt(hash_id) sha1 = add_response.get('hash') if (sha1 == None): logging.debug("process_debug(): sha1 is None: " + str(hash_id)) not_found.append(hash_id) continue else: downloaded_from_vt += 1 else: sha1 = res[0]["sha1"] added_to_queue += 1 add_hash_to_process_queue(sha1) if (env['auto_get_av_result']): add_task_to_download_av_result(sha1) yield str(sha1) + "\n" responsex = str(added_to_queue) + " files added to the process queue.\n" if (downloaded_from_vt > 0): responsex += str(downloaded_from_vt) + " new hashes.\n" if (len(not_found) != 0): responsex += str(len(not_found)) + " hashes not found.\n" responsex += "Not Found:\n" for aux in not_found: responsex = responsex + str(aux) + "\n" yield responsex yield "END"
def api_batch_process_debug_file(): yield "<html><body><pre>" yield "Running Batch process\n" file_hashes = request.forms.get('file_hash') if file_hashes is None: response.status = 422 logging.debug("api_batch_process_debug_file(): file_hash is missing") yield "file_hash parameter is missing" # transform file_hashes in a list of hashes. not_found = [] added_to_queue = 0 downloaded_from_vt = 0 for hash_id in file_hashes.split("\n"): hash_id = clean_hash(hash_id) if hash_id is None: continue data = "1=" + hash_id if(len(hash_id) == 40 or len(hash_id) == 32): pc = PackageController() res = pc.getFile(hash_id) if res is not None and len(SearchModule.search_by_id(data, 1, [], False)) == 0: logging.debug("Processing right now: " + str(hash_id)) process_file(hash_id) if(envget('auto_get_av_result')): add_task_to_download_av_result(hash_id) continue res = SearchModule.search_by_id(data, 1, [], False) if(len(res) == 0): legging.debug("process_debug(): metadata of " + str(hash_id) + " was not found. We will look in Pc. hash length: " + str(len(hash_id))) if(len(hash_id) == 40 or len(hash_id) == 32): pc = PackageController() res = pc.getFile(hash_id) if res is not None: logging.debug( "process_debug(): hash was found (" + str(hash_id) + ")") else: logging.debug( "process_debug(): hash was not found(" + str(hash_id) + ")") logging.debug("process_debug():") logging.debug("process_debug(): going to search " + str(hash_id) + " in vt") add_response = SearchModule.add_file_from_vt(hash_id) sha1 = add_response.get('hash') if(sha1 is None): logging.debug("process_debug(): sha1 is None: " + str(hash_id)) not_found.append(hash_id) continue else: downloaded_from_vt += 1 else: sha1 = res[0]["sha1"] added_to_queue += 1 add_hash_to_process_queue(sha1) if(envget('auto_get_av_result')): add_task_to_download_av_result(sha1) yield str(sha1) + "\n" responsex = str(added_to_queue) + " files added to the process queue.\n" if(downloaded_from_vt > 0): responsex += str(downloaded_from_vt) + " new hashes.\n" if(len(not_found) != 0): responsex += str(len(not_found)) + " hashes not found.\n" responsex += "Not Found:\n" for aux in not_found: responsex = responsex + str(aux) + "\n" yield responsex yield "END"