def api_batch_process_debug_file(): yield "<html><body><pre>" yield "Running Batch process\n" file_hashes = request.forms.get('file_hash') #print(dir(request.forms)) #print(request.forms.keys()) #transformar file_hashes a una lista de hashes not_found = [] added_to_queue = 0 downloaded_from_vt = 0 for hash_id in file_hashes.split("\n"): hash_id = clean_hash(hash_id) if hash_id is None: continue data = "1=" + hash_id res = SearchModule.search_by_id(data, 1, [], False) if (len(res) == 0): print "downloading " + str(hash_id) + " from vt" sha1 = SearchModule.add_file_from_vt(hash_id) if (sha1 == None): not_found.append(hash_id) continue else: downloaded_from_vt += 1 else: sha1 = res[0]["sha1"] added_to_queue += 1 add_hash_to_process_queue(sha1) if (env['auto_get_av_result']): get_av_result(sha1) yield str(sha1) + "\n" responsex = str(added_to_queue) + " files added to the process queue.\n" if (downloaded_from_vt > 0): responsex += str(downloaded_from_vt) + " new hashes.\n" if (len(not_found) != 0): responsex += str(len(not_found)) + " hashes not found.\n" responsex += "Not Found:\n" for aux in not_found: responsex = responsex + str(aux) + "\n" yield responsex yield "END"
def api_batch_process_debug_file(): yield "<html><body><pre>" yield "Running Batch process\n" file_hashes=request.forms.get('file_hash') #print(dir(request.forms)) #print(request.forms.keys()) #transformar file_hashes a una lista de hashes not_found=[] added_to_queue=0 downloaded_from_vt=0 for hash_id in file_hashes.split("\n"): hash_id=clean_hash(hash_id) if hash_id is None: continue data="1="+hash_id res=SearchModule.search_by_id(data,1,[],False) if(len(res)==0): print "downloading "+str(hash_id)+" from vt" sha1=SearchModule.add_file_from_vt(hash_id) if(sha1==None): not_found.append(hash_id) continue else: downloaded_from_vt+=1 else: sha1=res[0]["sha1"] added_to_queue+=1 add_hash_to_process_queue(sha1) yield str(sha1)+"\n" responsex=str(added_to_queue)+" files added to the process queue.\n" if(downloaded_from_vt > 0): responsex+=str(downloaded_from_vt)+" new hashes.\n" if(len(not_found)!=0): responsex+=str(len(not_found))+ " hashes not found.\n" responsex+="Not Found:\n" for aux in not_found: responsex=responsex+str(aux)+"\n" yield responsex yield "END"
def api_batch_process_debug_file(): yield "<html><body><pre>" yield "Running Batch process\n" file_hashes = request.forms.get('file_hash') if file_hashes is None: response.status = 422 logging.debug("api_batch_process_debug_file(): file_hash is missing") yield "file_hash parameter is missing" # transform file_hashes in a list of hashes. not_found = [] added_to_queue = 0 downloaded_from_vt = 0 for hash_id in file_hashes.split("\n"): hash_id = clean_hash(hash_id) if hash_id is None: continue data = "1=" + hash_id if (len(hash_id) == 40 or len(hash_id) == 32): pc = PackageController() res = pc.getFile(hash_id) if res is not None and len( SearchModule.search_by_id(data, 1, [], False)) == 0: logging.debug("Processing right now: " + str(hash_id)) process_file(hash_id) if (env['auto_get_av_result']): add_task_to_download_av_result(hash_id) continue res = SearchModule.search_by_id(data, 1, [], False) if (len(res) == 0): legging.debug("process_debug(): metadata of " + str(hash_id) + " was not found. We will look in Pc. hash length: " + str(len(hash_id))) if (len(hash_id) == 40 or len(hash_id) == 32): pc = PackageController() res = pc.getFile(hash_id) if res is not None: logging.debug("process_debug(): hash was found (" + str(hash_id) + ")") else: logging.debug("process_debug(): hash was not found(" + str(hash_id) + ")") logging.debug("process_debug():") logging.debug("process_debug(): going to search " + str(hash_id) + " in vt") add_response = SearchModule.add_file_from_vt(hash_id) sha1 = add_response.get('hash') if (sha1 == None): logging.debug("process_debug(): sha1 is None: " + str(hash_id)) not_found.append(hash_id) continue else: downloaded_from_vt += 1 else: sha1 = res[0]["sha1"] added_to_queue += 1 add_hash_to_process_queue(sha1) if (env['auto_get_av_result']): add_task_to_download_av_result(sha1) yield str(sha1) + "\n" responsex = str(added_to_queue) + " files added to the process queue.\n" if (downloaded_from_vt > 0): responsex += str(downloaded_from_vt) + " new hashes.\n" if (len(not_found) != 0): responsex += str(len(not_found)) + " hashes not found.\n" responsex += "Not Found:\n" for aux in not_found: responsex = responsex + str(aux) + "\n" yield responsex yield "END"
def api_batch_process_debug_file(): yield "<html><body><pre>" yield "Running Batch process\n" file_hashes = request.forms.get('file_hash') if file_hashes is None: response.status = 422 logging.debug("api_batch_process_debug_file(): file_hash is missing") yield "file_hash parameter is missing" # transform file_hashes in a list of hashes. not_found = [] added_to_queue = 0 downloaded_from_vt = 0 for hash_id in file_hashes.split("\n"): hash_id = clean_hash(hash_id) if hash_id is None: continue data = "1=" + hash_id if(len(hash_id) == 40 or len(hash_id) == 32): pc = PackageController() res = pc.getFile(hash_id) if res is not None and len(SearchModule.search_by_id(data, 1, [], False)) == 0: logging.debug("Processing right now: " + str(hash_id)) process_file(hash_id) if(envget('auto_get_av_result')): add_task_to_download_av_result(hash_id) continue res = SearchModule.search_by_id(data, 1, [], False) if(len(res) == 0): legging.debug("process_debug(): metadata of " + str(hash_id) + " was not found. We will look in Pc. hash length: " + str(len(hash_id))) if(len(hash_id) == 40 or len(hash_id) == 32): pc = PackageController() res = pc.getFile(hash_id) if res is not None: logging.debug( "process_debug(): hash was found (" + str(hash_id) + ")") else: logging.debug( "process_debug(): hash was not found(" + str(hash_id) + ")") logging.debug("process_debug():") logging.debug("process_debug(): going to search " + str(hash_id) + " in vt") add_response = SearchModule.add_file_from_vt(hash_id) sha1 = add_response.get('hash') if(sha1 is None): logging.debug("process_debug(): sha1 is None: " + str(hash_id)) not_found.append(hash_id) continue else: downloaded_from_vt += 1 else: sha1 = res[0]["sha1"] added_to_queue += 1 add_hash_to_process_queue(sha1) if(envget('auto_get_av_result')): add_task_to_download_av_result(sha1) yield str(sha1) + "\n" responsex = str(added_to_queue) + " files added to the process queue.\n" if(downloaded_from_vt > 0): responsex += str(downloaded_from_vt) + " new hashes.\n" if(len(not_found) != 0): responsex += str(len(not_found)) + " hashes not found.\n" responsex += "Not Found:\n" for aux in not_found: responsex = responsex + str(aux) + "\n" yield responsex yield "END"