def post_process(*args): command_name, populated_command, output_base_dir, workspace, vhost, host_dir, simulation, scanned_service_port, scanned_service, scanned_service_protocol, celery_path = args screenshot_name = "" urls_to_screenshot = [] urls_to_screenshot_with_filenames = [] if "gobuster" in populated_command: screenshot_name = "gobuster" scan_output_base_file_dir = os.path.join( output_base_dir, "celerystalkReports", "screens", vhost + "_" + str(scanned_service_port) + "_" + scanned_service_protocol) try: os.stat(scan_output_base_file_dir) except: os.makedirs(scan_output_base_file_dir) post_gobuster_filename = populated_command.split(">")[1].split( "&")[0].strip() print("Post gobuster filename" + post_gobuster_filename + "\n") populated_command_list = populated_command.split(" ") index = 0 for arg in populated_command_list: if "-u" == populated_command_list[index]: if "http" in populated_command_list[index + 1]: scanned_url = populated_command_list[index + 1] #print("Scanned_url: " + scanned_url) index = index + 1 try: with open(post_gobuster_filename, 'r') as gobuster_file: lines = gobuster_file.read().splitlines() print(lines) if len(lines) > 300: #TODO: def don't submit 100 direcotires to scan. but need a way to tell the user exit() for url in lines: url = url.split("?")[0].replace("//", "/") if url.startswith("http"): url_screenshot_filename = scan_output_base_file_dir + "/" + url.replace("http", "").replace("https", "") \ .replace("/", "_") \ .replace("\\", "") \ .replace(":", "_") + ".png" url_screenshot_filename = url_screenshot_filename.replace( "__", "") db_path = (vhost, scanned_service_port, url, 0, url_screenshot_filename, workspace) db.insert_new_path(db_path) print("Found Url: " + str(url)) urls_to_screenshot_with_filenames.append( (url, url_screenshot_filename)) urls_to_screenshot.append((url, url_screenshot_filename)) #result = lib.utils.take_screenshot(url,url_screenshot_filename) except Exception, e: if not simulation: print("[!] Could not open {0}".format(post_gobuster_filename))
def post_process(*args): command_name, populated_command, output_base_dir, workspace, vhost, host_dir, simulation, scanned_service_port, scanned_service, scanned_service_protocol, celery_path = args screenshot_name = "" urls_to_screenshot = [] urls_to_screenshot_with_filenames = [] if "gobuster" in populated_command: screenshot_name = "gobuster" scan_output_base_file_dir = os.path.join( output_base_dir, "celerystalkReports", "screens", vhost + "_" + str(scanned_service_port) + "_" + scanned_service_protocol) try: os.stat(scan_output_base_file_dir) except: os.makedirs(scan_output_base_file_dir) post_gobuster_filename = populated_command.split(">")[1].split( "&")[0].strip() print("Post gobuster filename" + post_gobuster_filename + "\n") populated_command_list = populated_command.split(" ") index = 0 for arg in populated_command_list: if "-u" == populated_command_list[index]: if "http" in populated_command_list[index + 1]: scanned_url = populated_command_list[index + 1] #print("Scanned_url: " + scanned_url) index = index + 1 try: with open(post_gobuster_filename, 'r') as gobuster_file: lines = gobuster_file.read().splitlines() print(lines) if len(lines) > 300: #TODO: def don't submit 100 direcotires to scan. but need a way to tell the user exit() for url in lines: url = url.split("?")[0].replace("//", "/") if url.startswith("http"): url_screenshot_filename = scan_output_base_file_dir + "/" + url.replace("http", "").replace("https", "") \ .replace("/", "_") \ .replace("\\", "") \ .replace(":", "_") + ".png" url_screenshot_filename = url_screenshot_filename.replace( "__", "") db_path = (vhost, scanned_service_port, url, 0, url_screenshot_filename, workspace) db.insert_new_path(db_path) print("Found Url: " + str(url)) urls_to_screenshot_with_filenames.append( (url, url_screenshot_filename)) urls_to_screenshot.append((url, url_screenshot_filename)) #result = lib.utils.take_screenshot(url,url_screenshot_filename) except Exception: if not simulation: print("[!] Could not open {0}".format(post_gobuster_filename)) if "photon" in populated_command: screenshot_name = "photon" scan_output_base_file_dir = os.path.join( output_base_dir, "celerystalkReports", "screens", vhost + "_" + str(scanned_service_port) + "_" + scanned_service_protocol) try: os.stat(scan_output_base_file_dir) except: os.makedirs(scan_output_base_file_dir) #post_photon_filename = populated_command.split(">")[1].lstrip() post_photon_filename = lib.db.get_output_file_for_command( workspace, populated_command)[0][0] #print(post_photon_filename) print("Post photon filename" + post_photon_filename + "\n") populated_command_list = populated_command.split(" ") index = 0 for arg in populated_command_list: if "-u" == populated_command_list[index]: if "http" in populated_command_list[index + 1]: scanned_url = populated_command_list[index + 1] #print("Scanned_url: " + scanned_url) index = index + 1 try: with open(post_photon_filename, 'r') as photon_file: photon_file_json = simplejson.load(photon_file) good_sections = ["internal", "robots", "fuzzable"] for section in good_sections: for url in photon_file_json[section]: if url.startswith("http"): url_screenshot_filename = scan_output_base_file_dir + "/" + url.replace("http", "").replace("https", "") \ .replace("/", "_") \ .replace("\\", "") \ .replace(":", "_") + ".png" url_screenshot_filename = url_screenshot_filename.replace( "__", "") db_path = (vhost, scanned_service_port, url, 0, url_screenshot_filename, workspace) db.insert_new_path(db_path) print("Found Url: " + str(url)) urls_to_screenshot_with_filenames.append( (str(url), url_screenshot_filename)) urls_to_screenshot.append( (str(url), url_screenshot_filename)) except Exception: if not simulation: print("[!] Could not open {0}".format(post_photon_filename)) if not simulation: if len(urls_to_screenshot) > 0: task_id = uuid() populated_command = "firefox-esr {0}-screenshots | {1} | {2}".format( screenshot_name, vhost, scan_output_base_file_dir) command_name = "Screenshots" utils.create_task(command_name, populated_command, vhost, scan_output_base_file_dir, workspace, task_id) cel_take_screenshot.delay(urls_to_screenshot_with_filenames, task_id, vhost, scan_output_base_file_dir, workspace, command_name, populated_command)
try: with open(post_photon_filename, 'r') as photon_file: photon_file_json = simplejson.load(photon_file) good_sections = ["internal", "robots", "fuzzable"] for section in good_sections: for url in photon_file_json[section]: if url.startswith("http"): url_screenshot_filename = scan_output_base_file_dir + "/" + url.replace("http", "").replace("https", "") \ .replace("/", "_") \ .replace("\\", "") \ .replace(":", "_") + ".png" url_screenshot_filename = url_screenshot_filename.replace("__", "") db_path = (vhost, scanned_service_port, url, 0, url_screenshot_filename, workspace) db.insert_new_path(db_path) print("Found Url: " + str(url)) urls_to_screenshot_with_filenames.append((str(url), url_screenshot_filename)) urls_to_screenshot.append((str(url), url_screenshot_filename)) except Exception, e: if not simulation: print("[!] Could not open {0}".format(post_photon_filename)) if not simulation: if len(urls_to_screenshot) > 0: task_id = uuid() populated_command = "firefox-esr {0}-screenshots | {1} | {2}".format(screenshot_name, vhost, scan_output_base_file_dir)
def post_process(*args): command_name, populated_command,output_base_dir, workspace, ip, host_dir, simulation, scanned_service_port,scanned_service,scanned_service_protocol,celery_path = args #json_config = read_config_post(path) #test_log = "/opt/celerystalk/test.txt" #f = open(test_log, 'a') ##f.write(populated_command + "\n\n") #print("post") # if "amass" in populated_command: # json_config = config_parser.read_config() # post_amass_filename = populated_command.split(">")[1].rstrip() # with open(post_amass_filename,'r') as amass_file: # lines = amass_file.read().splitlines() # for vhost in lines: # in_scope,ip = utils.domain_scope_checker(vhost,workspace) # db_vhost = (ip,vhost,in_scope,0,workspace) # db.create_vhost(db_vhost) # # #pull all in scope vhosts that have not been submitted # for scannable_vhost in db.get_inscope_vhosts(workspace): # scan_output_base_file_name = output_base_dir + "/" + ip + "/celerystalkOutput/" + scannable_vhost + str(scanned_service_port) + "_" + scanned_service_protocol + "_" # for db_scanned_service in db.get_all_services_for_ip(ip,workspace): # #run chain on each one and then update db as submitted # # for entry in json_config["services"][db_scanned_service]["output"]: # if (db_scanned_service == "http") or (db_scanned_service == "https"): # for cmd_name,cmd in entry["commands"]: # if simulation: # # debug - sends jobs to celery, but with a # in front of every one. # populated_command = (("#" + cmd) % {"IP": scannable_vhost, "PORT": scanned_service_port, # "OUTPUTDIR": scan_output_base_file_name}) # else: # populated_command = (cmd % {"IP": scannable_vhost, "PORT": scanned_service_port, # "OUTPUTDIR": scan_output_base_file_name}) # # # # task_id = uuid() # result = chain( # # insert a row into the database to mark the task as submitted. a subtask does not get tracked # # in celery the same way a task does, for instance, you can't find it in flower # #cel_create_task.subtask(args=(populated_command, ip, workspace, task_id), task_id=task_id), # cel_create_task.subtask(args=(cmd_name, populated_command, ip, output_base_dir, workspace, task_id)), # # run the command. run_task takes care of marking the task as started and then completed. # # The si tells run_cmd to ignore the data returned from a previous task # run_cmd.si(cmd_name, populated_command, celery_path, task_id).set(task_id=task_id), # # # right now, every executed command gets sent to a generic post_process task that can do # # additinoal stuff based on the command that just ran. # post_process.si(cmd_name, populated_command, output_base_dir, workspace, ip, host_dir, simulation, # scanned_service_port,db_scanned_service,scanned_service_protocol,celery_path), # )() # .apply_async() # # host_audit_log = host_dir + "/" + "{0}_executed_commands.txt".format(ip) # f = open(host_audit_log, 'a') # f.write(populated_command + "\n\n") # f.close() if "gobuster" in populated_command: scan_output_base_file_dir = os.path.join(output_base_dir,"celerystalkReports","screens",ip + "_" + str( scanned_service_port) + "_" + scanned_service_protocol) try: os.stat(scan_output_base_file_dir) except: os.makedirs(scan_output_base_file_dir) post_gobuster_filename = populated_command.split(">")[1].split("&")[0].strip() print("Post gobuster filename" + post_gobuster_filename + "\n") populated_command_list = populated_command.split(" ") index=0 for arg in populated_command_list: if "-u" == populated_command_list[index]: if "http" in populated_command_list[index+1]: scanned_url = populated_command_list[index+1] #print("Scanned_url: " + scanned_url) index = index + 1 with open(post_gobuster_filename,'r') as gobuster_file: lines = gobuster_file.read().splitlines() print(lines) if len(lines) > 300: #TODO: def don't submit 100 direcotires to scan. but need a way to tell the user exit() for url in lines: #url = url.split("?")[0].replace("//","/") if url.startswith("http"): url_screenshot_filename = scan_output_base_file_dir + url.replace("http", "").replace("https", "") \ .replace("/", "_") \ .replace("\\", "") \ .replace(":", "_") + ".png" url_screenshot_filename = url_screenshot_filename.replace("__", "") db_path = (ip, scanned_service_port, url, 0, url_screenshot_filename, workspace) db.insert_new_path(db_path) print("Found Url: " + str(url)) result = lib.utils.take_screenshot(url,url_screenshot_filename) if "photon" in populated_command: scan_output_base_file_dir = os.path.join(output_base_dir, "celerystalkReports", "screens", ip + "_" + str( scanned_service_port) + "_" + scanned_service_protocol) try: os.stat(scan_output_base_file_dir) except: os.makedirs(scan_output_base_file_dir) post_photon_filename = populated_command.split(">")[1].lstrip() print("Post photon filename" + post_photon_filename + "\n") populated_command_list = populated_command.split(" ") index=0 for arg in populated_command_list: if "-u" == populated_command_list[index]: if "http" in populated_command_list[index+1]: scanned_url = populated_command_list[index+1] #print("Scanned_url: " + scanned_url) index = index + 1 with open(post_photon_filename,'r') as photon_file: lines = photon_file.read().splitlines() print(lines) if len(lines) > 300: #TODO: def don't submit 100 direcotires to scan. but need a way to tell the user lines = lines[:300] for url in lines: #url = url.split("?")[0].replace("//","/") if url.startswith("http"): url_screenshot_filename = scan_output_base_file_dir + url.replace("http", "").replace("https", "") \ .replace("/", "_") \ .replace("\\", "") \ .replace(":", "_") + ".png" url_screenshot_filename = url_screenshot_filename.replace("__", "") db_path = (ip, scanned_service_port, url, 0, url_screenshot_filename, workspace) db.insert_new_path(db_path) print("Found Url: " + str(url)) result = lib.utils.take_screenshot(url,url_screenshot_filename) print(result)
def import_url(url, workspace, output_base_dir): celery_path = sys.path[0] #config, supported_services = config_parser.read_config_ini() #task_id_list = [] urls_to_screenshot = [] try: parsed_url = urlparse.urlparse(url) scheme = parsed_url[0] if not scheme: print( "\n[!] URL parameter (-u) requires that you specify the scheme (http:// or https://)\n" ) exit() if ":" in parsed_url[1]: vhost, port = parsed_url[1].split(':') else: vhost = parsed_url[1] if scheme == "http": port = 80 elif scheme == "https": port = 443 path = parsed_url[2] except: if not scheme: exit() in_scope, ip = lib.utils.domain_scope_checker(vhost, workspace) proto = "tcp" vhost_explicitly_out_of_scope = lib.db.is_vhost_explicitly_out_of_scope( vhost, workspace) if not vhost_explicitly_out_of_scope: # and if the vhost is not explicitly out of scope if in_scope == 0: answer = raw_input( "[+] {0} is not in scope. Would you like to to add {1}/{0} to the list of in scope hosts?" .format(vhost, ip)) if (answer == "Y") or (answer == "y") or (answer == ""): in_scope = 1 if in_scope == 1: is_vhost_in_db = lib.db.is_vhost_in_db(vhost, workspace) if not is_vhost_in_db: lib.db.update_vhosts_in_scope(ip, vhost, workspace, 1) lib.db.update_vhosts_submitted(ip, vhost, workspace, 1) else: db_vhost = (ip, vhost, 1, 0, 1, workspace ) # add it to the vhosts db and mark as in scope lib.db.create_vhost(db_vhost) if ip == vhost: scan_output_base_file_dir = output_base_dir + "/" + ip + "/celerystalkOutput/" + ip + "_" + str( port) + "_" + proto + "_" else: scan_output_base_file_dir = output_base_dir + "/" + ip + "/celerystalkOutput/" + vhost + "_" + str( port) + "_" + proto + "_" host_dir = output_base_dir + "/" + ip host_data_dir = host_dir + "/celerystalkOutput/" # Creates something like /pentest/10.0.0.1, /pentest/10.0.0.2, etc. lib.utils.create_dir_structure(ip, host_dir) # Next two lines create the file that will contain each command that was executed. This is not the audit log, # but a log of commands that can easily be copy/pasted if you need to run them again. summary_file_name = host_data_dir + "ScanSummary.log" summary_file = open(summary_file_name, 'a') # db_vhost = (ip, vhost, 1,0,1, workspace) # in this mode all vhosts are in scope # # print(db_vhost) # db.create_vhost(db_vhost) # Insert port/service combo into services table if it doesnt exist db_service = db.get_service(ip, port, proto, workspace) if not db_service: #db_string = (ip, port, proto, scheme,'','','',workspace) db_string = (vhost, port, proto, scheme, '', '', '', workspace) db.create_service(db_string) # Insert url into paths table and take screenshot db_path = db.get_path(path, workspace) if not db_path: url_screenshot_filename = scan_output_base_file_dir + url.replace("http", "").replace("https", "") \ .replace("/", "_") \ .replace("\\", "") \ .replace(":", "_") + ".png" url_screenshot_filename = url_screenshot_filename.replace( "__", "") db_path = (vhost, port, url, 0, url_screenshot_filename, workspace) db.insert_new_path(db_path) # print("Found Url: " + str(url)) #urls_to_screenshot.append((url, url_screenshot_filename)) #lib.utils.take_screenshot(urls_to_screenshot) # print(result) db_path = (vhost, port, url, 0, url_screenshot_filename, workspace) lib.db.insert_new_path(db_path) else: print( "[!] {0} is explicitly marked as out of scope. Skipping...".format( vhost))
def process_nmap_data(nmap_report, workspace, target=None): workspace_mode = lib.db.get_workspace_mode(workspace)[0][0] services_file = open('/etc/services', mode='r') services_file_data = services_file.readlines() services_file.close() #This top part of the for loop determines whether or not to add the host for scanned_host in nmap_report.hosts: ip = scanned_host.id unique_db_ips = lib.db.is_vhost_in_db( ip, workspace) #Returns data if IP is in database #print(unique_db_ips) vhosts = scanned_host.hostnames print("process_nmap_data: " + str(vhosts)) for vhost in vhosts: print("process_nmap_data: " + vhost) vhost_explicitly_out_of_scope = lib.db.is_vhost_explicitly_out_of_scope( vhost, workspace) if not vhost_explicitly_out_of_scope: # if the vhost is not explicitly out of scope, add it to db is_vhost_in_db = lib.db.is_vhost_in_db( vhost, workspace) # Returns data if IP is in database if not is_vhost_in_db: db_vhost = (ip, vhost, 1, 0, 0, workspace) lib.db.create_vhost(db_vhost) else: if not lib.db.get_in_scope_ip( ip, workspace ): # if it is in the DB but not in scope... print( "[+] IP is in the DB, but not in scope. Adding to scope:\t[{0}]" .format(ip)) lib.db.update_vhosts_in_scope( ip, vhost, workspace, 1 ) # update the host to add it to scope, if it was already in scope, do nothing else: print( "[!] {0} is explicitly marked as out of scope. Skipping..." .format(ip)) if unique_db_ips: #If this IP was in the db... vhost_explicitly_out_of_scope = lib.db.is_vhost_explicitly_out_of_scope( ip, workspace) if not vhost_explicitly_out_of_scope: #and if the vhost is not explicitly out of scope if not lib.db.get_in_scope_ip( ip, workspace): # and if it is not in scope... print( "[+] IP is in the DB, but not in scope. Adding to scope:\t[{0}]" .format(ip)) lib.db.update_vhosts_in_scope( ip, ip, workspace, 1 ) # update the host to add it to scope, if it was already in scope, do nothing # else: # print("[+] [{0}] is already in the DB and considered in scope".format(ip)) else: print( "[!] {0} is explicitly marked as out of scope. Skipping..." .format(ip)) else: #if this ip was not already in the db, create a new host and mark it as in scope #note to self: i dont need to check to see if it is out of scope beccause i already know its not in db at all... print("[+] IP not in DB. Adding it to DB and to scope:\t [{0}]". format(ip)) db_vhost = (ip, ip, 1, 0, 0, workspace) db.create_vhost(db_vhost) #Now after the host is added, let's add the ports for that host. for scanned_service_item in scanned_host.services: if scanned_service_item.state == "open": scanned_service_port = scanned_service_item.port scanned_service_name = scanned_service_item.service scanned_service_protocol = scanned_service_item.protocol #print(str(scanned_service_port)) if scanned_service_item.tunnel == 'ssl': scanned_service_name = 'https' if scanned_service_name == "tcpwrapped": try: port_proto = "\t" + str( scanned_service_port) + "/" + str( scanned_service_protocol) + "\t" for line in services_file_data: if port_proto in line: scanned_service_name = line.split("\t")[0] except: pass #Not using this yet, but I'd like to do send this to searchsploit try: scanned_service_product = scanned_service_item.service_dict[ 'product'] except: scanned_service_product = '' try: scanned_service_version = scanned_service_item.service_dict[ 'version'] except: scanned_service_version = '' try: scanned_service_extrainfo = scanned_service_item.service_dict[ 'extrainfo'] except: scanned_service_extrainfo = '' #print "Port: {0}\tService: {1}\tProduct & Version: {3} {4} {5}".format(scanned_service_port,scanned_service_name,scanned_service_product,scanned_service_version,scanned_service_extrainfo) db_service = db.get_service(ip, scanned_service_port, scanned_service_protocol, workspace) if not db_service: db_string = (ip, scanned_service_port, scanned_service_protocol, scanned_service_name, scanned_service_product, scanned_service_version, scanned_service_extrainfo, workspace) db.create_service(db_string) else: db.update_service(ip, scanned_service_port, scanned_service_protocol, scanned_service_name, workspace) output_base_dir = lib.db.get_output_dir_for_workspace( workspace)[0][0] file_end_part = "/" + ip + "/celerystalkOutput/" + ip + "_" + str( scanned_service_port ) + "_" + scanned_service_protocol + "_" scan_output_base_file_dir = os.path.abspath(output_base_dir + file_end_part) if (scanned_service_name == 'https') or (scanned_service_name == 'http'): path = scanned_service_name + "://" + ip + ":" + str( scanned_service_port) + "/" db_path = db.get_path(path, workspace) if not db_path: url_screenshot_filename = scan_output_base_file_dir + ".png" db_path = (ip, scanned_service_port, path, 0, url_screenshot_filename, workspace) db.insert_new_path(db_path) for vhost in vhosts: print("process_nmap_data - add service: " + vhost) db_service = db.get_service(vhost, scanned_service_port, scanned_service_protocol, workspace) if not db_service: print("service didnt exist, adding: " + vhost + str(scanned_service_port)) db_string = (vhost, scanned_service_port, scanned_service_protocol, scanned_service_name, scanned_service_product, scanned_service_version, scanned_service_extrainfo, workspace) db.create_service(db_string) else: print("service does exist, updating: " + vhost + str(scanned_service_port)) db.update_service(vhost, scanned_service_port, scanned_service_protocol, scanned_service_name, workspace) if ip == vhost: scan_output_base_file_dir = os.path.abspath( output_base_dir + "/" + ip + "/celerystalkOutput/" + ip + "_" + str(scanned_service_port) + "_" + scanned_service_protocol) else: scan_output_base_file_dir = os.path.abspath( output_base_dir + "/" + ip + "/celerystalkOutput/" + vhost + "_" + str(scanned_service_port) + "_" + scanned_service_protocol) if (scanned_service_name == 'https') or (scanned_service_name == 'http'): path = scanned_service_name + "://" + vhost + ":" + str( scanned_service_port) + "/" db_path = db.get_path(path, workspace) if not db_path: url_screenshot_filename = scan_output_base_file_dir + ".png" db_path = (vhost, scanned_service_port, path, 0, url_screenshot_filename, workspace) db.insert_new_path(db_path)
def process_url(url, output_base_dir, workspace, simulation): celery_path = sys.path[0] config, supported_services = config_parser.read_config_ini() task_id_list = [] try: parsed_url = urlparse.urlparse(url) scheme = parsed_url[0] if not scheme: print( "\n[!] URL parameter (-u) requires that you specify the scheme (http:// or https://)\n" ) exit() if ":" in parsed_url[1]: target, port = parsed_url[1].split(':') else: target = parsed_url[1] if scheme == "http": port = 80 elif scheme == "https": port = 443 path = parsed_url[2] except: if not scheme: exit() try: ip = socket.gethostbyname(target) except: print("Error getting IP") proto = "tcp" if ip == target: scan_output_base_file_dir = output_base_dir + "/" + ip + "/celerystalkOutput/" + ip + "_" + str( port) + "_" + proto + "_" else: scan_output_base_file_dir = output_base_dir + "/" + ip + "/celerystalkOutput/" + target + "_" + str( port) + "_" + proto + "_" host_dir = output_base_dir + "/" + ip host_data_dir = host_dir + "/celerystalkOutput/" # Creates something like /pentest/10.0.0.1, /pentest/10.0.0.2, etc. utils.create_dir_structure(ip, host_dir) # Next two lines create the file that will contain each command that was executed. This is not the audit log, # but a log of commands that can easily be copy/pasted if you need to run them again. summary_file_name = host_data_dir + "ScanSummary.log" summary_file = open(summary_file_name, 'a') db_vhost = (ip, target, 1, 1, workspace ) # in this mode all vhosts are in scope #print(db_vhost) db.create_vhost(db_vhost) #Insert port/service combo into services table db_service = db.get_service(ip, port, proto, workspace) if not db_service: db_string = (ip, port, proto, scheme, workspace) db.create_service(db_string) # Insert url into paths table and take screenshot db_path = db.get_path(path, workspace) if not db_path: url_screenshot_filename = scan_output_base_file_dir + url.replace("http", "").replace("https", "") \ .replace("/", "_") \ .replace("\\", "") \ .replace(":", "_") + ".png" url_screenshot_filename = url_screenshot_filename.replace("__", "") db_path = (ip, port, url, 0, url_screenshot_filename, workspace) db.insert_new_path(db_path) #print("Found Url: " + str(url)) result = utils.take_screenshot(url, url_screenshot_filename) #print(result) #TODO: This def might introduce a bug - same code as parse config submit jobs to celery. need to just call that function here for section in config.sections(): if (section == "http") or (section == "https"): if section == scheme: for (cmd_name, cmd) in config.items(section): outfile = scan_output_base_file_dir + cmd_name populated_command = cmd.replace( "[TARGET]", target).replace("[PORT]", str(port)).replace( "[OUTPUT]", outfile).replace("[PATH]", path) if simulation: # debug - sends jobs to celery, but with a # in front of every one. populated_command = "#" + populated_command # Grab a UUID from celery.utils so that i can assign it to my task at init, which is amazing because # that allows me to pass it to all of the tasks in the chain. task_id = uuid() result = chain( # insert a row into the database to mark the task as submitted. a subtask does not get tracked # in celery the same way a task does, for instance, you can't find it in flower tasks.cel_create_task.subtask( args=(cmd_name, populated_command, target, outfile + ".txt", workspace, task_id)), # run the command. run_task takes care of marking the task as started and then completed. # The si tells run_cmd to ignore the data returned from a previous task tasks.run_cmd.si(cmd_name, populated_command, celery_path, task_id).set(task_id=task_id), # right now, every executed command gets sent to a generic post_process task that can do # additinoal stuff based on the command that just ran. tasks.post_process.si(cmd_name, populated_command, output_base_dir, workspace, target, host_dir, simulation, port, scheme, proto, celery_path), )() # .apply_async() task_id_list.append(result.task_id) host_audit_log = host_dir + "/" + "{0}_executed_commands.txt".format( ip) f = open(host_audit_log, 'a') f.write(populated_command + "\n\n") f.close() print("[+] Submitted {0} tasks to queue.\n".format(len(task_id_list)))