def __init__(self): with open('./config/config.json', 'r') as myfile: data = myfile.read() sqlmap.config = loads(data) create_project_dir(sqlmap.config["ProjectName"] + "/sqlmap") sqlmap.files = create_penetration_data_files( sqlmap.config["ProjectName"], "sqlmap") sqlmap.queue = file_to_set(sqlmap.files[0]) sqlmap.neg = file_to_set(sqlmap.files[1]) sqlmap.pos = file_to_set(sqlmap.files[2]) sqlmap.startTime = time() sqlmap.work()
def __init__(self): with open('./config/config.json', 'r') as myfile: data = myfile.read() xss.config = loads(data) create_project_dir(xss.config["ProjectName"] + "/xss") xss.files = create_penetration_data_files(xss.config["ProjectName"], "xss") xss.queue = file_to_set(xss.files[0]) xss.neg = file_to_set(xss.files[1]) xss.pos = file_to_set(xss.files[2]) xss.startTime = time() xss.work()
def crawl(): queue_links = file_to_set(QUEUE_FILE) if len(queue_links) > 0: print(str(len(queue_links)) + " links in the queue") create_jobs()
def create_jobs(): for link in file_to_set(QUEUE_FILE): thread_queue.put(link) thread_queue.join() crawl()
def crawl(): queue_links = file_to_set(QUEUE_FILE) if len(queue_links) > 0: create_jobs() print('finish')
def boot(): create_project_dir(Spider.project_name) create_data_files(Spider.project_name, Spider.base_url) Spider.queue = file_to_set(Spider.queue_file) Spider.crawled = file_to_set(Spider.crawled_file) Spider.finish = file_to_set(Spider.finish_file)
def crawl(): queue_links= file_to_set(QUEUE_FILE) if len(queue_links) > 0: print(f'{len(queue_links)} links in the queue') create_jobs()
def boot(): create_project(Spider.project_name, Spider.base_url) Spider.queue_set = file_to_set(Spider.queue_file) Spider.crawled_set = file_to_set(Spider.crawled_file) Spider.htmls_set = file_to_set(Spider.htmls_file)