예제 #1
0
 def __init__(self):
     with open('./config/config.json', 'r') as myfile:
         data = myfile.read()
         sqlmap.config = loads(data)
     create_project_dir(sqlmap.config["ProjectName"] + "/sqlmap")
     sqlmap.files = create_penetration_data_files(
         sqlmap.config["ProjectName"], "sqlmap")
     sqlmap.queue = file_to_set(sqlmap.files[0])
     sqlmap.neg = file_to_set(sqlmap.files[1])
     sqlmap.pos = file_to_set(sqlmap.files[2])
     sqlmap.startTime = time()
     sqlmap.work()
예제 #2
0
파일: xss.py 프로젝트: alifrd/Lacher-Lizard
 def __init__(self):
     with open('./config/config.json', 'r') as myfile:
         data = myfile.read()
         xss.config = loads(data)
     create_project_dir(xss.config["ProjectName"] + "/xss")
     xss.files = create_penetration_data_files(xss.config["ProjectName"],
                                               "xss")
     xss.queue = file_to_set(xss.files[0])
     xss.neg = file_to_set(xss.files[1])
     xss.pos = file_to_set(xss.files[2])
     xss.startTime = time()
     xss.work()
예제 #3
0
def crawl():
    queue_links = file_to_set(QUEUE_FILE)
    if len(queue_links) > 0:
        print(str(len(queue_links)) + " links in the queue")
        create_jobs()
예제 #4
0
def create_jobs():
    for link in file_to_set(QUEUE_FILE):
        thread_queue.put(link)
    thread_queue.join()
    crawl()
예제 #5
0
파일: main.py 프로젝트: lasonadora31/pull
def crawl():
    queue_links = file_to_set(QUEUE_FILE)
    if len(queue_links) > 0:
        create_jobs()
    print('finish')
예제 #6
0
 def boot():
     create_project_dir(Spider.project_name)
     create_data_files(Spider.project_name, Spider.base_url)
     Spider.queue = file_to_set(Spider.queue_file)
     Spider.crawled = file_to_set(Spider.crawled_file)
     Spider.finish = file_to_set(Spider.finish_file)
예제 #7
0
def crawl():
    queue_links= file_to_set(QUEUE_FILE)
    if len(queue_links) > 0:
        print(f'{len(queue_links)} links in the queue')
        create_jobs()
예제 #8
0
 def boot():
     create_project(Spider.project_name, Spider.base_url)
     Spider.queue_set = file_to_set(Spider.queue_file)
     Spider.crawled_set = file_to_set(Spider.crawled_file)
     Spider.htmls_set = file_to_set(Spider.htmls_file)