Пример #1
0
 def scan(self):
     try:
         r = _requests(self.target, headers=headers)
         if isinstance(r, bool):
             print "invaild url please input correct url"
             return
         self.target_domain = urlparse.urlparse(self.target).netloc
         print "start crawl"
         print "*********************"
         hand = crawl(self.target, self.depth, self.concurrent_num)
         crawl_urls = hand.scan()
         print "*********************"
         print "crawl  finish"
         dirs = self.get_dir(crawl_urls)
         print "*********************"
         print "load server path "
         server_result = exploit_server_path(self.target)
         print "*********************"
         print "load backup path"
         backup_result = exploit_backup_path(self.target, dirs)
         print "*********************"
         print "load directory path"
         directory_result = exploit_directory_path(self.target, dirs)
         print "*********************"
         print "load common file path"
         common_file_result = exploit_common_file(self.target, self.extion, dirs)
         print "************************"
         print "finish scan :: {}".format(self.target)
         print "************************"
         if any([server_result, backup_result, directory_result, common_file_result]):
             with open("./report/" + self.target_domain + ".txt", 'w') as f:
                 if server_result:
                     f.writelines("************server path************\n")
                     for url in server_result:
                         f.writelines(url + '\n')
                     f.writelines("************server path************\n\n\n")
                 if backup_result:
                     f.writelines("************backup path************\n")
                     for url in backup_result:
                         f.writelines(url + '\n')
                     f.writelines("************backup path************\n\n\n")
                 if directory_result:
                     f.writelines("************directory path************\n")
                     for url in directory_result:
                         f.writelines(url + '\n')
                     f.writelines("************directory path************\n\n\n")
                 if common_file_result:
                     f.writelines("************common file path************\n")
                     for url in common_file_result:
                         f.writelines(url + '\n')
                     f.writelines("************common file path************\n\n\n")
                 f.close()
     except:
         traceback.print_exc()
Пример #2
0
def main():

    total_products = []

    shopping_list = ['텀블러', '가습기', '숨셔바요', '텀블러']

    for no in range(0, 4):
        for page_no in range(2, 5):
            page_string = crawl(shopping_list[no], page_no)
            products = parse(page_string)  # []
            total_products += products
        print("--------------------------------------------")

    for product in total_products:
        print(product)

    write_json(json_file_name, total_products)

    write_excel(json_file_name)
Пример #3
0
 def scan(self):
     try:
         if not self.target.startswith("http"):
             self.targets = [
                 "http://" + self.target, "https://" + self.target
             ]
             for target in self.targets:
                 r = _requests(target, headers=headers)
                 if not isinstance(r, bool):
                     self.target = target
                     break
         r = _requests(self.target, headers=headers)
         if isinstance(r, bool):
             print "{} is invaild url".format(self.target)
             return
         self.target_domain = urlparse.urlparse(self.target).netloc
         hand = crawl(self.target, self.depth, self.concurrent_num)
         crawl_urls = hand.scan()
         dirs = self.get_dir(crawl_urls)
         server_result = exploit_server_path(self.target)
         backup_result = exploit_backup_path(self.target, dirs)
         directory_result = exploit_directory_path(self.target, dirs)
         dirs.append("")
         if self.parse_extion:
             common_file_result = exploit_common_file(
                 self.target, self.parse_extion, dirs)
         else:
             extion = get_extion_by_sever(self.target)
             if extion:
                 common_file_result = exploit_common_file(
                     self.target, extion, dirs)
             else:
                 common_file_result = exploit_common_file(
                     self.target, self.extion, dirs)
         if any([
                 server_result, backup_result, directory_result,
                 common_file_result
         ]):
             with open("report/" + self.target_domain + ".txt", 'w') as f:
                 if server_result:
                     f.writelines(
                         "****************** server path ******************\n"
                     )
                     for url in server_result:
                         f.writelines(url + '\n')
                     f.writelines(
                         "****************** server path ******************\n"
                     )
                 if backup_result:
                     f.writelines(
                         "****************** backup path ******************\n"
                     )
                     for url in backup_result:
                         f.writelines(url + '\n')
                     f.writelines(
                         "****************** backup path ******************\n"
                     )
                 if directory_result:
                     f.writelines(
                         "**************** directory path *****************\n"
                     )
                     for url in directory_result:
                         f.writelines(url + '\n')
                     f.writelines(
                         "**************** directory path *****************\n"
                     )
                 if common_file_result:
                     f.writelines(
                         "*************** common file path ****************\n"
                     )
                     for url in common_file_result:
                         f.writelines(url + '\n')
                     f.writelines(
                         "*************** common file path ****************\n"
                     )
                 f.close()
     except:
         traceback.print_exc()