Beispiel #1
0
    def __init__(self):
        req = URLRequest()
        parser = argparse.ArgumentParser(
            prog='Spose by Petruknisme',
            description='Squid Pivoting Open Port Scanner')
        parser.add_argument("--proxy",
                            help="Define proxy address url(http://xxx:3128)",
                            action="store",
                            dest='proxy')
        parser.add_argument("--target",
                            help="Define target IP behind proxy",
                            action="store",
                            dest='target')
        results = parser.parse_args()

        if results.target is None or results.proxy is None:
            parser.print_help()
            sys.exit()

        target = results.target
        proxy = results.proxy
        common_ports = {
            21, 22, 23, 25, 53, 69, 80, 109, 110, 123, 137, 138, 139, 143, 156,
            389, 443, 546, 547, 995, 993, 2086, 2087, 2082, 2083, 3306, 8080,
            8443, 10000
        }

        print("Using proxy address {}".format(proxy))

        for n in sorted(common_ports):
            try:
                data = req.get("http://{}:{}".format(target, str(n)),
                               "{}".format(proxy))
                code = data.getcode()
                if code == 200 or code == 404 or code == 401:
                    print("{} {} seems OPEN ".format(target, str(n)))
            except:
                pass
Beispiel #2
0
from database import Database
from logger import Logger
from tqdm import tqdm
import requests
from url_request import URLRequest
from meta_exif_extractor import MetaExifExtractor
from util import Util

# Console color
G = '\033[92m'  # green
Y = '\033[93m'  # yellow
B = '\033[94m'  # blue
R = '\033[91m'  # red
W = '\033[0m'  # white

url_req = URLRequest()
log = Logger()
util = Util()


class HarvestPublicDocument(object):
    def __init__(self):
        self.db = Database()
        self.project_id = 0

    def init_crawl(self, domain, proxy_address, project_id):
        self.project_id = project_id
        log.console_log(
            "{}[*] Gather Link from Google Search for domain {}{}".format(
                G, domain, W))
        self.harvest_public_doc(domain, "pdf", proxy_address)