Пример #1
0
 def __init__(self):
     self.proxies = ProxyHandler()
     self.tasker = Tasker()
     self.saver = Saver()
     self.log_handler = LogHandler()
     self.tieba_task = tieba_task
     self.loop_num = LOOP_NUM
Пример #2
0
class Scheduler(object):
    def __init__(self):
        self.proxies = ProxyHandler()
        self.tasker = Tasker()
        self.saver = Saver()
        self.log_handler = LogHandler()
        self.tieba_task = tieba_task
        self.loop_num = LOOP_NUM

    def end_task(self, loop):
        # 保存数据到mongodb
        self.saver.run()
        if self.tasker.check_retries():
            self.retry_task(loop)
        else:
            loop.close()
            self.log_handler.logger().info('完成循环')
            print('Finish!!')

    def retry_task(self, loop):
        #已经分片好的每次重试的url列表
        per_step_urls_list = self.tasker.get_perloop_retry()
        # 获取代理池中的代理
        proxy_list = self.proxies.get_proxies()
        for per_step_urls in per_step_urls_list:
            # 待处理
            tasks = tieba_task('retry', per_step_urls, proxy_list)
            # 启动
            loop.run_until_complete(asyncio.gather(*tasks))
        self.end_task(loop)

    # @run_time
    def start_task(self):
        loop = asyncio.get_event_loop()
        # 获取代理池中的代理
        proxy_list = self.proxies.get_proxies()
        # 每次所有任务
        one_task = self.tasker.get_task()
        # self.loop_num: 每个任务循环几次, 避免Semaphore量太大
        for index in range(self.loop_num):
            # 每次循环的urls
            per_step_urls = self.tasker.get_perloop(one_task)
            # 待处理
            tasks = tieba_task(index, per_step_urls, proxy_list)
            # 启动
            loop.run_until_complete(asyncio.gather(*tasks))
        self.end_task(loop)
Пример #3
0
def main():
    parser = argparse.ArgumentParser(
        description='Penta is Pentest automation tool')

    parser.add_argument("-target",
                        dest="target",
                        help="Specify target IP / domain")
    parser.add_argument(
        "-ports",
        dest="ports",
        help=
        "Specify the target port(s) separated by comma. Default: 21,22,25,80,110,443,8080",
        default="21,22,25,80,110,443,8080")
    parser.add_argument("-proxy", dest="proxy", help="Proxy[IP:PORT]")

    options = parser.parse_args()

    checker = Inspect()
    nmap_scan = NmapScanner()
    dns_scan = DnsScanner()
    shodan_search = ShodanSearch()
    ftp_access = FtpConnector()
    ssh_access = SshConnector()
    msf_scan = MetaSploitRPC()
    log_handler = LogHandler()

    hostname = ""
    num_menu = ""

    if options.target is None:
        while hostname == "":
            hostname = input("[*] Specify IP or name domain:")
    else:
        hostname = options.target

    print("[*] Get IP address from host name...")
    ip = socket.gethostbyname(hostname)
    print('[+] The IP address of {} is {}{}{}\n'.format(
        hostname, Colors.GREEN, ip, Colors.END))

    while num_menu != 0:
        num_menu = menu()

        if num_menu == "0":
            sys.exit(1)

        elif num_menu == "1":
            port_list = options.ports.split(',')
            for port in port_list:
                nmap_scan.nmap_scan(ip, port)

            results = nmap_scan.nmap_json_export(ip, options.ports)
            log_filename = "scan_{}.json".format(hostname)

            log_handler.save_logfile(log_filename, results)
            print("[+] {}{}{} was generated".format(Colors.GREEN, log_filename,
                                                    Colors.END))
            print("\n")

        elif num_menu == "2":
            nmap_scan.nmap_menu(ip)
            print("\n")

        elif num_menu == "3":
            print("\n")
            checker.check_option_methods(hostname)
            print("\n")

        elif num_menu == "4":
            print("\n")
            dns_scan.check_dns_info(ip, hostname)
            print("\n")

        elif num_menu == "5":
            shodan_search.shodan_host_info(ip)
            print("\n")

        elif num_menu == "6":
            ftp_access.ftp_connect_anonymous(ip)
            print("\n")

        elif num_menu == "7":
            ssh_access.ssh_connect(ip)
            print("\n")

        elif num_menu == "8":
            msf_scan.msf_scan(ip)
            print("\n")

        elif num_menu == "9":
            # TODO: hydra brute force login --> smb ssh ftp http
            # TODO: malware detect functions e.g avast socks
            pass

        elif num_menu == "99":
            hostname = input("[*] Specify IP or name domain:")
            print("[*] Get IP address from host name...")
            ip = socket.gethostbyname(hostname)
            print('[+] The IP address of {} is {}{}{}\n'.format(
                hostname, Colors.GREEN, ip, Colors.END))

        else:
            print("[-] Incorrect option")
Пример #4
0
from logging import ERROR, INFO
from itertools import count
from xml.dom.minidom import parse
from urllib2 import urlopen

import scrapy

from crawler.items import PlayerIdItem
from utils import LogHandler


#constants
CURRENT_PLAYERS_SITEMAP = 'http://www.nba.com/current_players.xml'
LOGGER = LogHandler(__name__)

class PlayerIdSpider(scrapy.Spider):
	"""
	Scrapes current NBA players' ids from their nba.com page.

	Attributes:
		name (str): Name of spider
		allow_domains (List[str]): Domains which spider is allowed to scrape
		current_players (List[str]): Nba.com urls of current NBA players
		start_urls (List[str]): Urls which spider will scrape
	"""

	name = 'PlayerIdSpider'
	allow_domains = ['http://nba.com/']
	current_players = parse(urlopen(CURRENT_PLAYERS_SITEMAP)).getElementsByTagName('loc')
	start_urls = [p.childNodes[0].data for p in current_players]
Пример #5
0
def test_log_handler():
    """
    test function LogHandler  in Util/LogHandler
    :return:
    """
    log = LogHandler('test')
    log.info('this is a log from test')

    log.rename(name='test1')
    log.info('this is a log from test1')

    log.rename(name='test2')
    log.info('this is a log from test2')
Пример #6
0
class BaseFetcher(ABC):
    name = None
    enabled = True
    urls = []
    logger = LogHandler('Provider')
    use_proxy = False

    @staticmethod
    def sleep(sec):
        def decorator(func):  # 装饰器核心,以被装饰的函数对象为参数,返回装饰后的函数对象
            if sec:

                def wrapper(self, *args, **kwargs):  # 装饰的过程,参数列表适应不同参数的函数
                    self.lock.acquire()
                    try:
                        func(self, *args, **kwargs)  # 调用函数
                        gevent.sleep(sec)
                    except Exception as e:
                        self.logger.debug(e)
                    self.lock.release()

                return wrapper
            else:
                return func

        return decorator

    def __init__(self, tasks, result, pool=None):
        self._tasks = tasks
        self._result = result
        self.timeout = 5
        self.headers = {
            'User-Agent':
            'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.321.132 Safari/537.36',
            'Accept': '*/*',
            'Pragma': 'no-cache',
            'Cache-control': 'no-cache',
            'Referer': 'https://www.google.com/'
        }
        self.pool = pool
        self.lock = BoundedSemaphore()
        self.prepare()  # must run after init

    def request(self, url):
        resp = self._request(url)
        proxies = self.handle(resp)
        self.add_result(proxies)

    def _request(self, url):
        if not Config.proxy:
            return Response()
        if self.use_proxy:
            return requests.get(url,
                                headers=self.headers,
                                proxies={
                                    'http': 'http://' + Config.proxy,
                                    'https': 'https://' + Config.proxy
                                },
                                timeout=self.timeout)
        else:
            return requests.get(url,
                                headers=self.headers,
                                timeout=self.timeout)

    def prepare(self):
        pass

    def add_task(self, func):
        self._tasks.append(func)

    def add_result(self, result):
        if isinstance(result, (list, set)):
            for i in result:
                if isinstance(i, tuple):
                    self._result.add(':'.join(i))
                else:
                    self._result.add(i)
        elif isinstance(result, str):
            self._result.add(result)
        elif isinstance(result, tuple):
            self._result.add(':'.join(result))
        else:
            raise TypeError

    @abstractmethod
    def handle(self, resp: Response):
        pass

    def process_urls(self):
        return self.urls

    def fill_task(self):
        urls = self.process_urls()
        for url in urls:
            task = gevent.spawn(self.request, url)
            self.add_task(task)
            if self.pool is not None:
                self.pool.add(task)

    def __str__(self):
        return '<Provider name=%s, enabled=%s>' % (self.name, self.enabled)

    @classmethod
    def test(cls):
        tasks = []
        result = set()
        cls(tasks, result).fill_task()
        gevent.joinall(tasks)
        print(result)
Пример #7
0
# coding:utf-8
import time

import gevent

from manager import ProxyManager
from utils import LogHandler
from verify.proxy_verifier import ProxyGeventVerifier

logger = LogHandler('ProxyVerifyGeventScheduler')


class ProxyVerifyGeventScheduler:
    def run(self):
        try:
            proxy_manager = ProxyManager()
            logger.info("Start proxy verify")
            while True:
                proxies = proxy_manager.proxy_verified_before(minutes=30,
                                                              limit=1000)
                if not len(proxies):
                    logger.info(
                        'Not proxy need to be verified! Sleep [ 5 ] minutes.')
                    proxy_manager.close()
                    break
                verifier = ProxyGeventVerifier()
                start = time.time()
                tasks = verifier.generate_tasks(proxies)
                logger.info('Created %d verify tasks' % len(proxies))
                gevent.joinall(tasks)
                logger.info('Proxy Verify Using %d sec.' %
Пример #8
0
import time
from _ssl import SSLError
from json import JSONDecodeError

import gevent
from gevent import socket
from utils._parser import HTTPParseError

from verify.connector import GeventConnector
from model import ProxyModel
from utils import LogHandler
from utils.errors import BadStatusError, RecvTimeout, ConnectTimeout, BadResponseError, ProxySendError
from utils.functions import get_self_ip
from verify.detector import Detector

logger = LogHandler('ProxyGeventVerifier')


class ProxyGeventVerifier:
    def __init__(self, post=False, timeout=4):
        ext_ip = get_self_ip()
        self._method = 'POST' if post else 'GET'
        self.http_judge = Detector('http://httpbin.skactor.tk:8080/anything', ext_ip)
        self.https_judge = Detector('https://httpbin.skactor.tk/anything', ext_ip)
        self._timeout = timeout

    def generate_tasks(self, proxies):
        tasks = []
        for proxy in proxies:
            proxy.init(GeventConnector)
            task = gevent.spawn(self.check, proxy)
Пример #9
0
def ip_menu(options):
    hostname = ""
    num_menu = ""

    checker = Inspect()
    nmap_scan = NmapScanner()
    dns_scan = DnsScanner()
    shodan_search = ShodanSearch()
    ftp_access = FtpConnector()
    ssh_access = SshConnector()
    msf_rpc_scan = MetaSploitRPC()
    log_handler = LogHandler()

    if options.target is None:
        while hostname == "":
            hostname = input("[>] Specify IP or name domain: ")
    else:
        hostname = options.target

    print("[*] Get IP address from host name...")
    ip = socket.gethostbyname(hostname)
    print('[+] The IP address of {} is {}{}{}\n'.format(hostname, Colors.LIGHTGREEN, ip, Colors.END))

    ip_menu_list()
    while num_menu != 0:
        num_menu = choice_num()
        if num_menu == 0:
            main_menu(options)

        elif num_menu == 1:
            port_list = options.ports.split(',')
            for port in port_list:
                nmap_scan.nmap_scan(ip, port)

            results = nmap_scan.nmap_json_export(ip, options.ports)
            log_filename = "scan_{}.json".format(hostname)

            log_handler.save_logfile(log_filename, results)
            print("[+] {}{}{} was generated".format(Colors.LIGHTGREEN, log_filename, Colors.END))

        elif num_menu == 2:
            nmap_scan.nmap_menu(ip)

        elif num_menu == 3:
            checker.check_option_methods(hostname)

        elif num_menu == 4:
            dns_scan.check_dns_info(ip, hostname)

        elif num_menu == 5:
            shodan_search.shodan_host_info(ip)

        elif num_menu == 6:
            ftp_access.ftp_connect_anonymous(ip)

        elif num_menu == 7:
            ssh_access.ssh_connect(ip)

        elif num_menu == 8:
            msf_rpc_scan.scan(ip)

        elif num_menu == 9:
            # TODO: hydra brute force login --> smb ssh ftp http
            # TODO: malware detect functions e.g avast socks
            pass

        elif num_menu == 99:
            hostname = input("[*] Specify IP or name domain: ")
            print("[*] Get IP address from host name...")
            ip = socket.gethostbyname(hostname)
            print('[+] The IP address of {} is {}{}{}\n'.format(hostname, Colors.LIGHTGREEN, ip, Colors.END))

        else:
            logging.error("Incorrect option")
        ip_menu_list()
Пример #10
0
# coding:utf-8
import datetime

from sqlalchemy import asc, desc, update

from model import IPLocation
from model import ProxyModel
from utils import Config, LogHandler

logger = LogHandler('ProxyManager')


class ProxyManager:
    def __init__(self):
        self.session = Config.Session()

    def get_proxy(self, proxy: ProxyModel) -> ProxyModel:
        return self.session.query(ProxyModel).get(proxy.unique_id)

    def get_iploc(self, ip) -> IPLocation:
        return self.session.query(IPLocation).get(ip)

    def all_proxy(self):
        return self.session.query(ProxyModel).all()

    def all_iploc(self):
        return self.session.query(IPLocation).all()

    def all_usable_proxy(self):
        return self.session.query(ProxyModel).filter(
            ProxyModel.usable == 1).all()
Пример #11
0
import struct
from abc import ABC, abstractmethod
from socket import inet_aton

from utils import LogHandler
from utils.errors import BadStatusError, BadResponseError

__all__ = ['Socks5Ngtr', 'Socks4Ngtr', 'Connect80Ngtr', 'Connect25Ngtr',
           'HttpsNgtr', 'HttpNgtr', 'NGTRS']

SMTP_READY = 2201

logger = LogHandler('Negotiator')


def _CONNECT_request(host, port, **kwargs):
    kwargs.setdefault('User-Agent', 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.321.132 Safari/537.36')
    kw = {
        'host': host,
        'port': port,
        'headers': '\r\n'.join(('%s: %s' % (k, v) for k, v in kwargs.items()))
    }
    req = 'CONNECT {host}:{port} HTTP/1.1\r\nHost: {host}\r\n{headers}\r\nConnection: keep-alive\r\n\r\n'.format(**kw).encode()
    return req


class BaseNegotiator(ABC):
    """Base Negotiator."""

    name = None
    check_anon_lvl = False
Пример #12
0
# coding:utf-8

from gevent import ssl, socket

from verify.connector.base_connector import BaseConnector
from utils import LogHandler
from utils.errors import RecvTimeout, ConnectTimeout, ProxySendError
from utils.response import HTTPSocketResponse, HTTPConnectionClosed
from verify.negotiators import NGTRS

logger = LogHandler('GeventConnector')


class GeventConnector(BaseConnector):
    def __init__(self, protocol, ip, port, timeout=5):
        super(GeventConnector, self).__init__(protocol, ip, port, timeout)
        self._socket = {'conn': None, 'ssl': None}
        self.negotiator = NGTRS[self.protocol.upper()](self)
        self.use_full_path = self.negotiator.use_full_path
        self.parser = None

    def negotiate(self, judge):
        self.negotiator.negotiate(judge)

    @property
    def socket(self):
        return self._socket.get('ssl') or self._socket.get('conn')

    @socket.setter
    def socket(self, value):
        if self._socket.get('conn'):
Пример #13
0
# coding:utf-8
import gc
import time

import gevent

from manager import ProxyFetcherManager, ProxyManager
from model import ProxyModel
from utils import LogHandler
from verify.proxy_verifier import ProxyGeventVerifier

gevent.hub.Hub.NOT_ERROR = (Exception, )
logger = LogHandler('ProxyFetcherScheduler')


class ProxyFetcherScheduler:
    def __gen_fetch_tasks(self, tasks, result):
        start = time.time()
        for FetcherClass in ProxyFetcherManager.fetchers():
            if not FetcherClass.enabled:
                continue
            fetcher = FetcherClass(tasks, result)
            logger.info("Start Fetcher: %s" % fetcher)
            fetcher.fill_task()
        logger.debug('Using %.2f seconds to start fetchers' %
                     (time.time() - start))

    def __wait_fetch(self, tasks):
        start = time.time()
        gevent.joinall(tasks)
        logger.debug('Using %.2f second to finish fetch processes' %