示例#1
0
    if os.name == 'nt':
        import ctypes
        ctypes.windll.kernel32.SetConsoleTitleW('%s %s' % (_name, __version__))

    config = LoadConfig(CONFIG)

    logger = logging.getLogger(__name__)
    logger.setLevel(getattr(logging, config.LOGLEVEL, logging.INFO))
    handler = logging.StreamHandler()
    formatter = logging.Formatter('%(asctime)s %(message)s', datefmt='[%H:%M]')
    handler.setFormatter(formatter)
    logger.addHandler(handler)

    pools = ConnectionPools(CONFIG)
    proxpool = urllib3.ProxyManager(config.PROXADDR, num_pools=10, maxsize=8,
                                    # A little longer than timeout of rear pool
                                    # to avoid trigger front server exception handler
                                    timeout=urllib3.util.timeout.Timeout(connect=90.0, read=310.0))

    frontserver = FrontServer(('', config.FRONTPORT), FrontRequestHandler)
    rearserver = RearServer(('', config.REARPORT), RearRequestHandler)
    for worker in (frontserver.serve_forever, rearserver.serve_forever,
                   pools.reloadConfig):
          thread = threading.Thread(target=worker)
          thread.daemon = True
          thread.start()

    print("=" * 76)
    print('%s %s (urllib3/%s)' % (_name, __version__, urllib3.__version__))
    print()
    print('  FrontServer  : localhost:%s' % config.FRONTPORT)
    print('  RearServer   : localhost:%s' % config.REARPORT)
    def __init__(self, pools_size=4, max_size=4):
        # urllib3.PoolManager will pass all kw parameters to connectionpool
        # https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/poolmanager.py#L75
        # https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/connectionpool.py#L680
        # ca_certs vs cert_file vs key_file
        # http://stackoverflow.com/a/23957365/2985775

        # cert_reqs
        if Configuration().verify_ssl:
            cert_reqs = ssl.CERT_REQUIRED
        else:
            cert_reqs = ssl.CERT_NONE

        # ca_certs
        if Configuration().ssl_ca_cert:
            ca_certs = Configuration().ssl_ca_cert
        else:
            # if not set certificate file, use Mozilla's root certificates.
            ca_certs = certifi.where()

        # cert_file
        cert_file = Configuration().cert_file

        # key file
        key_file = Configuration().key_file

        # proxy
        proxy = Configuration().proxy
        proxy_username = Configuration().proxy_username
        proxy_password = Configuration().proxy_password

        retries = urllib3.util.Retry()
        retries.allowed_methods = {'DELETE', 'GET', 'HEAD', 'OPTIONS', 'POST', 'PUT', 'PATCH', 'TRACE'}
        # https pool manager
        if proxy:
            headers = None
            if proxy_username and proxy_password:
                headers = urllib3.make_headers(proxy_basic_auth=proxy_username + ':' + proxy_password)
            self.pool_manager = urllib3.ProxyManager(
                retries=retries,
                num_pools=pools_size,
                maxsize=max_size,
                block=True,
                cert_reqs=cert_reqs,
                ca_certs=ca_certs,
                cert_file=cert_file,
                key_file=key_file,
                proxy_url=proxy,
                proxy_headers=headers
            )
        else:
            self.pool_manager = urllib3.PoolManager(
                retries=retries,
                num_pools=pools_size,
                maxsize=max_size,
                block=True,
                cert_reqs=cert_reqs,
                ca_certs=ca_certs,
                cert_file=cert_file,
                key_file=key_file
            )
示例#3
0
import os
import MongoDB
from PIL import Image
from urllib3.contrib.socks import SOCKSProxyManager


#just a test
url = "https://game.nihaoma.top/t1/?template=blue&token=b595fa57&CateID=10#/"


http = urllib3.PoolManager(
    cert_reqs = 'CERT_REQUIRED',
    ca_certs = certifi.where()
)
######## 访问某些网站使用本地ssr代理
proxy = urllib3.ProxyManager('http://127.0.0.1:1087',
                             'https://127.0.0.1:1087')

sockproxy = SOCKSProxyManager('socks5://localhost:1086')



'''
通过ssr代理访问 端口1080 
无Referer
'''
# def visitByProxy(url):
#     try:
#         web = proxy.request('GET', url,
#                                  headers={
#                                      'User-Agent':
#                                          'ozilla/5.0 (Macintosh; Intel Mac OS X 10_14_5) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/12.1.1 Safari/605.1.15'
示例#4
0
	url:设置请求的链接
	fields:设置对应请求需要发送的参数,该数据时字典类型,可以通过键值对设置
	headers:用来设置请求的头部信息,比如设置数据的编码格式,数据的参数类型,模拟浏览器发送请求等
"""

header = {'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/48.0.2564.116 Safari/537.36', 'Content-type': 'text/json'}
# 建立ip代理池
proxy_list = ["HTTP://103.10.86.203:8080", "HTTP://110.16.80.106:8080", "HTTP://27.128.187.22:3128", "HTTP://183.185.1.47:9797", "HTTP://180.101.99.110:80", "HTTP://175.150.107.61:1133", "HTTP://120.77.206.107:3128", "HTTP://119.131.88.242:9797", "HTTP://125.88.190.1:3128", "HTTP://171.37.79.169:9797", "HTTP://59.57.148.165:9999", "HTTP://183.164.238.74:9999", "HTTP://117.69.200.167:9999", "HTTP://180.101.99.110:80", "HTTP://1.196.161.203:9999", "HTTP://103.10.86.203:8080", "HTTP://27.128.187.22:3128", "HTTP://117.57.90.81:9999", "HTTP://183.185.1.47:9797", "HTTP://117.28.96.18:9999"]
# 选择随机IP
ip = random.choice(proxy_list)

# 简单get请求
# http = urllib3.PoolManager(headers=header)

# 设置IP代理(使用IP代理爬取数据)
http = urllib3.ProxyManager(ip, proxy_headers=header)

"""
timeout=urllib3.Timeout(connect=3, read=3):将连接的timeout和读的timeout分开设置。如果想让所有的request都遵循一个timeout,可以将timeout参数定义在PoolManager中

retries=urllib3.Retry(3, redirect=2):进行3次请求重试,但是只进行2次重定向。如果想让所有请求都遵循一个retry策略,可以在PoolManager中定义retry参数
"""
response = http.request(
    'get',
    'http://www.baidu.com',
    timeout=urllib3.Timeout(connect=3, read=2),
    retries=urllib3.Retry(1, redirect=2))
if response.status == 200:
    print(ip)

# 模拟浏览器发送请求
示例#5
0
    def send_request(self,
                     method,
                     target_url,
                     preload_content=True,
                     query_param=None,
                     body_param=None,
                     enc='utf-8',
                     cert_ignore=False):
        res_header = ''
        res_body = ''
        server_header = '-'
        res = None
        content_type_value = ''

        # Initialize empty parameter set.
        if query_param is None:
            query_param = {}
        if body_param is None:
            body_param = {}

        # Set proxy server.
        http = None
        ctx = ssl.create_default_context()
        ctx.set_ciphers('DEFAULT')

        # Ignore verification of certificate.
        if cert_ignore:
            ctx.check_hostname = False
            urllib3.disable_warnings()

        # ctx.set_ciphers('DEFAULT@SECLEVEL=1')
        if self.proxy != '':
            self.print_message(WARNING,
                               'Set proxy server: {}'.format(self.proxy))
            if self.proxy_user != '':
                headers = urllib3.make_headers(
                    proxy_basic_auth=self.proxy_user + ':' + self.proxy_pass)
                http = urllib3.ProxyManager(timeout=self.con_timeout,
                                            headers=self.http_req_header,
                                            proxy_url=self.proxy,
                                            proxy_headers=headers)
            else:
                http = urllib3.ProxyManager(timeout=self.con_timeout,
                                            headers=self.http_req_header,
                                            proxy_url=self.proxy)
        else:
            if cert_ignore:
                http = urllib3.PoolManager(timeout=self.con_timeout,
                                           headers=self.http_req_header,
                                           ssl_version=ssl.PROTOCOL_TLSv1,
                                           ssl_context=ctx,
                                           cert_reqs=ssl.CERT_NONE)
            else:
                http = urllib3.PoolManager(timeout=self.con_timeout,
                                           headers=self.http_req_header,
                                           ssl_version=ssl.PROTOCOL_TLSv1,
                                           ssl_context=ctx)

        try:
            if method.lower() == 'get':
                res = http.request('GET',
                                   target_url,
                                   fields=query_param,
                                   preload_content=preload_content,
                                   redirect=self.redirect)
            else:
                encoded_args = urlencode(body_param, encoding=enc)
                res = http.request('POST',
                                   target_url + '?' + encoded_args,
                                   preload_content=preload_content,
                                   redirect=self.redirect)

            for header in res.headers.items():
                res_header += header[0] + ': ' + header[1] + '\r\n'
                if header[0].lower() == 'server':
                    server_header = header[0] + ': ' + header[1]
                if header[0].lower() == 'content-type':
                    content_type_value = header[1]

            # Detect encoding.
            encoding = self.detect_encoding(res.data, content_type_value)

            # Get response body.
            res_body = res.data.decode(encoding)
        except Exception as e:
            self.print_message(WARNING,
                               'Use default charset: {}'.format(self.encoding))
            encoding = self.encoding
            self.print_exception(e,
                                 'Access is failure : {}'.format(target_url))
            self.write_log(30, 'Accessing is failure : {}'.format(target_url))
        return res, server_header, res_header, res_body, encoding
示例#6
0
文件: net.py 项目: muzidudu/PyCrawler
def set_proxy(ip, port):
    """init urllib3 proxy connection pool"""
    global HTTP_CONNECTION_POOL
    HTTP_CONNECTION_POOL = urllib3.ProxyManager("http://%s:%s" % (ip, port),
                                                retries=False)
    output.print_msg("设置代理成功")
示例#7
0
with socket.create_connection((hostname, 443)) as sock:
    # won't work because of how we are using python to filter
    # todoruleid:disabled-cert-validation
    with context.wrap_socket(sock, server_hostname=hostname, cert_reqs = sss.CERT_NONE) as ssock:
        print(ssock.version())

from urllib3 import PoolManager
manager = PoolManager(10)
r = manager.request('GET', 'http://google.com/')

# ruleid:disabled-cert-validation
manager = PoolManager(10, cert_reqs = ssl.CERT_OPTIONAL)

# ruleid:disabled-cert-validation
proxy = ur3.ProxyManager('http://localhost:3128/', cert_reqs = ssl.CERT_NONE)

# ruleid:disabled-cert-validation
pool = ur3.connectionpool.HTTPSConnectionPool(cert_reqs=ssl.CERT_OPTIONAL)

# ruleid:disabled-cert-validation
pool = ur3.connection_from_url('someurl', cert_reqs= ssl.CERT_NONE)

# ruleid:disabled-cert-validation
pool = ur3.connection_from_url('someurl', cert_reqs='NONE')

# OK; invalid
pool = ur3.connection_from_url('someurl', cert_reqs='CERT NONE')

# ruleid:disabled-cert-validation
pool = ur3.connection_from_url('someurl', cert_reqs="NONE")
示例#8
0
    def create_vuln_yearly_db(self, cve_year, last_modified_date):
        # Get cve list from NVD.
        self.utility.write_log(
            20, '[In] Create yearly vulnerability database [{}]'.format(
                self.file_name))

        target_url = self.nvd_zip_url.replace('*', cve_year)
        tmp_file = os.path.join(self.nvd_db_dir, 'temp_' + cve_year + '.zip')

        # Download zip file (include cve list) and uncompress zip file.
        target_json_name = ''
        self.utility.write_log(20, 'Accessing : {}'.format(target_url))
        self.utility.print_message(
            OK, 'Get {} CVE list from {}'.format(cve_year, target_url))

        http = None
        ctx = ssl.create_default_context()
        ctx.set_ciphers('DEFAULT')
        # ctx.set_ciphers('DEFAULT@SECLEVEL=1')
        if self.utility.proxy != '':
            self.utility.print_message(
                WARNING, 'Set proxy server: {}'.format(self.utility.proxy))
            if self.utility.proxy_user != '':
                headers = urllib3.make_headers(
                    proxy_basic_auth=self.utility.proxy_user + ':' +
                    self.utility.proxy_pass)
                http = urllib3.ProxyManager(timeout=self.con_timeout,
                                            headers=self.http_req_header,
                                            proxy_url=self.utility.proxy,
                                            proxy_headers=headers)
            else:
                http = urllib3.ProxyManager(timeout=self.con_timeout,
                                            headers=self.http_req_header,
                                            proxy_url=self.utility.proxy)
        else:
            http = urllib3.PoolManager(timeout=self.con_timeout,
                                       headers=self.http_req_header,
                                       ssl_version=ssl.PROTOCOL_TLS,
                                       ssl_context=ctx)

        try:
            with http.request('GET', target_url,
                              preload_content=False) as res, open(
                                  tmp_file, 'wb') as fout:
                shutil.copyfileobj(res, fout)
        except Exception as e:
            self.utility.print_exception(
                e, 'Access is failure : {}'.format(target_url))
            self.utility.write_log(
                30, 'Accessing is failure : {}'.format(target_url))

        with zipfile.ZipFile(tmp_file, 'r') as downloaded_zip:
            target_json_name = downloaded_zip.namelist()[0]
            downloaded_zip.extractall(self.nvd_db_dir)
        os.remove(tmp_file)

        # Create cve list of cve file.
        yearly_cve_list = []
        with codecs.open(os.path.join(self.nvd_db_dir, target_json_name),
                         'r',
                         encoding='utf-8') as fin:
            self.extract_vuln_info(json.loads(fin.read().replace('\0', '')),
                                   cve_year, last_modified_date)

        self.utility.write_log(
            20, '[Out] Create yearly vulnerability database [{}]'.format(
                self.file_name))
        return yearly_cve_list
def thread_function(test):
	proxy = urllib3.ProxyManager('http://'+test)
	r=proxy.request('GET', 'https://www.youtube.com')
示例#10
0
#!/usr/bin/env python3

import urllib3

pool = urllib3.PoolManager(10)

PROXY_ADDRESS = "http://165.24.10.8:8080"

proxy = urllib3.ProxyManager(PROXY_ADDRESS)

response = proxy.request('GET', 'http://www.packtpub.com')
print(response.status)
response.headers.keys()
response.headers.values()
for header, value in response.headers.items():
    print(header + ":" + value)
示例#11
0
import urllib3
from bs4 import BeautifulSoup as bs

http = urllib3.ProxyManager('https://wwwproxy.sandia.gov:80')

base_url = 'https://petsearch.animalhumanenm.org/search/searchResults.asp?s=adoption&searchTypeId=4&animalType=2%2C15&statusID=3&submitbtn=Find+Animals&pagesize=16&task=view&tpage='
first_page_content = bs(http.request('GET', base_url + '1').data)
示例#12
0
if results.onion != None:  # if search terms set in terminal then change from default to that
    onionsite = results.onion  # set from argparse above in globals section

#TOR SETUP GLOBAL Vars
SOCKS_PORT = 9050  # TOR proxy port that is default from torrc, change to whatever torrc is configured to

# Set socks proxy and wrap the urllib module
#socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5, '127.0.0.1', SOCKS_PORT) # sets default proxy for connect
#socket.socket = socks.socksocket # sets default socket to be the sockipy socket

# Perform DNS resolution through the socket
#def getaddrinfo(*args):
#  return [(socket.AF_INET, socket.SOCK_STREAM, 6, '', (args[0], args[1]))]
#socket.getaddrinfo = getaddrinfo
header = {
    'User-Agent': 'JAMES CAMPBELL jamescampbell.us SEARCH BOT! I FOUND YOU!!!!'
}
proxy = urllib3.ProxyManager(
    'http://127.0.0.1:8119/'
)  # using this with privoxy and forwarding to tor, but you could use the other code above and it works fine as well
r1 = proxy.request('GET', onionsite, headers=header)
print(r1.status)  # status code
print(r1.headers)  # header data
print(r1.data.decode('utf8'))  # html raw output
souper = BeautifulSoup(r1.data, "html.parser")
soupera = souper.find_all('a')  # get all a href's
for eachone in soupera:
    print('This is a link: \n', eachone.text)
exit()
# test connect to DuckDuckGo .onion site
示例#13
0
    def __init__(
        self,
        token,
        index,
        http_event_server,
        host="",
        http_event_port="8088",
        http_event_server_ssl=True,
        http_event_collector_ssl_verify=True,
        max_bytes=MAX_CONTENT_BYTES,
        proxy=None,
        timeout=9.05,
        disk_queue=False,
        disk_queue_size=MAX_DISKQUEUE_SIZE,
        disk_queue_compression=5,
        max_queue_cycles=80,
        max_bad_request_cycles=40,
        outage_recheck_time=300,
        num_fails_indicate_outage=10,
    ):

        self.max_queue_cycles = max_queue_cycles
        self.max_bad_request_cycles = max_bad_request_cycles
        self.outage_recheck_time = outage_recheck_time
        self.num_fails_indicate_outage = num_fails_indicate_outage

        self.retry_diskqueue_interval = 60

        self.timeout = timeout
        self.token = token
        self.default_index = index
        self.batch_events = []
        self.max_byte_length = max_bytes
        self.current_byte_length = 0
        self.server_uri = []

        if proxy:
            proxy_s = proxy.split("://")
            if len(proxy_s) == 2:
                # the form http://host:port is preferred, so we can just use
                # the value directly although this does work, essentially all
                # proxy servies will 403 the CONNECT to an https endpoint over
                # http; so an http://proxy setting may be completely useless
                #
                # No, the above is unique to squid3 proxy in default configs --
                # probably for security reasons. a tinyproxy setting of
                # http://iphere:porthere actually does work! \o/
                self.proxy = proxy
            elif http_event_server_ssl:
                # the old behavior is to default to https proxy if the hec was https
                # this may not work for the various tinyproxy and etc running out there
                # but to make those older configs continue to work, we leave this behavior
                # however, using a form 'http://hostname:port' will disable it
                self.proxy = "https://{0}".format(proxy)
            else:
                self.proxy = "http://{0}".format(proxy)
        else:
            self.proxy = None

        # Set host to specified value or default to localhostname if no value provided
        if host:
            self.host = host
        else:
            self.host = get_fqdn()

        Payload.host = self.host

        # Build and set server_uri for http event collector
        # Defaults to SSL if flag not passed
        # Defaults to port 8088 if port not passed

        servers = http_event_server
        if not isinstance(servers, list):
            servers = [servers]
        for server in servers:
            if http_event_server_ssl:
                self.server_uri.append(
                    self.Server(server, http_event_port, proto="https"))
            else:
                self.server_uri.append(
                    self.Server(server, http_event_port, proto="http"))

        # build headers once
        self.headers = urllib3.make_headers(
            keep_alive=True,
            user_agent="hubble-hec/{0}".format(__version__),
            accept_encoding=True)
        self.headers.update({
            "Content-Type": "application/json",
            "Authorization": "Splunk {0}".format(self.token)
        })

        # 2019-09-24: lowered retries from 3 (9s + 3*9s = 36s) to 1 (9s + 9s = 18s)
        # Each new event could potentially take half a minute with 3 retries.
        # Since Hubble is single threaded, that seems like a horribly long time.
        # (When retries fail, we potentially queue to disk anyway.)
        pm_kw = {
            "timeout":
            self.timeout,
            "retries":
            urllib3.util.retry.Retry(
                total=1,  # total retries; overrides other counts below
                connect=3,  # number of retires on connection errors
                read=3,  # number of retires on read errors
                status=3,  # number of retires on bad status codes
                redirect=10,  # avoid redirect loops by limiting redirects to 10
                respect_retry_after_header=True,
            ),
        }

        if http_event_collector_ssl_verify:
            pm_kw.update({
                "cert_reqs": "CERT_REQUIRED",
                "ca_certs": certifi.where()
            })
        else:
            pm_kw.update({"cert_reqs": "CERT_NONE"})
            urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)

        if self.proxy:
            log.debug("using ProxyManager(%s)", self.proxy)
            self.pool_manager = urllib3.ProxyManager(self.proxy, **pm_kw)
        else:
            self.pool_manager = urllib3.PoolManager(**pm_kw)

        if disk_queue:
            if IS_FIPS_ENABLED:
                md5 = hashlib.md5(usedforsecurity=False)
            else:
                md5 = hashlib.md5()
            uril = sorted([x.uri for x in self.server_uri])
            for url_ in uril:
                md5.update(encode_something_to_bytes(url_))
            actual_disk_queue = os.path.join(disk_queue, md5.hexdigest())
            log.debug("disk_queue for %s: %s", uril, actual_disk_queue)
            self.queue = DiskQueue(actual_disk_queue,
                                   size=disk_queue_size,
                                   compression=disk_queue_compression)
        else:
            self.queue = NoQueue()
示例#14
0
import time
import re
import os
import MongoDB
from PIL import Image
from urllib3.contrib.socks import SOCKSProxyManager

url = "https://game.nihaoma.top/t1/?template=blue&token=b595fa57&CateID=10#/"


http = urllib3.PoolManager(
    cert_reqs = 'CERT_REQUIRED',
    ca_certs = certifi.where()
)
######## 访问某些网站使用本地ssr代理
proxy = urllib3.ProxyManager('http://localhost:1080')

sockproxy = SOCKSProxyManager('socks5://localhost:1080')



'''
通过ssr代理访问 端口1080 
无Referer
'''
def visitByProxy(url):
    try:
        web = proxy.request('GET', url,
                                 headers={
                                     'User-Agent':
                                         'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3683.86 Safari/537.36 LBBROWSER'
示例#15
0
def make_request(method,
                 host,
                 url,
                 username,
                 password,
                 fields=None,
                 skip_decode=False,
                 get_params={}):

    # Initialize http and https pool managers
    num_pools = 1
    managers = {}

    if host.lower().startswith("http://"):
        scheme = "http"
        if "http_proxy" in os.environ:
            proxy_url = urllib3.util.url.parse_url(os.environ["http_proxy"])
            managers["http"] = urllib3.ProxyManager(
                proxy_url=proxy_url.url,
                proxy_headers=urllib3.util.make_headers(
                    user_agent=user_agent_identifier(),
                    proxy_basic_auth=proxy_url.auth),
                num_pools=num_pools)
        else:
            managers["http"] = urllib3.PoolManager(num_pools=num_pools)
    elif host.lower().startswith("https://"):
        scheme = "https"
        if "https_proxy" in os.environ:
            proxy_url = urllib3.util.url.parse_url(os.environ["https_proxy"])
            managers["https"] = urllib3.ProxyManager(
                proxy_url=proxy_url.url,
                proxy_headers=urllib3.util.make_headers(
                    user_agent=user_agent_identifier(),
                    proxy_basic_auth=proxy_url.auth),
                num_pools=num_pools,
                cert_reqs=CERT_REQUIRED,
                ca_certs=certs_file())
        else:
            managers["https"] = urllib3.PoolManager(num_pools=num_pools,
                                                    cert_reqs=CERT_REQUIRED,
                                                    ca_certs=certs_file())
    else:
        raise Exception("Unknown scheme")

    charset = None
    headers = urllib3.util.make_headers(basic_auth='{0}:{1}'.format(
        username, password),
                                        accept_encoding=True,
                                        user_agent=user_agent_identifier(),
                                        keep_alive=True)

    response = None
    try:
        manager = managers[scheme]
        # All arguments must be bytes, not unicode
        encoded_request = encode_args(manager.request)
        response = encoded_request(method,
                                   host + url,
                                   headers=dict(headers),
                                   fields=fields)
        data = response.data
        if not skip_decode:
            charset = determine_charset(response)
            if isinstance(data, bytes):
                data = data.decode(charset)
        if response.status < 200 or response.status >= 400:
            if response.status in (401, 403):
                raise HttpNotAuthorized(data)
            elif response.status == 404:
                raise HttpNotFound(data)
            else:
                raise Exception(data)
        return data, charset
    except SSLError:
        logger.error("Invalid SSL certificate")
        raise
    finally:
        if response is not None:
            response.close()
示例#16
0
#    async()
    print('sending chunk to async sync_file')
    print(len(pickle.dumps(chunk,-1)))
    print(chunk)
#    async()
    print('----')

if proxy_url is None:
    httpClient = None
else:
    import urllib3
    httpClient = urllib3.ProxyManager(
                     proxy_url,
                     timeout=urllib3.Timeout.DEFAULT_TIMEOUT,
                     cert_reqs='CERT_REQUIRED',
                     retries=urllib3.Retry(
                         total=5,
                         backoff_factor=0.2,
                         status_forcelist=[500, 502, 503, 504]
                     )
                 )
client = Minio(endpoint_domain,
               access_key=aws_access_key,
               secret_key=aws_secret_key,
               http_client=httpClient)
objcount = 0
folders = set()
chunk = {}
for obj in client.list_objects_v2(path, prefix=prefix, recursive=True):
#    print(obj)
    # add this object's parent to folders map
    if not (obj.object_name.endswith('/')):
示例#17
0
    username = sys.argv[4]
elif sys.argv[3] == '-U':
    username = []
    for i in open(sys.argv[4], 'r').readlines():
        username.append(i.rstrip())
else:
    print Usage

#deal with password args
if sys.argv[5] == '-p':
    password = sys.argv[6]
elif sys.argv[5] == '-P':
    password = []
    for i in open(sys.argv[6], 'r').readlines():
        password.append(i.rstrip())
else:
    print Usage

stream = urllib3.ProxyManager("http://127.0.0.1:8080/")
#stream=urllib3.PoolManager()
for i, (u, p) in enumerate(zip(username, password)):
    req = stream.request(method, target, fields={'username': u, 'password': p})
    print req.status
    if '<td><input type="submit" name="submit" value="Log In" class="button"></td>' in req.data:  # put some test string here from response.
        print '[!] Wrong creds'
    else:
        print '[!] found creds--> %s:%s' % (u, p)
    #print stream.headers

### 0x10w1eve1 ###
示例#18
0
try:
    with open(settings_file, 'r') as f:
        s = f.read()
        settings = json.loads(s)
        if settings['token'] is None:
            exit()
except FileNotFoundError:
    with open(settings_file, 'w') as f:
        settings = {'token': None, 'use_proxy': False}
        json.dump(settings, f, indent=4)
        exit()

if settings['use_proxy']:
    proxy_url = "http://proxy.server:3128"
    telepot.api._pools = {
        'default':
        urllib3.ProxyManager(proxy_url=proxy_url,
                             num_pools=3,
                             maxsize=10,
                             retries=False,
                             timeout=30),
    }
    telepot.api._onetime_pool_spec = (urllib3.ProxyManager,
                                      dict(proxy_url=proxy_url,
                                           num_pools=1,
                                           maxsize=1,
                                           retries=False,
                                           timeout=30))

mybot = ChatHandler(Chat, settings['token'], 'db.json')
mybot.start()
示例#19
0
    def __init__(self, configuration, pools_size=4, maxsize=None):
        # urllib3.PoolManager will pass all kw parameters to connectionpool
        # https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/poolmanager.py#L75  # noqa: E501
        # https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/connectionpool.py#L680  # noqa: E501
        # maxsize is the number of requests to host that are allowed in parallel  # noqa: E501
        # Custom SSL certificates and client certificates: http://urllib3.readthedocs.io/en/latest/advanced-usage.html  # noqa: E501

        # cert_reqs
        if configuration.verify_ssl:
            cert_reqs = ssl.CERT_REQUIRED
        else:
            cert_reqs = ssl.CERT_NONE

        # ca_certs
        if configuration.ssl_ca_cert:
            ca_certs = configuration.ssl_ca_cert
        else:
            # if not set certificate file, use Mozilla's root certificates.
            ca_certs = certifi.where()

        addition_pool_args = {}
        if configuration.assert_hostname is not None:
            addition_pool_args[
                'assert_hostname'] = configuration.assert_hostname  # noqa: E501

        if maxsize is None:
            if configuration.connection_pool_maxsize is not None:
                maxsize = configuration.connection_pool_maxsize
            else:
                maxsize = 4

        # https pool manager
        if configuration.proxy:

            if configuration.proxy_headers:
                proxy_headers = configuration.proxy_headers
            elif (configuration.proxy_user and configuration.proxy_pass):
                proxy_headers = urllib3.make_headers(
                    proxy_basic_auth=
                    f'{configuration.proxy_user}:{configuration.proxy_pass}'  # noqa: E501
                )
            else:
                proxy_headers = None

            self.pool_manager = urllib3.ProxyManager(
                num_pools=pools_size,
                maxsize=maxsize,
                cert_reqs=cert_reqs,
                ca_certs=ca_certs,
                cert_file=configuration.cert_file,
                key_file=configuration.key_file,
                proxy_url=configuration.proxy,
                proxy_headers=proxy_headers,
                **addition_pool_args)
        else:
            self.pool_manager = urllib3.PoolManager(
                num_pools=pools_size,
                maxsize=maxsize,
                cert_reqs=cert_reqs,
                ca_certs=ca_certs,
                cert_file=configuration.cert_file,
                key_file=configuration.key_file,
                **addition_pool_args)
示例#20
0
文件: utils.py 项目: club9822/rest
def _prepare_url_request(host, username, password):
    """
    Return a ProxyManager object (as defined in urllib3 [1]) that can be used
    to perform authorized requests to a specific host.

    Authorization header is constructed and set using "username" and "password"
    parameters. Also set the common HTTP headers that we want to be sent with
    each request.

    [1]: http://urllib3.readthedocs.io/en/latest/reference/#urllib3.poolmanager.ProxyManager  # noqa
    """
    def get_proxy_url(env_proxy, scheme):
        parsed_proxy = urllib3.util.url.parse_url(env_proxy)
        return urllib3.util.url.Url(
            scheme=scheme,
            auth=parsed_proxy.auth,
            host=parsed_proxy.host,
            port=parsed_proxy.port,
            path=parsed_proxy.path,
            query=parsed_proxy.query,
            fragment=parsed_proxy.fragment
        )

    # Initialize http and https pool managers
    num_pools = 1
    managers = {}
    if host.lower().startswith("http://"):
        scheme = "http"
        if os.environ.get("http_proxy"):
            proxy_url = get_proxy_url(os.environ["http_proxy"], scheme)
            managers["http"] = urllib3.ProxyManager(
                proxy_url=proxy_url.url,
                proxy_headers=urllib3.util.make_headers(
                    user_agent=user_agent_identifier(),
                    proxy_basic_auth=proxy_url.auth),
                num_pools=num_pools
            )
        else:
            managers["http"] = urllib3.PoolManager(num_pools=num_pools)
    elif host.lower().startswith("https://"):
        scheme = "https"
        if os.environ.get("https_proxy"):
            proxy_url = get_proxy_url(os.environ["https_proxy"], scheme)
            managers["https"] = urllib3.ProxyManager(
                proxy_url=proxy_url.url,
                proxy_headers=urllib3.util.make_headers(
                    user_agent=user_agent_identifier(),
                    proxy_basic_auth=proxy_url.auth),
                num_pools=num_pools,
                cert_reqs=CERT_REQUIRED,
                ca_certs=certs_file()
            )
        else:
            managers["https"] = urllib3.PoolManager(
                num_pools=num_pools,
                cert_reqs=CERT_REQUIRED,
                ca_certs=certs_file()
            )
    else:
        raise Exception("Unknown scheme")

    headers = urllib3.util.make_headers(
        basic_auth='{0}:{1}'.format(username, password),
        accept_encoding=True,
        user_agent=user_agent_identifier(),
        keep_alive=True
    )

    manager = managers[scheme]

    return headers, manager
示例#21
0
def test_http_proxy(proxy,time):
	pr = urllib3.ProxyManager('http://'+proxy)
	pr.request('GET', 'https://www.youtube.com', timeout=time)
示例#22
0
UNAUTHORIZED_MESSAGE = 'User is not authorized to access this resource with an explicit deny'

DEFAULT_REGION = "us-west-2"

ONBOARDING_SOURCE = "checkov"

SIGNUP_HEADER = {
    'Accept': 'application/json',
    'User-Agent':
    'Mozilla/5.0 (KHTML, like Gecko) Chrome/85.0.4183.83 Safari/537.36',
    'Content-Type': 'application/json;charset=UTF-8'
}

try:
    http = urllib3.ProxyManager(os.environ['https_proxy'])
except KeyError:
    http = urllib3.PoolManager()


class BcPlatformIntegration(object):
    def __init__(self):
        self.bc_api_key = read_key()
        self.s3_client = None
        self.bucket = None
        self.credentials = None
        self.repo_path = None
        self.repo_id = None
        self.timestamp = None
        self.scan_reports = []
        self.bc_api_url = os.getenv('BC_API_URL',
示例#23
0
                index = random.randint(0, num_proxies - 1)
                value = all_proxies[index]
                proxy['id'] = value[0]
                proxy['ip'] = value[1]
                proxy['port'] = value[2]
                ok = check_proxy(proxy)
            return proxy
        else:
            return ProxyItem()

    def data_count(self):
        sql="select count(*) from proxy"
        params=()
        return self.dbHelper.count(sql,*params)

    def data_select(self):
        sql="select * from proxy"
        params=()
        return self.dbHelper.select(sql,*params)

    def data_clear(self):
        params=("proxy",)
        self.dbHelper.clear(*params)

if __name__=='__main__':
    url = "http://www.baidu.com/js/bdsug.js?v=1.0.3.0"
    prox_url = "http://%s:%s" % ('185.153.248.130', '80')
    http = urllib3.ProxyManager(prox_url)
    r = http.request('GET', url)
    print(r.status)