コード例 #1
0
    def pool_for(self, scheme, host, port):
        """
        Given a URL (from which a scheme and host can be extracted),
        return a connection pool (potentially with TLS state)
        which can be used to connect to the URL.
        """

        if scheme is None:
            if self._keyfile:
                return urllib3.HTTPSConnectionPool(host, port,
                                                    key_file=self._keyfile,
                                                    cert_file=self._certfile,
                                                    ca_certs=self._cafile)
            else:
                return urllib3.HTTPConnectionPool(host, port)
        elif scheme == "http":
            return urllib3.HTTPConnectionPool(host, port)
        elif scheme == "https":
            if self._keyfile:
                return urllib3.HTTPSConnectionPool(host, port,
                                                    key_file=self._keyfile,
                                                    cert_file=self._certfile,
                                                    ca_certs=self._cafile)
            else:
                raise ValueError("SSL requested without providing certificate")
                exit(1)
        elif scheme == "file":
            # FIXME what to do here?
            raise ValueError("Unsupported scheme "+scheme)
        else:
            raise ValueError("Unsupported scheme "+scheme)
コード例 #2
0
def main():
    """Main function."""
    # Gather all backlink information from the rest
    timestamp = datetime.datetime.now().strftime("%y-%m-%d")
    stats_dir = "/popularity_stats/" + timestamp + "/"
    url = 'http://127.0.0.1:45454/address/online/'
    pool = urllib3.HTTPConnectionPool("127.0.0.1",
                                      45454,
                                      timeout=10,
                                      cert_reqs='CERT_NONE',
                                      assert_hostname=False)
    links = pool.request('GET', url).data
    links = links.replace(".onion/", "").replace("http://", "").split('\n')
    for onion_id in links:
        try:
            # Random delay 3min + 1-60 seconds
            delay_time = 180 + random.randrange(1, 60)
            time.sleep(delay_time)
            if not onion_id:
                continue
            content_type = {'Content-Type': 'application/json'}
            onion_url = 'http://' + onion_id + '.onion/'
            print onion_url
            backlinks = str(get_backlinks(onion_url))
            url = 'http://127.0.0.1:45454/address/' + onion_id + "/popularity/"
            data = '{"date": "' + timestamp + '", "tor2web_access_count": '
            data = data + '0, "backlinks": ' + backlinks + '}'
            print data
            save_popularity_data(data, onion_id)
            pool.urlopen('PUT', url, headers=content_type, body=data)
        except Exception:
            import traceback
            print 'generic exception: ' + traceback.format_exc()
    def test_request_hook_params(self):
        def request_hook(span, request, headers, body):
            span.set_attribute("request_hook_headers", json.dumps(headers))
            span.set_attribute("request_hook_body", body)

        URLLib3Instrumentor().uninstrument()
        URLLib3Instrumentor().instrument(request_hook=request_hook, )

        headers = {"header1": "value1", "header2": "value2"}
        body = "param1=1&param2=2"

        pool = urllib3.HTTPConnectionPool("httpbin.org")
        response = pool.request("POST",
                                "/status/200",
                                body=body,
                                headers=headers)

        self.assertEqual(b"Hello!", response.data)

        span = self.assert_span()

        self.assertIn("request_hook_headers", span.attributes)
        self.assertEqual(span.attributes["request_hook_headers"],
                         json.dumps(headers))
        self.assertIn("request_hook_body", span.attributes)
        self.assertEqual(span.attributes["request_hook_body"], body)
コード例 #4
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument('host', type=str)
    parser.add_argument('port', type=int)
    parser.add_argument('x', type=int)
    parser.add_argument('y')  #, type=int)
    args = parser.parse_args()

    http = urllib3.HTTPConnectionPool(args.host, args.port)
    encoded_args = urlencode({'x': args.x, 'y': args.y})
    url = 'http://' + args.host + ':' + str(args.port) + '/?' + encoded_args
    r = http.request('POST', url)

    data = r.data.decode('utf-8')
    hit = 0
    sink = ''
    print(r.status)
    if r.status == 200:
        split = data.split('=')
        if len(split[1]) > 1:
            hit = int(split[1][0])
            sink = split[2]
        else:
            hit = int(split[1])
        update_opponent_board(args.x, int(args.y), hit)

        print('Hit: {}'.format(hit))
        if sink != '':
            print('Sink: {}'.format(sink))
コード例 #5
0
 def __init__(self, url, maxsize, proxy=None):
     self.url = url
     self.maxsize = maxsize
     self.proxy = proxy
     if url.find('://') < 0:
         url = 'http://' + url
     p = urlparse.urlsplit(url)
     i = p.netloc.rfind('@')
     self.host = p.netloc[i + 1:]
     if i >= 0:
         self.auth = ('Basic ' +
                      base64.b64encode(p.netloc[:i].encode('U8')).decode())
     else:
         self.auth = None
     self.path = urlparse.urlunsplit(('', '', p.path or '/', p.query, ''))
     self.fullpath = 'http://' + self.host + self.path
     if proxy:
         self.path = self.fullpath
     self.pool = urllib3.HTTPConnectionPool(proxy if proxy else self.host,
                                            maxsize=maxsize,
                                            block=True)
     self.lp_header = None
     self.lp_pool = self.pool
     self.lp_host = self.host
     self.lp_path = self.path
     self.expire = 0
コード例 #6
0
 def setUp(self):
     self.http = urllib3.HTTPConnectionPool('127.0.0.1',
                                            port=testenv["wsgi_port"],
                                            maxsize=20)
     self.recorder = tracer.recorder
     self.recorder.clear_spans()
     tracer._scope_manager = GeventScopeManager()
コード例 #7
0
    def __init__(self):
        esurl = Configure.configure().value("elasticsearch.url")
        poolsize = Configure.configure().value("elasticsearch.pool.maxsize")

        self._es_url = esurl
        self._pool_maxsize = poolsize
        self._es_domain = None

        if esurl.startswith("http://"):
            self._es_domain = esurl[7:]
        elif esurl.startswith("https://"):
            self._es_domain = esurl[8:]

        idx = self._es_domain.find("/")
        self._es_domain = self._es_domain[:idx]
        #     httpexp = re.compile(r'^https?://')
        #     httpma = httpexp.search(self._es_url)
        #     print (httpma.span())
        #     s,e=httpma.span()
        #     domain = p_es_url[e:]
        #     i = domain.find("/")
        #     if i >= 0:
        #       domain=domain[:i]
        #     print (domain)
        print("extract domain", self._es_domain)
        self._es_client = urllib3.HTTPConnectionPool(
            self._es_domain, maxsize=self._pool_maxsize)
コード例 #8
0
ファイル: api.py プロジェクト: songdi/py-telegram-bot-client
 def __init__(_self, maxsize: int, block: bool,
              **connection_pool_kwargs):
     connection_pool_kwargs.get("headers", {}).update({
         "connection":
         "keep-alive",
         "user-agent":
         "telegram-bot-client: A Telegram Bot API Python client",
     })
     # from urllib3.connection.default_socket_options
     from socket import IPPROTO_TCP, SOL_SOCKET, TCP_NODELAY, SO_KEEPALIVE
     connection_pool_kwargs[
         "socket_options"] = connection_pool_kwargs.get(
             "socket_options", []) + [
                 (IPPROTO_TCP, TCP_NODELAY, 1),
                 (SOL_SOCKET, SO_KEEPALIVE, 1),
             ]
     if self.host.startswith("https://"):
         _self.pool = urllib3.HTTPSConnectionPool(
             host=self.host[8:],
             maxsize=maxsize,
             block=block,
             **connection_pool_kwargs)
     elif self.host.startswith("http://"):
         _self.pool = urllib3.HTTPConnectionPool(
             host=self.host[7:],
             maxsize=maxsize,
             block=block,
             **connection_pool_kwargs)
     else:
         raise TelegramBotException(
             "Telegram Bot API only supports https:// and http://")
コード例 #9
0
    def __init__(self, cfg):
        self.simhash_size = cfg['simhash']['size']
        self.simhash_expire = cfg['simhash']['expire_after']
        if self.simhash_size > 512:
            raise Exception('do not support simhash longer than 512')

        headers = {
            'User-Agent': 'wayback-discover-diff',
            'Accept-Encoding': 'gzip,deflate',
            'Connection': 'keep-alive'
        }
        cdx_auth_token = cfg.get('cdx_auth_token')
        if cdx_auth_token:
            headers['cookie'] = 'cdx_auth_token=%s' % cdx_auth_token

        self.http = urllib3.HTTPConnectionPool('web.archive.org',
                                               maxsize=50,
                                               retries=4,
                                               headers=headers)
        self.redis = StrictRedis(connection_pool=BlockingConnectionPool.
                                 from_url(cfg['redis_uri'],
                                          max_connections=50,
                                          timeout=cfg.get('redis_timeout', 10),
                                          decode_responses=True))
        self.tpool = ThreadPoolExecutor(max_workers=cfg['threads'])
        self.snapshots_number = cfg['snapshots']['number_per_year']
        self.download_errors = 0
        # Initialize logger
        self._log = logging.getLogger('wayback_discover_diff.worker')
コード例 #10
0
ファイル: __init__.py プロジェクト: Python3pkg/NivadPy
def _get_connection_pool():
    if DEBUG:
        return urllib3.HTTPConnectionPool('127.0.0.1:8000')
    else:
        return urllib3.HTTPSConnectionPool('api.nivad.io',
                                           cert_reqs='CERT_REQUIRED',
                                           ca_certs=certifi.where())
コード例 #11
0
    def test_basic_http_absolute_url(self):
        url = "http://httpbin.org:666/status/200"
        httpretty.register_uri(httpretty.GET, url, body="Hello!")
        pool = urllib3.HTTPConnectionPool("httpbin.org", port=666)
        response = pool.request("GET", url)

        self.assert_success_span(response, url)
コード例 #12
0
    def test_url_open_explicit_arg_parameters(self):
        url = "http://httpbin.org:666/status/200"
        httpretty.register_uri(httpretty.GET, url, body="Hello!")
        pool = urllib3.HTTPConnectionPool("httpbin.org", port=666)
        response = pool.urlopen(method="GET", url="/status/200")

        self.assert_success_span(response, url)
コード例 #13
0
 def request(self, method, body, headers, lp=False):
     headers['host'] = self.lp_host if lp else self.host
     if self.auth:
         headers['authorization'] = self.auth
     elif 'authorization' in headers:
         del headers['authorization']
     pool = self.lp_pool if lp else self.pool
     r = pool.urlopen(method,
                      self.lp_path if lp else self.path,
                      body,
                      headers,
                      assert_same_host=False,
                      timeout=self.lp_timeout if lp else self.timeout)
     for h in ('content-encoding', 'transfer-encoding'):
         if h in r.headers:
             del r.headers[h]
     if ('x-long-polling' in r.headers
             and self.lp_header != r.headers['x-long-polling']):
         self.lp_header = r.headers['x-long-polling']
         p = urlparse.urlsplit(self.lp_header)
         self.lp_host = p.netloc or self.host
         if self.proxy:
             self.lp_path = urlparse.urljoin(self.fullpath, self.lp_header)
         else:
             self.lp_path = urlparse.urlunsplit(('', '', p.path
                                                 or '/', p.query, ''))
             if self.lp_host == self.host:
                 self.lp_pool = self.pool
             else:
                 self.lp_pool = urllib3.HTTPConnectionPool(
                     p.netloc, maxsize=self.maxsize, block=True)
     return r
コード例 #14
0
ファイル: test.py プロジェクト: pombredanne/REST-Stockmarket
def play_sessions(runner):
    runner.http = urllib3.HTTPConnectionPool('localhost:8080', maxsize=2)
    for i in xrange(0, runner.nsessions):
        # Select a user
        runner.user = random.choice(runner.users)
        # Play a session
        runner.playSession()
    return runner.counter
コード例 #15
0
def getInfo(username, password, host):
	http = urllib3.HTTPConnectionPool(host)

	# Login
	request = http.request('GET','/asp/GetRandCount.asp')

	## Parameters
	password64 = base64.b64encode(str.encode(password)).decode('ascii')
コード例 #16
0
def snipplets():
    '''
    Code for pulling random images from specified boards on 4chan and pushing them to a Apple TV 2 Device
    '''

    #Specify boards
    boards = ['b','g','v','wg','fa','trv','an','ck']

    #Specify Transitions and airplay device IP
    transitions = ['None', 'SlideLeft', 'SlideRight', 'Dissolve']
    airpush_ip = '192.168.1.100'
    airpush_port = '7000'
    airpush_target = "/photo"

    #Uses one connection. Urllib2 closes and reopens connections, this is a bad time. Don't have a bad time.
    http_pool = urllib3.HTTPConnectionPool(airpush_ip+':'+airpush_port)

    #Track seen images so we dont get dubs
    seen = set()

    #For the purpose of 'social coding' this infinate loop has been replaced to looping 10x
    #while True:
    for i in range(10):
        try:
            chosen_one = random.choice(boards)
            #load thread json
            url = 'http://api.4chan.org/'+chosen_one+'/'+str(random.choice(range(10)))+'.json'
            urllib2.Request(url)
            html = urllib2.urlopen(req)
            data = json.loads(html.read())

            #get random thread
            thread = random.choice(data['threads'])
            url = 'http://api.4chan.org/'+chosen_one+'/res/'+str(thread['posts'][0]['no'])+'.json'
            data = getJson(url)

            #Get random image from thread
            images = []
            for post in data['posts']:
                if 'tim' in post.keys():
                    if 'ext' != '.gif':
                        if str(post['tim'])+post['ext'] not in seen:
                            seen.add(str(post['tim'])+post['ext'])
                            images.append(str(post['tim'])+post['ext'])
            if len(images) > 0:
                image = random.choice(images)

                img = urllib2.urlopen("http://images.4chan.org/"+chosen_one+"/src/"+image)
                
                #Make request to airplay device
                r = http_pool.urlopen('PUT',airpush_target,body=img.read(),headers={'X-Apple-Transition': random.choice(transitions)})

                #display it on screen for 2secs
                time.sleep(2)

        except urllib2.HTTPError:
            print "404lol"
            print url
コード例 #17
0
 def pattern(self, method, url, params=None, body=None, host='localhost', port=9200, **kwargs):
     # ...
     
     self.pool = urllib3.HTTPConnectionPool(host, port=port)
     url = self.url_prefix + url
     if params:
         url = '%s?%s' % (url, urlencode(params or {}))
     
     response = self.pool.urlopen(method, url, body)
コード例 #18
0
def internet_on():
    try:
        http = urllib3.HTTPConnectionPool('www.google.com')
        http.urlopen(url='http://www.google.com', timeout=1.0, method='GET')
        logging.info('network is ON')
        return True
    except urllib3.exceptions.HTTPError as err:
        logging.error('network is OFF')
        return False
コード例 #19
0
    def get(self, host, params=()):
        # type: (object, object) -> object
        """Get metadata by url"""

        self.__is_server_online(host)
        self.__disable_verbose()
        self.__parse_params(params)
        scheme, host = urlparse(host).scheme, urlparse(host).netloc
        self.DEFAULT_HTTP_PROTOCOL = scheme + "://"
        self.urls = self.__get_urls(host)
        response = {}
        self.HEADER['user-agent'] = self.reader.get_random_user_agent()
        log.info("user-agent : " + self.HEADER['user-agent'])
        log.info('Thread num : ' + str(self.threads))

        try:
            httplib.HTTPConnection.debuglevel = self.debug

            if hasattr(urllib3, 'disable_warnings'):
                urllib3.disable_warnings()
            if scheme == "http":
                self.http = urllib3.HTTPConnectionPool(
                    host.split(':')[0],
                    port=80 if len(host.split(':')) == 1 else int(
                        host.split(':')[1]),
                    block=True,
                    maxsize=10)
            elif scheme == "https":
                self.http = urllib3.HTTPSConnectionPool(
                    host.split(':')[0],
                    port=443 if len(host.split(':')) == 1 else int(
                        host.split(':')[1]),
                    block=True,
                    maxsize=10)
            else:
                log.critical("not support http protocl, Exit now ")
                sys.exit(1)
            pool = threadpool.ThreadPool(self.threads)
            requests = threadpool.makeRequests(self.request, self.urls)
            for req in requests:
                pool.putRequest(req)
            time.sleep(1)
            pool.wait()
        except exceptions.AttributeError as e:
            log.critical(e.message)
        except KeyboardInterrupt:
            log.warning('Session canceled')
            sys.exit()

        self.counter['total'] = self.urls.__len__()
        self.counter['pools'] = pool.workers.__len__()

        response['count'] = self.counter
        response['result'] = self.result

        return response
コード例 #20
0
 def _create_connection_pool(self):
     headers = {}
     headers.setdefault('USER-AGENT', 'SIMBA APP')
     headers.setdefault('Content-Type', 'application/json')
     headers.setdefault('Accept-Type', 'application/json')
     self._http_connection_pool = urllib3.HTTPConnectionPool(host=self.config.get('AUTHENTICATION_HOST'),
                                                             headers=headers,
                                                             block=True,
                                                             maxsize=self.config.get(
                                                                 'AUTHENTICATION_HOST_MAX_CONNECTIONS'))
コード例 #21
0
    def test_basic_http_success_using_connection_pool(self):
        with urllib3.HTTPConnectionPool(self.http_host, timeout=3) as pool:
            response = pool.request("GET", "/status/200")

            self.assert_success_span(response, self.http_url)

            # Test that when re-using an existing connection, everything still works.
            # Especially relevant for IP capturing.
            response = pool.request("GET", "/status/200")

            self.assert_success_span(response, self.http_url)
コード例 #22
0
 def __init__(self, host, timeout=1000, maxsize=3):
     '''
         初始化
         @param host:  服务器地址
         @param timeout: 连接超时时间,单位为秒
     '''
     self.__host = host
     import urllib3
     self.__pool = urllib3.HTTPConnectionPool(host,
                                              timeout=timeout,
                                              maxsize=maxsize)
コード例 #23
0
ファイル: test_urlfetch.py プロジェクト: ctomiao2/vim
 def test_urlfetch_called_with_http(self):
     """Check that URLFetch is used to fetch non-https resources."""
     resp = MockResponse('OK', 200, False, 'http://www.google.com',
                         {'content-type': 'text/plain'})
     fetch_patch = patch('google.appengine.api.urlfetch.fetch',
                         return_value=resp)
     with fetch_patch as fetch_mock:
         import urllib3
         pool = urllib3.HTTPConnectionPool('www.google.com', '80')
         r = pool.request('GET', '/')
         self.assertEqual(r.status, 200, r.data)
         self.assertEqual(fetch_mock.call_count, 1)
コード例 #24
0
def do2():
    print("urllib3")
    conn = urllib3.HTTPConnectionPool(domen, maxsize=M)

    @g_async
    def f():
        for i in range(N):
            if i % (N // 10) == 0: print(f"{i/N:.0%}")
            rp: urllib3.HTTPResponse = conn.request("GET", url)
            trash = rp.data

    gevent.wait([f() for _ in range(M)])
コード例 #25
0
 def __init__(self,
              host: str,
              port: int,
              queue: str,
              vc_format: str = "base64"):
     self._host = host
     self._port = port
     self._queue_name = queue
     self._vc_format = vc_format
     self._url = f"http://{self._host}:{self._port}/queuename/{self._queue_name}?object_format=internal"
     self._http = urllib3.HTTPConnectionPool(host=self._host,
                                             port=self._port,
                                             retries=False)
コード例 #26
0
    def test_urlfetch_called_with_http(self):
        """Check that URLFetch is used to fetch non-https resources."""
        resp = MockResponse("OK", 200, False, "http://www.google.com",
                            {"content-type": "text/plain"})
        fetch_patch = patch("google.appengine.api.urlfetch.fetch",
                            return_value=resp)
        with fetch_patch as fetch_mock:
            import urllib3

            pool = urllib3.HTTPConnectionPool("www.google.com", "80")
            r = pool.request("GET", "/")
            assert r.status == 200, r.data
            assert fetch_mock.call_count == 1
コード例 #27
0
def main():

    parser = argparse.ArgumentParser()

    parser.add_argument(
        "--mode",
        help=
        "monitoring mode , cluster is cluster health and tps , node is node health and gc infomation (default cluster)",
        default="cluster",
        required=False)
    parser.add_argument("--url",
                        help="the url of cluster (default localhost)",
                        default="localhost",
                        required=False)
    parser.add_argument("--port",
                        help="the port of cluster (default 9200)",
                        default=9200,
                        required=False)
    parser.add_argument("--interval",
                        help="the interval of stat api request (default 1s)",
                        default=1,
                        required=False,
                        type=int)

    args = parser.parse_args()

    mode = args.mode
    url = args.url
    port = args.port
    interval = args.interval

    try:

        esRestSession = urllib3.HTTPConnectionPool(url,
                                                   maxsize=1,
                                                   port=port,
                                                   timeout=10)
        esRestSession.request('GET', '/')

    except:

        printError("ElasticSearch is not installed on " + url + ":" +
                   str(port))
        exit(1)

    if mode == "node":
        monitoringNodeStatus(esRestSession, interval)

    else:
        monitoringClusterStatus(esRestSession, interval)
コード例 #28
0
    def __init__(self, config):
        """
        Initialise the Gisgraphy interface.

        :param config: The :class:`configparser` object.
        """
        url = "localhost"

        self.post_data = {'q': '', 'format': 'json', 'suggest': 'true'}
        self.pool = urllib3.HTTPConnectionPool(
            host=url,
            port=8080,
            maxsize=25,
            headers={'accept': 'application/json'})
コード例 #29
0
    def setUp(self):
        """
        Setup sink and confirm replication queue is empty
        """
        self.host = os.getenv('RIAK_HOST', 'localhost')
        self.sink = ReplSink(host=self.host, port=8098, queue='q1_ttaaefs')
        self.test_data = b'{"test":"data"}'
        self.http = urllib3.HTTPConnectionPool(host=self.host,
                                               port=8098,
                                               retries=False)

        empty = False
        while not empty:
            rec = self.sink.fetch()
            empty = rec.empty
コード例 #30
0
    def __init__(self, host, proto="http", port=80, timeout=15, strict=True, retries=None, redirect=True,
                 assert_same_host=False, assert_hostname=None, assert_fingerprint=None, ssl_version=None):
        if "://" in host:
            host = host.split("://")[1].split("/")[0]
            
        socket.gethostbyname(host)

        self.request_cfg = {"timeout": timeout, "retries": retries, "redirect": redirect, "assert_same_host": assert_same_host}

        if proto.lower() == "https":
            self.connection = urllib3.HTTPSConnectionPool(host, port=port, timeout=timeout, retries=retries, ssl_version=ssl_version, assert_hostname=assert_hostname, assert_fingerprint=assert_fingerprint)
        else:
            self.connection = urllib3.HTTPConnectionPool(host, port=port, timeout=timeout, retries=retries, strict=strict)

        self.target = host