Example #1
0
def handle_http_proxy(path_info, real_host, connection_timeout,
                      network_timeout):
    token = md5.new('bird%s' % time.strftime('%Y%m%d')).hexdigest()
    href = 'http://%s%s?token=%s&random=%d' % (
        real_host, path_info.replace('/proxy/',
                                     '/'), token, random.randint(0, 100000))
    print(href)
    ret = ''
    try:
        client = HTTPClient.from_url(
            href,
            concurrency=1,
            connection_timeout=connection_timeout,
            network_timeout=network_timeout,
        )
        url = URL(href)
        response = client.get(url.request_uri)
        if response and (response.status_code == 200
                         or response.status_code == 304):
            ret = response.read()
        else:
            msg = 'handle_http_proxy response error:%d' % response.status_code
            ret = json.dumps({'result': msg}, ensure_ascii=True, indent=4)
    except:
        e = sys.exc_info()[1]
        msg = ''
        if hasattr(e, 'message'):
            msg = 'handle_http_proxy error:%s' % e.message
        else:
            msg = 'handle_http_proxy error:%s' % str(e)
        ret = json.dumps({'result': msg}, ensure_ascii=True, indent=4)
    return '200 OK', {}, ret
Example #2
0
 def superrun(url, group):
     client = self.make_client(url)
     for i, url in enumerate(itertools.repeat(url)):
         group.spawn(run, client, URL(url))
         if i + 1 == o.requests:
             break
     group.join(timeout=o.limit)
Example #3
0
def one_request():
    h = {
        'User-Agent':
        'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.76 Safari/537.36',
        'Accept':
        'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
        'Accept-Encoding': 'gzip, deflate, sdch',
        'Accept-Language': 'zh-CN,zh;q=0.8',
        'Cookie': 'SESSIONID=474906595',
        'Host': 'afgrtbb.tk',
        'Connection': 'keep-alive'
    }
    try:
        href = build_random_param()
        print(href)
        #if body['name'] == 'name':
        url = URL(href, 'utf-8')
        http = HTTPClient.from_url(
            url,
            concurrency=1,
            connection_timeout=60,
            network_timeout=80,
        )
        #response = None
        return gevent.spawn(http.get, url.request_uri, h)
        #g.join()
        #response = g.value
        #if response and response.status_code == 200:
        #print('200 OK')
    except Exception, e:
        raise
Example #4
0
def test_https():
    href = u'https://192.168.1.136:8080/stream/webrtc'
    url = URL(href)

    cli = HTTPClient(url.host,
                     port=url.port,
                     ssl=True,
                     ssl_options={'ca_certs': 'ssl_certificate.crt'},
                     connection_timeout=5.0,
                     network_timeout=10.0)
    response = cli.get(url.request_uri)
    s = response.read()
    cli.close()
    print(s)
Example #5
0
def check_recaptcha(secret, resp, ip):
    try:
        url = URL(
            'https://www.google.com/recaptcha/api/siteverify?secret=%s&response=%s&ip=%s'
            % (secret, resp, ip))
        http = HTTPClient.from_url(url)
        response = http.get(url.request_uri)
        if response.status_code == 200:
            raw_res = response.read()
            res = json.loads(raw_res)
            if res.get('success'):
                return True
    except:
        pass
    return False
Example #6
0
 def get(self, relay, ca_certs, timeout):
     try:
         return self.pool[relay.fronturl].get(block=False)
     except gevent.queue.Empty:
         insecure = "verify" not in relay.properties
         if ca_certs:
             ssl_options = {
                 'ca_certs': ca_certs,
                 'ssl_version': ssl.PROTOCOL_TLSv1
             }
         else:
             ssl_options = {}
         conn = HTTPClient.from_url(URL(relay.fronturl),
                                    insecure=insecure,
                                    block_size=MAX_PAYLOAD_LENGTH,
                                    connection_timeout=timeout,
                                    network_timeout=timeout,
                                    concurrency=1,
                                    ssl_options=ssl_options)
         return conn
def request_cluster_config(dev_list, unsync_list=False):
    req_uri = '/admin/agentd_comm'
    conf_q = config.get('constants', 'REDIS_CONFIG_XML_QUEUE_KEY')
    mfc_count = len(dev_list)
    g_pool = gevent.pool.Pool(size=mfc_count)
    sync_flag = True
    if unsync_list:
        sync_flag = False

    LOG.debug("Creating Config request clients")
    conf_clients = []
    for device in dev_list:
        url = URL('http://' + device[2] + ':8080' + req_uri)
        conf_clients.append(
            HTTPClient.from_url(url, concurrency=1, headers_type=dict))

    LOG.debug("Starting to request Config from MFC")
    for i in xrange(mfc_count):
        g_pool.spawn(request_config_mfc_cb, conf_clients[i], dev_list[i],
                     conf_q)
    g_pool.join()
    LOG.debug("Finished collecting Config from MFC")

    for i in xrange(mfc_count):
        conf_clients[i].close()
    """Parse and store the config.

    mfc_uuid is a global hashmap(redis Dict) with ip as key and UUID as value
    parse_config_and_sync will update the sync_dev_list, mfc_uuid for each XML response.
    """
    LOG.debug("Parsing config request output and building the UUID hash.")
    q_len = r.llen(conf_q)
    g_pool = gevent.pool.Pool(size=q_len)
    for _ in xrange(q_len):
        data = r.blpop(conf_q)
        g_pool.spawn(parse_config_and_sync, data, sync_flag)
    g_pool.join()
    """Return list of MFCs which was able to communicate."""
    sync_list = List(key=config.get('constants', 'REDIS_SYNC_DEV_LIST_KEY'),
                     redis=r)
    return list(sync_list)
#!/usr/bin/env python

from geventhttpclient import HTTPClient, URL

if __name__ == "__main__":

    url = URL('http://127.0.0.1:80/100.dat')
    http = HTTPClient.from_url(url)
    response = http.get(url.request_uri)
    assert response.status_code == 200

    CHUNK_SIZE = 1024 * 16  # 16KB
    with open('/tmp/100.dat', 'w') as f:
        data = response.read(CHUNK_SIZE)
        while data:
            f.write(data)
            data = response.read(CHUNK_SIZE)
Example #9
0
# -*- coding: utf-8 -*-
from geventhttpclient import HTTPClient
from geventhttpclient import URL

url = URL("http://gevent.org/")

print url.request_uri
print url.path
http = HTTPClient(url.host)

response = http.get(url.path)

print response.status_code

print response.get('url')

print response.headers
#print response.read()

http.close()
import time
import gevent.pool
from geventhttpclient import HTTPClient, URL

N = 1000
C = 10

url = URL('http://127.0.0.1/index.html')
qs = url.request_uri


def run(client):
    response = client.get(qs)
    response.read()
    assert response.status_code == 200


client = HTTPClient.from_url(url, concurrency=C)
group = gevent.pool.Pool(size=C)

now = time.time()
for _ in xrange(N):
    group.spawn(run, client)
group.join()

delta = time.time() - now
req_per_sec = N / delta

print "request count:%d, concurrenry:%d, %f req/s" % (N, C, req_per_sec)
 def create_stat_clients():
     LOG.info("Creating Stats request clients")
     for device_id, name, ip in sync_mfcs:
         url = URL('http://' + ip + ':8080' + req_uri)
         stat_clients.append(
             HTTPClient.from_url(url, concurrency=1, headers_type=dict))
Example #12
0
def get(path="/sub/channel"):
    url = URL("http://%s/%s" % (os.getenv("HTTP_HOST", 'localhost'), path))
    http = HTTPClient.from_url(url)
    response = http.get(path)
    return response