Ejemplo n.º 1
0
def auth_get_repo(user_password):
    pool = ConnectionPool(factory=Connection)
    serverurl = "https://api.github.com"
    '''
    #print 'Enter your username:'******'+')[0]
    
    password = user_password.split('+')[1]
    # Add your username and password here, or prompt for them
    auth=BasicAuth(username, password)
     
    # Use your basic auth to request a token
    # This is just an example from http://developer.github.com/v3/
    authreqdata = { "scopes": [ "public_repo" ], "note": "admin script" }
    resource = Resource('https://api.github.com/authorizations', pool=pool, filters=[auth])
    response = resource.post(headers={ "Content-Type": "application/json" }, payload=json.dumps(authreqdata))
    token = json.loads(response.body_string())['token']
     '''
    """
    Once you have a token, you can pass that in the Authorization header
    You can store this in a cache and throw away the user/password
    This is just an example query.  See http://developer.github.com/v3/ 
    for more about the url structure
    """
    token = '94038d59a46c5ea1aa4f11626a83cde3e8794668'
    resource = Resource('https://api.github.com/user/repos', pool=pool)
    headers = {'Content-Type': 'application/json'}
    headers['Authorization'] = 'token %s' % token
    response = resource.get(headers=headers)
    repos = json.loads(response.body_string())
    for each in repos:
        git("clone", each['clone_url'])
Ejemplo n.º 2
0
 def setUp(self):
     # we set a pool to not keep connection opened during testing
     from socketpool import ConnectionPool
     from restkit.conn import Connection
     self.client_options = dict(
         pool=ConnectionPool(factory=Connection, max_lifetime=.001))
     super(TestRestkit, self).setUp()
Ejemplo n.º 3
0
 def __init__(self, user, token):
     self.pool = ConnectionPool(factory=Connection)
     self.token = token
     self.headers = {
         'Content-Type': 'application/json',
         'Authorization': 'token %s' % self.token
     }
Ejemplo n.º 4
0
 def connect(self):
     """创建连接"""
     try:
         socket_pool = ConnectionPool(factory=TcpConnector, max_size=1024,
                                      options={'host': cons.BAOSTOCK_SERVER_IP, 'port': cons.BAOSTOCK_SERVER_PORT})
         setattr(context, "default_socket_pool", socket_pool)
     except Exception:
         print("服务器连接失败,请稍后再试。")
Ejemplo n.º 5
0
 def __init__(self, endpoint, name, **kwargs):
     if endpoint.endswith('/'):
         endpoint = endpoint.rstrip('/')
     if 'pool' not in kwargs:
         kwargs['pool'] = ConnectionPool(factory=Connection)
     self.json_default = kwargs.get('json_default', json_util.default)
     self.json_object_hook = kwargs.get('json_object_hook',
                                        json_util.object_hook)
     self.resource = Resource(endpoint, **kwargs)
     self.name = name
Ejemplo n.º 6
0
def get_session(backend_name, **options):
    global _default_session

    if not _default_session:
        _default_session = {}
        pool = ConnectionPool(factory=Connection,
                              backend=backend_name,
                              **options)
        _default_session[backend_name] = pool
    else:
        if backend_name not in _default_session:
            pool = ConnectionPool(factory=Connection,
                                  backend=backend_name,
                                  **options)

            _default_session[backend_name] = pool
        else:
            pool = _default_session.get(backend_name)
    return pool
Ejemplo n.º 7
0
    def test_stop_reaper_thread(self):
        """Verify that calling stop_reaper will terminate the reaper thread.
        """
        pool = ConnectionPool(MessyConnector, backend='thread')
        assert pool._reaper.running
        pool.stop_reaper()

        for i in xrange(1000):
            if not pool._reaper.running:
                return
            time.sleep(0.01)

        assert False, 'Reaper thread not terminated in time.'
    def __init__(self, *args, **kw):
        super(Socks5Server, self).__init__(*args, **kw)
        self.remote_pool = ConnectionPool(factory=TcpConnector,
                                          max_size=600,
                                          max_lifetime=3,
                                          backend="gevent")

        def log_tcp_pool_size(s):
            log("ConnPool size: %d" % self.remote_pool.size)
            spawn_later(10, s, s)

        def log_dns_pool_size(s):
            log("DNSPool size: %d" % len(self.HOSTCACHE))
            spawn_later(10, s, s)

        spawn_later(10, log_tcp_pool_size, log_tcp_pool_size)
        spawn_later(10, log_dns_pool_size, log_dns_pool_size)
Ejemplo n.º 9
0
    def test_del(self):
        """Verify that garbage collection of the pool will release the reaper
        thread.
        """
        pool = ConnectionPool(MessyConnector, backend='thread')
        reaper = pool._reaper

        assert reaper.running

        # Remove reference.
        pool = None

        for i in xrange(1000):
            if not reaper.running:
                return
            time.sleep(0.01)

        assert False, 'Reaper thread not terminated in time.'
Ejemplo n.º 10
0
 def __init__(self,
              url=GITHUB_URL,
              params=None,
              payload=None,
              headers=None,
              filters=None,
              access_token=None):
     self.url = url
     self.params = params or dict()
     self.payload = payload or dict()
     self.headers = headers or {'Content-Type': 'application/json'}
     filters = filters or list()
     self.resource = Resource(
         url,
         pool=ConnectionPool(factory=Connection),
         filters=filters,
     )
     if access_token is not None:
         self.params["access_token"] = access_token
Ejemplo n.º 11
0
import timeit

import eventlet
eventlet.monkey_patch()

from restkit import *
from restkit.conn import Connection
from socketpool import ConnectionPool

#set_logging("debug")

pool = ConnectionPool(factory=Connection, backend="eventlet")

epool = eventlet.GreenPool()

urls = [
    "http://yahoo.fr", "http://google.com", "http://friendpaste.com",
    "http://benoitc.io", "http://couchdb.apache.org"
]

allurls = []
for i in range(10):
    allurls.extend(urls)


def fetch(u):
    r = request(u, follow_redirect=True, pool=pool)
    print "RESULT: %s: %s (%s)" % (u, r.status, len(r.body_string()))


def extract():
Ejemplo n.º 12
0
#
# This file is part of restkit released under the MIT license.
# See the NOTICE for more information.

from six.moves.urllib import parse as urlparse

from webob import Request
from restkit.contrib.wsgi_proxy import HostProxy

import restkit
from restkit.conn import Connection
from socketpool import ConnectionPool

restkit.set_logging("debug")

pool = ConnectionPool(factory=Connection, max_size=10, backend="thread")
proxy = HostProxy("http://127.0.0.1:5984", pool=pool)


def application(environ, start_response):
    req = Request(environ)
    if 'RAW_URI' in req.environ:
        # gunicorn so we use real path non encoded
        u = urlparse.urlparse(req.environ['RAW_URI'])
        req.environ['PATH_INFO'] = u.path

    # do smth like adding oauth headers ..
    resp = req.get_response(proxy)

    # rewrite response
    # do auth ...
Ejemplo n.º 13
0
 def test_size_on_isconnected_failure(self):
     pool = ConnectionPool(MessyConnector)
     assert pool.size == 0
     pytest.raises(MaxTriesError, pool.get)
Ejemplo n.º 14
0
import json
from restkit import Resource, BasicAuth, Connection, request
from socketpool import ConnectionPool
import getpass
 
pool = ConnectionPool(factory=Connection)
serverurl="https://api.github.com"
print 'Enter your username:'******'https://api.github.com/authorizations', pool=pool, filters=[auth])
response = resource.post(headers={ "Content-Type": "application/json" }, payload=json.dumps(authreqdata))
token = json.loads(response.body_string())['token']
"""
Once you have a token, you can pass that in the Authorization header
You can store this in a cache and throw away the user/password
This is just an example query.  See http://developer.github.com/v3/ 
for more about the url structure
"""
#token = '94038d59a46c5ea1aa4f11626a83cde3e8794668' 
resource = Resource('https://api.github.com/user/repos', pool=pool)
headers = {'Content-Type' : 'application/json' }
headers['Authorization'] = 'token %s' % token
response = resource.get(headers = headers)
repos = json.loads(response.body_string())
Ejemplo n.º 15
0
def make_auctions_app(global_conf,
                      redis_url='redis://localhost:7777/0',
                      external_couch_url='http://localhost:5000/auction',
                      internal_couch_url='http://localhost:9000/',
                      proxy_internal_couch_url='http://localhost:9000/',
                      auctions_db='auctions',
                      hash_secret_key='',
                      timezone='Europe/Kiev',
                      preferred_url_scheme='http',
                      debug=False,
                      auto_build=False,
                      event_source_connection_limit=1000):
    """
    [app:main]
    use = egg:openprocurement.auction#auctions_server
    redis_url = redis://:passwod@localhost:1111/0
    external_couch_url = http://localhost:1111/auction
    internal_couch_url = http://localhost:9011/
    auctions_db = auction
    timezone = Europe/Kiev
    """
    auctions_server.proxy_connection_pool = ConnectionPool(factory=Connection,
                                                           max_size=20,
                                                           backend="gevent")
    auctions_server.proxy_mappings = Memoizer({})
    auctions_server.event_sources_pool = deque([])
    auctions_server.config['PREFERRED_URL_SCHEME'] = preferred_url_scheme
    auctions_server.config['REDIS_URL'] = redis_url
    auctions_server.config['event_source_connection_limit'] = int(
        event_source_connection_limit)
    auctions_server.config['EXT_COUCH_DB'] = urljoin(external_couch_url,
                                                     auctions_db)
    auctions_server.add_url_rule('/' + auctions_db + '/<path:path>',
                                 'couch_server_proxy',
                                 couch_server_proxy,
                                 methods=['GET'])
    auctions_server.add_url_rule('/' + auctions_db + '/',
                                 'couch_server_proxy',
                                 couch_server_proxy,
                                 methods=['GET'],
                                 defaults={'path': ''})

    auctions_server.add_url_rule('/' + auctions_db + '_secured/<path:path>',
                                 'auth_couch_server_proxy',
                                 auth_couch_server_proxy,
                                 methods=['GET'])
    auctions_server.add_url_rule('/' + auctions_db + '_secured/',
                                 'auth_couch_server_proxy',
                                 auth_couch_server_proxy,
                                 methods=['GET'],
                                 defaults={'path': ''})

    auctions_server.config['INT_COUCH_URL'] = internal_couch_url
    auctions_server.config['PROXY_COUCH_URL'] = proxy_internal_couch_url
    auctions_server.config['COUCH_DB'] = auctions_db
    auctions_server.config['TIMEZONE'] = tz(timezone)
    auctions_server.redis = Redis(auctions_server)
    auctions_server.couch_server = Server(
        auctions_server.config.get('INT_COUCH_URL'),
        session=Session(retry_delays=range(10)))
    if auctions_server.config['COUCH_DB'] not in auctions_server.couch_server:
        auctions_server.couch_server.create(auctions_server.config['COUCH_DB'])

    auctions_server.db = auctions_server.couch_server[
        auctions_server.config['COUCH_DB']]
    auctions_server.config['HASH_SECRET_KEY'] = hash_secret_key
    sync_design(auctions_server.db)
    auctions_server.config['ASSETS_DEBUG'] = True if debug else False
    assets.auto_build = True if auto_build else False
    return auctions_server
Ejemplo n.º 16
0
def make_auctions_app(global_conf,
                      redis_url='redis://localhost:9002/1',
                      redis_password='',
                      redis_database='',
                      sentinel_cluster_name='',
                      sentinels='',
                      external_couch_url='http://localhost:5000/auction',
                      internal_couch_url='http://localhost:9000/',
                      proxy_internal_couch_url='http://localhost:9000/',
                      auctions_db='database',
                      hash_secret_key='',
                      timezone='Europe/Kiev',
                      preferred_url_scheme='http',
                      debug=False,
                      auto_build=False,
                      event_source_connection_limit=1000,
                      limit_replications_progress=99,
                      limit_replications_func='any'):
    """
    [app:main]
    use = egg:openprocurement.auction#auctions_server
    redis_url = redis://:passwod@localhost:1111/0
    external_couch_url = http://localhost:1111/auction
    internal_couch_url = http://localhost:9011/
    auctions_db = auction
    timezone = Europe/Kiev
    """
    auctions_server = components.queryUtility(IAuctionsServer)
    auctions_server.proxy_connection_pool = ConnectionPool(factory=Connection,
                                                           max_size=20,
                                                           backend='gevent')
    auctions_server.proxy_mappings = Memoizer({})
    auctions_server.event_sources_pool = deque([])
    auctions_server.config['PREFERRED_URL_SCHEME'] = preferred_url_scheme
    auctions_server.config['limit_replications_progress'] = float(
        limit_replications_progress)
    auctions_server.config['limit_replications_func'] = limit_replications_func

    auctions_server.config['REDIS'] = {
        'redis': redis_url,
        'redis_password': redis_password,
        'redis_database': redis_database,
        'sentinel_cluster_name': sentinel_cluster_name,
        'sentinel': loads(sentinels)
    }

    auctions_server.config['event_source_connection_limit'] = int(
        event_source_connection_limit)
    auctions_server.config['EXT_COUCH_DB'] = urljoin(external_couch_url,
                                                     auctions_db)
    auctions_server.add_url_rule('/' + auctions_db + '/<path:path>',
                                 'couch_server_proxy',
                                 couch_server_proxy,
                                 methods=['GET'])
    auctions_server.add_url_rule('/' + auctions_db + '/',
                                 'couch_server_proxy',
                                 couch_server_proxy,
                                 methods=['GET'],
                                 defaults={'path': ''})

    auctions_server.add_url_rule('/' + auctions_db + '_secured/<path:path>',
                                 'auth_couch_server_proxy',
                                 auth_couch_server_proxy,
                                 methods=['GET'])
    auctions_server.add_url_rule('/' + auctions_db + '_secured/',
                                 'auth_couch_server_proxy',
                                 auth_couch_server_proxy,
                                 methods=['GET'],
                                 defaults={'path': ''})

    auctions_server.config['INT_COUCH_URL'] = internal_couch_url
    auctions_server.config['PROXY_COUCH_URL'] = proxy_internal_couch_url
    auctions_server.config['COUCH_DB'] = auctions_db
    auctions_server.config['TIMEZONE'] = tz(timezone)

    auctions_server.couch_server = Server(
        auctions_server.config.get('INT_COUCH_URL'),
        session=Session(retry_delays=range(10)))
    if auctions_server.config['COUCH_DB'] not in auctions_server.couch_server:
        auctions_server.couch_server.create(auctions_server.config['COUCH_DB'])

    auctions_server.db = auctions_server.\
        couch_server[auctions_server.config['COUCH_DB']]
    auctions_server.config['HASH_SECRET_KEY'] = hash_secret_key
    sync_design(auctions_server.db)
    for entry_point in iter_entry_points(PKG_NAMESPACE):
        plugin = entry_point.load()
        plugin(components)
    return auctions_server
Ejemplo n.º 17
0
 def pool(self):
     pool_keepalive = int(self.app.config.get('COUCHDB_KEEPALIVE'))
     pool_backend = self.app.config.get('COUCHDB_BACKEND')
     return ConnectionPool(Connection,
                           max_size=pool_keepalive,
                           backend=pool_backend)
Ejemplo n.º 18
0
def echo(sock, address):
    print('New connection from %s:%s' % address)

    while True:
        data = sock.recv(1024)
        if not data:
            break
        sock.send(data)
        print("echoed %r" % data)


if __name__ == '__main__':
    import time

    options = {'host': 'localhost', 'port': 6000}
    pool = ConnectionPool(factory=TcpConnector, backend="gevent")
    server = StreamServer(('localhost', 6000), echo)
    gevent.spawn(server.serve_forever)

    def runpool(data):
        print 'ok'
        with pool.connection(**options) as conn:
            print 'sending'
            sent = conn.send(data)
            print 'send %d bytes' % sent
            echo_data = conn.recv(1024)
            print "got %s" % data
            assert data == echo_data

    start = time.time()
    jobs = [gevent.spawn(runpool, "blahblah") for _ in xrange(20)]
Ejemplo n.º 19
0
    from gevent import monkey
    monkey.patch_all()

    import gevent.pool
    import time

    from restkit import *
    from socketpool import ConnectionPool

    url = 'http://127.0.0.1/index.html'

    N = 1000
    C = 10

    Pool = ConnectionPool(factory=Connection,backend="gevent",max_size=C,timeout=300)


    def run():
        response = request(url,follow_redirect=True,pool=Pool)
        response.body_string()
        assert response.status_int == 200


    group = gevent.pool.Pool(size=C)

    now = time.time()
    for _ in xrange(N):
        group.spawn(run)
    group.join()
Ejemplo n.º 20
0
import socket
import json
from socketpool import ConnectionPool, TcpConnector
import time

HOST = '192.168.0.106'
PORT = 10010
SIZE = 1024
POOL = ConnectionPool(factory=TcpConnector, backend="thread", timeout=10)
"""
测试用,实际Bigfish并未用到
"""


def socket_test():
    sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
    sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
    sock.connect((HOST, PORT))
    query_id = time.time()
    message = {
        'opt': "GETDATA",
        'query_id': query_id,
        'symbols': ["XAUUSD"],
        'tf': "M1",
        'start_time': "2015-11-03",
        'end_time': "2015-11-04",
        'size': 90
    }
    try:
        data = {query_id: []}
        times = 0