コード例 #1
0
ファイル: home.py プロジェクト: schiermike/pylons-websockets
    def ws(self):
        # open websocket
        websock = request.environ["ws4py.websocket"]
        # websock_version = request.environ['wsgi.websocket_version']
        # sec_websocket_extensions = request.environ.get('HTTP_SEC_WEBSOCKET_EXTENSIONS')
        # sec_websocket_key = request.environ.get('HTTP_SEC_WEBSOCKET_KEY')
        # sec_websocket_version = request.environ.get('HTTP_SEC_WEBSOCKET_VERSION')
        endpoint = websock.sock.getpeername()
        # print 'connection established with endpoint %s:%s, version %s / %s, key %s, extensions %s' \
        #       % (endpoint[0], endpoint[1], websock_version, sec_websocket_version, sec_websocket_key, sec_websocket_extensions)

        from gevent import Greenlet

        g1 = Greenlet(websock.run)
        g1.start()

        websock.send("Hello dear Browser! I'll send you redis stuff when I get some")

        g2 = Greenlet(send_stuff_in_intervals, websock)
        #         g2 = Greenlet(send_redis_stuff, websock)
        g2.start()

        g2.join()
        g1.join()

        print "connection closed to %s:%s" % endpoint
コード例 #2
0
class MongoThread(object):
    """A thread, or a greenlet, that uses a Connection"""
    def __init__(self, test_case):
        self.use_greenlets = test_case.use_greenlets
        self.connection = test_case.c
        self.db = self.connection[DB]
        self.ut = test_case
        self.passed = False

    def start(self):
        if self.use_greenlets:
            self.thread = Greenlet(self.run)
        else:
            self.thread = threading.Thread(target=self.run)

        self.thread.start()

    def join(self):
        self.thread.join()
        self.thread = None

    def run(self):
        self.run_mongo_thread()

        # No exceptions thrown
        self.passed = True

    def run_mongo_thread(self):
        raise NotImplementedError()
コード例 #3
0
def thrFunc(s):
    print "%s start.\n" %s
    gevent.sleep(2)
    gThr = Greenlet(subFunc)
    gThr.start()
    gThr.join()
    print "%s end.\n" %s
コード例 #4
0
    def start(self):
        greenlet = Greenlet(self._start)
        greenlet.link_exception(self._logGreenletError)

        TIMER = gevent.greenlet.Greenlet(self._timer)
        TIMER.start()

        # Start and wait until the log server stops (main greenlet).
        greenlet.start()
        greenlet.join()
コード例 #5
0
 def StartGame(self):
   self.game_id = self.manager.CreateGame(self.game_name, self.players)[0]
   for p in self.players:
     CardNamespace.players[p].emit('go_to_game_table')
   while False in [CardNamespace.players[p].ready for p in self.players]:
     print >>sys.stderr, [CardNamespace.players[p].ready for p in self.players]
     sleep(0.05)
   g = Greenlet(self.manager.StartGame, self.game_id)
   g.start()
   g.join()
コード例 #6
0
class MongoThread(object):
    """A thread, or a greenlet, that uses a MongoClient"""
    def __init__(self, test_case):
        self.use_greenlets = test_case.use_greenlets
        self.client = test_case.c
        self.db = self.client[DB]
        self.ut = test_case
        self.passed = False

    def start(self):
        if self.use_greenlets:
            # A Gevent extended Greenlet
            self.thread = Greenlet(self.run)
        else:
            self.thread = threading.Thread(target=self.run)
            self.thread.setDaemon(True)  # Don't hang whole test if thread hangs

        self.thread.start()

    @property
    def alive(self):
        if self.use_greenlets:
            return not self.thread.dead
        else:
            return self.thread.isAlive()

    def join(self):
        self.thread.join(20)
        if self.use_greenlets:
            msg = "Greenlet timeout"
        else:
            msg = "Thread timeout"
        assert not self.alive, msg
        self.thread = None

    def run(self):
        self.run_mongo_thread()

        # No exceptions thrown
        self.passed = True

    def run_mongo_thread(self):
        raise NotImplementedError()

    def disconnect_client(self):
        if isinstance(self.client, MongoClient):
            self.client.close()
        else:
            # Don't kill the replica set monitor.
            self.client.disconnect()
コード例 #7
0
    def test_socket_reclamation(self):
        # Check that if a thread starts a request and dies without ending
        # the request, that the socket is reclaimed into the pool.
        cx_pool = self.get_pool(
            pair=(host,port),
            max_size=10,
            net_timeout=1000,
            conn_timeout=1000,
            use_ssl=False,
        )

        self.assertEqual(0, len(cx_pool.sockets))

        lock = None
        the_sock = [None]

        def leak_request():
            self.assertEqual(NO_REQUEST, cx_pool._get_request_state())
            cx_pool.start_request()
            self.assertEqual(NO_SOCKET_YET, cx_pool._get_request_state())
            sock_info = cx_pool.get_socket()
            self.assertEqual(sock_info, cx_pool._get_request_state())
            the_sock[0] = id(sock_info.sock)

            if not self.use_greenlets:
                lock.release()

        if self.use_greenlets:
            g = Greenlet(leak_request)
            g.start()
            g.join(1)
            self.assertTrue(g.ready(), "Greenlet is hung")
        else:
            lock = thread.allocate_lock()
            lock.acquire()

            # Start a thread WITHOUT a threading.Thread - important to test that
            # Pool can deal with primitive threads.
            thread.start_new_thread(leak_request, ())

            # Join thread
            acquired = lock.acquire()
            self.assertTrue(acquired, "Thread is hung")

        force_reclaim_sockets(cx_pool, 1)

        # Pool reclaimed the socket
        self.assertEqual(1, len(cx_pool.sockets))
        self.assertEqual(the_sock[0], id(one(cx_pool.sockets).sock))
コード例 #8
0
ファイル: list_streams.py プロジェクト: Locu/chronology
def check_stream(client, namespace, stream, start, end, limit, timeout,
                 latency):
  def run():
    for event in client.get(stream, start, end, limit=limit, timeout=latency):
      # Yeah, I'm useless.
      pass

  read_greenlet = Greenlet(run)
  read_greenlet.start()
  read_greenlet.join(timeout)
  if not read_greenlet.ready():
    read_greenlet.kill()
    success = False
  else:
    success = read_greenlet.successful()
  return success
コード例 #9
0
def test_reply_reconcile(db, config, message, sync_client):
    from inbox.server.models.tables.base import Message, SpoolMessage
    from inbox.server.models.tables.imap import ImapAccount
    from inbox.server.sendmail.base import reply, recipients

    to, subject, body = message
    attachment = None
    cc = '*****@*****.**'
    bcc = None

    account = db.session.query(ImapAccount).get(ACCOUNT_ID)

    # Create email message, store a local copy + send it:
    reply(NAMESPACE_ID, account, THREAD_ID, recipients(to, cc, bcc),
          subject, body, attachment)

    # Sync to verify reconciliation:
    synclet = Greenlet(sync_client.start_sync, ACCOUNT_ID)
    synclet.start()

    print '\nSyncing...'
    Greenlet.join(synclet, timeout=60)

    sync_client.stop_sync(ACCOUNT_ID)

    spool_messages = db.session.query(SpoolMessage).\
        filter_by(subject=THREAD_TOPIC).all()
    assert len(spool_messages) == 1, 'spool message missing'

    resolved_message_id = spool_messages[0].resolved_message_id
    assert resolved_message_id, 'spool message not reconciled'

    inbox_uid = spool_messages[0].inbox_uid
    thread_id = spool_messages[0].thread_id
    g_thrid = spool_messages[0].g_thrid

    killall(synclet)

    reconciled_message = db.session.query(Message).get(resolved_message_id)
    assert reconciled_message.inbox_uid == inbox_uid,\
        'spool message, reconciled message have different inbox_uids'

    assert reconciled_message.thread_id == thread_id,\
        'spool message, reconciled message have different thread_ids'

    assert reconciled_message.g_thrid == g_thrid,\
        'spool message, reconciled message have different g_thrids'
コード例 #10
0
ファイル: test.py プロジェクト: sublee/lets
def test_job_queue_guarantees_all_jobs():
    queue = lets.JobQueue()
    xs = []
    def f(x):
        gevent.sleep(0.01)
        xs.append(x)
    queue.put(Greenlet(f, 0))
    queue.put(Greenlet(f, 1))
    g = Greenlet(f, 2)
    queue.put(g)
    g.join()
    gevent.sleep(0)
    # before 0.0.23, the worker has done but the worker pool is still full.
    # before 0.0.12, the final job won't be scheduled.
    queue.put(Greenlet(f, 3))
    queue.join()
    assert xs == [0, 1, 2, 3]
コード例 #11
0
ファイル: test.py プロジェクト: sublee/lets
def test_job_queue_exited():
    results = []
    def f(x, delay=0):
        gevent.sleep(delay)
        results.append(x)
        return x
    queue = lets.JobQueue()
    g1 = Greenlet(f, 1, 0.1)
    g2 = Greenlet(f, 2, 0.1)
    queue.put(g1)
    queue.put(g2)
    g1.join()
    queue.kill()
    queue.join()
    assert results == [1]
    assert g1.get() == 1
    assert isinstance(g2.get(), gevent.GreenletExit)
コード例 #12
0
ファイル: server.py プロジェクト: jumpscale7/jumpscale_core7
    def start(self):
        print('Starting message server')

        for forwardAddress in self.forwardAddresses:
            client = MessageServerClient(forwardAddress)
            server.forwardClients.add(client)

        self._connect()

        self._storePidInPidFile()

        gevent.core.signal(signal.SIGHUP, self.stop)
        gevent.core.signal(signal.SIGINT, self.stop)
        gevent.core.signal(signal.SIGTERM, self.stop)

        greenlet = Greenlet(self.receiveMessages)
        greenlet.link_exception(self._logGreenletError)

        greenlet2 = Greenlet(self.processLogMessages)
        greenlet2.link_exception(self._logGreenletError)
        greenlet2.start()

        greenlet3 = Greenlet(self._timer)
        greenlet3.link_exception(self._logGreenletError)
        greenlet3.start()

        greenlet.start()

        storeLocallyStr = str(self.storeLocally)
        addresses = [client.address for client in self.forwardClients]
        addressesStr = ', '.join(addresses)

        print(('''\
Message server started
listens on: %s
stores locally: %s
forwards to: %s
pid: %d
pid file: %s''' % (self._address, storeLocallyStr, addressesStr, self._pid, self._pidFile)))

        # Wait until the log server stops (main greenlet).
        try:
            greenlet.join()
        except KeyboardInterrupt:
            # Ignore this error.
            pass
コード例 #13
0
class MongoThread(object):
    """A thread, or a greenlet, that uses a MongoClient"""
    def __init__(self, test_case):
        self.use_greenlets = test_case.use_greenlets
        self.client = test_case.c
        self.db = self.client[DB]
        self.ut = test_case
        self.passed = False

    def start(self):
        if self.use_greenlets:
            # A Gevent extended Greenlet
            self.thread = Greenlet(self.run)
        else:
            self.thread = threading.Thread(target=self.run)
            self.thread.setDaemon(
                True)  # Don't hang whole test if thread hangs

        self.thread.start()

    @property
    def alive(self):
        if self.use_greenlets:
            return not self.thread.dead
        else:
            return self.thread.isAlive()

    def join(self):
        self.thread.join(10)
        if self.use_greenlets:
            msg = "Greenlet timeout"
        else:
            msg = "Thread timeout"
        assert not self.alive, msg
        self.thread = None

    def run(self):
        self.run_mongo_thread()

        # No exceptions thrown
        self.passed = True

    def run_mongo_thread(self):
        raise NotImplementedError()
コード例 #14
0
ファイル: list_streams.py プロジェクト: tibbetts/chronology
def check_stream(client, namespace, stream, start, end, limit, timeout,
                 latency):
    def run():
        for event in client.get(stream,
                                start,
                                end,
                                limit=limit,
                                timeout=latency):
            # Yeah, I'm useless.
            pass

    read_greenlet = Greenlet(run)
    read_greenlet.start()
    read_greenlet.join(timeout)
    if not read_greenlet.ready():
        read_greenlet.kill()
        success = False
    else:
        success = read_greenlet.successful()
    return success
コード例 #15
0
def test_send_reconcile(db, config, message, sync_client):
    from inbox.server.models.tables.base import Message, SpoolMessage
    from inbox.server.models.tables.imap import ImapAccount
    from inbox.server.sendmail.base import send, recipients

    to, subject, body = message
    attachment = None
    cc = '*****@*****.**'
    bcc = None

    # Create email message, store a local copy + send it:
    account = db.sesson.query(ImapAccount).get(ACCOUNT_ID)
    send(account, recipients(to, cc, bcc), subject, body, attachment)

    # Sync to verify reconciliation:
    synclet = Greenlet(sync_client.start_sync, ACCOUNT_ID)
    synclet.start()

    Greenlet.join(synclet, timeout=60)

    sync_client.stop_sync(ACCOUNT_ID)

    spool_messages = db.session.query(SpoolMessage).\
        filter_by(subject=subject).all()
    assert len(spool_messages) == 1, 'spool message missing'

    resolved_message_id = spool_messages[0].resolved_message_id
    assert resolved_message_id, 'spool message not reconciled'

    inbox_uid = spool_messages[0].inbox_uid
    thread_id = spool_messages[0].thread_id

    killall(synclet)

    reconciled_message = db.session.query(Message).get(resolved_message_id)
    assert reconciled_message.inbox_uid == inbox_uid,\
        'spool message, reconciled message have different inbox_uids'

    assert reconciled_message.thread_id == thread_id,\
        'spool message, reconciled message have different thread_ids'
コード例 #16
0
class TemperatureMonitor(Service):
    __service__ = 'temperature_monitor'

    def start(self):
        patch_all() # :D

        self.background = Background()
        gevent.signal(signal.SIGTERM, self.background.kill)
        gevent.signal(signal.SIGINT, self.background.kill)

        self.service_greenlet = Greenlet(super(TemperatureMonitor, self).start)
        gevent.signal(signal.SIGTERM, self.service_greenlet.kill)
        gevent.signal(signal.SIGINT, self.service_greenlet.kill)

        self.background.start()
        self.service_greenlet.start()
        self.service_greenlet.join()

    def sleep(self, seconds):
        sleep(seconds)

    def handle(self, obj):
        self.logger.debug(u"Request {0}".format(obj.metadata))
コード例 #17
0
class TemperatureMonitor(Service):
    __service__ = 'temperature_monitor'

    def start(self):
        patch_all()  # :D

        self.background = Background()
        gevent.signal(signal.SIGTERM, self.background.kill)
        gevent.signal(signal.SIGINT, self.background.kill)

        self.service_greenlet = Greenlet(super(TemperatureMonitor, self).start)
        gevent.signal(signal.SIGTERM, self.service_greenlet.kill)
        gevent.signal(signal.SIGINT, self.service_greenlet.kill)

        self.background.start()
        self.service_greenlet.start()
        self.service_greenlet.join()

    def sleep(self, seconds):
        sleep(seconds)

    def handle(self, obj):
        self.logger.debug(u"Request {0}".format(obj.metadata))
コード例 #18
0
class MongoThread(object):
    """A thread, or a greenlet, that uses a Connection"""
    def __init__(self, test_case):
        self.use_greenlets = test_case.use_greenlets
        self.connection = test_case.c
        self.db = self.connection[DB]
        self.ut = test_case
        self.passed = False

    def start(self):
        if self.use_greenlets:
            # A Gevent extended Greenlet
            self.thread = Greenlet(self.run)
        else:
            self.thread = threading.Thread(target=self.run)
            self.thread.setDaemon(
                True)  # Don't hang whole test if thread hangs

        self.thread.start()

    def join(self):
        self.thread.join(300)
        if self.use_greenlets:
            assert self.thread.dead, "Greenlet timeout"
        else:
            assert not self.thread.isAlive(), "Thread timeout"

        self.thread = None

    def run(self):
        self.run_mongo_thread()

        # No exceptions thrown
        self.passed = True

    def run_mongo_thread(self):
        raise NotImplementedError()
コード例 #19
0
class MongoThread(object):
    """A thread, or a greenlet, that uses a Connection"""
    def __init__(self, test_case):
        self.use_greenlets = test_case.use_greenlets
        self.connection = test_case.c
        self.db = self.connection[DB]
        self.ut = test_case
        self.passed = False

    def start(self):
        if self.use_greenlets:
            # A Gevent extended Greenlet
            self.thread = Greenlet(self.run)
        else:
            self.thread = threading.Thread(target=self.run)
            self.thread.setDaemon(True) # Don't hang whole test if thread hangs


        self.thread.start()

    def join(self):
        self.thread.join(300)
        if self.use_greenlets:
            assert self.thread.dead, "Greenlet timeout"
        else:
            assert not self.thread.isAlive(), "Thread timeout"

        self.thread = None

    def run(self):
        self.run_mongo_thread()

        # No exceptions thrown
        self.passed = True

    def run_mongo_thread(self):
        raise NotImplementedError()
コード例 #20
0
ファイル: AEntityProxy.py プロジェクト: Stamped/Stamped
 def join(self):
     self._source.join()
     self._pool.join()
     Greenlet.join(self)
コード例 #21
0
ファイル: fakeclient.py プロジェクト: duhoobo/swarm
class FakeClient(object):
    """
    A fake client with persistent connection.

    Driven by a dedicated greenlet, it will die trying to operate by the rules
    from the script orderly, round and round.
    """

    def __init__(self, swarm, server, script_):
        self._swarm = swarm
        self._server = server
        self._socket = None
        self._greenlet = Greenlet(self._run)

        self._status = INITIAL
        self._prev_status = None
        self._script = script_
        self._id = id(self)

    def _report(self, status):
        """
        Report to swarm immediately on status change
        """
        if status != self._status:
            self._swarm.commit(CommandSTATUS(self._id, status, self._status))
            self._status, self._prev_status = (status, self._status)

    def _reconnect_server(self):
        """
        Die trying
        """
        while True:
            try:
                # To scatter connect requests
                time.sleep(randint(1, 20))

                self._report(CONNECT)
                self._disconnect_server()
                self._socket = create_connection(self._server, 3)
                self._socket.setsockopt(SOL_SOCKET, SO_RCVBUF, 128)
                self._socket.setsockopt(SOL_SOCKET, SO_SNDBUF, 1024)

                break

            except socket.error as e:
                # A fact: `socket.timeout`, `socket.herror`, and
                # `socket.gaierror` are all subclasses of `socket.error`.
                self._report(e.args[0])
                continue

    def _disconnect_server(self):
        if self._socket:
            self._socket.close()
            self._socket = None

    def _run(self):
        try:
            self._report(STARTED)
            self._reconnect_server()

            while True:
                try:
                    self._report(ACTING)
                    script.execute(self, self._script)
                    self._report(STANDBY)

                except (socket.error, BenchException) as e:
                    self._report(e.args[0])
                    self._reconnect_server()

        except GreenletExit:
            self._report(KILLED)

        except:
            self._report(FATAL)
            # let gevent print this exception
            raise

        finally:
            self._disconnect_server()

    def start(self):
        self._greenlet.start()

    def stop(self):
        self._greenlet.kill()
        self._greenlet.join()

    def send_for_reply(self, data, reply_parser):
        """
        Called by object of Script.

        Exceptions raised here should be handled in `_run`.
        """
        self._socket.sendall(data)

        need = reply_parser(None)
        while need > 0:
            data = self._socket.recv(need)
            if not data:
                raise ServerClosed("server closed")

            need = reply_parser(data)

    def send_noreply(self, data):
        self._socket.sendall(data)

    def close_connection(self):
        raise CloseForcibly("client closed")
コード例 #22
0
    def _test_pool(self, use_request):
        """
        Test that the connection pool prevents both threads and greenlets from
        using a socket at the same time.

        Sequence:
        gr0: start a slow find()
        gr1: start a fast find()
        gr1: get results
        gr0: get results
        """
        results = {
            'find_fast_result': None,
            'find_slow_result': None,
        }

        cx = get_connection(
            use_greenlets=self.use_greenlets,
            auto_start_request=False
        )

        db = cx.pymongo_test
        db.test.remove(safe=True)
        db.test.insert({'_id': 1}, safe=True)

        history = []

        def find_fast():
            if use_request:
                cx.start_request()

            history.append('find_fast start')

            # With the old connection._Pool, this would throw
            # AssertionError: "This event is already used by another
            # greenlet"
            results['find_fast_result'] = list(db.test.find())
            history.append('find_fast done')

            if use_request:
                cx.end_request()

        def find_slow():
            if use_request:
                cx.start_request()

            history.append('find_slow start')

            # Javascript function that pauses
            where = delay(2)
            results['find_slow_result'] = list(db.test.find(
                    {'$where': where}
            ))

            history.append('find_slow done')

            if use_request:
                cx.end_request()

        if self.use_greenlets:
            gr0, gr1 = Greenlet(find_slow), Greenlet(find_fast)
            gr0.start()
            gr1.start_later(.1)
        else:
            gr0 = threading.Thread(target=find_slow)
            gr1 = threading.Thread(target=find_fast)
            gr0.start()
            time.sleep(.1)
            gr1.start()

        gr0.join()
        gr1.join()

        self.assertEqual([{'_id': 1}], results['find_slow_result'])

        # Fails if there's a bug in socket allocation, since find_fast won't
        # complete
        self.assertEqual([{'_id': 1}], results['find_fast_result'])

        self.assertEqual([
            'find_slow start',
            'find_fast start',
            'find_fast done',
            'find_slow done',
        ], history)
コード例 #23
0
ファイル: run_fifo.py プロジェクト: hturki/HoneyBadgerBFT
def run_badger_node(myID, N, f, sPK, sSK, ePK, eSK, sendPath, receivePath):
    '''
    Test for the client with random delay channels
    :param i: the current node index
    :param N: the number of parties
    :param t: the number of malicious parties toleranted
    :return None:
    '''
    assert type(sPK) is boldyreva.TBLSPublicKey
    assert type(sSK) is boldyreva.TBLSPrivateKey
    assert type(ePK) is tpke.TPKEPublicKey
    assert type(eSK) is tpke.TPKEPrivateKey

    # Create the listening channel
    recv_queue = listen_to_channel(BASE_PORT + myID)
    recv = recv_queue.get
    print 'server started'

    # Create the sending channels
    send_queues = []
    for i in range(N):
        port = BASE_PORT + i
        send_queues.append(connect_to_channel('127.0.0.1', port, myID))

    def send(j, obj):
        send_queues[j].put(obj)

    # Start the honeybadger instance
    tx_submit = Queue()

    def send_to_hyperledger(transactions):
        global sendConnection
        for tx in transactions:
            if os.path.exists(sendPath):
                if sendConnection is None:
                    print "Opening sending socket at path " + sendPath
                    sendConnection = socket.socket(socket.AF_UNIX,
                                                   socket.SOCK_STREAM)
                    sendConnection.connect(sendPath)
                print "sending length " + str(len(tx))
                sendConnection.send(struct.pack('!Q', len(tx)))
                print "sending tx " + tx
                sendConnection.send(tx)

    hbbft = HoneyBadgerBFT("sid",
                           myID,
                           8,
                           N,
                           f,
                           sPK,
                           sSK,
                           ePK,
                           eSK,
                           send,
                           recv,
                           tx_submit.get,
                           send_to_hyperledger,
                           encode=repr,
                           decode=ast.literal_eval)
    th = Greenlet(hbbft.run)
    th.parent_args = (N, f)
    th.name = __file__ + '.honestParty(%d)' % i
    th.start()

    if os.path.exists(receivePath):
        os.remove(receivePath)

    print "Opening listening socket at path " + receivePath
    server = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
    server.bind(receivePath)

    # Listen for incoming connections
    server.listen(1)

    while True:
        # Wait for a connection
        connection, client_address = server.accept()
        try:
            while True:
                message = connection.recv(8)
                if message:
                    print "Message " + message
                    length, = struct.unpack('!Q', message)
                    print length
                    message = connection.recv(length)
                    print message
                    tx_submit.put([message])
                else:
                    print >> sys.stderr, 'no more data from', client_address
                    break

        finally:
            # Clean up the connection
            connection.close()
            os.remove(receivePath)

    th.join()
コード例 #24
0
ファイル: client-test.py プロジェクト: gdamjan/Scalable
import os, time
from gevent import Greenlet, sleep

l = []
t1 = time.time()
for i in xrange(0, 100000):  # also try a milion :)
    t = Greenlet(sleep, 30)
    t.start()
    l.append(t)

t2 = time.time()
print "%i coroutines created in %d seconds." % (len(l), t2 - t1)

for t in l:
    t.join()
コード例 #25
0
ファイル: fakeclient.py プロジェクト: teazj/swarm
class FakeClient(object):
    """
    A fake client with persistent connection.

    Driven by a dedicated greenlet, it will die trying to operate by the rules
    from the script orderly, round and round.
    """
    def __init__(self, swarm, server, script_):
        self._swarm = swarm
        self._server = server
        self._socket = None
        self._greenlet = Greenlet(self._run)

        self._status = INITIAL
        self._prev_status = None
        self._script = script_
        self._id = id(self)

    def _report(self, status):
        """
        Report to swarm immediately on status change
        """
        if status != self._status:
            self._swarm.commit(CommandSTATUS(self._id, status, self._status))
            self._status, self._prev_status = (status, self._status)

    def _reconnect_server(self):
        """
        Die trying
        """
        while True:
            try:
                # To scatter connect requests
                time.sleep(randint(1, 20))

                self._report(CONNECT)
                self._disconnect_server()
                self._socket = create_connection(self._server, 3)
                self._socket.setsockopt(SOL_SOCKET, SO_RCVBUF, 128)
                self._socket.setsockopt(SOL_SOCKET, SO_SNDBUF, 1024)

                break

            except socket.error as e:
                # A fact: `socket.timeout`, `socket.herror`, and
                # `socket.gaierror` are all subclasses of `socket.error`.
                self._report(e.args[0])
                continue

    def _disconnect_server(self):
        if self._socket:
            self._socket.close()
            self._socket = None

    def _run(self):
        try:
            self._report(STARTED)
            self._reconnect_server()

            while True:
                try:
                    self._report(ACTING)
                    script.execute(self, self._script)
                    self._report(STANDBY)

                except (socket.error, BenchException) as e:
                    self._report(e.args[0])
                    self._reconnect_server()

        except GreenletExit:
            self._report(KILLED)

        except:
            self._report(FATAL)
            # let gevent print this exception
            raise

        finally:
            self._disconnect_server()

    def start(self):
        self._greenlet.start()

    def stop(self):
        self._greenlet.kill()
        self._greenlet.join()

    def send_for_reply(self, data, reply_parser):
        """
        Called by object of Script.

        Exceptions raised here should be handled in `_run`.
        """
        self._socket.sendall(data)

        need = reply_parser(None)
        while need > 0:
            data = self._socket.recv(need)
            if not data:
                raise ServerClosed("server closed")

            need = reply_parser(data)

    def send_noreply(self, data):
        self._socket.sendall(data)

    def close_connection(self):
        raise CloseForcibly("client closed")
コード例 #26
0
    def _test_pool(self, use_request):
        """
        Test that the connection pool prevents both threads and greenlets from
        using a socket at the same time.

        Sequence:
        gr0: start a slow find()
        gr1: start a fast find()
        gr1: get results
        gr0: get results
        """
        cx = get_client(
            use_greenlets=self.use_greenlets,
            auto_start_request=False
        )

        db = cx.pymongo_test
        db.test.remove()
        db.test.insert({'_id': 1})

        history = []

        def find_fast():
            if use_request:
                cx.start_request()

            history.append('find_fast start')

            # With greenlets and the old connection._Pool, this would throw
            # AssertionError: "This event is already used by another
            # greenlet"
            self.assertEqual({'_id': 1}, db.test.find_one())
            history.append('find_fast done')

            if use_request:
                cx.end_request()

        def find_slow():
            if use_request:
                cx.start_request()

            history.append('find_slow start')

            # Javascript function that pauses N seconds per document
            fn = delay(10)
            if (is_mongos(db.connection) or not
                version.at_least(db.connection, (1, 7, 2))):
                # mongos doesn't support eval so we have to use $where
                # which is less reliable in this context.
                self.assertEqual(1, db.test.find({"$where": fn}).count())
            else:
                # 'nolock' allows find_fast to start and finish while we're
                # waiting for this to complete.
                self.assertEqual({'ok': 1.0, 'retval': True},
                                 db.command('eval', fn, nolock=True))

            history.append('find_slow done')

            if use_request:
                cx.end_request()

        if self.use_greenlets:
            gr0, gr1 = Greenlet(find_slow), Greenlet(find_fast)
            gr0.start()
            gr1.start_later(.1)
        else:
            gr0 = threading.Thread(target=find_slow)
            gr0.setDaemon(True)
            gr1 = threading.Thread(target=find_fast)
            gr1.setDaemon(True)

            gr0.start()
            time.sleep(.1)
            gr1.start()

        gr0.join()
        gr1.join()

        self.assertEqual([
            'find_slow start',
            'find_fast start',
            'find_fast done',
            'find_slow done',
        ], history)
コード例 #27
0
    def _test_pool(self, use_greenlets, use_request):
        """
        Test that the connection pool prevents both threads and greenlets from
        using a socket at the same time.

        Sequence:
        gr0: start a slow find()
        gr1: start a fast find()
        gr1: get results
        gr0: get results
        """
        if use_greenlets:
            try:
                from gevent import monkey, Greenlet
            except ImportError:
                raise SkipTest('gevent not installed')

            # Note we don't do patch_thread() or patch_all() - we're
            # testing here that patch_thread() is unnecessary for
            # the connection pool to work properly.
            monkey.patch_socket()

        try:
            results = {
                'find_fast_result': None,
                'find_slow_result': None,
            }

            cx = get_connection(
                use_greenlets=use_greenlets,
                auto_start_request=False
            )

            db = cx.pymongo_test
            db.test.remove(safe=True)
            db.test.insert({'_id': 1})

            history = []

            def find_fast():
                if use_request:
                    cx.start_request()

                history.append('find_fast start')

                # With the old connection._Pool, this would throw
                # AssertionError: "This event is already used by another
                # greenlet"
                results['find_fast_result'] = list(db.test.find())
                history.append('find_fast done')

                if use_request:
                    cx.end_request()

            def find_slow():
                if use_request:
                    cx.start_request()

                history.append('find_slow start')

                # Javascript function that pauses
                where = delay(2)
                results['find_slow_result'] = list(db.test.find(
                    {'$where': where}
                ))

                history.append('find_slow done')

                if use_request:
                    cx.end_request()

            if use_greenlets:
                gr0, gr1 = Greenlet(find_slow), Greenlet(find_fast)
                gr0.start()
                gr1.start_later(.1)
            else:
                gr0 = threading.Thread(target=find_slow)
                gr1 = threading.Thread(target=find_fast)
                gr0.start()
                time.sleep(.1)
                gr1.start()

            gr0.join()
            gr1.join()

            self.assertEqual([{'_id': 1}], results['find_slow_result'])

            # Fails, since find_fast doesn't complete
            self.assertEqual([{'_id': 1}], results['find_fast_result'])

            self.assertEqual([
                'find_slow start',
                'find_fast start',
                'find_fast done',
                'find_slow done',
            ], history)

        finally:
            # Undo Gevent patching
            reload(socket)
コード例 #28
0
    def _test_pool(self, use_request):
        """
        Test that the connection pool prevents both threads and greenlets from
        using a socket at the same time.

        Sequence:
        gr0: start a slow find()
        gr1: start a fast find()
        gr1: get results
        gr0: get results
        """
        cx = get_connection(use_greenlets=self.use_greenlets,
                            auto_start_request=False)

        db = cx.pymongo_test
        if not version.at_least(db.connection, (1, 7, 2)):
            raise SkipTest("Need at least MongoDB version 1.7.2 to use"
                           " db.eval(nolock=True)")

        db.test.remove(safe=True)
        db.test.insert({'_id': 1}, safe=True)

        history = []

        def find_fast():
            if use_request:
                cx.start_request()

            history.append('find_fast start')

            # With greenlets and the old connection._Pool, this would throw
            # AssertionError: "This event is already used by another
            # greenlet"
            self.assertEqual({'_id': 1}, db.test.find_one())
            history.append('find_fast done')

            if use_request:
                cx.end_request()

        def find_slow():
            if use_request:
                cx.start_request()

            history.append('find_slow start')

            # Javascript function that pauses 5 sec. 'nolock' allows find_fast
            # to start and finish while we're waiting for this.
            fn = delay(5)
            self.assertEqual({
                'ok': 1.0,
                'retval': True
            }, db.command('eval', fn, nolock=True))

            history.append('find_slow done')

            if use_request:
                cx.end_request()

        if self.use_greenlets:
            gr0, gr1 = Greenlet(find_slow), Greenlet(find_fast)
            gr0.start()
            gr1.start_later(.1)
        else:
            gr0 = threading.Thread(target=find_slow)
            gr0.setDaemon(True)
            gr1 = threading.Thread(target=find_fast)
            gr1.setDaemon(True)

            gr0.start()
            time.sleep(.1)
            gr1.start()

        gr0.join()
        gr1.join()

        self.assertEqual([
            'find_slow start',
            'find_fast start',
            'find_fast done',
            'find_slow done',
        ], history)
コード例 #29
0
ファイル: geventgreenlet.py プロジェクト: tracedeng/calculus
# Block until all threads complete.
gevent.joinall(threads)
"""


def foo():
    print "foo"
    print gevent.getcurrent()
    gevent.sleep(2)
    return "foo"

def foo2(green):
    print("foo2")
    print gevent.getcurrent()
    return "foo2"


print gevent.getcurrent()
t = Greenlet(foo)
print t.ready()
t.start()
t.link(foo2)
t.join(0)
#t.kill()

print "yes"
print t.ready()
print t.successful()
#print t.get()
print t.value
コード例 #30
0
ファイル: gevent_demo.py プロジェクト: lijiulin/quietheart
#!/usr/bin/python
import random
import gevent
from gevent import Greenlet


def thrFunc(s):
    print "%s start.\n" % s
    gevent.sleep(2)
    print "%s end.\n" % s


gThr = Greenlet(thrFunc, "startjoin1")
gThr.start()
gThr.join()
gThr = Greenlet(thrFunc, "startjoin2")
gThr.start()
gThr.join()

print "+++++++++"
gThr = Greenlet(thrFunc, "start1")
gThr.start()
gThr = Greenlet(thrFunc, "start2")
gThr.start()
gThr = Greenlet(thrFunc, "start3")
gThr.start()
gThr = Greenlet(thrFunc, "start4")
gThr.start()

print "+++++++++"
gThr1 = Greenlet.spawn(thrFunc, "spawn1")
コード例 #31
0
ファイル: test.py プロジェクト: seckcoder/lang-learn
import gevent
from gevent import Greenlet
import time

def foo():
    print 'foo'
    print 'foo again'
    return "haha"

def bar():
    print 'bar'
    gevent.sleep(0)
    print 'bar again'

g = Greenlet(foo)

class Study(object):
    def __call__(self, args):
        print "callback"
case = Study()
g.link(case)
g.start()
g.join()
コード例 #32
0
ファイル: base.py プロジェクト: DrMoriarty/sync-engine
class BaseMailSyncMonitor(Greenlet):
    """
    The SYNC_MONITOR_CLS for all mail sync providers should subclass this.

    Parameters
    ----------
    account_id : int
        Which account to sync.
    email_address : str
        Email address for `account_id`.
    provider : str
        Provider for `account_id`.
    heartbeat : int
        How often to check for commands.
    """

    def __init__(self, account, heartbeat=1):
        bind_context(self, 'mailsyncmonitor', account.id)
        self.shutdown = event.Event()
        # how often to check inbox, in seconds
        self.heartbeat = heartbeat
        self.log = log.new(component='mail sync', account_id=account.id)
        self.account_id = account.id
        self.namespace_id = account.namespace.id
        self.email_address = account.email_address
        self.provider_name = account.verbose_provider

        Greenlet.__init__(self)

    def _run(self):
        try:
            return retry_with_logging(self._run_impl,
                                      account_id=self.account_id,
                                      provider=self.provider_name,
                                      logger=self.log)
        except GreenletExit:
            self._cleanup()
            raise

    def _run_impl(self):
        self.sync = Greenlet(retry_with_logging, self.sync,
                             account_id=self.account_id,
                             provider=self.provider_name,
                             logger=self.log)
        self.sync.start()
        self.sync.join()

        if self.sync.successful():
            return self._cleanup()

        self.log.error('mail sync should run forever',
                       provider=self.provider_name,
                       account_id=self.account_id,
                       exc=self.sync.exception)
        raise self.sync.exception

    def sync(self):
        raise NotImplementedError

    def _cleanup(self):
        self.sync.kill()
        with session_scope(self.namespace_id) as mailsync_db_session:
            map(lambda x: x.set_stopped(mailsync_db_session),
                self.folder_monitors)
        self.folder_monitors.kill()
コード例 #33
0
    def _test_pool(self, use_request):
        """
        Test that the connection pool prevents both threads and greenlets from
        using a socket at the same time.

        Sequence:
        gr0: start a slow find()
        gr1: start a fast find()
        gr1: get results
        gr0: get results
        """
        cx = get_connection(
            use_greenlets=self.use_greenlets,
            auto_start_request=False
        )

        db = cx.pymongo_test
        if not version.at_least(db.connection, (1, 7, 2)):
            raise SkipTest("Need at least MongoDB version 1.7.2 to use"
                           " db.eval(nolock=True)")
        
        db.test.remove(safe=True)
        db.test.insert({'_id': 1}, safe=True)

        history = []

        def find_fast():
            if use_request:
                cx.start_request()

            history.append('find_fast start')

            # With greenlets and the old connection._Pool, this would throw
            # AssertionError: "This event is already used by another
            # greenlet"
            self.assertEqual({'_id': 1}, db.test.find_one())
            history.append('find_fast done')

            if use_request:
                cx.end_request()

        def find_slow():
            if use_request:
                cx.start_request()

            history.append('find_slow start')

            # Javascript function that pauses 5 sec. 'nolock' allows find_fast
            # to start and finish while we're waiting for this.
            fn = delay(5)
            self.assertEqual(
                {'ok': 1.0, 'retval': True},
                db.command('eval', fn, nolock=True))

            history.append('find_slow done')

            if use_request:
                cx.end_request()

        if self.use_greenlets:
            gr0, gr1 = Greenlet(find_slow), Greenlet(find_fast)
            gr0.start()
            gr1.start_later(.1)
        else:
            gr0 = threading.Thread(target=find_slow)
            gr0.setDaemon(True)
            gr1 = threading.Thread(target=find_fast)
            gr1.setDaemon(True)

            gr0.start()
            time.sleep(.1)
            gr1.start()

        gr0.join()
        gr1.join()

        self.assertEqual([
            'find_slow start',
            'find_fast start',
            'find_fast done',
            'find_slow done',
        ], history)
コード例 #34
0
        def do_test():
            if use_greenlets:
                try:
                    from gevent import Greenlet
                    from gevent import monkey

                    # Note we don't do patch_thread() or patch_all() - we're
                    # testing here that patch_thread() is unnecessary for
                    # the connection pool to work properly.
                    monkey.patch_socket()
                except ImportError:
                    outcome.value = SKIP
                    return
    
            cx = get_connection(
                use_greenlets=use_greenlets,
                auto_start_request=False
            )

            db = cx.pymongo_test
            db.test.remove(safe=True)
            db.test.insert({'_id': 1})

            history = []

            def find_fast():
                if use_request:
                    cx.start_request()

                history.append('find_fast start')

                # With the old connection._Pool, this would throw
                # AssertionError: "This event is already used by another
                # greenlet"
                results['find_fast_result'] = list(db.test.find())
                history.append('find_fast done')

                if use_request:
                    cx.end_request()

            def find_slow():
                if use_request:
                    cx.start_request()

                history.append('find_slow start')

                # Javascript function that pauses for half a second
                where = delay(0.5)
                results['find_slow_result'] = list(db.test.find(
                    {'$where': where}
                ))

                history.append('find_slow done')

                if use_request:
                    cx.end_request()

            if use_greenlets:
                gr0, gr1 = Greenlet(find_slow), Greenlet(find_fast)
                gr0.start()
                gr1.start_later(.1)
            else:
                gr0 = threading.Thread(target=find_slow)
                gr1 = threading.Thread(target=find_fast)
                gr0.start()
                time.sleep(0.1)
                gr1.start()

            gr0.join()
            gr1.join()

            self.assertEqual([{'_id': 1}], results['find_slow_result'])

            # Fails, since find_fast doesn't complete
            self.assertEqual([{'_id': 1}], results['find_fast_result'])

            self.assertEqual([
                'find_slow start',
                'find_fast start',
                'find_fast done',
                'find_slow done',
            ], history)

            outcome.value = SUCCESS
コード例 #35
0
    def _test_pool(self, use_request):
        """
        Test that the connection pool prevents both threads and greenlets from
        using a socket at the same time.

        Sequence:
        gr0: start a slow find()
        gr1: start a fast find()
        gr1: get results
        gr0: get results
        """
        cx = get_client(use_greenlets=self.use_greenlets,
                        auto_start_request=False)

        db = cx.pymongo_test
        db.test.remove()
        db.test.insert({'_id': 1})

        history = []

        def find_fast():
            if use_request:
                cx.start_request()

            history.append('find_fast start')

            # With greenlets and the old connection._Pool, this would throw
            # AssertionError: "This event is already used by another
            # greenlet"
            self.assertEqual({'_id': 1}, db.test.find_one())
            history.append('find_fast done')

            if use_request:
                cx.end_request()

        def find_slow():
            if use_request:
                cx.start_request()

            history.append('find_slow start')

            # Javascript function that pauses N seconds per document
            fn = delay(10)
            if (is_mongos(db.connection)
                    or not version.at_least(db.connection, (1, 7, 2))):
                # mongos doesn't support eval so we have to use $where
                # which is less reliable in this context.
                self.assertEqual(1, db.test.find({"$where": fn}).count())
            else:
                # 'nolock' allows find_fast to start and finish while we're
                # waiting for this to complete.
                self.assertEqual({
                    'ok': 1.0,
                    'retval': True
                }, db.command('eval', fn, nolock=True))

            history.append('find_slow done')

            if use_request:
                cx.end_request()

        if self.use_greenlets:
            gr0, gr1 = Greenlet(find_slow), Greenlet(find_fast)
            gr0.start()
            gr1.start_later(.1)
        else:
            gr0 = threading.Thread(target=find_slow)
            gr0.setDaemon(True)
            gr1 = threading.Thread(target=find_fast)
            gr1.setDaemon(True)

            gr0.start()
            time.sleep(.1)
            gr1.start()

        gr0.join()
        gr1.join()

        self.assertEqual([
            'find_slow start',
            'find_fast start',
            'find_fast done',
            'find_slow done',
        ], history)
コード例 #36
0
class BaseMailSyncMonitor(Greenlet):
    """
    The SYNC_MONITOR_CLS for all mail sync providers should subclass this.

    Parameters
    ----------
    account_id : int
        Which account to sync.
    email_address : str
        Email address for `account_id`.
    provider : str
        Provider for `account_id`.
    heartbeat : int
        How often to check for commands.
    """

    def __init__(self, account, heartbeat=1):
        bind_context(self, "mailsyncmonitor", account.id)
        self.shutdown = event.Event()
        # how often to check inbox, in seconds
        self.heartbeat = heartbeat
        self.log = log.new(component="mail sync", account_id=account.id)
        self.account_id = account.id
        self.namespace_id = account.namespace.id
        self.email_address = account.email_address
        self.provider_name = account.verbose_provider

        Greenlet.__init__(self)

    def _run(self):
        try:
            return retry_with_logging(
                self._run_impl,
                account_id=self.account_id,
                provider=self.provider_name,
                logger=self.log,
            )
        except GreenletExit:
            self._cleanup()
            raise

    def _run_impl(self):
        self.sync = Greenlet(
            retry_with_logging,
            self.sync,
            account_id=self.account_id,
            provider=self.provider_name,
            logger=self.log,
        )
        self.sync.start()
        self.sync.join()

        if self.sync.successful():
            return self._cleanup()

        self.log.error(
            "mail sync should run forever",
            provider=self.provider_name,
            account_id=self.account_id,
            exc=self.sync.exception,
        )
        raise self.sync.exception

    def sync(self):
        raise NotImplementedError

    def _cleanup(self):
        self.sync.kill()
        with session_scope(self.namespace_id) as mailsync_db_session:
            map(lambda x: x.set_stopped(mailsync_db_session), self.folder_monitors)
        self.folder_monitors.kill()
コード例 #37
0
ファイル: console.py プロジェクト: whatSocks/Jeev
class ConsoleAdapter(object):
    """
        This adapter will run Jeev in console mode, listening to stdin for messages,
        and writing outgoing messages to stdout.
    """
    def __init__(self, jeev, opts):
        self._jeev = jeev
        self._opts = opts
        self._stdin = None
        self._stdout = None
        self._reader = None
        self._channel = opts.get('console_channel', 'console')
        self._user = opts.get('console_user', 'user')

    def _read_stdin(self):
        self._stdout.write(">>> Jeev Console Adapater\n")
        self._stdout.write(">>> Switch channel using \c channel_name\n")
        self._stdout.write(">>> Switch user using \u user_name\n")
        self._stdout.flush()
        
        while True:
            self._stdout.write('[%s@%s] > ' % (self._user, self._channel))
            self._stdout.flush()

            line = self._stdin.readline()
            if not line:
                break

            if line.startswith('\c'):
                self._channel = line[2:].strip().lstrip('#')
                self._stdout.write("Switched channel to #%s\n" % self._channel)
                self._stdout.flush()

            elif line.startswith('\u'):
                self._user = line[2:].strip()
                self._stdout.write("Switched user %s\n" % self._user)
                self._stdout.flush()

            else:
                message = Message({}, self._channel, self._user, line.strip())
                self._jeev._handle_message(message)

    def start(self):
        self._reader = Greenlet(self._read_stdin)
        self._stdin = FileObject(sys.stdin)
        self._stdout = FileObject(sys.stdout)
        self._reader.start()

    def stop(self):
        self._reader.kill()
        self._reader = None

    def join(self):
        self._reader.join()

    def send_message(self, channel, message):
        self._stdout.write('\r< [#%s] %s\n' % (channel, message))
        self._stdout.write('[%s@%s] > ' % (self._user, self._channel))
        self._stdout.flush()

    def send_messages(self, channel, *messages):
        for message in messages:
            self.send_message(channel, message)
コード例 #38
0
    def test_socket_reclamation(self):
        if sys.platform.startswith('java'):
            raise SkipTest("Jython can't do socket reclamation")

        # Check that if a thread starts a request and dies without ending
        # the request, that the socket is reclaimed into the pool.
        cx_pool = self.get_pool(
            pair=(host, port),
            max_size=10,
            net_timeout=1000,
            conn_timeout=1000,
            use_ssl=False,
        )

        self.assertEqual(0, len(cx_pool.sockets))

        lock = None
        the_sock = [None]

        def leak_request():
            self.assertEqual(NO_REQUEST, cx_pool._get_request_state())
            cx_pool.start_request()
            self.assertEqual(NO_SOCKET_YET, cx_pool._get_request_state())
            sock_info = cx_pool.get_socket()
            self.assertEqual(sock_info, cx_pool._get_request_state())
            the_sock[0] = id(sock_info.sock)
            cx_pool.maybe_return_socket(sock_info)

            if not self.use_greenlets:
                lock.release()

        if self.use_greenlets:
            g = Greenlet(leak_request)
            g.start()
            g.join(1)
            self.assertTrue(g.ready(), "Greenlet is hung")

            # In Gevent after 0.13.8, join() returns before the Greenlet.link
            # callback fires. Give it a moment to reclaim the socket.
            gevent.sleep(0.1)
        else:
            lock = thread.allocate_lock()
            lock.acquire()

            # Start a thread WITHOUT a threading.Thread - important to test that
            # Pool can deal with primitive threads.
            thread.start_new_thread(leak_request, ())

            # Join thread
            acquired = lock.acquire()
            self.assertTrue(acquired, "Thread is hung")

            # Make sure thread is really gone
            time.sleep(1)

            if 'PyPy' in sys.version:
                gc.collect()

            # Access the thread local from the main thread to trigger the
            # ThreadVigil's delete callback, returning the request socket to
            # the pool.
            # In Python 2.7.0 and lesser, a dead thread's locals are deleted
            # and those locals' weakref callbacks are fired only when another
            # thread accesses the locals and finds the thread state is stale,
            # see http://bugs.python.org/issue1868. Accessing the thread
            # local from the main thread is a necessary part of this test, and
            # realistic: in a multithreaded web server a new thread will access
            # Pool._ident._local soon after an old thread has died.
            cx_pool._ident.get()

        # Pool reclaimed the socket
        self.assertEqual(1, len(cx_pool.sockets))
        self.assertEqual(the_sock[0], id(one(cx_pool.sockets).sock))
        self.assertEqual(0, len(cx_pool._tid_to_sock))
コード例 #39
0
ファイル: client-test.py プロジェクト: gdamjan/Scalable
import os, time
from gevent import Greenlet, sleep

l = []
t1 = time.time()
for i in xrange(0, 100000):  # also try a milion :)
   t = Greenlet(sleep, 30)
   t.start()
   l.append(t)

t2 = time.time()
print "%i coroutines created in %d seconds." % (len(l), t2-t1)

for t in l:
   t.join()

コード例 #40
0
    def test_socket_reclamation(self):
        if sys.platform.startswith('java'):
            raise SkipTest("Jython can't do socket reclamation")

        # Check that if a thread starts a request and dies without ending
        # the request, that the socket is reclaimed into the pool.
        cx_pool = self.get_pool(
            pair=(host,port),
            max_size=10,
            net_timeout=1000,
            conn_timeout=1000,
            use_ssl=False,
        )

        self.assertEqual(0, len(cx_pool.sockets))

        lock = None
        the_sock = [None]

        def leak_request():
            self.assertEqual(NO_REQUEST, cx_pool._get_request_state())
            cx_pool.start_request()
            self.assertEqual(NO_SOCKET_YET, cx_pool._get_request_state())
            sock_info = cx_pool.get_socket()
            self.assertEqual(sock_info, cx_pool._get_request_state())
            the_sock[0] = id(sock_info.sock)
            cx_pool.maybe_return_socket(sock_info)

            if not self.use_greenlets:
                lock.release()

        if self.use_greenlets:
            g = Greenlet(leak_request)
            g.start()
            g.join(1)
            self.assertTrue(g.ready(), "Greenlet is hung")

            # In Gevent after 0.13.8, join() returns before the Greenlet.link
            # callback fires. Give it a moment to reclaim the socket.
            gevent.sleep(0.1)
        else:
            lock = thread.allocate_lock()
            lock.acquire()

            # Start a thread WITHOUT a threading.Thread - important to test that
            # Pool can deal with primitive threads.
            thread.start_new_thread(leak_request, ())

            # Join thread
            acquired = lock.acquire()
            self.assertTrue(acquired, "Thread is hung")

            # Make sure thread is really gone
            time.sleep(1)

            if 'PyPy' in sys.version:
                gc.collect()

            # Access the thread local from the main thread to trigger the
            # ThreadVigil's delete callback, returning the request socket to
            # the pool.
            # In Python 2.7.0 and lesser, a dead thread's locals are deleted
            # and those locals' weakref callbacks are fired only when another
            # thread accesses the locals and finds the thread state is stale,
            # see http://bugs.python.org/issue1868. Accessing the thread
            # local from the main thread is a necessary part of this test, and
            # realistic: in a multithreaded web server a new thread will access
            # Pool._ident._local soon after an old thread has died.
            cx_pool._ident.get()

        # Pool reclaimed the socket
        self.assertEqual(1, len(cx_pool.sockets))
        self.assertEqual(the_sock[0], id(one(cx_pool.sockets).sock))
        self.assertEqual(0, len(cx_pool._tid_to_sock))
コード例 #41
0
ファイル: gevent_demo.py プロジェクト: cherry-wb/quietheart
#!/usr/bin/python
import random
import gevent
from gevent import Greenlet

def thrFunc(s):
    print "%s start.\n" %s
    gevent.sleep(2)
    print "%s end.\n" %s

gThr = Greenlet(thrFunc,"startjoin1")
gThr.start()
gThr.join()
gThr = Greenlet(thrFunc,"startjoin2")
gThr.start()
gThr.join()

print "+++++++++"
gThr = Greenlet(thrFunc,"start1")
gThr.start()
gThr = Greenlet(thrFunc,"start2")
gThr.start()
gThr = Greenlet(thrFunc,"start3")
gThr.start()
gThr = Greenlet(thrFunc,"start4")
gThr.start()

print "+++++++++"
gThr1 = Greenlet.spawn(thrFunc,"spawn1")
gThr2 = Greenlet.spawn(thrFunc,"spawn2")
gThr2.join()