Пример #1
0
    def test_wait_for_ssh(self, mock_conn):
        """Test the _wait_for_ssh() helper func."""

        # if the connection attempts fail
        mock_conn.return_value = False
        q = g_queue.JoinableQueue(items=[('dummy.host', 1234), ('host', 1234)])
        self.ssh_mod._wait_for_ssh(q, 'ssh', 'cmd', timeout=0, attempts=10)
        self.assertEqual(mock_conn.call_count, 10)
        # check that "task_done()" has been invoked as many times as elements
        # have been read from the queue
        with self.assertRaises(ValueError):
            q.task_done()

        # if the connection attempt is successful
        mock_conn.reset_mock()
        mock_conn.return_value = True
        q = g_queue.JoinableQueue(items=[('dummy.host', 1234), ('host', 1234)])
        self.ssh_mod._wait_for_ssh(q, 'ssh', 'cmd', timeout=0, attempts=5)
        self.assertEqual(mock_conn.call_count, 1)

        # first two attempts fail, then new endpoint info is taken from the
        # queue and the new attempt is successful
        mock_conn.reset_mock()
        # connection attempt successful only at the third (host_B) attempt
        mock_conn.side_effect = lambda host, port: host == 'host_B'
        q = g_queue.JoinableQueue(items=[('dummy.host',
                                          1234), ('host_A',
                                                  1234), ('host_B', 1234)])

        self.ssh_mod._wait_for_ssh(q, 'ssh', 'cmd', timeout=0, attempts=5)
        self.assertEqual(mock_conn.call_count, 3)

        with self.assertRaises(ValueError):
            q.task_done()
Пример #2
0
  def __init__(self, num_workers, callable, poll_timeout_secs=10, name=None,
               queue_size=None):
    """
    Args:
      num_workers - The max number of concurrent greenlets in this pool.
      callable - Python callable that will be invoked with each task input. The
          callable should return the tuple that will be passed as input to all
          registered consumer pools, or None if nothing should be passed to
          consumer pools.
      poll_timeout_secs - How often the pool will check to see if it has been
          signaled to finish.
      name - An optional name for the worker pool; used in logging.
      queue_size - The size of the underlying input queue. If this is None, the
          size is unbounded, and adding a task input to this pool will never
          block the calling thread. If queue_size > 0, the queue may get filled
          up, in which case, adding a task input will block the calling thread.
    """
    self.name = name
    self.poll_timeout_secs = poll_timeout_secs
    self.callable = callable

    self.queue = queue.JoinableQueue(maxsize=queue_size)
    self.driver = gevent.Greenlet(self._Driver)
    self.finish_signaled = gevent.event.Event()
    self.pool = pool.Pool(num_workers)

    self.consumer_pools = []
    self.producer_pools = []
    self.producer_pools_finished = 0

    self.counter_time_since_add = None
    self.counter_num_inputs_added = None
    self.counter_queue_size_at_processing_time = None
Пример #3
0
    def __init__(self,
                 write_key=None,
                 host=None,
                 debug=False,
                 max_queue_size=10000,
                 send=True,
                 on_error=None):
        require('write_key', write_key, string_types)

        self.queue = queue.JoinableQueue(max_queue_size)
        self.consumer = Consumer(self.queue,
                                 write_key,
                                 host=host,
                                 on_error=on_error)
        self.write_key = write_key
        self.on_error = on_error
        self.debug = debug
        self.send = send

        if debug:
            self.log.setLevel(logging.DEBUG)

        # if we've disabled sending, just don't start the consumer
        if send:
            # On program exit, allow the consumer thread to exit cleanly.
            # This prevents exceptions and a messy shutdown when the interpreter is
            # destroyed before the daemon thread finishes execution. However, it
            # is *not* the same as flushing the queue! To guarantee all messages
            # have been delivered, you'll still need to call flush().
            atexit.register(self.join)
            self.consumer.start()
Пример #4
0
def _wait_for_app(ssh, app, command, queue=None):
    """Use websockets to wait for the app to start"""
    # JoinableQueue is filled with a dummy item otherwise queue.join() unblocks
    # immediately wo/ actually letting the ws_loop and _wait_for_ssh to run.
    queue = queue or g_queue.JoinableQueue(items=[('dummy.host', 1234)])

    def on_message(result, queue=queue):
        """Callback to process trace message."""
        _LOGGER.debug('Endpoint trase msg: %r', result)
        queue.put((result['host'], result['port']))
        return False

    def on_error(result):
        """Callback to process errors."""
        click.echo('Error: %s' % result['_error'], err=True)

    try:
        gevent.spawn(_wait_for_ssh, queue, ssh, command)
        gevent.spawn(
            ws_client.ws_loop, context.GLOBAL.ws_api(), {
                'topic': '/endpoints',
                'filter': app,
                'proto': 'tcp',
                'endpoint': 'ssh'
            }, False, on_message, on_error)

        queue.join()

    except ws_client.WSConnectionError:
        cli.bad_exit('Could not connect to any Websocket APIs')
Пример #5
0
 def __init__(self):
     # Allow enqueuing of several API calls worth of work, which
     # right now allow 1000 key deletions per job.
     self.PAGINATION_MAX = 1000
     self._q = queue.JoinableQueue(self.PAGINATION_MAX * 10)
     self._worker = gevent.spawn(self._work)
     self._parent_greenlet = gevent.getcurrent()
     self.closing = False
Пример #6
0
 def test_task_done(self):
     channel = queue.JoinableQueue(0)
     X = object()
     gevent.spawn(channel.put, X)
     result = channel.get()
     assert result is X, (result, X)
     assert channel.unfinished_tasks == 1, channel.unfinished_tasks
     channel.task_done()
     assert channel.unfinished_tasks == 0, channel.unfinished_tasks
Пример #7
0
 def test_task_done(self):
     channel = queue.JoinableQueue()
     X = object()
     gevent.spawn(channel.put, X)
     result = channel.get()
     self.assertIs(result, X)
     self.assertEqual(1, channel.unfinished_tasks)
     channel.task_done()
     self.assertEqual(0, channel.unfinished_tasks)
Пример #8
0
 def test_queue_task_done(self):
     # Test to make sure a queue task completed successfully.
     q = Queue.JoinableQueue()  # self.type2test()
     # XXX the same test in subclasses
     try:
         q.task_done()
     except ValueError:
         pass
     else:
         self.fail("Did not detect task count going negative")
Пример #9
0
 def test_queue_join(self):
     # Test that a queue join()s successfully, and before anything else
     # (done twice for insurance).
     q = Queue.JoinableQueue()  # self.type2test()
     # XXX the same test in subclass
     self.queue_join_test(q)
     self.queue_join_test(q)
     try:
         q.task_done()
     except ValueError:
         pass
     else:
         self.fail("Did not detect task count going negative")
Пример #10
0
    def test_queue_task_done_with_items(self):
        # Passing items to the constructor allows for as
        # many task_done calls. Joining before all the task done
        # are called returns false
        # XXX the same test in subclass
        l = [1, 2, 3]
        q = Queue.JoinableQueue(items=l)
        for i in l:
            self.assertFalse(q.join(timeout=0.001))
            self.assertEqual(i, q.get())
            q.task_done()

        try:
            q.task_done()
        except ValueError:
            pass
        else:
            self.fail("Did not detect task count going negative")
        self.assertTrue(q.join(timeout=0.001))
Пример #11
0
def test_gevent_based_observer(temporary_dir):
    from populus.utils.observers.observers_gevent import DirWatcher
    import gevent
    from gevent import queue

    file_path_a = os.path.join(temporary_dir, 'file-a.txt')

    ensure_file_exists(file_path_a)

    change_queue = queue.JoinableQueue()

    def change_cb(*args):
        change_queue.put(args)

    watcher = DirWatcher(temporary_dir, change_cb)
    watcher.start()

    def empty_queue():
        while True:
            try:
                gevent.sleep(1)
                change_queue.get(False)
            except queue.Empty:
                break
            else:
                change_queue.task_done()

    def assert_event(*expected):
        gevent.sleep(1)
        actual = change_queue.get(block=True, timeout=2)
        assert actual == expected
        change_queue.task_done()

    empty_queue()

    # write existing root file
    with open(file_path_a, 'w') as file_a:
        file_a.write('test')
    assert_event(file_path_a, 'modified')
Пример #12
0
import urllib2

def fetch_title(url):
    print "Running %s" % url
    data = urllib2.urlopen(url).read()
    print "Parsing %s" % url
    d = feedparser.parse(data)
    print "Parsed %s" % d.feed.get('title', '')
    return d.feed.get('title', '')

def worker():
    while True:
        url = q.get()
        try:
            fetch_title(url)
        finally:
            q.task_done()

if __name__ == '__main__':
    q = queue.JoinableQueue()
    for i in range(5):
         gevent.spawn(worker)

    for url in "http://www.43folders.com/rss.xml/nhttp://feeds.feedburner.com/43folders/nhttp://www.43folders.com/rss.xml/nhttp://feeds.feedburner.com/43folders/nhttp://feeds.feedburner.com/AMinuteWithBrendan/nhttp://feeds.feedburner.com/AMinuteWithBrendan/nhttp://www.asianart.org/feeds/Lectures,Classes,Symposia.xml/nhttp://www.asianart.org/feeds/Performances.xml/nhttp://feeds.feedburner.com/ajaxian/nhttp://ajaxian.com/index.xml/nhttp://al3x.net/atom.xml/nhttp://feeds.feedburner.com/AmericanDrink/nhttp://feeds.feedburner.com/eod_full/nhttp://feeds.feedburner.com/typepad/notes/nhttp://feeds.dashes.com/AnilDash/nhttp://rss.sciam.com/assignment-impossible/feed/nhttp://blogs.scientificamerican.com/assignment-impossible//nhttp://feeds.feedburner.com/Beautiful-Pixels/nhttp://feeds.feedburner.com/Beautiful-Pixels/nhttp://www.betabeat.com/feed/".split('/n'):
            print "Spawning: %s" % url
            q.put(url)

    q.join()  # block until all tasks are done


Пример #13
0
 def test_issue_45(self):
     """Test that join() exits immediately if not jobs were put into the queue"""
     self.switch_expected = False
     q = queue.JoinableQueue()
     q.join()
Пример #14
0
def make_put_interrupt(queue):
    class TestPutInterrupt(GenericGetTestCase):

        Timeout = Full

        def wait(self, timeout):
            while not queue.full():
                queue.put(1)
            return queue.put(2, timeout=timeout)

    TestPutInterrupt.__name__ += '_' + queue.__class__.__name__
    return TestPutInterrupt


for obj in [
        queue.Queue(1),
        queue.JoinableQueue(1),
        queue.LifoQueue(1),
        queue.PriorityQueue(1),
        queue.Channel()
]:
    klass = make_put_interrupt(obj)
    globals()[klass.__name__] = klass
del klass, obj

del GenericGetTestCase

if __name__ == '__main__':
    main()
Пример #15
0
    globals()[klass.__name__] = klass
del klass, queue_type


def make_put_interrupt(queue):

    class TestPutInterrupt(GenericGetTestCase):

        Timeout = Full

        def wait(self, timeout):
            while not queue.full():
                queue.put(1)
            return queue.put(2, timeout=timeout)

    TestPutInterrupt.__name__ += '_' + queue.__class__.__name__
    return TestPutInterrupt


for obj in [queue.Queue(1), queue.JoinableQueue(1), queue.LifoQueue(1), queue.PriorityQueue(1), queue.Channel()]:
    klass = make_put_interrupt(obj)
    globals()[klass.__name__] = klass
del klass, obj


del GenericGetTestCase


if __name__ == '__main__':
    main()
Пример #16
0
    def __init__(self):
        self._queue = queue.JoinableQueue()

        self._all_tasks = {}
Пример #17
0
class Task(object):
    def __init__(self, func, *args, **kwargs):
        self.args = args
        self.kwargs = kwargs
        self.func = func

    def __call__(self):
        self.func(*self.args, **self.kwargs)


def consumer():
    while True:
        try:
            task = _task_queue.get()
        except queue.Empty:
            pass
        else:
            try:
                task()
            finally:
                _task_queue.task_done()


def add_task(task):
    _task_queue.put_nowait(task)


_running_tasks = {}
_task_queue = queue.JoinableQueue()
_consumer = gevent.spawn(consumer)
Пример #18
0
from ..compat import py3k
#assert not py3k, "this script requires python 2."

import psycogreen.gevent
from ..profile import avg_rec_rate
from .. import util
import psycopg2
from sqlalchemy.engine import url
import gevent
from gevent import queue
psycogreen.gevent.patch_psycopg()

options = None
work_queue = queue.JoinableQueue()
monitor = avg_rec_rate()
connect = None


def setup(opt):
    global options
    options = opt

    db_url = url.make_url(options.dburl)

    global connect

    def connect():
        conn = psycopg2.connect(
            user=db_url.username,
            password=db_url.password,
            host=db_url.host,