def test_queue_objects(self): queue = RedisQueue("test-queue", maxsize=100) # put int queue.put(1) v = queue.get_nowait() self.assertEqual(v, 1) self.assertIsInstance(v, int) # put str queue.put("a") v = queue.get_nowait() self.assertEqual(v, "a") self.assertIsInstance(v, str) # put float queue.put(1.) v = queue.get_nowait() self.assertEqual(v, 1.) self.assertIsInstance(v, float) # put list queue.put([1, 3, 4, 5, "a", "b", "c", 1., 2., 3.]) v = queue.get_nowait() self.assertEqual(v, [1, 3, 4, 5, "a", "b", "c", 1., 2., 3.]) self.assertIsInstance(v, list) # put dict queue.put({"x": "y"}) v = queue.get_nowait() self.assertEqual(v, {"x": "y"}) self.assertIsInstance(v, dict)
def test_queue_objects(self): queue = RedisQueue("test-queue", maxsize=100, host=REDIS_HOST) # put int queue.put(1) v = queue.get_nowait() assert v == 1 assert isinstance(v, int) # put str queue.put("a") v = queue.get_nowait() assert v == "a" assert isinstance(v, six.string_types) # put float queue.put(1.) v = queue.get_nowait() assert v == 1. assert isinstance(v, float) # put list queue.put([1, 3, 4, 5, "a", "b", "c", 1., 2., 3.]) v = queue.get_nowait() assert v == [1, 3, 4, 5, "a", "b", "c", 1., 2., 3.] assert isinstance(v, list) # put dict queue.put({"x": "y"}) v = queue.get_nowait() assert v == {"x": "y"} assert isinstance(v, dict)
def test_queue_objects(self): queue = RedisQueue("test-queue", maxsize=100, host=REDIS_HOST) # put int queue.put(1) v = queue.get_nowait() self.assertEqual(v, 1) self.assertIsInstance(v, int) # put str queue.put("a") v = queue.get_nowait() self.assertEqual(v, "a") self.assertIsInstance(v, str) # put float queue.put(1.) v = queue.get_nowait() self.assertEqual(v, 1.) self.assertIsInstance(v, float) # put list queue.put([1, 3, 4, 5, "a", "b", "c", 1., 2., 3.]) v = queue.get_nowait() self.assertEqual(v, [1, 3, 4, 5, "a", "b", "c", 1., 2., 3.]) self.assertIsInstance(v, list) # put dict queue.put({"x": "y"}) v = queue.get_nowait() self.assertEqual(v, {"x": "y"}) self.assertIsInstance(v, dict)
def test_queue_objects(self): queue = RedisQueue("test-queue", maxsize=100, host=REDIS_HOST) # put int queue.put(1) v = queue.get_nowait() assert v == 1 assert isinstance(v, int) # put str queue.put("a") v = queue.get_nowait() assert v == "a" assert isinstance(v, str) # put float queue.put(1.) v = queue.get_nowait() assert v == 1. assert isinstance(v, float) # put list queue.put([1, 3, 4, 5, "a", "b", "c", 1., 2., 3.]) v = queue.get_nowait() assert v == [1, 3, 4, 5, "a", "b", "c", 1., 2., 3.] assert isinstance(v, list) # put dict queue.put({"x": "y"}) v = queue.get_nowait() assert v == {"x": "y"} assert isinstance(v, dict)
def get_file_contents(self, items, active=True): data, queue = self.queue(create=True) for item in items: queue.put_nowait(item) if not active: queue.get_nowait() data.seek(0) return data.read()
def main(): import time import shutil from itertools import product def mk_journaled_queue(basedir): return JournaledPersistentQueue(basedir + "/journal", create=True) def mk_persistent_queue(basedir): return PersistentQueue(basedir) def mk_comp_persistent_queue(basedir): return CompressedPersistentQueue(basedir) def mk_json_persistent_queue(basedir): return JSONPersistentQueue(basedir) queues = [ ("JSONPersistentQueue", mk_json_persistent_queue), ("PersistentQueue", mk_persistent_queue), ("CompressedPersistentQueue", mk_comp_persistent_queue), ("JournaledPersistentQueue", mk_journaled_queue), ] tests = [ ("1-byte", 20000, b"x"), ("1k-bytes", 20000, b"x" * 1000), ("10k-bytes", 20000, b"x" * 10000), ] for (qname, qfactory), (name, num, data) in product(queues, tests): basedir = tempfile.mkdtemp(prefix="pqueue-%s-%s-" % (qname, name, )) try: queue = qfactory(basedir) operations = [ ("put/get", lambda: [queue.put_nowait(data), queue.get_nowait()]), ("puts", lambda: queue.put_nowait(data)), ("gets", lambda: queue.get_nowait()), ] for op_name, op in operations: start = time.time() try: for _ in range(num): op() finally: end = time.time() duration = end - start stats = "(%d ops/sec, %0.02f kB/sec)" % ( num / duration, (len(data) * num / 1000.0) / duration, ) print(qname, name, num, "%s:" % (op_name, ), duration, stats) queue.close() finally: shutil.rmtree(basedir)
def test_queue_size(self): print(REDIS_HOST) print(os.getenv('REDIS_PORT_6379_TCP_ADDR')) queue = RedisQueue("test-queue-size-1", maxsize=1, host=REDIS_HOST) queue.put(1) with pytest.raises(six.moves.queue.Full): queue.put(1) queue = RedisQueue("test-queue-size-2", maxsize=2, host=REDIS_HOST) queue.put(1) queue.put(1) with pytest.raises(six.moves.queue.Full): queue.put(1) queue.get() queue.get() with pytest.raises(six.moves.queue.Empty): queue.get_nowait()
def test_queue_len(self): queue = RedisQueue("test-queue-len", maxsize=100) self.assertEqual(queue.length, 0) queue.put(1) self.assertEqual(queue.length, 1) queue.put(1) self.assertEqual(queue.length, 2) queue.put(1) self.assertEqual(queue.length, 3) queue.get_nowait() self.assertEqual(queue.length, 2) queue.get_nowait() self.assertEqual(queue.length, 1) queue.get_nowait() self.assertEqual(queue.length, 0)
def test_queue_len(self): queue = RedisQueue("test-queue-len", maxsize=100, host=REDIS_HOST) assert queue.length == 0 queue.put(1) assert queue.length == 1 queue.put(1) assert queue.length == 2 queue.put(1) assert queue.length == 3 queue.get_nowait() assert queue.length == 2 queue.get_nowait() assert queue.length == 1 queue.get_nowait() assert queue.length == 0
def test_queue_len(self): queue = RedisQueue("test-queue-len", maxsize=100, host=REDIS_HOST) self.assertEqual(queue.length, 0) queue.put(1) self.assertEqual(queue.length, 1) queue.put(1) self.assertEqual(queue.length, 2) queue.put(1) self.assertEqual(queue.length, 3) queue.get_nowait() self.assertEqual(queue.length, 2) queue.get_nowait() self.assertEqual(queue.length, 1) queue.get_nowait() self.assertEqual(queue.length, 0)
def main(): import time import shutil from itertools import product def mk_journaled_queue(basedir): return JournaledPersistentQueue(basedir + "/journal", create=True) def mk_persistent_queue(basedir): return PersistentQueue(basedir) def mk_comp_persistent_queue(basedir): return CompressedPersistentQueue(basedir) def mk_json_persistent_queue(basedir): return JSONPersistentQueue(basedir) queues = [ ("JSONPersistentQueue", mk_json_persistent_queue), ("PersistentQueue", mk_persistent_queue), ("CompressedPersistentQueue", mk_comp_persistent_queue), ("JournaledPersistentQueue", mk_journaled_queue), ] tests = [ ("1-byte", 20000, b"x"), ("1k-bytes", 20000, b"x" * 1000), ("10k-bytes", 20000, b"x" * 10000), ] for (qname, qfactory), (name, num, data) in product(queues, tests): basedir = tempfile.mkdtemp(prefix="pqueue-%s-%s-" % ( qname, name, )) try: queue = qfactory(basedir) operations = [ ("put/get", lambda: [queue.put_nowait(data), queue.get_nowait()]), ("puts", lambda: queue.put_nowait(data)), ("gets", lambda: queue.get_nowait()), ] for op_name, op in operations: start = time.time() try: for _ in range(num): op() finally: end = time.time() duration = end - start stats = "(%d ops/sec, %0.02f kB/sec)" % ( num / duration, (len(data) * num / 1000.0) / duration, ) print(qname, name, num, "%s:" % (op_name, ), duration, stats) queue.close() finally: shutil.rmtree(basedir)