def test_worker_fills_internal_queue_only_until_maximum_queue_size(): """ Test read workers fill internal queue only to maximum size """ conn = boto3.client('sqs', region_name='us-east-1') # Set visibility timeout low to improve test speed queue_url = conn.create_queue( QueueName="tester", Attributes={'VisibilityTimeout': '1'})['QueueUrl'] message = json.dumps({ 'task': 'tests.tasks.index_incrementer', 'args': [], 'kwargs': { 'message': 'Test message', }, }) for i in range(3): conn.send_message(QueueUrl=queue_url, MessageBody=message) internal_queue = Queue(maxsize=2) worker = ReadWorker(queue_url, internal_queue, BATCHSIZE) worker.read_message() # The internal queue should only have two messages on it internal_queue.get(timeout=1) internal_queue.get(timeout=1) try: internal_queue.get(timeout=1) except Empty: pass else: raise AssertionError("The internal queue should be empty")
def test_worker_fills_internal_queue_from_celery_task(): """ Test read workers fill internal queue with celery tasks """ conn = boto.connect_sqs() queue = conn.create_queue("tester") message = Message() body = '{"body": "KGRwMApTJ3Rhc2snCnAxClMndGVzdHMudGFza3MuaW5kZXhfaW5jcmVtZW50ZXInCnAyCnNTJ2Fy\\nZ3MnCnAzCihscDQKc1Mna3dhcmdzJwpwNQooZHA2ClMnbWVzc2FnZScKcDcKUydUZXN0IG1lc3Nh\\nZ2UyJwpwOApzcy4=\\n", "some stuff": "asdfasf"}' message.set_body(body) queue.write(message) internal_queue = Queue() worker = ReadWorker(queue, internal_queue, BATCHSIZE) worker.read_message() packed_message = internal_queue.get(timeout=1) found_message_body = decode_message(packed_message['message']) found_message_body.should.equal({ 'task': 'tests.tasks.index_incrementer', 'args': [], 'kwargs': { 'message': 'Test message2', }, })
def test_worker_fills_internal_queue_from_celery_task(): """ Test read workers fill internal queue with celery tasks """ conn = boto3.client('sqs', region_name='us-east-1') queue_url = conn.create_queue(QueueName="tester")['QueueUrl'] message = ('{"body": "KGRwMApTJ3Rhc2snCnAxClMndGVzdHMudGFza3MuaW5kZXhfa' 'W5jcmVtZW50ZXInCnAyCnNTJ2Fy\\nZ3MnCnAzCihscDQKc1Mna3dhcmdzJw' 'pwNQooZHA2ClMnbWVzc2FnZScKcDcKUydUZXN0IG1lc3Nh\\nZ2UyJwpwOAp' 'zcy4=\\n", "some stuff": "asdfasf"}') conn.send_message(QueueUrl=queue_url, MessageBody=message) internal_queue = Queue() worker = ReadWorker(queue_url, internal_queue, BATCHSIZE, parent_id=1) worker.read_message() packed_message = internal_queue.get(timeout=1) found_message_body = decode_message(packed_message['message']) found_message_body.should.equal({ 'task': 'tests.tasks.index_incrementer', 'args': [], 'kwargs': { 'message': 'Test message2', }, })
def test_worker_fills_internal_queue_and_respects_visibility_timeouts(): """ Test read workers respect visibility timeouts """ # Setup logging logger = logging.getLogger("pyqs") logger.handlers.append(MockLoggingHandler()) # Setup SQS Queue conn = boto3.client('sqs', region_name='us-east-1') queue_url = conn.create_queue(QueueName="tester", Attributes={'VisibilityTimeout': '1'})['QueueUrl'] # Add MEssages message = json.dumps({ "body": ("KGRwMApTJ3Rhc2snCnAxClMndGVzdHMudGFza3MuaW5kZXhfaW5jcmVtZW" "50ZXInCnAyCnNTJ2Fy\nZ3MnCnAzCihscDQKc1Mna3dhcmdzJwpwNQooZHA" "2ClMnbWVzc2FnZScKcDcKUydUZXN0IG1lc3Nh\nZ2UyJwpwOApzcy4=\n"), "some stuff": "asdfasf", }) for _ in range(3): conn.send_message(QueueUrl=queue_url, MessageBody=message) # Run Reader internal_queue = Queue(maxsize=1) worker = ReadWorker(queue_url, internal_queue, BATCHSIZE, parent_id=1) worker.read_message() # Check log messages logger.handlers[0].messages['warning'][0].should.contain( "Timed out trying to add the following message to the internal queue") logger.handlers[0].messages['warning'][1].should.contain( "Clearing Local messages since we exceeded their visibility_timeout")
def test_worker_fills_internal_queue_only_until_maximum_queue_size(): """ Test read workers fill internal queue only to maximum size """ conn = boto.connect_sqs() queue = conn.create_queue("tester") queue.set_timeout(1) # Set visibility timeout low to improve test speed message = Message() body = json.dumps({ 'task': 'tests.tasks.index_incrementer', 'args': [], 'kwargs': { 'message': 'Test message', }, }) message.set_body(body) for i in range(3): queue.write(message) internal_queue = Queue(maxsize=2) worker = ReadWorker(queue, internal_queue, BATCHSIZE) worker.read_message() # The internal queue should only have two messages on it internal_queue.get(timeout=1) internal_queue.get(timeout=1) try: internal_queue.get(timeout=1) except Empty: pass else: raise AssertionError("The internal queue should be empty")
def test_worker_fills_internal_queue(): """ Test read workers fill internal queue """ conn = boto3.client('sqs', region_name='us-east-1') queue_url = conn.create_queue(QueueName="tester")['QueueUrl'] message = json.dumps({ 'task': 'tests.tasks.index_incrementer', 'args': [], 'kwargs': { 'message': 'Test message', }, }) conn.send_message(QueueUrl=queue_url, MessageBody=message) internal_queue = Queue() worker = ReadWorker(queue_url, internal_queue, BATCHSIZE) worker.read_message() packed_message = internal_queue.get(timeout=1) found_message_body = decode_message(packed_message['message']) found_message_body.should.equal({ 'task': 'tests.tasks.index_incrementer', 'args': [], 'kwargs': { 'message': 'Test message', }, })
def test_worker_fills_internal_queue(): """ Test read workers fill internal queue """ conn = boto3.client('sqs', region_name='us-east-1') queue_url = conn.create_queue(QueueName="tester")['QueueUrl'] message = json.dumps({ 'task': 'tests.tasks.index_incrementer', 'args': [], 'kwargs': { 'message': 'Test message', }, }) conn.send_message(QueueUrl=queue_url, MessageBody=message) internal_queue = Queue() worker = ReadWorker(queue_url, internal_queue, BATCHSIZE, parent_id=1) worker.read_message() packed_message = internal_queue.get(timeout=1) found_message_body = decode_message(packed_message['message']) found_message_body.should.equal({ 'task': 'tests.tasks.index_incrementer', 'args': [], 'kwargs': { 'message': 'Test message', }, })
def test_worker_fills_internal_queue_from_celery_task(): """ Test read workers fill internal queue with celery tasks """ conn = boto.connect_sqs() queue = conn.create_queue("tester") message = Message() body = '{"body": "KGRwMApTJ3Rhc2snCnAxClMndGVzdHMudGFza3MuaW5kZXhfaW5jcmVtZW50ZXInCnAyCnNTJ2Fy\\nZ3MnCnAzCihscDQKc1Mna3dhcmdzJwpwNQooZHA2ClMnbWVzc2FnZScKcDcKUydUZXN0IG1lc3Nh\\nZ2UyJwpwOApzcy4=\\n", "some stuff": "asdfasf"}' message.set_body(body) queue.write(message) internal_queue = Queue() worker = ReadWorker(queue, internal_queue) worker.read_message() packed_message = internal_queue.get(timeout=1) found_message_body = decode_message(packed_message['message']) found_message_body.should.equal({ 'task': 'tests.tasks.index_incrementer', 'args': [], 'kwargs': { 'message': 'Test message2', }, })
def test_worker_fills_internal_queue_and_respects_visibility_timeouts(): """ Test read workers respect visibility timeouts """ # Setup logging logger = logging.getLogger("pyqs") logger.handlers.append(MockLoggingHandler()) # Setup SQS Queue conn = boto.connect_sqs() queue = conn.create_queue("tester") queue.set_timeout(1) # Add MEssages message = Message() body = '{"body": "KGRwMApTJ3Rhc2snCnAxClMndGVzdHMudGFza3MuaW5kZXhfaW5jcmVtZW50ZXInCnAyCnNTJ2Fy\\nZ3MnCnAzCihscDQKc1Mna3dhcmdzJwpwNQooZHA2ClMnbWVzc2FnZScKcDcKUydUZXN0IG1lc3Nh\\nZ2UyJwpwOApzcy4=\\n", "some stuff": "asdfasf"}' message.set_body(body) queue.write(message) queue.write(message) queue.write(message) # Run Reader internal_queue = Queue(maxsize=1) worker = ReadWorker(queue, internal_queue, BATCHSIZE) worker.read_message() # Check log messages logger.handlers[0].messages['warning'][0].should.contain("Timed out trying to add the following message to the internal queue") logger.handlers[0].messages['warning'][1].should.contain("Clearing Local messages since we exceeded their visibility_timeout")
def test_worker_fills_internal_queue(): """ Test read workers fill internal queue """ conn = boto.connect_sqs() queue = conn.create_queue("tester") message = Message() body = json.dumps({ 'task': 'tests.tasks.index_incrementer', 'args': [], 'kwargs': { 'message': 'Test message', }, }) message.set_body(body) queue.write(message) internal_queue = Queue() worker = ReadWorker(queue, internal_queue, BATCHSIZE) worker.read_message() packed_message = internal_queue.get(timeout=1) found_message_body = decode_message(packed_message['message']) found_message_body.should.equal({ 'task': 'tests.tasks.index_incrementer', 'args': [], 'kwargs': { 'message': 'Test message', }, })
def test_worker_fills_internal_queue_only_until_maximum_queue_size(): """ Test read workers fill internal queue only to maximum size """ conn = boto.connect_sqs() queue = conn.create_queue("tester") queue.set_timeout(1) # Set visibility timeout low to improve test speed message = Message() body = json.dumps({ 'task': 'tests.tasks.index_incrementer', 'args': [], 'kwargs': { 'message': 'Test message', }, }) message.set_body(body) for i in range(3): queue.write(message) internal_queue = Queue(maxsize=2) worker = ReadWorker(queue, internal_queue) worker.read_message() # The internal queue should only have two messages on it internal_queue.get(timeout=1) internal_queue.get(timeout=1) try: internal_queue.get(timeout=1) except Empty: pass else: raise AssertionError("The internal queue should be empty")
def test_worker_fills_internal_queue(): """ Test read workers fill internal queue """ conn = boto.connect_sqs() queue = conn.create_queue("tester") message = Message() body = json.dumps({ 'task': 'tests.tasks.index_incrementer', 'args': [], 'kwargs': { 'message': 'Test message', }, }) message.set_body(body) queue.write(message) internal_queue = Queue() worker = ReadWorker(queue, internal_queue) worker.read_message() packed_message = internal_queue.get(timeout=1) found_message_body = decode_message(packed_message['message']) found_message_body.should.equal({ 'task': 'tests.tasks.index_incrementer', 'args': [], 'kwargs': { 'message': 'Test message', }, })
def test_worker_fills_internal_queue_and_respects_visibility_timeouts(): """ Test read workers respect visibility timeouts """ # Setup logging logger = logging.getLogger("pyqs") logger.handlers.append(MockLoggingHandler()) # Setup SQS Queue conn = boto.connect_sqs() queue = conn.create_queue("tester") queue.set_timeout(1) # Add MEssages message = Message() body = '{"body": "KGRwMApTJ3Rhc2snCnAxClMndGVzdHMudGFza3MuaW5kZXhfaW5jcmVtZW50ZXInCnAyCnNTJ2Fy\\nZ3MnCnAzCihscDQKc1Mna3dhcmdzJwpwNQooZHA2ClMnbWVzc2FnZScKcDcKUydUZXN0IG1lc3Nh\\nZ2UyJwpwOApzcy4=\\n", "some stuff": "asdfasf"}' message.set_body(body) queue.write(message) queue.write(message) queue.write(message) # Run Reader internal_queue = Queue(maxsize=1) worker = ReadWorker(queue, internal_queue) worker.read_message() # Check log messages logger.handlers[0].messages['warning'][0].should.contain("Timed out trying to add the following message to the internal queue") logger.handlers[0].messages['warning'][1].should.contain("Clearing Local messages since we exceeded their visibility_timeout")
def test_worker_fills_internal_queue_only_until_maximum_queue_size(): """ Test read workers fill internal queue only to maximum size """ conn = boto3.client('sqs', region_name='us-east-1') # Set visibility timeout low to improve test speed queue_url = conn.create_queue(QueueName="tester", Attributes={'VisibilityTimeout': '1'})['QueueUrl'] message = json.dumps({ 'task': 'tests.tasks.index_incrementer', 'args': [], 'kwargs': { 'message': 'Test message', }, }) for i in range(3): conn.send_message(QueueUrl=queue_url, MessageBody=message) internal_queue = Queue(maxsize=2) worker = ReadWorker(queue_url, internal_queue, BATCHSIZE, parent_id=1) worker.read_message() # The internal queue should only have two messages on it internal_queue.get(timeout=1) internal_queue.get(timeout=1) try: internal_queue.get(timeout=1) except Empty: pass else: raise AssertionError("The internal queue should be empty")
def test_worker_fills_internal_queue_from_celery_task(): """ Test read workers fill internal queue with celery tasks """ conn = boto3.client('sqs', region_name='us-east-1') queue_url = conn.create_queue(QueueName="tester")['QueueUrl'] message = ( '{"body": "KGRwMApTJ3Rhc2snCnAxClMndGVzdHMudGFza3MuaW5kZXhfa' 'W5jcmVtZW50ZXInCnAyCnNTJ2Fy\\nZ3MnCnAzCihscDQKc1Mna3dhcmdzJw' 'pwNQooZHA2ClMnbWVzc2FnZScKcDcKUydUZXN0IG1lc3Nh\\nZ2UyJwpwOAp' 'zcy4=\\n", "some stuff": "asdfasf"}' ) conn.send_message(QueueUrl=queue_url, MessageBody=message) internal_queue = Queue() worker = ReadWorker(queue_url, internal_queue, BATCHSIZE) worker.read_message() packed_message = internal_queue.get(timeout=1) found_message_body = decode_message(packed_message['message']) found_message_body.should.equal({ 'task': 'tests.tasks.index_incrementer', 'args': [], 'kwargs': { 'message': 'Test message2', }, })
def test_read_worker_with_parent_process_dead_and_should_not_exit(os): """ Test read workers exit when parent is dead and shutdown is not set """ # Setup SQS Queue conn = boto.connect_sqs() queue = conn.create_queue("tester") # Setup PPID os.getppid.return_value = 1 # When I have no parent process, and shutdown is not set worker = ReadWorker(queue, "foo") worker.read_message = Mock() # Then I return from run() worker.run().should.be.none
def test_read_worker_with_parent_process_dead_and_should_not_exit(os): """ Test read workers exit when parent is dead and shutdown is not set """ # Setup SQS Queue conn = boto.connect_sqs() queue = conn.create_queue("tester") # Setup PPID os.getppid.return_value = 1 # When I have no parent process, and shutdown is not set worker = ReadWorker(queue, "foo", BATCHSIZE) worker.read_message = Mock() # Then I return from run() worker.run().should.be.none
def test_worker_fills_internal_queue_from_celery_task(): conn = boto.connect_sqs() queue = conn.create_queue("tester") message = Message() body = '{"body": "KGRwMApTJ3Rhc2snCnAxClMndGVzdHMudGFza3MuaW5kZXhfaW5jcmVtZW50ZXInCnAyCnNTJ2Fy\\nZ3MnCnAzCihscDQKc1Mna3dhcmdzJwpwNQooZHA2ClMnbWVzc2FnZScKcDcKUydUZXN0IG1lc3Nh\\nZ2UyJwpwOApzcy4=\\n", "some stuff": "asdfasf"}' message.set_body(body) queue.write(message) internal_queue = Queue() worker = ReadWorker(queue, internal_queue) worker.read_message() found_message = internal_queue.get(timeout=1) found_message.should.equal( {"task": "tests.tasks.index_incrementer", "args": [], "kwargs": {"message": "Test message2"}} )
def test_worker_fills_internal_queue(): conn = boto.connect_sqs() queue = conn.create_queue("tester") message = Message() body = json.dumps({"task": "tests.tasks.index_incrementer", "args": [], "kwargs": {"message": "Test message"}}) message.set_body(body) queue.write(message) internal_queue = Queue() worker = ReadWorker(queue, internal_queue) worker.read_message() found_message = internal_queue.get(timeout=1) found_message.should.equal( {"task": "tests.tasks.index_incrementer", "args": [], "kwargs": {"message": "Test message"}} )
def test_read_worker_with_parent_process_dead_and_should_not_exit(os): """ Test read workers exit when parent is dead and shutdown is not set """ # Setup SQS Queue conn = boto3.client('sqs', region_name='us-east-1') queue_url = conn.create_queue(QueueName="tester")['QueueUrl'] # Setup PPID os.getppid.return_value = 123 # Setup internal queue q = Queue(1) # When I have no parent process, and shutdown is not set worker = ReadWorker(queue_url, q, BATCHSIZE, parent_id=1) worker.read_message = Mock() # Then I return from run() worker.run().should.be.none
def test_read_worker_with_parent_process_dead_and_should_not_exit(os): """ Test read workers exit when parent is dead and shutdown is not set """ # Setup SQS Queue conn = boto3.client('sqs', region_name='us-east-1') queue_url = conn.create_queue(QueueName="tester")['QueueUrl'] # Setup PPID os.getppid.return_value = 1 # Setup internal queue q = Queue(1) # When I have no parent process, and shutdown is not set worker = ReadWorker(queue_url, q, BATCHSIZE) worker.read_message = Mock() # Then I return from run() worker.run().should.be.none
def test_read_worker_with_parent_process_alive_and_should_not_exit(os): """ Test read workers do not exit when parent is alive and shutdown is not set """ # Setup SQS Queue conn = boto.connect_sqs() queue = conn.create_queue("tester") # Setup PPID os.getppid.return_value = 1234 # Setup dummy read_message def read_message(): raise Exception("Called") # When I have a parent process, and shutdown is not set worker = ReadWorker(queue, "foo", BATCHSIZE) worker.read_message = read_message # Then read_message() is reached worker.run.when.called_with().should.throw(Exception, "Called")
def test_read_worker_with_parent_process_alive_and_should_not_exit(os): """ Test read workers do not exit when parent is alive and shutdown is not set """ # Setup SQS Queue conn = boto.connect_sqs() queue = conn.create_queue("tester") # Setup PPID os.getppid.return_value = 1234 # Setup dummy read_message def read_message(): raise Exception("Called") # When I have a parent process, and shutdown is not set worker = ReadWorker(queue, "foo") worker.read_message = read_message # Then read_message() is reached worker.run.when.called_with().should.throw(Exception, "Called")
def test_read_worker_with_parent_process_alive_and_should_not_exit(os): """ Test read workers do not exit when parent is alive and shutdown is not set """ # Setup SQS Queue conn = boto3.client('sqs', region_name='us-east-1') queue_url = conn.create_queue(QueueName="tester")['QueueUrl'] # Setup PPID os.getppid.return_value = 1234 # Setup dummy read_message def read_message(): raise Exception("Called") # When I have a parent process, and shutdown is not set worker = ReadWorker(queue_url, "foo", BATCHSIZE) worker.read_message = read_message # Then read_message() is reached worker.run.when.called_with().should.throw(Exception, "Called")
def test_read_worker_with_parent_process_alive_and_should_not_exit(os): """ Test read workers do not exit when parent is alive and shutdown is not set """ # Setup SQS Queue conn = boto3.client('sqs', region_name='us-east-1') queue_url = conn.create_queue(QueueName="tester")['QueueUrl'] # Setup PPID os.getppid.return_value = 1 # Setup dummy read_message def read_message(): raise Exception("Called") # When I have a parent process, and shutdown is not set worker = ReadWorker(queue_url, "foo", BATCHSIZE, parent_id=1) worker.read_message = read_message # Then read_message() is reached worker.run.when.called_with().should.throw(Exception, "Called")
def test_worker_fills_internal_queue_and_respects_visibility_timeouts(): """ Test read workers respect visibility timeouts """ # Setup logging logger = logging.getLogger("pyqs") logger.handlers.append(MockLoggingHandler()) # Setup SQS Queue conn = boto3.client('sqs', region_name='us-east-1') queue_url = conn.create_queue( QueueName="tester", Attributes={'VisibilityTimeout': '1'})['QueueUrl'] # Add MEssages message = json.dumps( { "body": ( "KGRwMApTJ3Rhc2snCnAxClMndGVzdHMudGFza3MuaW5kZXhfaW5jcmVtZW" "50ZXInCnAyCnNTJ2Fy\nZ3MnCnAzCihscDQKc1Mna3dhcmdzJwpwNQooZHA" "2ClMnbWVzc2FnZScKcDcKUydUZXN0IG1lc3Nh\nZ2UyJwpwOApzcy4=\n" ), "some stuff": "asdfasf", } ) for _ in range(3): conn.send_message(QueueUrl=queue_url, MessageBody=message) # Run Reader internal_queue = Queue(maxsize=1) worker = ReadWorker(queue_url, internal_queue, BATCHSIZE) worker.read_message() # Check log messages logger.handlers[0].messages['warning'][0].should.contain( "Timed out trying to add the following message to the internal queue") logger.handlers[0].messages['warning'][1].should.contain( "Clearing Local messages since we exceeded their visibility_timeout")
def test_worker_fills_internal_queue_only_until_maximum_queue_size(): conn = boto.connect_sqs() queue = conn.create_queue("tester") message = Message() body = json.dumps({"task": "tests.tasks.index_incrementer", "args": [], "kwargs": {"message": "Test message"}}) message.set_body(body) for i in range(3): queue.write(message) internal_queue = Queue(maxsize=2) worker = ReadWorker(queue, internal_queue) worker.read_message() # The internal queue should only have two messages on it internal_queue.get(timeout=1) internal_queue.get(timeout=1) try: internal_queue.get(timeout=1) except Empty: pass else: raise AssertionError("The internal queue should be empty")
def test_read_worker_with_parent_process_alive_and_should_exit(os): """ Test read workers exit when parent is alive and shutdown is set """ # Setup SQS Queue conn = boto.connect_sqs() queue = conn.create_queue("tester") # Setup PPID os.getppid.return_value = 1234 # Setup internal queue q = Queue(1) # When I have a parent process, and shutdown is set worker = ReadWorker(queue, q, BATCHSIZE) worker.read_message = Mock() worker.shutdown() # Then I return from run() worker.run().should.be.none