Beispiel #1
0
def test_worker_processes_tasks_from_internal_queue():
    """
    Test worker processes read from internal queue
    """
    del task_results[:]

    # Setup SQS Queue
    conn = boto.connect_sqs()
    queue = conn.create_queue("tester")

    # Build the SQS message
    message_body = {"task": "tests.tasks.index_incrementer", "args": [], "kwargs": {"message": "Test message"}}
    message = Message()
    body = json.dumps(message_body)
    message.set_body(body)

    # Add message to queue
    internal_queue = Queue()
    internal_queue.put({"message": message, "queue": queue.id, "start_time": time.time(), "timeout": 30})

    # Process message
    worker = ProcessWorker(internal_queue)
    worker.process_message()

    task_results.should.equal(["Test message"])

    # We expect the queue to be empty now
    try:
        internal_queue.get(timeout=1)
    except Empty:
        pass
    else:
        raise AssertionError("The internal queue should be empty")
Beispiel #2
0
def test_worker_processes_only_increases_processed_counter_if_a_message_was_processed():
    """
    Test worker process only increases processed counter if a message was processed
    """
    # Setup SQS Queue
    conn = boto.connect_sqs()
    queue = conn.create_queue("tester")

    # Build the SQS Message
    message_body = {"task": "tests.tasks.index_incrementer", "args": [], "kwargs": {"message": 23}}
    message = Message()
    body = json.dumps(message_body)
    message.set_body(body)

    # Add message to internal queue
    internal_queue = Queue(3)
    internal_queue.put({"queue": queue.id, "message": message, "start_time": time.time(), "timeout": 30})

    # And we add a message to the queue later
    def sleep_and_queue(internal_queue):
        time.sleep(1)
        internal_queue.put({"queue": queue.id, "message": message, "start_time": time.time(), "timeout": 30})

    thread = threading.Thread(target=sleep_and_queue, args=(internal_queue,))
    thread.daemon = True
    thread.start()

    # When I Process messages
    worker = ProcessWorker(internal_queue)
    worker._messages_to_process_before_shutdown = 2

    # Then I return from run() after processing 2 messages
    worker.run().should.be.none
Beispiel #3
0
def test_worker_processes_discard_tasks_that_exceed_their_visibility_timeout():
    """
    Test worker processes discards tasks that exceed their visibility timeout
    """
    # Setup logging
    logger = logging.getLogger("pyqs")
    del logger.handlers[:]
    logger.handlers.append(MockLoggingHandler())

    # Setup SQS Queue
    conn = boto.connect_sqs()
    queue = conn.create_queue("tester")

    # Build the SQS Message
    message_body = {"task": "tests.tasks.index_incrementer", "args": [], "kwargs": {"message": 23}}
    message = Message()
    body = json.dumps(message_body)
    message.set_body(body)

    # Add message to internal queue with timeout of 0 that started long ago
    internal_queue = Queue()
    internal_queue.put({"queue": queue.id, "message": message, "start_time": 0, "timeout": 0})

    # When I process the message
    worker = ProcessWorker(internal_queue)
    worker.process_message()

    # Then I get an error about exceeding the visibility timeout
    msg1 = "Discarding task tests.tasks.index_incrementer with args: [] and kwargs: {u'message': 23} due to exceeding visibility timeout"  # noqa
    logger.handlers[0].messages["warning"][0].lower().should.contain(msg1.lower())
Beispiel #4
0
def test_worker_processes_shuts_down_after_processing_its_maximum_number_of_messages():
    """
    Test worker processes shutdown after processing maximum number of messages
    """
    # Setup SQS Queue
    conn = boto.connect_sqs()
    queue = conn.create_queue("tester")

    # Build the SQS Message
    message_body = {"task": "tests.tasks.index_incrementer", "args": [], "kwargs": {"message": 23}}
    message = Message()
    body = json.dumps(message_body)
    message.set_body(body)

    # Add message to internal queue
    internal_queue = Queue(3)
    internal_queue.put({"queue": queue.id, "message": message, "start_time": time.time(), "timeout": 30})
    internal_queue.put({"queue": queue.id, "message": message, "start_time": time.time(), "timeout": 30})
    internal_queue.put({"queue": queue.id, "message": message, "start_time": time.time(), "timeout": 30})

    # When I Process messages
    worker = ProcessWorker(internal_queue)
    worker._messages_to_process_before_shutdown = 2

    # Then I return from run()
    worker.run().should.be.none

    # With messages still on the queue
    internal_queue.empty().should.be.false
    internal_queue.full().should.be.false
Beispiel #5
0
def test_worker_processes_tasks_and_logs_warning_correctly():
    """
    Test worker processes logs WARNING correctly
    """
    # Setup logging
    logger = logging.getLogger("pyqs")
    del logger.handlers[:]
    logger.handlers.append(MockLoggingHandler())

    # Setup SQS Queue
    conn = boto.connect_sqs()
    queue = conn.create_queue("tester")

    # Build the SQS Message
    message_body = {"task": "tests.tasks.index_incrementer", "args": [], "kwargs": {"message": 23}}
    message = Message()
    body = json.dumps(message_body)
    message.set_body(body)

    # Add message to internal queue
    internal_queue = Queue()
    internal_queue.put({"queue": queue.id, "message": message, "start_time": time.time(), "timeout": 30})

    # Process message
    worker = ProcessWorker(internal_queue)
    worker.process_message()

    # Check output
    msg1 = "Task tests.tasks.index_incrementer raised error in 0.0000 seconds: with args: [] and kwargs: {u'message': 23}: Traceback (most recent call last)"  # noqa
    logger.handlers[0].messages["error"][0].lower().should.contain(msg1.lower())
    msg2 = 'raise ValueError("Need to be given basestring, was given {}".format(message))\nValueError: Need to be given basestring, was given 23'  # noqa
    logger.handlers[0].messages["error"][0].lower().should.contain(msg2.lower())
Beispiel #6
0
def test_worker_processes_tasks_and_logs_correctly():
    """
    Test worker processes logs INFO correctly
    """
    # Setup logging
    logger = logging.getLogger("pyqs")
    del logger.handlers[:]
    logger.handlers.append(MockLoggingHandler())

    # Setup SQS Queue
    conn = boto.connect_sqs()
    queue = conn.create_queue("tester")

    # Build the SQS message
    message_body = {"task": "tests.tasks.index_incrementer", "args": [], "kwargs": {"message": "Test message"}}
    message = Message()
    body = json.dumps(message_body)
    message.set_body(body)

    # Add message to internal queue
    internal_queue = Queue()
    internal_queue.put({"queue": queue.id, "message": message, "start_time": time.time(), "timeout": 30})

    # Process message
    worker = ProcessWorker(internal_queue)
    worker.process_message()

    # Check output
    expected_result = u"Processed task tests.tasks.index_incrementer in 0.0000 seconds with args: [] and kwargs: {u'message': u'Test message'}"
    logger.handlers[0].messages["info"].should.equal([expected_result])
Beispiel #7
0
def test_worker_processes_empty_queue():
    """
    Test worker processes read from empty internal queue
    """
    internal_queue = Queue()

    worker = ProcessWorker(internal_queue, INTERVAL)
    worker.process_message()
Beispiel #8
0
def test_worker_processes_empty_queue():
    """
    Test worker processes read from empty internal queue
    """
    internal_queue = Queue()

    worker = ProcessWorker(internal_queue, INTERVAL)
    worker.process_message()
Beispiel #9
0
def test_worker_processes_shuts_down_after_processing_its_max_number_of_msgs(
        os):
    """
    Test worker processes shutdown after processing maximum number of messages
    """
    os.getppid.return_value = 1

    # Setup SQS Queue
    conn = boto3.client('sqs', region_name='us-east-1')
    queue_url = conn.create_queue(QueueName="tester")['QueueUrl']

    # Build the SQS Message
    message = {
        'Body':
        json.dumps({
            'task': 'tests.tasks.index_incrementer',
            'args': [],
            'kwargs': {
                'message': 23,
            },
        }),
        "ReceiptHandle":
        "receipt-1234",
        "MessageId":
        "message-id-1",
    }

    # Add message to internal queue
    internal_queue = Queue(3)
    internal_queue.put({
        "queue": queue_url,
        "message": message,
        "start_time": time.time(),
        "timeout": 30,
    })
    internal_queue.put({
        "queue": queue_url,
        "message": message,
        "start_time": time.time(),
        "timeout": 30,
    })
    internal_queue.put({
        "queue": queue_url,
        "message": message,
        "start_time": time.time(),
        "timeout": 30,
    })

    # When I Process messages
    worker = ProcessWorker(internal_queue, INTERVAL, parent_id=1)
    worker._messages_to_process_before_shutdown = 2

    # Then I return from run()
    worker.run().should.be.none

    # With messages still on the queue
    internal_queue.empty().should.be.false
    internal_queue.full().should.be.false
Beispiel #10
0
def test_worker_processes_shuts_down_after_processing_its_max_number_of_msgs():
    """
    Test worker processes shutdown after processing maximum number of messages
    """
    # Setup SQS Queue
    conn = boto3.client('sqs', region_name='us-east-1')
    queue_url = conn.create_queue(QueueName="tester")['QueueUrl']

    # Build the SQS Message
    message = {
        'Body': json.dumps({
            'task': 'tests.tasks.index_incrementer',
            'args': [],
            'kwargs': {
                'message': 23,
            },
        }),
        "ReceiptHandle": "receipt-1234",
    }

    # Add message to internal queue
    internal_queue = Queue(3)
    internal_queue.put(
        {
            "queue": queue_url,
            "message": message,
            "start_time": time.time(),
            "timeout": 30,
        }
    )
    internal_queue.put(
        {
            "queue": queue_url,
            "message": message,
            "start_time": time.time(),
            "timeout": 30,
        }
    )
    internal_queue.put(
        {
            "queue": queue_url,
            "message": message,
            "start_time": time.time(),
            "timeout": 30,
        }
    )

    # When I Process messages
    worker = ProcessWorker(internal_queue, INTERVAL)
    worker._messages_to_process_before_shutdown = 2

    # Then I return from run()
    worker.run().should.be.none

    # With messages still on the queue
    internal_queue.empty().should.be.false
    internal_queue.full().should.be.false
Beispiel #11
0
def test_worker_processes_tasks_and_logs_warning_correctly():
    """
    Test worker processes logs WARNING correctly
    """
    # Setup logging
    logger = logging.getLogger("pyqs")
    del logger.handlers[:]
    logger.handlers.append(MockLoggingHandler())

    # Setup SQS Queue
    conn = boto3.client('sqs', region_name='us-east-1')
    queue_url = conn.create_queue(QueueName="tester")['QueueUrl']

    # Build the SQS Message
    message = {
        'Body': json.dumps({
            'task': 'tests.tasks.index_incrementer',
            'args': [],
            'kwargs': {
                'message': 23,
            },
        }),
        "ReceiptHandle": "receipt-1234",
    }

    # Add message to internal queue
    internal_queue = Queue()
    internal_queue.put(
        {
            "queue": queue_url,
            "message": message,
            "start_time": time.time(),
            "timeout": 30,
        }
    )

    # Process message
    worker = ProcessWorker(internal_queue, INTERVAL)
    worker.process_message()

    # Check output
    kwargs = json.loads(message['Body'])['kwargs']
    msg1 = (
        "Task tests.tasks.index_incrementer raised error in 0.0000 seconds: "
        "with args: [] and kwargs: {}: "
        "Traceback (most recent call last)".format(kwargs)
    )  # noqa
    logger.handlers[0].messages['error'][0].lower().should.contain(
        msg1.lower())
    msg2 = (
        'raise ValueError("Need to be given basestring, was given '
        '{}".format(message))\nValueError: Need to be given basestring, '
        'was given 23'
    )  # noqa
    logger.handlers[0].messages['error'][0].lower().should.contain(
        msg2.lower())
Beispiel #12
0
def test_worker_processes_tasks_with_pre_and_post_process():
    """
    Test worker runs registered callbacks when processing a message
    """

    # Declare these so they can be checked as a side effect to the callbacks
    contexts = []

    def pre_process_with_side_effect(context):
        contexts.append(context)

    def post_process_with_side_effect(context):
        contexts.append(context)

    # When we have a registered pre_process and post_process callback
    register_event("pre_process", pre_process_with_side_effect)
    register_event("post_process", post_process_with_side_effect)

    # And we process a message
    internal_queue = _add_message_to_internal_queue(
        'tests.tasks.index_incrementer')
    worker = ProcessWorker(internal_queue, INTERVAL, parent_id=1)
    worker.process_message()

    pre_process_context = contexts[0]

    # We should run the callbacks with the right task contexts
    pre_process_context['task_name'].should.equal('index_incrementer')
    pre_process_context['args'].should.equal([])
    pre_process_context['kwargs'].should.equal({'message': 'Test message'})
    pre_process_context['full_task_path'].should.equal(
        'tests.tasks.index_incrementer')
    pre_process_context['queue_url'].should.equal(
        'https://queue.amazonaws.com/123456789012/tester')
    pre_process_context['timeout'].should.equal(30)

    assert 'fetch_time' in pre_process_context
    assert 'status' not in pre_process_context

    post_process_context = contexts[1]

    post_process_context['task_name'].should.equal('index_incrementer')
    post_process_context['args'].should.equal([])
    post_process_context['kwargs'].should.equal({'message': 'Test message'})
    post_process_context['full_task_path'].should.equal(
        'tests.tasks.index_incrementer')
    post_process_context['queue_url'].should.equal(
        'https://queue.amazonaws.com/123456789012/tester')
    post_process_context['timeout'].should.equal(30)
    post_process_context['status'].should.equal('success')

    assert 'fetch_time' in post_process_context
    assert 'exception' not in post_process_context

    # And the internal queue should be empty
    _check_internal_queue_is_empty(internal_queue)
Beispiel #13
0
def test_worker_processes_only_incr_processed_counter_if_a_msg_was_processed():
    """
    Test worker process only increases processed counter if a message was
    processed
    """
    # Setup SQS Queue
    conn = boto3.client('sqs', region_name='us-east-1')
    queue_url = conn.create_queue(QueueName="tester")['QueueUrl']

    # Build the SQS Message
    message = {
        'Body': json.dumps({
            'task': 'tests.tasks.index_incrementer',
            'args': [],
            'kwargs': {
                'message': 23,
            },
        }),
        "ReceiptHandle": "receipt-1234",
    }

    # Add message to internal queue
    internal_queue = Queue(3)
    internal_queue.put(
        {
            "queue": queue_url,
            "message": message,
            "start_time": time.time(),
            "timeout": 30,
        }
    )

    # And we add a message to the queue later
    def sleep_and_queue(internal_queue):
        time.sleep(1)
        internal_queue.put(
            {
                "queue": queue_url,
                "message": message,
                "start_time": time.time(),
                "timeout": 30,
            }
        )

    thread = threading.Thread(target=sleep_and_queue, args=(internal_queue,))
    thread.daemon = True
    thread.start()

    # When I Process messages
    worker = ProcessWorker(internal_queue, INTERVAL)
    worker._messages_to_process_before_shutdown = 2

    # Then I return from run() after processing 2 messages
    worker.run().should.be.none
Beispiel #14
0
def test_worker_processes_only_incr_processed_counter_if_a_msg_was_processed():
    """
    Test worker process only increases processed counter if a message was
    processed
    """
    # Setup SQS Queue
    conn = boto3.client('sqs', region_name='us-east-1')
    queue_url = conn.create_queue(QueueName="tester")['QueueUrl']

    # Build the SQS Message
    message = {
        'Body':
        json.dumps({
            'task': 'tests.tasks.index_incrementer',
            'args': [],
            'kwargs': {
                'message': 23,
            },
        }),
        "ReceiptHandle":
        "receipt-1234",
        "MessageId":
        "message-id-1",
    }

    # Add message to internal queue
    internal_queue = Queue(3)
    internal_queue.put({
        "queue": queue_url,
        "message": message,
        "start_time": time.time(),
        "timeout": 30,
    })

    # And we add a message to the queue later
    def sleep_and_queue(internal_queue):
        time.sleep(1)
        internal_queue.put({
            "queue": queue_url,
            "message": message,
            "start_time": time.time(),
            "timeout": 30,
        })

    thread = threading.Thread(target=sleep_and_queue, args=(internal_queue, ))
    thread.daemon = True
    thread.start()

    # When I Process messages
    worker = ProcessWorker(internal_queue, INTERVAL, parent_id=1)
    worker._messages_to_process_before_shutdown = 2

    # Then I return from run() after processing 2 messages
    worker.run().should.be.none
Beispiel #15
0
def test_worker_processes_tasks_and_logs_warning_correctly():
    """
    Test worker processes logs WARNING correctly
    """
    # Setup logging
    logger = logging.getLogger("pyqs")
    del logger.handlers[:]
    logger.handlers.append(MockLoggingHandler())

    # Setup SQS Queue
    conn = boto3.client('sqs', region_name='us-east-1')
    queue_url = conn.create_queue(QueueName="tester")['QueueUrl']

    # Build the SQS Message
    message = {
        'Body':
        json.dumps({
            'task': 'tests.tasks.index_incrementer',
            'args': [],
            'kwargs': {
                'message': 23,
            },
        }),
        "ReceiptHandle":
        "receipt-1234",
        "MessageId":
        "message-id-1",
    }

    # Add message to internal queue
    internal_queue = Queue()
    internal_queue.put({
        "queue": queue_url,
        "message": message,
        "start_time": time.time(),
        "timeout": 30,
    })

    # Process message
    worker = ProcessWorker(internal_queue, INTERVAL, parent_id=1)
    worker.process_message()

    # Check output
    kwargs = json.loads(message['Body'])['kwargs']
    msg1 = (
        "Task tests.tasks.index_incrementer raised error in 0.0000 seconds: "
        "with args: [] and kwargs: {}: "
        "Traceback (most recent call last)".format(kwargs))  # noqa
    logger.handlers[0].messages['error'][0].lower().should.contain(
        msg1.lower())
    msg2 = ('ValueError: Need to be given basestring, ' 'was given 23')  # noqa
    logger.handlers[0].messages['error'][0].lower().should.contain(
        msg2.lower())
Beispiel #16
0
def test_worker_processes_shuts_down_after_processing_its_maximum_number_of_messages(
):
    """
    Test worker processes shutdown after processing maximum number of messages
    """
    # Setup SQS Queue
    conn = boto.connect_sqs()
    queue = conn.create_queue("tester")

    # Build the SQS Message
    message_body = {
        'task': 'tests.tasks.index_incrementer',
        'args': [],
        'kwargs': {
            'message': 23,
        },
    }
    message = Message()
    body = json.dumps(message_body)
    message.set_body(body)

    # Add message to internal queue
    internal_queue = Queue(3)
    internal_queue.put({
        "queue": queue.id,
        "message": message,
        "start_time": time.time(),
        "timeout": 30
    })
    internal_queue.put({
        "queue": queue.id,
        "message": message,
        "start_time": time.time(),
        "timeout": 30
    })
    internal_queue.put({
        "queue": queue.id,
        "message": message,
        "start_time": time.time(),
        "timeout": 30
    })

    # When I Process messages
    worker = ProcessWorker(internal_queue, INTERVAL)
    worker._messages_to_process_before_shutdown = 2

    # Then I return from run()
    worker.run().should.be.none

    # With messages still on the queue
    internal_queue.empty().should.be.false
    internal_queue.full().should.be.false
Beispiel #17
0
def test_process_worker_with_parent_process_dead_and_should_not_exit(os):
    """
    Test worker processes exit when parent is dead and shutdown is not set
    """
    # Setup PPID
    os.getppid.return_value = 1

    # When I have no parent process, and shutdown is not set
    worker = ProcessWorker("foo", INTERVAL)
    worker.process_message = Mock()

    # Then I return from run()
    worker.run().should.be.none
Beispiel #18
0
def test_process_worker_with_parent_process_dead_and_should_not_exit(os):
    """
    Test worker processes exit when parent is dead and shutdown is not set
    """
    # Setup PPID
    os.getppid.return_value = 1

    # When I have no parent process, and shutdown is not set
    worker = ProcessWorker("foo", INTERVAL)
    worker.process_message = Mock()

    # Then I return from run()
    worker.run().should.be.none
Beispiel #19
0
def test_worker_processes_tasks_and_logs_correctly():
    logger = logging.getLogger("pyqs")
    logger.handlers.append(MockLoggingHandler())
    message = {"task": "tests.tasks.index_incrementer", "args": [], "kwargs": {"message": "Test message"}}
    internal_queue = Queue()
    internal_queue.put(message)

    worker = ProcessWorker(internal_queue)
    worker.process_message()

    expected_result = (
        "Processing task tests.tasks.index_incrementer with args: [] and kwargs: {'message': 'Test message'}"
    )
    logger.handlers[0].messages["info"].should.equal([expected_result])
Beispiel #20
0
def test_worker_processes_only_increases_processed_counter_if_a_message_was_processed(
):
    """
    Test worker process only increases processed counter if a message was processed
    """
    # Setup SQS Queue
    conn = boto.connect_sqs()
    queue = conn.create_queue("tester")

    # Build the SQS Message
    message_body = {
        'task': 'tests.tasks.index_incrementer',
        'args': [],
        'kwargs': {
            'message': 23,
        },
    }
    message = Message()
    body = json.dumps(message_body)
    message.set_body(body)

    # Add message to internal queue
    internal_queue = Queue(3)
    internal_queue.put({
        "queue": queue.id,
        "message": message,
        "start_time": time.time(),
        "timeout": 30
    })

    # And we add a message to the queue later
    def sleep_and_queue(internal_queue):
        time.sleep(1)
        internal_queue.put({
            "queue": queue.id,
            "message": message,
            "start_time": time.time(),
            "timeout": 30
        })

    thread = threading.Thread(target=sleep_and_queue, args=(internal_queue, ))
    thread.daemon = True
    thread.start()

    # When I Process messages
    worker = ProcessWorker(internal_queue, INTERVAL)
    worker._messages_to_process_before_shutdown = 2

    # Then I return from run() after processing 2 messages
    worker.run().should.be.none
Beispiel #21
0
def test_worker_processes_discard_tasks_that_exceed_their_visibility_timeout():
    """
    Test worker processes discards tasks that exceed their visibility timeout
    """
    # Setup logging
    logger = logging.getLogger("pyqs")
    del logger.handlers[:]
    logger.handlers.append(MockLoggingHandler())

    # Setup SQS Queue
    conn = boto3.client('sqs', region_name='us-east-1')
    queue_url = conn.create_queue(QueueName="tester")['QueueUrl']

    # Build the SQS Message
    message = {
        'Body': json.dumps({
            'task': 'tests.tasks.index_incrementer',
            'args': [],
            'kwargs': {
                'message': 23,
            },
        }),
        "ReceiptHandle": "receipt-1234",
    }

    # Add message to internal queue with timeout of 0 that started long ago
    internal_queue = Queue()
    internal_queue.put(
        {
            "queue": queue_url,
            "message": message,
            "start_time": 0,
            "timeout": 0,
        }
    )

    # When I process the message
    worker = ProcessWorker(internal_queue, INTERVAL)
    worker.process_message()

    # Then I get an error about exceeding the visibility timeout
    kwargs = json.loads(message['Body'])['kwargs']
    msg1 = (
        "Discarding task tests.tasks.index_incrementer with args: [] "
        "and kwargs: {} due to exceeding "
        "visibility timeout"
    ).format(kwargs)  # noqa
    logger.handlers[0].messages['warning'][0].lower().should.contain(
        msg1.lower())
Beispiel #22
0
def test_worker_processes_discard_tasks_that_exceed_their_visibility_timeout():
    """
    Test worker processes discards tasks that exceed their visibility timeout
    """
    # Setup logging
    logger = logging.getLogger("pyqs")
    del logger.handlers[:]
    logger.handlers.append(MockLoggingHandler())

    # Setup SQS Queue
    conn = boto3.client('sqs', region_name='us-east-1')
    queue_url = conn.create_queue(QueueName="tester")['QueueUrl']

    # Build the SQS Message
    message = {
        'Body':
        json.dumps({
            'task': 'tests.tasks.index_incrementer',
            'args': [],
            'kwargs': {
                'message': 23,
            },
        }),
        "ReceiptHandle":
        "receipt-1234",
        "MessageId":
        "message-id-1",
    }

    # Add message to internal queue with timeout of 0 that started long ago
    internal_queue = Queue()
    internal_queue.put({
        "queue": queue_url,
        "message": message,
        "start_time": 0,
        "timeout": 0,
    })

    # When I process the message
    worker = ProcessWorker(internal_queue, INTERVAL, parent_id=1)
    worker.process_message()

    # Then I get an error about exceeding the visibility timeout
    kwargs = json.loads(message['Body'])['kwargs']
    msg1 = ("Discarding task tests.tasks.index_incrementer with args: [] "
            "and kwargs: {} due to exceeding "
            "visibility timeout").format(kwargs)  # noqa
    logger.handlers[0].messages['warning'][0].lower().should.contain(
        msg1.lower())
Beispiel #23
0
def test_worker_processes_tasks_and_logs_correctly():
    """
    Test worker processes logs INFO correctly
    """
    # Setup logging
    logger = logging.getLogger("pyqs")
    del logger.handlers[:]
    logger.handlers.append(MockLoggingHandler())

    # Setup SQS Queue
    conn = boto3.client('sqs', region_name='us-east-1')
    queue_url = conn.create_queue(QueueName="tester")['QueueUrl']

    # Build the SQS message
    message = {
        'Body':
        json.dumps({
            'task': 'tests.tasks.index_incrementer',
            'args': [],
            'kwargs': {
                'message': 'Test message',
            },
        }),
        "ReceiptHandle":
        "receipt-1234",
        "MessageId":
        "message-id-1",
    }

    # Add message to internal queue
    internal_queue = Queue()
    internal_queue.put({
        "queue": queue_url,
        "message": message,
        "start_time": time.time(),
        "timeout": 30,
    })

    # Process message
    worker = ProcessWorker(internal_queue, INTERVAL, parent_id=1)
    worker.process_message()

    # Check output
    kwargs = json.loads(message['Body'])['kwargs']
    expected_result = (
        u"Processed task tests.tasks.index_incrementer in 0.0000 seconds "
        "with args: [] and kwargs: {}".format(kwargs))
    logger.handlers[0].messages['info'].should.equal([expected_result])
Beispiel #24
0
def test_worker_processes_tasks_from_internal_queue():
    """
    Test worker processes read from internal queue
    """
    del task_results[:]

    # Setup SQS Queue
    conn = boto3.client('sqs', region_name='us-east-1')
    queue_url = conn.create_queue(QueueName="tester")['QueueUrl']

    # Build the SQS message
    message = {
        'Body':
        json.dumps({
            'task': 'tests.tasks.index_incrementer',
            'args': [],
            'kwargs': {
                'message': 'Test message',
            },
        }),
        "ReceiptHandle":
        "receipt-1234",
        "MessageId":
        "message-id-1",
    }

    # Add message to queue
    internal_queue = Queue()
    internal_queue.put({
        "message": message,
        "queue": queue_url,
        "start_time": time.time(),
        "timeout": 30,
    })

    # Process message
    worker = ProcessWorker(internal_queue, INTERVAL, parent_id=1)
    worker.process_message()

    task_results.should.equal(['Test message'])

    # We expect the queue to be empty now
    try:
        internal_queue.get(timeout=1)
    except Empty:
        pass
    else:
        raise AssertionError("The internal queue should be empty")
Beispiel #25
0
def test_worker_processes_tasks_from_internal_queue():
    message = {"task": "tests.tasks.index_incrementer", "args": [], "kwargs": {"message": "Test message"}}
    internal_queue = Queue()
    internal_queue.put(message)

    worker = ProcessWorker(internal_queue)
    worker.process_message()

    task_results.should.equal(["Test message"])

    try:
        internal_queue.get(timeout=1)
    except Empty:
        pass
    else:
        raise AssertionError("The internal queue should be empty")
Beispiel #26
0
def test_worker_processes_tasks_and_logs_correctly():
    """
    Test worker processes logs INFO correctly
    """
    # Setup logging
    logger = logging.getLogger("pyqs")
    del logger.handlers[:]
    logger.handlers.append(MockLoggingHandler())

    # Setup SQS Queue
    conn = boto3.client('sqs', region_name='us-east-1')
    queue_url = conn.create_queue(QueueName="tester")['QueueUrl']

    # Build the SQS message
    message = {
        'Body': json.dumps({
            'task': 'tests.tasks.index_incrementer',
            'args': [],
            'kwargs': {
                'message': 'Test message',
            },
        }),
        "ReceiptHandle": "receipt-1234",
    }

    # Add message to internal queue
    internal_queue = Queue()
    internal_queue.put(
        {
            "queue": queue_url,
            "message": message,
            "start_time": time.time(),
            "timeout": 30,
        }
    )

    # Process message
    worker = ProcessWorker(internal_queue, INTERVAL)
    worker.process_message()

    # Check output
    kwargs = json.loads(message['Body'])['kwargs']
    expected_result = (
        u"Processed task tests.tasks.index_incrementer in 0.0000 seconds "
        "with args: [] and kwargs: {}".format(kwargs)
    )
    logger.handlers[0].messages['info'].should.equal([expected_result])
Beispiel #27
0
def test_worker_processes_tasks_and_logs_warning_correctly():
    """
    Test worker processes logs WARNING correctly
    """
    # Setup logging
    logger = logging.getLogger("pyqs")
    del logger.handlers[:]
    logger.handlers.append(MockLoggingHandler())

    # Setup SQS Queue
    conn = boto.connect_sqs()
    queue = conn.create_queue("tester")

    # Build the SQS Message
    message_body = {
        'task': 'tests.tasks.index_incrementer',
        'args': [],
        'kwargs': {
            'message': 23,
        },
    }
    message = Message()
    body = json.dumps(message_body)
    message.set_body(body)

    # Add message to internal queue
    internal_queue = Queue()
    internal_queue.put({
        "queue": queue.id,
        "message": message,
        "start_time": time.time(),
        "timeout": 30
    })

    # Process message
    worker = ProcessWorker(internal_queue, INTERVAL)
    worker.process_message()

    # Check output
    kwargs = json.loads(body)['kwargs']
    msg1 = "Task tests.tasks.index_incrementer raised error in 0.0000 seconds: with args: [] and kwargs: {}: Traceback (most recent call last)".format(
        kwargs)  # noqa
    logger.handlers[0].messages['error'][0].lower().should.contain(
        msg1.lower())
    msg2 = 'raise ValueError("Need to be given basestring, was given {}".format(message))\nValueError: Need to be given basestring, was given 23'  # noqa
    logger.handlers[0].messages['error'][0].lower().should.contain(
        msg2.lower())
Beispiel #28
0
def test_worker_processes_tasks_from_internal_queue():
    """
    Test worker processes read from internal queue
    """
    del task_results[:]

    # Setup SQS Queue
    conn = boto3.client('sqs', region_name='us-east-1')
    queue_url = conn.create_queue(QueueName="tester")['QueueUrl']

    # Build the SQS message
    message = {
        'Body': json.dumps({
            'task': 'tests.tasks.index_incrementer',
            'args': [],
            'kwargs': {
                'message': 'Test message',
            },
        }),
        "ReceiptHandle": "receipt-1234",
    }

    # Add message to queue
    internal_queue = Queue()
    internal_queue.put(
        {
            "message": message,
            "queue": queue_url,
            "start_time": time.time(),
            "timeout": 30,
        }
    )

    # Process message
    worker = ProcessWorker(internal_queue, INTERVAL)
    worker.process_message()

    task_results.should.equal(['Test message'])

    # We expect the queue to be empty now
    try:
        internal_queue.get(timeout=1)
    except Empty:
        pass
    else:
        raise AssertionError("The internal queue should be empty")
Beispiel #29
0
def test_process_worker_with_parent_process_alive_and_should_not_exit(os):
    """
    Test worker processes do not exit when parent is alive and shutdown is not set
    """
    # Setup PPID
    os.getppid.return_value = 1234

    # Setup dummy read_message
    def process_message():
        raise Exception("Called")

    # When I have a parent process, and shutdown is not set
    worker = ProcessWorker("foo")
    worker.process_message = process_message

    # Then process_message() is reached
    worker.run.when.called_with().should.throw(Exception, "Called")
Beispiel #30
0
def test_process_worker_with_parent_process_alive_and_should_not_exit(os):
    """
    Test worker processes do not exit when parent is alive and shutdown is not set
    """
    # Setup PPID
    os.getppid.return_value = 1234

    # Setup dummy read_message
    def process_message():
        raise Exception("Called")

    # When I have a parent process, and shutdown is not set
    worker = ProcessWorker("foo", INTERVAL)
    worker.process_message = process_message

    # Then process_message() is reached
    worker.run.when.called_with().should.throw(Exception, "Called")
Beispiel #31
0
def test_worker_processes_tasks_from_internal_queue():
    """
    Test worker processes read from internal queue
    """
    del task_results[:]

    # Setup SQS Queue
    conn = boto.connect_sqs()
    queue = conn.create_queue("tester")

    # Build the SQS message
    message_body = {
        'task': 'tests.tasks.index_incrementer',
        'args': [],
        'kwargs': {
            'message': 'Test message',
        },
    }
    message = Message()
    body = json.dumps(message_body)
    message.set_body(body)

    # Add message to queue
    internal_queue = Queue()
    internal_queue.put({
        "message": message,
        "queue": queue.id,
        "start_time": time.time(),
        "timeout": 30
    })

    # Process message
    worker = ProcessWorker(internal_queue, INTERVAL)
    worker.process_message()

    task_results.should.equal(['Test message'])

    # We expect the queue to be empty now
    try:
        internal_queue.get(timeout=1)
    except Empty:
        pass
    else:
        raise AssertionError("The internal queue should be empty")
Beispiel #32
0
def test_worker_processes_discard_tasks_that_exceed_their_visibility_timeout():
    """
    Test worker processes discards tasks that exceed their visibility timeout
    """
    # Setup logging
    logger = logging.getLogger("pyqs")
    del logger.handlers[:]
    logger.handlers.append(MockLoggingHandler())

    # Setup SQS Queue
    conn = boto.connect_sqs()
    queue = conn.create_queue("tester")

    # Build the SQS Message
    message_body = {
        'task': 'tests.tasks.index_incrementer',
        'args': [],
        'kwargs': {
            'message': 23,
        },
    }
    message = Message()
    body = json.dumps(message_body)
    message.set_body(body)

    # Add message to internal queue with timeout of 0 that started long ago
    internal_queue = Queue()
    internal_queue.put({
        "queue": queue.id,
        "message": message,
        "start_time": 0,
        "timeout": 0
    })

    # When I process the message
    worker = ProcessWorker(internal_queue, INTERVAL)
    worker.process_message()

    # Then I get an error about exceeding the visibility timeout
    kwargs = json.loads(body)['kwargs']
    msg1 = "Discarding task tests.tasks.index_incrementer with args: [] and kwargs: {} due to exceeding visibility timeout".format(
        kwargs)  # noqa
    logger.handlers[0].messages['warning'][0].lower().should.contain(
        msg1.lower())
Beispiel #33
0
def test_worker_processes_tasks_and_logs_correctly():
    """
    Test worker processes logs INFO correctly
    """
    # Setup logging
    logger = logging.getLogger("pyqs")
    del logger.handlers[:]
    logger.handlers.append(MockLoggingHandler())

    # Setup SQS Queue
    conn = boto.connect_sqs()
    queue = conn.create_queue("tester")

    # Build the SQS message
    message_body = {
        'task': 'tests.tasks.index_incrementer',
        'args': [],
        'kwargs': {
            'message': 'Test message',
        },
    }
    message = Message()
    body = json.dumps(message_body)
    message.set_body(body)

    # Add message to internal queue
    internal_queue = Queue()
    internal_queue.put({
        "queue": queue.id,
        "message": message,
        "start_time": time.time(),
        "timeout": 30
    })

    # Process message
    worker = ProcessWorker(internal_queue, INTERVAL)
    worker.process_message()

    # Check output
    kwargs = json.loads(body)['kwargs']
    expected_result = u"Processed task tests.tasks.index_incrementer in 0.0000 seconds with args: [] and kwargs: {}".format(
        kwargs)
    logger.handlers[0].messages['info'].should.equal([expected_result])
Beispiel #34
0
def test_worker_processes_tasks_and_logs_warning_correctly():
    logger = logging.getLogger("pyqs")
    logger.handlers.append(MockLoggingHandler())
    message = {"task": "tests.tasks.index_incrementer", "args": [], "kwargs": {"message": 23}}
    internal_queue = Queue()
    internal_queue.put(message)

    worker = ProcessWorker(internal_queue)
    worker.process_message()

    base_dir = abspath(join(dirname(__file__), pardir, pardir))
    expected_result = (
        "Task tests.tasks.index_incrementer raised error: with"
        " args: [] and kwargs: {'message': 23}: Traceback (most recent call last)"
        ':\n  File "%s/PyQS/pyqs/worker.py", line 101, in '
        "process_message\n    task(*args, **kwargs)\n  File "
        '"%s/PyQS/tests/tasks.py", line 11, in '
        'index_incrementer\n    raise ValueError("Need to be given basestring, was '
        'given {}".format(message))\nValueError: Need to be given basestring, was '
        "given 23\n" % (base_dir, base_dir)
    )
    logger.handlers[0].messages["error"].should.equal([expected_result])
Beispiel #35
0
def test_worker_processes_tasks_and_logs_correctly():
    """
    Test worker processes logs INFO correctly
    """
    # Setup logging
    logger = logging.getLogger("pyqs")
    del logger.handlers[:]
    logger.handlers.append(MockLoggingHandler())

    # Setup SQS Queue
    conn = boto.connect_sqs()
    queue = conn.create_queue("tester")

    # Build the SQS message
    message_body = {
        'task': 'tests.tasks.index_incrementer',
        'args': [],
        'kwargs': {
            'message': 'Test message',
        },
    }
    message = Message()
    body = json.dumps(message_body)
    message.set_body(body)

    # Add message to internal queue
    internal_queue = Queue()
    internal_queue.put({"queue": queue.id, "message": message})

    # Process message
    worker = ProcessWorker(internal_queue)
    worker.process_message()

    # Check output
    expected_result = u"Processed task tests.tasks.index_incrementer with args: [] and kwargs: {u'message': u'Test message'}"
    logger.handlers[0].messages['info'].should.equal([expected_result])
Beispiel #36
0
def test_process_worker_with_parent_process_alive_and_should_exit(os):
    """
    Test worker processes exit when parent is alive and shutdown is set
    """
    # Setup PPID
    os.getppid.return_value = 1234

    # When I have a parent process, and shutdown is set
    worker = ProcessWorker("foo", INTERVAL, parent_id=1)
    worker.process_message = Mock()
    worker.shutdown()

    # Then I return from run()
    worker.run().should.be.none
Beispiel #37
0
def test_process_worker_with_parent_process_alive_and_should_exit(os):
    """
    Test worker processes exit when parent is alive and shutdown is set
    """
    # Setup PPID
    os.getppid.return_value = 1234

    # When I have a parent process, and shutdown is set
    worker = ProcessWorker("foo")
    worker.process_message = Mock()
    worker.shutdown()

    # Then I return from run()
    worker.run().should.be.none
Beispiel #38
0
def test_worker_processes_empty_queue():
    internal_queue = Queue()

    worker = ProcessWorker(internal_queue)
    worker.process_message()