Exemplo n.º 1
0
    def test_to_task_has_correct_arguments(self, memcache, time, task,
                                           task_retry):
        """Ensure that if no name is passed into the MessageProcessor that it
        creates a default unique name when creating the task.
        """
        from furious.async import MAX_RESTARTS
        from furious.batcher import MessageProcessor

        memcache.get.return_value = 'current-batch'
        time.time.return_value = 100

        task_retry_object = Mock()
        task_retry.return_value = task_retry_object

        processor = MessageProcessor('something', queue='test_queue',
                                     id='someid', parent_id='parentid',
                                     context_id="contextid")

        processor.to_task()

        task_args = {
            'name': 'processor-processor-current-batch-3',
            'url': '/_ah/queue/async/something',
            'countdown': 30,
            'headers': {},
            'retry_options': task_retry_object,
            'payload': json.dumps(processor.to_dict())
        }

        task.assert_called_once_with(**task_args)
        task_retry.assert_called_once_with(task_retry_limit=MAX_RESTARTS)
Exemplo n.º 2
0
    def test_to_task_has_correct_arguments(self, memcache, time, task,
                                           task_retry):
        """Ensure that if no name is passed into the MessageProcessor that it
        creates a default unique name when creating the task.
        """
        from furious. async import MAX_RESTARTS
        from furious.batcher import MessageProcessor

        memcache.get.return_value = 'current-batch'
        time.time.return_value = 100

        task_retry_object = Mock()
        task_retry.return_value = task_retry_object

        processor = MessageProcessor('something',
                                     queue='test_queue',
                                     id='someid',
                                     parent_id='parentid',
                                     context_id="contextid")

        processor.to_task()

        task_args = {
            'name': 'processor-processor-current-batch-3',
            'url': '/_ah/queue/async/something',
            'countdown': 30,
            'headers': {},
            'retry_options': task_retry_object,
            'payload': json.dumps(processor.to_dict())
        }

        task.assert_called_once_with(**task_args)
        task_retry.assert_called_once_with(task_retry_limit=MAX_RESTARTS)
Exemplo n.º 3
0
    def test_to_task_has_correct_arguments(self, memcache, time, task):
        """Ensure that if no name is passed into the MessageProcessor that it
        creates a default unique name when creating the task.
        """
        from furious.batcher import MessageProcessor

        memcache.get.return_value = 'current-batch'
        time.time.return_value = 100

        processor = MessageProcessor('something', queue='test_queue')

        processor.to_task()

        task_args = {
            'url': '/_ah/queue/async/something',
            'headers': {},
            'payload': json.dumps({
                'queue': 'test_queue',
                'job': ["something", None, None],
                'task_args': {
                    'countdown': 30,
                    'name': 'processor-processor-current-batch-3'
                },
            }),
            'countdown': 30,
            'name': 'processor-processor-current-batch-3'
        }

        task.assert_called_once_with(**task_args)
Exemplo n.º 4
0
    def test_to_task_with_frequency_passed_in(self, memcache, time):
        """Ensure that if a frequency is passed into the MessageProcessor that
        it uses that frequency when creating the task.
        """
        from furious.batcher import MessageProcessor

        memcache.get.return_value = 'current-batch'
        time.time.return_value = 100

        processor = MessageProcessor('something', queue='test_queue', freq=100)

        task = processor.to_task()

        self.assertEqual(task.name, 'processor-processor-current-batch-1')
Exemplo n.º 5
0
    def test_has_type(self):
        """Ensure that if _type is not furious.async.Async that the correct
        subclass is instantiated.
        """
        from furious.async import async_from_options
        from furious.batcher import MessageProcessor

        async_job = MessageProcessor(dir)

        options = async_job.to_dict()

        result = async_from_options(options)

        self.assertIsInstance(result, MessageProcessor)
Exemplo n.º 6
0
    def test_to_task_with_no_name_passed_in(self, memcache, time):
        """Ensure that if no name is passed into the MessageProcessor that it
        creates a default unique name when creating the task.
        """
        from furious.batcher import MessageProcessor

        memcache.get.return_value = 'current-batch'
        time.time.return_value = 100

        processor = MessageProcessor('something', queue='test_queue')

        task = processor.to_task()

        self.assertEqual(task.name, 'processor-processor-current-batch-3')
Exemplo n.º 7
0
    def test_has_type(self):
        """Ensure that if _type is not furious.async.Async that the correct
        subclass is instantiated.
        """
        from furious. async import async_from_options
        from furious.batcher import MessageProcessor

        async_job = MessageProcessor(dir)

        options = async_job.to_dict()

        result = async_from_options(options)

        self.assertIsInstance(result, MessageProcessor)
Exemplo n.º 8
0
    def test_to_task_with_frequency_passed_in(self, memcache, time):
        """Ensure that if a frequency is passed into the MessageProcessor that
        it uses that frequency when creating the task.
        """
        from furious.batcher import MessageProcessor

        memcache.get.return_value = 'current-batch'
        time.time.return_value = 100

        processor = MessageProcessor('something', queue='test_queue', freq=100)

        task = processor.to_task()

        self.assertEqual(task.name, 'processor-processor-current-batch-1')
Exemplo n.º 9
0
    def test_to_task_with_no_name_passed_in(self, memcache, time):
        """Ensure that if no name is passed into the MessageProcessor that it
        creates a default unique name when creating the task.
        """
        from furious.batcher import MessageProcessor

        memcache.get.return_value = 'current-batch'
        time.time.return_value = 100

        processor = MessageProcessor('something', queue='test_queue')

        task = processor.to_task()

        self.assertEqual(task.name, 'processor-processor-current-batch-3')
Exemplo n.º 10
0
    def test_to_task_has_correct_arguments(self, memcache, time, task,
                                           task_retry):
        """Ensure that if no name is passed into the MessageProcessor that it
        creates a default unique name when creating the task.
        """
        from furious. async import MAX_RESTARTS
        from furious.batcher import MessageProcessor

        memcache.get.return_value = 'current-batch'
        time.time.return_value = 100

        task_retry_object = Mock()
        task_retry.return_value = task_retry_object

        processor = MessageProcessor('something',
                                     queue='test_queue',
                                     id='someid')

        processor.to_task()

        task_args = {
            'url':
            '/_ah/queue/async/something',
            'headers': {},
            'payload':
            json.dumps({
                'queue': 'test_queue',
                'job': ("something", None, None),
                'id': 'someid',
                'task_args': {
                    'countdown': 30,
                    'name': 'processor-processor-current-batch-3'
                },
                '_recursion': {
                    'current': 1,
                    'max': 100
                },
                '_type': 'furious.batcher.MessageProcessor',
            }),
            'countdown':
            30,
            'name':
            'processor-processor-current-batch-3',
            'retry_options':
            task_retry_object
        }

        task.assert_called_once_with(**task_args)
        task_retry.assert_called_once_with(task_retry_limit=MAX_RESTARTS)
Exemplo n.º 11
0
    def test_to_task_with_name_passed_in(self, memcache, time):
        """Ensure that if a name is passed into the MessageProcessor that it
        uses that name when creating the task.
        """
        from furious.batcher import MessageProcessor

        memcache.get.return_value = 'current-batch'
        time.time.return_value = 100

        processor = MessageProcessor('something', queue='test_queue',
                                     task_args={'name': 'test-name'})

        task = processor.to_task()

        self.assertEqual(task.name, 'test-name-processor-current-batch-3')
Exemplo n.º 12
0
    def test_to_task_with_name_passed_in(self, memcache, time):
        """Ensure that if a name is passed into the MessageProcessor that it
        uses that name when creating the task.
        """
        from furious.batcher import MessageProcessor

        memcache.get.return_value = 'current-batch'
        time.time.return_value = 100

        processor = MessageProcessor('something',
                                     queue='test_queue',
                                     task_args={'name': 'test-name'})

        task = processor.to_task()

        self.assertEqual(task.name, 'test-name-processor-current-batch-3')
Exemplo n.º 13
0
    def test_to_task_with_tag_not_passed_in(self, memcache, time):
        """Ensure that if a tag is not passed into the MessageProcessor that it
        uses a default value when creating the task.
        """
        from furious.batcher import MessageProcessor

        memcache.get.return_value = 'current-batch'
        time.time.return_value = 100

        processor = MessageProcessor('something', queue='test_queue')

        task = processor.to_task()

        self.assertEqual(task.name, 'processor-processor-current-batch-3')

        memcache.get.assert_called_once_with('agg-batch-processor')
Exemplo n.º 14
0
    def test_to_task_with_tag_not_passed_in(self, memcache, time):
        """Ensure that if a tag is not passed into the MessageProcessor that it
        uses a default value when creating the task.
        """
        from furious.batcher import MessageProcessor

        memcache.get.return_value = 'current-batch'
        time.time.return_value = 100

        processor = MessageProcessor('something', queue='test_queue')

        task = processor.to_task()

        self.assertEqual(task.name, 'processor-processor-current-batch-3')

        memcache.get.assert_called_once_with('agg-batch-processor')
Exemplo n.º 15
0
    def get(self):
        from furious import context
        from furious.batcher import Message
        from furious.batcher import MessageProcessor

        try:
            color, value, count = self.get_params()
        except (KeyError, AssertionError):
            response = {
                "success": False,
                "message": "Invalid parameters."
            }
            self.response.write(json.dumps(response))
            return

        payload = {
            "color": color,
            "value": value,
            "timestamp": time.mktime(datetime.datetime.utcnow().timetuple())
        }

        tag = "color"

        # create a context to insert multiple Messages
        with context.new() as ctx:
            # loop through the count adding a task to the context per increment
            for _ in xrange(count):
                # insert the message with the payload
                ctx.add(Message(task_args={"payload": payload, "tag": tag}))

        # insert a processor to fetch the messages in batches
        # this should always be inserted. the logic will keep it from inserting
        # too many processors
        processor = MessageProcessor(
            target=process_messages, args=(tag,), tag=tag,
            task_args={"countdown": 0})
        processor.start()

        response = {
            "success": True,
            "message": "Task inserted successfully with %s" % (payload,)
        }

        self.response.write(json.dumps(response))
Exemplo n.º 16
0
    def test_to_task_has_correct_arguments(self, memcache, time, task,
                                           task_retry):
        """Ensure that if no name is passed into the MessageProcessor that it
        creates a default unique name when creating the task.
        """
        from furious.async import MAX_RESTARTS
        from furious.batcher import MessageProcessor

        memcache.get.return_value = 'current-batch'
        time.time.return_value = 100

        task_retry_object = Mock()
        task_retry.return_value = task_retry_object

        processor = MessageProcessor('something', queue='test_queue',
                                     id='someid')

        processor.to_task()

        task_args = {
            'url': '/_ah/queue/async/something',
            'headers': {},
            'payload': json.dumps({
                'queue': 'test_queue',
                'job': ("something", None, None),
                'id': 'someid',
                'task_args': {
                    'countdown': 30,
                    'name': 'processor-processor-current-batch-3'
                },
                '_recursion': {
                    'current': 1,
                    'max': 100
                },
                '_type': 'furious.batcher.MessageProcessor',
            }),
            'countdown': 30,
            'name': 'processor-processor-current-batch-3',
            'retry_options': task_retry_object
        }

        task.assert_called_once_with(**task_args)
        task_retry.assert_called_once_with(task_retry_limit=MAX_RESTARTS)
Exemplo n.º 17
0
    def get(self):
        from furious import context
        from furious.batcher import Message
        from furious.batcher import MessageProcessor

        try:
            color, value, count = self.get_params()
        except (KeyError, AssertionError):
            response = {"success": False, "message": "Invalid parameters."}
            self.response.write(json.dumps(response))
            return

        payload = {
            "color": color,
            "value": value,
            "timestamp": time.mktime(datetime.datetime.utcnow().timetuple())
        }

        tag = "color"

        # create a context to insert multiple Messages
        with context.new() as ctx:
            # loop through the count adding a task to the context per increment
            for _ in xrange(count):
                # insert the message with the payload
                ctx.add(Message(task_args={"payload": payload, "tag": tag}))

        # insert a processor to fetch the messages in batches
        # this should always be inserted. the logic will keep it from inserting
        # too many processors
        processor = MessageProcessor(target=process_messages,
                                     args=(tag, ),
                                     tag=tag,
                                     task_args={"countdown": 0})
        processor.start()

        response = {
            "success": True,
            "message": "Task inserted successfully with %s" % (payload, )
        }

        self.response.write(json.dumps(response))
Exemplo n.º 18
0
    def test_curent_batch_key_doesnt_exist_in_cache(self, cache):
        """Ensure that if the current batch key doesn't exist in cache that it
        inserts the default value of 1 into cache and returns it.
        """
        from furious.batcher import MessageProcessor

        cache.get.return_value = None

        processor = MessageProcessor('something')

        current_batch = processor.current_batch

        self.assertEqual(current_batch, 1)

        cache.get.assert_called_once_with('agg-batch-processor')
        cache.add.assert_called_once_with('agg-batch-processor', 1)
Exemplo n.º 19
0
    def test_curent_batch_key_exists_in_cache(self, cache):
        """Ensure that if the current batch key exists in cache that it uses it
        and doesn't update it.
        """
        from furious.batcher import MessageProcessor

        cache.get.return_value = 1

        processor = MessageProcessor('something')

        current_batch = processor.current_batch

        cache.get.assert_called_once_with('agg-batch-processor')

        self.assertEqual(current_batch, 1)
        self.assertFalse(cache.add.called)
Exemplo n.º 20
0
def process_messages(tag, retries=0):
    """Processes the messages pulled fromm a queue based off the tag passed in.
    Will insert another processor if any work was processed or the retry count
    is under the max retry count. Will update a aggregated stats object with
    the data in the payload of the messages processed.

    :param tag: :class: `str` Tag to query the queue on
    :param retry: :class: `int` Number of retries the job has processed
    """
    from furious.batcher import bump_batch
    from furious.batcher import MESSAGE_DEFAULT_QUEUE
    from furious.batcher import MessageIterator
    from furious.batcher import MessageProcessor

    from google.appengine.api import memcache

    # since we don't have a flag for checking complete we'll re-insert a
    # processor task with a retry count to catch any work that may still be
    # filtering in. If we've hit our max retry count we just bail out and
    # consider the job complete.
    if retries > 5:
        logging.info("Process messages hit max retry and is exiting")
        return

    # create a message iteragor for the tag in batches of 500
    message_iterator = MessageIterator(tag, MESSAGE_DEFAULT_QUEUE, 500)

    client = memcache.Client()

    # get the stats object from cache
    stats = client.gets(tag)

    # json decode it if it exists otherwise get the default state.
    stats = json.loads(stats) if stats else get_default_stats()

    work_processed = False

    # loop through the messages pulled from the queue.
    for message in message_iterator:
        work_processed = True

        value = int(message.get("value", 0))
        color = message.get("color").lower()

        # update the total stats with the value pulled
        set_stats(stats["totals"], value)

        # update the specific color status via the value pulled
        set_stats(stats["colors"][color], value)

    # insert the stats back into cache
    json_stats = json.dumps(stats)

    # try and do an add first to see if it's new. We can't trush get due to
    # a race condition.
    if not client.add(tag, json_stats):
        # if we couldn't add than lets do a compare and set to safely
        # update the stats
        if not client.cas(tag, json_stats):
            raise Exception("Transaction Collision.")

    # bump the process batch id
    bump_batch(tag)

    if work_processed:
        # reset the retries as we've processed work
        retries = 0
    else:
        # no work was processed so increment the retries
        retries += 1

    # insert another processor
    processor = MessageProcessor(
        target=process_messages, args=("colors",),
        kwargs={'retries': retries}, tag="colors")

    processor.start()
Exemplo n.º 21
0
def process_messages(tag, retries=0):
    """Processes the messages pulled fromm a queue based off the tag passed in.
    Will insert another processor if any work was processed or the retry count
    is under the max retry count. Will update a aggregated stats object with
    the data in the payload of the messages processed.

    :param tag: :class: `str` Tag to query the queue on
    :param retry: :class: `int` Number of retries the job has processed
    """
    from furious.batcher import bump_batch
    from furious.batcher import MESSAGE_DEFAULT_QUEUE
    from furious.batcher import MessageIterator
    from furious.batcher import MessageProcessor

    from google.appengine.api import memcache

    # since we don't have a flag for checking complete we'll re-insert a
    # processor task with a retry count to catch any work that may still be
    # filtering in. If we've hit our max retry count we just bail out and
    # consider the job complete.
    if retries > 5:
        logging.info("Process messages hit max retry and is exiting")
        return

    # create a message iteragor for the tag in batches of 500
    message_iterator = MessageIterator(tag, MESSAGE_DEFAULT_QUEUE, 500)

    client = memcache.Client()

    # get the stats object from cache
    stats = client.gets(tag)

    # json decode it if it exists otherwise get the default state.
    stats = json.loads(stats) if stats else get_default_stats()

    work_processed = False

    # loop through the messages pulled from the queue.
    for message in message_iterator:
        work_processed = True

        value = int(message.get("value", 0))
        color = message.get("color").lower()

        # update the total stats with the value pulled
        set_stats(stats["totals"], value)

        # update the specific color status via the value pulled
        set_stats(stats["colors"][color], value)

    # insert the stats back into cache
    json_stats = json.dumps(stats)

    # try and do an add first to see if it's new. We can't trush get due to
    # a race condition.
    if not client.add(tag, json_stats):
        # if we couldn't add than lets do a compare and set to safely
        # update the stats
        if not client.cas(tag, json_stats):
            raise Exception("Transaction Collision.")

    # bump the process batch id
    bump_batch(tag)

    if work_processed:
        # reset the retries as we've processed work
        retries = 0
    else:
        # no work was processed so increment the retries
        retries += 1

    # insert another processor
    processor = MessageProcessor(target=process_messages,
                                 args=("colors", ),
                                 kwargs={'retries': retries},
                                 tag="colors")

    processor.start()