Exemplo n.º 1
0
    def jobs(self):
        """
        Iterate through all existing jobs in the cheapest and quickest possible
        way.

        By default we will:

            - Use the maximum batch size to reduce calls to SQS.  This will
              reduce the cost of running the service, as less requests equals
              less dollars.

            - Wait for as long as possible (*20 seconds*) for a message to be
              sent to us (*if none are in the queue already*).  This way, we'll
              reduce our total request count, and spend less dollars.

        .. note::
            This method is a generator which will continue to return results
            until this SQS queue is emptied.
        """
        total_jobs = self.num_jobs()

        while total_jobs:
            for message in self.queue.get_messages(
                num_messages = self.BATCH_SIZE,
                wait_time_seconds = self.WAIT_SECONDS,
            ):
                yield Job.from_message(message)

            total_jobs = self.num_jobs()
Exemplo n.º 2
0
    def jobs(self):
        """
        Iterate through all existing jobs in the cheapest and quickest possible
        way.

        By default we will:

            - Use the maximum batch size to reduce calls to SQS.  This will
              reduce the cost of running the service, as less requests equals
              less dollars.

            - Wait for as long as possible (*20 seconds*) for a message to be
              sent to us (*if none are in the queue already*).  This way, we'll
              reduce our total request count, and spend less dollars.

        .. note::
            This method is a generator which will continue to return results
            until this SQS queue is emptied.
        """
        total_jobs = self.num_jobs()

        while total_jobs:
            for message in self.queue.get_messages(
                    num_messages=self.BATCH_SIZE,
                    wait_time_seconds=self.WAIT_SECONDS,
            ):
                yield Job.from_message(message)

            total_jobs = self.num_jobs()
Exemplo n.º 3
0
    def test_create_job_from_message(self):
        message = Message(body=dumps({
            'callable': random_job,
            'args': (),
            'kwargs': {},
        }))

        job = Job.from_message(message)
        self.assertEqual(message.get_body(), job.message.get_body())
Exemplo n.º 4
0
    def test_create_job_from_message(self):
        message = Message(body=dumps({
            'callable': random_job,
            'args': (),
            'kwargs': {},
        }))

        job = Job.from_message(message)
        self.assertEqual(message.get_body(), job.message.get_body())
Exemplo n.º 5
0
    def jobs(self):
        """
        Iterate through all existing jobs in the cheapest and quickest possible
        way.

        By default we will:

            - Use the maximum batch size to reduce calls to SQS.  This will
              reduce the cost of running the service, as less requests equals
              less dollars.

            - Wait for as long as possible (*20 seconds*) for a message to be
              sent to us (*if none are in the queue already*).  This way, we'll
              reduce our total request count, and spend less dollars.

        .. note::
            This method is a generator which will continue to return results
            until this SQS queue is emptied.
        """

        messages = self.queue.receive_messages(
            AttributeNames=["All"],
            MaxNumberOfMessages=self.BATCH_SIZE,
            WaitTimeSeconds=self.WAIT_SECONDS,
            MessageAttributeNames=["id"],
        )
        duplicate_job_groups = dict()

        for message in messages:
            job = Job.from_message(message)
            if job.id is not None and job.id in duplicate_job_groups:
                duplicate_job_groups[job.id].append(job)
                continue
            else:
                duplicate_job_groups[job.id] = []

            yield job

        self._cleanup_duplicate_jobs(duplicate_job_groups)