Пример #1
0
    def update(self, remote_ip, uptime):
        self.last_seen = datetime.utcnow()

        if self.last_ip != remote_ip:
            LogEntry.log_event(self.key(), 'Info', 'IP changed - new IP: ' + remote_ip)

        self.last_ip = remote_ip

        if uptime is not None:
            if self.update is not None and self.uptime > uptime:
                LogEntry.log_event(self.key(), 'Reboot',
                                   'Reboot - Previous uptime: ' + str(timedelta(seconds=self.uptime)))
                for action_key in self.reboot_actions:
                    try:
                        db.get(action_key).perform_action()
                    except Exception as exp:
                        logging.error('Error executing reboot action: ' + str(exp))

        self.uptime = uptime
        self.put()

        # job got back online
        if self.status == 'offline':
            self.status = 'online'
            LogEntry.log_event(self.key(), 'Info', 'Job back online - IP: ' + remote_ip)

            # perform all back_online actions
            for action_key in self.backonline_actions:
                try:
                    db.get(action_key).perform_action()
                except Exception as exp:
                    logging.error('Error executing backonline action: ' + str(exp))

        # delete previous (waiting) task
        if self.task_name is not None:
            logging.debug('old task: ' + self.task_name)
            Queue.delete_tasks(Queue(), Task(name=self.task_name))

        task_name = self.name + '_' + datetime.utcnow().strftime('%Y-%m-%d_%H-%M-%S-%f')

        # create task to be executed in updated no called in interval minutes
        taskqueue.add(name=task_name, url='/task', params={'key': self.key()}, countdown=(self.interval + 2) * 60)

        self.task_name = task_name
        self.put()
Пример #2
0
class MessageIterator(object):
    """This iterator will return a batch of messages for a given group.

    This iterator should be directly used when trying to avoid the lease
    operation inside a transaction, or when other flows are needed.
    """
    def __init__(self,
                 tag,
                 queue_name,
                 size,
                 duration=60,
                 deadline=10,
                 auto_delete=True):
        """The generator will yield json deserialized payloads from tasks with
        the corresponding tag.

        :param tag: :class: `str` Pull queue tag to query against
        :param queue_name: :class: `str` Name of PULL queue holding tasks to
                           lease.
        :param size: :class: `int` The number of items to pull at once
        :param duration: :class: `int` After this time, the tasks may be leased
                         again. Tracked in seconds
        :param deadline: :class: `int` The time in seconds to wait for the rpc.
        :param auto_delete: :class: `bool` Delete tasks when iteration is
                            complete.

        :return: :class: `iterator` of json deserialized payloads
        """
        from google.appengine.api.taskqueue import Queue

        self.queue_name = queue_name
        self.queue = Queue(name=self.queue_name)

        self.tag = tag
        self.size = size
        self.duration = duration
        self.auto_delete = auto_delete
        self.deadline = deadline

        self._messages = []
        self._processed_messages = []
        self._fetched = False

    def fetch_messages(self):
        """Fetch messages from the specified pull-queue.

        This should only be called a single time by a given MessageIterator
        object.  If the MessageIterator is iterated over again, it should
        return the originally leased messages.
        """
        if self._fetched:
            return

        start = time.time()

        loaded_messages = self.queue.lease_tasks_by_tag(self.duration,
                                                        self.size,
                                                        tag=self.tag,
                                                        deadline=self.deadline)

        # If we are within 0.1 sec of our deadline and no messages were
        # returned, then we are hitting queue contention issues and this
        # should be a DeadlineExceederError.
        # TODO: investigate other ways around this, perhaps async leases, etc.
        if (not loaded_messages
                and round(time.time() - start, 1) >= self.deadline - 0.1):
            raise DeadlineExceededError()

        self._messages.extend(loaded_messages)

        self._fetched = True

        logging.debug("Calling fetch messages with %s:%s:%s:%s:%s:%s" % (len(
            self._messages), len(loaded_messages), len(
                self._processed_messages), self.duration, self.size, self.tag))

    def __iter__(self):
        """Initialize this MessageIterator for iteration.

        If messages have not been fetched, fetch them.  If messages have been
        fetched, reset self._messages and self._processed_messages for
        re-iteration.  The reset is done to prevent deleting messages that were
        never applied.
        """
        if self._processed_messages:
            # If the iterator is used within a transaction, and there is a
            # retry we need to re-process the original messages, not new
            # messages.
            self._messages = list(
                set(self._messages) | set(self._processed_messages))
            self._processed_messages = []

        if not self._messages:
            self.fetch_messages()

        return self

    def next(self):
        """Get the next batch of messages from the previously fetched messages.

        If there's no more messages, check if we should auto-delete the
        messages and raise StopIteration.
        """
        if not self._messages:
            if self.auto_delete:
                self.delete_messages()
            raise StopIteration

        message = self._messages.pop(0)
        self._processed_messages.append(message)
        return json.loads(message.payload)

    def delete_messages(self, only_processed=True):
        """Delete the messages previously leased.

        Unless otherwise directed, only the messages iterated over will be
        deleted.
        """
        messages = self._processed_messages
        if not only_processed:
            messages += self._messages

        if messages:
            try:
                self.queue.delete_tasks(messages)
            except Exception:
                logging.exception("Error deleting messages")
                raise
Пример #3
0
class MessageIterator(object):
    """This iterator will return a batch of messages for a given group.

    This iterator should be directly used when trying to avoid the lease
    operation inside a transaction, or when other flows are needed.
    """

    def __init__(self, tag, queue_name, size, duration=60, deadline=10,
                 auto_delete=True):
        """The generator will yield json deserialized payloads from tasks with
        the corresponding tag.

        :param tag: :class: `str` Pull queue tag to query against
        :param queue_name: :class: `str` Name of PULL queue holding tasks to
                           lease.
        :param size: :class: `int` The number of items to pull at once
        :param duration: :class: `int` After this time, the tasks may be leased
                         again. Tracked in seconds
        :param deadline: :class: `int` The time in seconds to wait for the rpc.
        :param auto_delete: :class: `bool` Delete tasks when iteration is
                            complete.

        :return: :class: `iterator` of json deserialized payloads
        """
        from google.appengine.api.taskqueue import Queue

        self.queue_name = queue_name
        self.queue = Queue(name=self.queue_name)

        self.tag = tag
        self.size = size
        self.duration = duration
        self.auto_delete = auto_delete
        self.deadline = deadline

        self._messages = []
        self._processed_messages = []
        self._fetched = False

    def fetch_messages(self):
        """Fetch messages from the specified pull-queue.

        This should only be called a single time by a given MessageIterator
        object.  If the MessageIterator is iterated over again, it should
        return the originally leased messages.
        """
        if self._fetched:
            return

        start = time.time()

        loaded_messages = self.queue.lease_tasks_by_tag(
            self.duration, self.size, tag=self.tag, deadline=self.deadline)

        # If we are within 0.1 sec of our deadline and no messages were
        # returned, then we are hitting queue contention issues and this
        # should be a DeadlineExceederError.
        # TODO: investigate other ways around this, perhaps async leases, etc.
        if (not loaded_messages and
                round(time.time() - start, 1) >= self.deadline - 0.1):
            raise DeadlineExceededError()

        self._messages.extend(loaded_messages)

        self._fetched = True

        logging.debug("Calling fetch messages with %s:%s:%s:%s:%s:%s" % (
            len(self._messages), len(loaded_messages),
            len(self._processed_messages), self.duration, self.size, self.tag))

    def __iter__(self):
        """Initialize this MessageIterator for iteration.

        If messages have not been fetched, fetch them.  If messages have been
        fetched, reset self._messages and self._processed_messages for
        re-iteration.  The reset is done to prevent deleting messages that were
        never applied.
        """
        if self._processed_messages:
            # If the iterator is used within a transaction, and there is a
            # retry we need to re-process the original messages, not new
            # messages.
            self._messages = list(
                set(self._messages) | set(self._processed_messages))
            self._processed_messages = []

        if not self._messages:
            self.fetch_messages()

        return self

    def next(self):
        """Get the next batch of messages from the previously fetched messages.

        If there's no more messages, check if we should auto-delete the
        messages and raise StopIteration.
        """
        if not self._messages:
            if self.auto_delete:
                self.delete_messages()
            raise StopIteration

        message = self._messages.pop(0)
        self._processed_messages.append(message)
        return json.loads(message.payload)

    def delete_messages(self, only_processed=True):
        """Delete the messages previously leased.

        Unless otherwise directed, only the messages iterated over will be
        deleted.
        """
        messages = self._processed_messages
        if not only_processed:
            messages += self._messages

        if messages:
            try:
                self.queue.delete_tasks(messages)
            except Exception:
                logging.exception("Error deleting messages")
                raise