Beispiel #1
0
    def set_non_gd_msg_list_response(self, msg_list, cur_page, _sort_key=itemgetter('pub_time')):
        """ Paginates a list of non-GD messages (from topics or queues) and returns results.
        """
        cur_page = cur_page - 1 if cur_page else 0 # We index lists from 0

        # Set it here because later on it may be shortened to the page_size of elements
        total = len(msg_list)

        # If we get here, we must have collected some data at all
        if msg_list:

            # Sort the output before it is returned - messages last published (youngest) come first
            msg_list.sort(key=_sort_key, reverse=True)
            start = cur_page * _page_size
            end = start + _page_size
            msg_list = msg_list[start:end]

        for msg in msg_list:

            # Convert float timestamps in all the remaining messages to ISO-8601
            msg['pub_time'] = datetime_from_ms(msg['pub_time'] * 1000.0)
            if msg.get('expiration_time'):
                msg['expiration_time'] = datetime_from_ms(msg['expiration_time'] * 1000.0)

            # Return endpoint information in the same format GD messages are returned in
            msg['endpoint_id'] = msg.pop('published_by_id')
            msg['endpoint_name'] = self.pubsub.get_endpoint_by_id(msg['endpoint_id']).name

        search_results = SearchResults(None, None, None, total)
        search_results.set_data(cur_page, _page_size)

        # This goes to the service's response payload object
        self.response.payload.response = msg_list
        self.response.payload._meta = search_results.to_dict()
Beispiel #2
0
    def handle(self, _sort_key=itemgetter('pub_time')):
        # Local aliases
        topic_id = self.request.input.topic_id
        paginate = self.request.input.paginate
        cur_page = self.request.input.cur_page
        cur_page = cur_page - 1 if cur_page else 0  # We index lists from 0

        # Response to produce
        msg_list = []

        # Collects responses from all server processes
        is_all_ok, all_data = self.servers.invoke_all(
            'zato.pubsub.topic.get-server-message-list', {
                'topic_id': topic_id,
                'query': self.request.input.query,
            },
            timeout=30)

        # Check if everything is OK on each level - overall, per server and then per process
        if is_all_ok:
            for server_name, server_data in all_data.iteritems():
                if server_data['is_ok']:
                    for server_pid, server_pid_data in server_data[
                            'server_data'].iteritems():
                        if server_pid_data['is_ok']:
                            pid_data = server_pid_data['pid_data']['response'][
                                'data']
                            msg_list.extend(pid_data)
                        else:
                            self.logger.warn(
                                'Caught an error (server_pid_data) %s',
                                server_pid_data['error_info'])
                else:
                    self.logger.warn('Caught an error (server_data) %s',
                                     server_data['error_info'])

        else:
            self.logger.warn('Caught an error (all_data) %s', all_data)

        # Set it here because later on it may be shortened to the page_size of elements
        total = len(msg_list)

        # If we get here, we must have collected some data at all
        if msg_list:

            # Sort the output before it is returned - messages last published (youngest) come first
            msg_list.sort(key=_sort_key, reverse=True)

            # If pagination is requsted, return only the desired page
            if paginate:

                start = cur_page * _page_size
                end = start + _page_size

                msg_list = msg_list[start:end]

        for msg in msg_list:
            # Convert float timestamps in all the remaining messages to ISO-8601
            msg['pub_time'] = datetime_from_ms(msg['pub_time'] * 1000.0)
            if msg.get('expiration_time'):
                msg['expiration_time'] = datetime_from_ms(
                    msg['expiration_time'] * 1000.0)

            # Return endpoint information in the same format GD messages are returned in
            msg['endpoint_id'] = msg.pop('published_by_id')
            msg['endpoint_name'] = self.pubsub.get_endpoint_by_id(
                msg['endpoint_id']).name

        search_results = SearchResults(None, None, None, total)
        search_results.set_data(cur_page, _page_size)

        # Actual data
        self.response.payload.response = msg_list

        # Search metadata
        self.response.payload._meta = search_results.to_dict()