Esempio n. 1
0
    def set_non_gd_msg_list_response(self, msg_list, cur_page, _sort_key=itemgetter('pub_time')):
        """ Paginates a list of non-GD messages (from topics or queues) and returns results.
        """
        cur_page = cur_page - 1 if cur_page else 0 # We index lists from 0

        # Set it here because later on it may be shortened to the page_size of elements
        total = len(msg_list)

        # If we get here, we must have collected some data at all
        if msg_list:

            # Sort the output before it is returned - messages last published (youngest) come first
            msg_list.sort(key=_sort_key, reverse=True)
            start = cur_page * _page_size
            end = start + _page_size
            msg_list = msg_list[start:end]

        for msg in msg_list:

            # Convert float timestamps in all the remaining messages to ISO-8601
            msg['pub_time'] = datetime_from_ms(msg['pub_time'] * 1000.0)
            if msg.get('expiration_time'):
                msg['expiration_time'] = datetime_from_ms(msg['expiration_time'] * 1000.0)

            # Return endpoint information in the same format GD messages are returned in
            msg['endpoint_id'] = msg.pop('published_by_id')
            msg['endpoint_name'] = self.pubsub.get_endpoint_by_id(msg['endpoint_id']).name

        search_results = SearchResults(None, None, None, total)
        search_results.set_data(cur_page, _page_size)

        # This goes to the service's response payload object
        self.response.payload.response = msg_list
        self.response.payload._meta = search_results.to_dict()
Esempio n. 2
0
    def _get_data_from_sliceable(self,
                                 sliceable,
                                 query_ctx,
                                 _time_keys=time_keys):

        max_chars = self.request.input.get('max_chars') or 30
        out = []

        now = self.time.utcnow(needs_format=False)

        start = query_ctx.cur_page * query_ctx.page_size
        stop = start + query_ctx.page_size

        for idx, item in enumerate(sliceable[start:stop]):

            # Internally, time is kept as doubles so we need to convert it to a datetime object or null it out.
            for name in _time_keys:
                _value = item[name]
                if _value:
                    item[name] = arrow_get(_value)
                else:
                    item[name] = None

            del _value

            # Compute expiry since the last operation + the time left to expiry
            expiry = item.pop('expiry')
            if expiry:
                item['expiry_op'] = int(expiry)
                item['expiry_left'] = int(
                    (item['expires_at'] - now).total_seconds())
            else:
                item['expiry_op'] = None
                item['expiry_left'] = None

            # Now that we have worked with all the time keys needed, we can serialize them to the ISO-8601 format.
            for name in _time_keys:
                if item[name]:
                    item[name] = item[name].isoformat()

            # Shorten the value if it's possible, if it's not something else than a string/unicode
            value = item['value']
            if isinstance(value, basestring):
                len_value = len(value)
                chars_omitted = len_value - max_chars
                chars_omitted = chars_omitted if chars_omitted > 0 else 0

                if chars_omitted:
                    value = value[:max_chars]

                item['value'] = value
                item['chars_omitted'] = chars_omitted

            item['cache_id'] = self.request.input.cache_id
            item['server'] = '{} ({})'.format(self.server.name,
                                              self.server.pid)
            out.append(item)

        return SearchResults(None, out, None, len(sliceable))
Esempio n. 3
0
    def handle(self, _sort_key=itemgetter('pub_time')):
        # Local aliases
        topic_id = self.request.input.topic_id
        paginate = self.request.input.paginate
        cur_page = self.request.input.cur_page
        cur_page = cur_page - 1 if cur_page else 0  # We index lists from 0

        # Response to produce
        msg_list = []

        # Collects responses from all server processes
        is_all_ok, all_data = self.servers.invoke_all(
            'zato.pubsub.topic.get-server-message-list', {
                'topic_id': topic_id,
                'query': self.request.input.query,
            },
            timeout=30)

        # Check if everything is OK on each level - overall, per server and then per process
        if is_all_ok:
            for server_name, server_data in all_data.iteritems():
                if server_data['is_ok']:
                    for server_pid, server_pid_data in server_data[
                            'server_data'].iteritems():
                        if server_pid_data['is_ok']:
                            pid_data = server_pid_data['pid_data']['response'][
                                'data']
                            msg_list.extend(pid_data)
                        else:
                            self.logger.warn(
                                'Caught an error (server_pid_data) %s',
                                server_pid_data['error_info'])
                else:
                    self.logger.warn('Caught an error (server_data) %s',
                                     server_data['error_info'])

        else:
            self.logger.warn('Caught an error (all_data) %s', all_data)

        # Set it here because later on it may be shortened to the page_size of elements
        total = len(msg_list)

        # If we get here, we must have collected some data at all
        if msg_list:

            # Sort the output before it is returned - messages last published (youngest) come first
            msg_list.sort(key=_sort_key, reverse=True)

            # If pagination is requsted, return only the desired page
            if paginate:

                start = cur_page * _page_size
                end = start + _page_size

                msg_list = msg_list[start:end]

        for msg in msg_list:
            # Convert float timestamps in all the remaining messages to ISO-8601
            msg['pub_time'] = datetime_from_ms(msg['pub_time'] * 1000.0)
            if msg.get('expiration_time'):
                msg['expiration_time'] = datetime_from_ms(
                    msg['expiration_time'] * 1000.0)

            # Return endpoint information in the same format GD messages are returned in
            msg['endpoint_id'] = msg.pop('published_by_id')
            msg['endpoint_name'] = self.pubsub.get_endpoint_by_id(
                msg['endpoint_id']).name

        search_results = SearchResults(None, None, None, total)
        search_results.set_data(cur_page, _page_size)

        # Actual data
        self.response.payload.response = msg_list

        # Search metadata
        self.response.payload._meta = search_results.to_dict()