def _dispatch_entries(feed): # Actually sort by updated date. sorted_entries = sorted(entries(feed), key=lambda x: parse_date(updated(x))) return self._coiterate(chain.from_iterable( ((el(entry) for el in self._event_listeners) for entry in sorted_entries)))
def _dispatch_entries(feed): # Actually sort by updated date. sorted_entries = sorted(entries(feed), key=lambda x: parse_date(updated(x))) return self._coiterate( chain.from_iterable(((el(entry) for el in self._event_listeners) for entry in sorted_entries)))
def read_entries(service_type, url, params, direction, follow_limit=100, log_msg_type=None): """ Read all feed entries and follow in given direction until it is empty :param service_type: Service hosting the feed :type service_type: A member of :class:`ServiceType` :param str url: CF URL to append :param dict params: HTTP parameters :param direction: Where to continue fetching? :type direction: A member of :class:`Direction` :param int follow_limit: Maximum number of times to follow in given direction :return: (``list`` of :obj:`Element`, last fetched params) tuple """ if direction == Direction.PREVIOUS: direction_link = atom.previous_link elif direction == Direction.NEXT: direction_link = atom.next_link else: raise ValueError("Invalid direction") if log_msg_type is not None: log_cb = log_success_response(log_msg_type, identity, False) else: log_cb = identity all_entries = [] while follow_limit > 0: resp, feed_str = yield service_request(service_type, "GET", url, params=params, json_response=False).on(log_cb) feed = atom.parse(feed_str) entries = atom.entries(feed) if entries == []: break all_entries.extend(entries) link = direction_link(feed) if link is None: break params = parse_qs(urlparse(link).query) follow_limit -= 1 yield do_return((all_entries, params))
def read_entries(service_type, url, params, direction, follow_limit=100, log_msg_type=None): """ Read all feed entries and follow in given direction until it is empty :param service_type: Service hosting the feed :type service_type: A member of :class:`ServiceType` :param str url: CF URL to append :param dict params: HTTP parameters :param direction: Where to continue fetching? :type direction: A member of :class:`Direction` :param int follow_limit: Maximum number of times to follow in given direction :return: (``list`` of :obj:`Element`, last fetched params) tuple """ if direction == Direction.PREVIOUS: direction_link = atom.previous_link elif direction == Direction.NEXT: direction_link = atom.next_link else: raise ValueError("Invalid direction") if log_msg_type is not None: log_cb = log_success_response(log_msg_type, identity, False) else: log_cb = identity all_entries = [] while follow_limit > 0: resp, feed_str = yield service_request( service_type, "GET", url, params=params, json_response=False).on(log_cb) feed = atom.parse(feed_str) entries = atom.entries(feed) if entries == []: break all_entries.extend(entries) link = direction_link(feed) if link is None: break params = parse_qs(urlparse(link).query) follow_limit -= 1 yield do_return((all_entries, params))
def extract_CLB_drained_at(feed): """ Extract time when node was changed to DRAINING from a CLB atom feed. :param str feed: Atom feed of the node :returns: EPOCH in seconds :rtype: float """ # TODO: This function temporarily only looks at last entry assuming that # it was draining operation. May need to look at all entries in reverse # order and check for draining operation. This could include paging to # further entries entry = atom.entries(atom.parse(feed))[0] summary = atom.summary(entry) if 'Node successfully updated' in summary and 'DRAINING' in summary: return timestamp_to_epoch(atom.updated(entry)) else: raise ValueError('Unexpected summary: {}'.format(summary))
def get_clb_node_feed(lb_id, node_id): """ Get the atom feed associated with a CLB node. :param int lb_id: Cloud Load balancer ID :param int node_id: Node ID of in loadbalancer node :returns: Effect of ``list`` of atom entry :class:`Element` :rtype: ``Effect`` """ all_entries = [] params = {} while True: feed_str = yield _node_feed_page(lb_id, node_id, params) feed = atom.parse(feed_str) entries = atom.entries(feed) if entries == []: break all_entries.extend(entries) next_link = atom.next_link(feed) if not next_link: break params = parse_qs(urlparse(next_link).query) yield do_return(all_entries)
def parsed_feed(self, *entries): entries = ''.join(self.entry.format(*e) for e in entries) return atom.entries(atom.parse(self.feed.format(entries)))
def setUp(self): """ Load simple atom feed fixture """ self.simple_atom = parse(fixture("simple.atom")) self.simple_entry = entries(self.simple_atom)[0]