def _dispatch_entries(feed): # Actually sort by updated date. sorted_entries = sorted(entries(feed), key=lambda x: parse_date(updated(x))) return self._coiterate(chain.from_iterable( ((el(entry) for el in self._event_listeners) for entry in sorted_entries)))
def _dispatch_entries(feed): # Actually sort by updated date. sorted_entries = sorted(entries(feed), key=lambda x: parse_date(updated(x))) return self._coiterate( chain.from_iterable(((el(entry) for el in self._event_listeners) for entry in sorted_entries)))
def test_updated(self): """ :func:`otter.indexer.updated` finds the updated timestamp in the first entry in the sample simple atom feed """ self.assertEqual( updated(self.simple_entry), '2003-12-13T18:30:02Z' )
def extract_clb_drained_at(feed): """ Extract time when node was changed to DRAINING from a CLB atom feed. Will return node's creation time if node was created with DRAINING. Return None if couldnt find for any reason. :param list feed: ``list`` of atom entry :class:`Elements` :returns: drained_at EPOCH in seconds :rtype: float """ for entry in feed: if _DRAINING_RE.match(atom.summary(entry)): return timestamp_to_epoch(atom.updated(entry)) return None
def extract_CLB_drained_at(feed): """ Extract time when node was changed to DRAINING from a CLB atom feed. :param str feed: Atom feed of the node :returns: EPOCH in seconds :rtype: float """ # TODO: This function temporarily only looks at last entry assuming that # it was draining operation. May need to look at all entries in reverse # order and check for draining operation. This could include paging to # further entries entry = atom.entries(atom.parse(feed))[0] summary = atom.summary(entry) if 'Node successfully updated' in summary and 'DRAINING' in summary: return timestamp_to_epoch(atom.updated(entry)) else: raise ValueError('Unexpected summary: {}'.format(summary))