def save_article_stream_to_analytics_store(article_stream, cache, analytics_store): for i, article_event in enumerate(article_stream): print(u"Processing article event {} - {}:{} {}".format( i, article_event["item_id"], article_event.get("resolved_title", "No Title"), article_event["time_updated"] )) analytics_store.add_event("pocket_read_articles", article_event, timestamp=from_iso_8601(article_event["timestamp"])) # We store the time_updated so if anything crashes we can continue from # that point and don't get duplicates. The extra five seconds are so that # we don't get the last item again. last_updated = from_iso_8601(article_event["time_updated"]) + timedelta(seconds=5) cache.set(POCKET_CACHE_KEY, to_iso_8601(last_updated))
def convert_timestamp_to_isoformat(timestamp): if timestamp == '0': return None else: return to_iso_8601(from_unix_timestamp(timestamp))
def convert_timestamp_to_isoformat(timestamp): if timestamp == "0": return None else: return to_iso_8601(from_unix_timestamp(timestamp))