def feed_create(): """List all examples""" try: # Get feed type default to RSS feeds feed_type = int(request.form.get('feed_type', FEED_TYPE.RSS)) feed_class = FEED_TYPE_TO_CLASS[feed_type] validation_form = feed_class.create_form except: return jsonify_error(status='error', message='Invalid feed type') form = validation_form(request.form) if not form.validate(): return jsonify(status='error', message='The passed arguments failed validation') existing_feeds = feed_class.for_user_and_form(user=g.user, form=form) if existing_feeds.count(): feed = existing_feeds.get() # Did we get a channel_id from the form channel_id = form.data.get('channel_id') # Update the channel id for this feed if channel_id: # If this feed is already publishing to a channel don't yank it away. if feed.channel_id: return jsonify(status='error', message='The feed is already connected to a channel.') feed.publish_to_stream = True feed.channel_id = channel_id feed.put() else: feed = feed_class.create_feed_from_form(g.user, form).get_result() return jsonify(status='ok', data=feed.to_json())
def update_feed_for_error(feed_key): feed = ndb.Key(urlsafe=feed_key).get() if not feed: raise ndb.Return(jsonify_error('Unknown feed')) logger.info("Incrementing error count for feed: %s errors: %s", feed_key, feed.error_count) noop = request.form.get('noop') if noop: logger.info('Noop feed error feed: %s because off testing', feed_key) raise ndb.Return(jsonify(status='ok')) yield feed.track_error() raise ndb.Return(jsonify(status='ok'))
def update_feed_for_error(feed_key): feed = ndb.Key(urlsafe=feed_key).get() if not feed: raise ndb.Return(jsonify_error("Unknown feed")) logger.info("Incrementing error count for feed: %s errors: %s", feed_key, feed.error_count) noop = request.form.get("noop") if noop: logger.info("Noop feed error feed: %s because off testing", feed_key) raise ndb.Return(jsonify(status="ok")) yield feed.track_error() raise ndb.Return(jsonify(status="ok"))
def update_all_feeds(interval_id): """Update all feeds for a specific interval""" if request.headers.get('X-Appengine-Cron') != 'true': raise ndb.Return(jsonify_error(message='Not a cron call')) for feed_type, feed_class in FEED_TYPE_TO_CLASS.iteritems(): feeds = Feed.for_interval(interval_id) success = 0 more = True cursor = None futures = [] while more: feeds_to_fetch, cursor, more = yield feeds.fetch_page_async(BATCH_SIZE, start_cursor=cursor) feeds_to_fetch = filter(lambda x: getattr(x, 'external_polling_bucket', DEFAULT_POLLING_BUCKET) == DEFAULT_POLLING_BUCKET, feeds_to_fetch) keys = ','.join([x.key.urlsafe() for x in feeds_to_fetch]) if not keys: continue futures.append(Queue('poll').add_async(Task(url=url_for('tq_feed_poll-canonical'), method='POST', params={'keys': keys}))) success += 1 for future in futures: yield future logger.info('queued poll for %d feeds at interval_id=%s', success, interval_id) raise ndb.Return(jsonify(status='ok'))
def limit_feeds_for_channel_id(channel_id): """Limit all feeds connected to a channel""" max_stories_per_period = cast_int( request.form.get('max_stories_per_period'), default=None) schedule_period = cast_int(request.form.get('schedule_period'), default=None) dump_excess_in_period = bool(request.form.get('dump_excess_in_period')) users_feeds = [ feed for feed in Feed.for_channel(channel_id) if feed.visible ] futures = [] for feed in users_feeds: if dump_excess_in_period: feed.dump_excess_in_period = True if max_stories_per_period or schedule_period: feed.manual_control = True if max_stories_per_period: feed.max_stories_per_period = max_stories_per_period if schedule_period: feed.schedule_period = schedule_period futures.append(feed.put_async()) for future in futures: yield future users_feeds = export_feeds_to_json(users_feeds) raise ndb.Return(jsonify(status='ok', data=users_feeds))
def feeds_for_channel_id(channel_id): """List all examples""" users_feeds = get_feeds_for_channel(channel_id) users_feeds = export_feeds_to_json(users_feeds) return jsonify(status='ok', data=users_feeds)
def post_all_feeds(): """Post all new items for feeds for a specific interval""" if request.headers.get("X-Appengine-Cron") != "true": raise ndb.Return(jsonify_error(message="Not a cron call")) logger.info("Starting a post job") futures = [] for feed_type, feed_class in FEED_TYPE_TO_CLASS.iteritems(): feeds = feed_class.query(feed_class.is_dirty == True) logger.info("Got some feeds_count: %s feeds_type: %s", feeds.count(), feed_type) success = 0 more = True cursor = None while more: feeds_to_fetch, cursor, more = yield feeds.fetch_page_async(BATCH_SIZE, start_cursor=cursor) keys = ",".join([x.key.urlsafe() for x in feeds_to_fetch]) if not keys: continue futures.append( Queue().add_async(Task(url=url_for("tq_feed_post-canonical"), method="POST", params={"keys": keys})) ) success += len(feeds_to_fetch) logger.info("queued post for %d feeds feed_type:%s", success, feed_type) for future in futures: yield future logger.info("Finished Post Job") yield write_epoch_to_stat(Stat, "post_job") raise ndb.Return(jsonify(status="ok"))
def post_all_feeds(): """Post all new items for feeds for a specific interval""" if request.headers.get('X-Appengine-Cron') != 'true': raise ndb.Return(jsonify_error(message='Not a cron call')) logger.info('Starting a post job') futures = [] for feed_type, feed_class in FEED_TYPE_TO_CLASS.iteritems(): feeds = feed_class.query(feed_class.is_dirty == True) logger.info("Got some feeds_count: %s feeds_type: %s", feeds.count(), feed_type) success = 0 more = True cursor = None while more: feeds_to_fetch, cursor, more = yield feeds.fetch_page_async(BATCH_SIZE, start_cursor=cursor) keys = ','.join([x.key.urlsafe() for x in feeds_to_fetch]) if not keys: continue futures.append(Queue().add_async(Task(url=url_for('tq_feed_post-canonical'), method='POST', params={'keys': keys}))) success += len(feeds_to_fetch) logger.info('queued post for %d feeds feed_type:%s', success, feed_type) for future in futures: yield future logger.info('Finished Post Job') yield write_epoch_to_stat(Stat, 'post_job') raise ndb.Return(jsonify(status='ok'))
def update_feed_url(feed_key): feed = ndb.Key(urlsafe=feed_key).get() if not feed: raise ndb.Return(jsonify_error('Unknown feed')) logger.info("Updating feed: %s old feed url: %s new feed url: %s", feed_key, feed.feed_url, request.form.get('feed_url')) feed.feed_url = request.form.get('feed_url') noop = request.form.get('noop') if noop: logger.info('Noop feed_url update: feed: %s because off testing', feed_key) raise ndb.Return(jsonify(status='ok')) yield feed.put_async() raise ndb.Return(jsonify(status='ok'))
def try_push_resub(): """Post all new items for feeds for a specific interval""" if request.headers.get("X-Appengine-Cron") != "true": raise ndb.Return(jsonify_error(message="Not a cron call")) unsubscribed_feeds = Feed.query(Feed.hub != None, Feed.subscribed_at_hub == False) # noqa qit = unsubscribed_feeds.iter() errors = 0 success = 0 count = 0 futures = [] while (yield qit.has_next_async()): feed = qit.next() futures.append((feed, Feed.subscribe_to_hub(feed))) for feed, future in futures: count += 1 try: yield future success += 1 except: errors += 1 logger.exception("Failed to PuSH subscribe feed:%s" % (feed.feed_url,)) logger.info("Tried to call hub for num_unsubscribed_feeds:%s success:%s, errors:%s", count, success, errors) raise ndb.Return(jsonify(status="ok"))
def try_push_resub(): """Post all new items for feeds for a specific interval""" if request.headers.get('X-Appengine-Cron') != 'true': raise ndb.Return(jsonify_error(message='Not a cron call')) unsubscribed_feeds = Feed.query(Feed.hub != None, Feed.subscribed_at_hub == False) # noqa qit = unsubscribed_feeds.iter() errors = 0 success = 0 count = 0 futures = [] while (yield qit.has_next_async()): feed = qit.next() futures.append((feed, Feed.subscribe_to_hub(feed))) for feed, future in futures: count += 1 try: yield future success += 1 except: errors += 1 logger.exception('Failed to PuSH subscribe feed:%s' % (feed.feed_url, )) logger.info('Tried to call hub for num_unsubscribed_feeds:%s success:%s, errors:%s', count, success, errors) raise ndb.Return(jsonify(status='ok'))
def all_feeds(): """Post all new items for feeds for a specific interval""" def feed_to_dict(feed): return { "feed_key": feed.key.urlsafe(), "feed_url": feed.feed_url, "etag": feed.etag, "last_hash": feed.last_fetched_content_hash, "update_interval": UPDATE_INTERVAL_TO_MINUTES.get(feed.update_interval), } bucket = int(request.args.get("bucket_id", 1)) feed_clss = [Feed, RssFeed] feeds_response = [] for feed_cls in feed_clss: qit = feed_cls.query(feed_cls.external_polling_bucket == bucket) more = True cursor = None while more: feeds_to_fetch, cursor, more = yield qit.fetch_page_async(1000, start_cursor=cursor) feeds_response.extend((feed_to_dict(feed) for feed in feeds_to_fetch)) poller_run_id = uuid.uuid4().hex logger.info("Poller run %s dispatched with %d feeds", poller_run_id, len(feeds_response)) response = {"poller_run_id": poller_run_id, "feeds": feeds_response} yield write_epoch_to_stat(Stat, "external_poll_get_all_feeds") raise ndb.Return(jsonify(status="ok", data=response))
def update_all_feeds(interval_id): """Update all feeds for a specific interval""" if request.headers.get("X-Appengine-Cron") != "true": raise ndb.Return(jsonify_error(message="Not a cron call")) for feed_type, feed_class in FEED_TYPE_TO_CLASS.iteritems(): feeds = Feed.for_interval(interval_id) success = 0 more = True cursor = None futures = [] while more: feeds_to_fetch, cursor, more = yield feeds.fetch_page_async(BATCH_SIZE, start_cursor=cursor) feeds_to_fetch = filter( lambda x: getattr(x, "external_polling_bucket", DEFAULT_POLLING_BUCKET) == DEFAULT_POLLING_BUCKET, feeds_to_fetch, ) keys = ",".join([x.key.urlsafe() for x in feeds_to_fetch]) if not keys: continue futures.append( Queue("poll").add_async( Task(url=url_for("tq_feed_poll-canonical"), method="POST", params={"keys": keys}) ) ) success += 1 for future in futures: yield future logger.info("queued poll for %d feeds at interval_id=%s", success, interval_id) raise ndb.Return(jsonify(status="ok"))
def limit_feeds_for_channel_id(channel_id): """Limit all feeds connected to a channel""" max_stories_per_period = cast_int(request.form.get('max_stories_per_period'), default=None) schedule_period = cast_int(request.form.get('schedule_period'), default=None) dump_excess_in_period = bool(request.form.get('dump_excess_in_period')) users_feeds = [feed for feed in Feed.for_channel(channel_id) if feed.visible] futures = [] for feed in users_feeds: if dump_excess_in_period: feed.dump_excess_in_period = True if max_stories_per_period or schedule_period: feed.manual_control = True if max_stories_per_period: feed.max_stories_per_period = max_stories_per_period if schedule_period: feed.schedule_period = schedule_period futures.append(feed.put_async()) for future in futures: yield future users_feeds = export_feeds_to_json(users_feeds) raise ndb.Return(jsonify(status='ok', data=users_feeds))
def delete_feed(feed_type, feed_id): """Delete a feed""" feed = FEED_TYPE_TO_CLASS[feed_type].get_by_id(feed_id, parent=g.user.key) if not feed: raise ndb.Return(jsonify_error(message="Can't find that feed")) yield Entry.delete_for_feed(feed) yield feed.key.delete_async() raise ndb.Return(jsonify(status='ok'))
def update_feed_url(feed_key): feed = ndb.Key(urlsafe=feed_key).get() if not feed: raise ndb.Return(jsonify_error("Unknown feed")) logger.info( "Updating feed: %s old feed url: %s new feed url: %s", feed_key, feed.feed_url, request.form.get("feed_url") ) feed.feed_url = request.form.get("feed_url") noop = request.form.get("noop") if noop: logger.info("Noop feed_url update: feed: %s because off testing", feed_key) raise ndb.Return(jsonify(status="ok")) yield feed.put_async() raise ndb.Return(jsonify(status="ok"))
def feeds(): """List all examples""" users_feeds = [] for feed_type in FEED_TYPE_TO_CLASS.values(): users_feeds += [ feed.to_json() for feed in feed_type.for_user(g.user) if feed.visible ] return jsonify(status='ok', data=users_feeds)
def published_entries_for_feed(feed_type, feed_id): feed = FEED_TYPE_TO_CLASS[feed_type].get_by_id(feed_id, parent=g.user.key) if not feed: return jsonify_error(message="Can't find that feed") feed_data = feed.to_json() entries = [entry.to_json() for entry in Entry.latest(feed, order_by='-published_at', include_overflow=True).fetch(20)] feed_data['entries'] = entries return jsonify(status='ok', data=feed_data)
def feed(feed_type, feed_id): """Get a feed""" feed = FEED_TYPE_TO_CLASS[feed_type].get_by_id(feed_id, parent=g.user.key) if not feed: return jsonify_error(message="Can't find that feed") feed_data = feed.to_json() entries = [entry.to_dict(include=['guid', 'published', 'extra_info']) for entry in Entry.latest_for_feed(feed).fetch(10)] feed_data['entries'] = entries return jsonify(status='ok', data=feed_data)
def tq_inbound_feed(): if request.headers.get("X-Appengine-Queuename") != "inbound-posts": raise ndb.Return(jsonify_error(message="Not a cron call")) feed_key = request.form.get("feed_key") feed_data = request.form.get("feed_data") etag = request.form.get("etag") last_hash = request.form.get("last_hash") logger.info("Task to process inbound feed: %s", feed_key) yield inbound_feed_process(feed_key, feed_data, etag, last_hash) raise ndb.Return(jsonify(status="ok"))
def tq_inbound_feed(): if request.headers.get('X-Appengine-Queuename') != 'inbound-posts': raise ndb.Return(jsonify_error(message='Not a cron call')) feed_key = request.form.get('feed_key') feed_data = request.form.get('feed_data') etag = request.form.get('etag') last_hash = request.form.get('last_hash') logger.info('Task to process inbound feed: %s', feed_key) yield inbound_feed_process(feed_key, feed_data, etag, last_hash) raise ndb.Return(jsonify(status='ok'))
def feed_push_update_app(feed_key): feed = ndb.Key(urlsafe=feed_key).get() if not feed: raise ndb.Return(jsonify_error("Unknown feed")) noop = request.args.get("noop") if noop: logger.info("Noop feed publish %s because off testing", feed_key) raise ndb.Return(jsonify(status="ok")) post_data = { "feed_key": feed_key, "feed_data": request.stream.read(), "etag": request.args.get("etag"), "last_hash": request.args.get("last_hash"), } yield inbound_feed_process(**post_data) # yield Queue('inbound-posts').add_async(Task(url=url_for('tq_inbound_feed'), method='POST', params=post_data)) yield write_epoch_to_stat(Stat, "external_poll_post_feed") raise ndb.Return(jsonify(status="ok"))
def feed_push_update_app(feed_key): feed = ndb.Key(urlsafe=feed_key).get() if not feed: raise ndb.Return(jsonify_error('Unknown feed')) noop = request.args.get('noop') if noop: logger.info('Noop feed publish %s because off testing', feed_key) raise ndb.Return(jsonify(status='ok')) post_data = { 'feed_key': feed_key, 'feed_data': request.stream.read(), 'etag': request.args.get('etag'), 'last_hash': request.args.get('last_hash'), } yield inbound_feed_process(**post_data) # yield Queue('inbound-posts').add_async(Task(url=url_for('tq_inbound_feed'), method='POST', params=post_data)) yield write_epoch_to_stat(Stat, 'external_poll_post_feed') raise ndb.Return(jsonify(status='ok'))
def unpublished_entries_for_feed(feed_type, feed_id): feed = FEED_TYPE_TO_CLASS[feed_type].get_by_id(feed_id, parent=g.user.key) if not feed: return jsonify_error(message="Can't find that feed") feed_data = feed.to_json() entries = [ entry.to_json() for entry in Entry.latest_unpublished(feed).fetch(20) ] feed_data['entries'] = entries return jsonify(status='ok', data=feed_data)
def monitor_jobs(): """Are the jobs running""" post_value = yield get_epoch_from_stat(Stat, 'post_job') external_poll_get_all_feeds = yield get_epoch_from_stat(Stat, 'external_poll_get_all_feeds') external_poll_post_feed = yield get_epoch_from_stat(Stat, 'external_poll_post_feed') response = { 'post': post_value, 'external_poll_get_all_feeds': external_poll_get_all_feeds, 'external_poll_post_feed': external_poll_post_feed, } raise ndb.Return(jsonify(status='ok', data=response))
def email_to_feed(email): logger.info("Email: %s", email) account, _ = email.split("@", 1) logger.info("Account: %s", account) unique_key, feed_type, version = account.split("_") feed_type, version = map(int, (feed_type, version)) logger.info("unique_key: %s feed_type:%s version:%s", unique_key, feed_type, version) feed = FEED_TYPE_TO_CLASS[feed_type].for_email(unique_key) logger.info("Found feed: %s", feed) mail_message = mail.InboundEmailMessage(request.stream.read()) entry = yield feed.create_entry_from_mail(mail_message) yield publish_entry(entry, feed) raise ndb.Return(jsonify(status="ok"))
def feed(feed_type, feed_id): """Get a feed""" feed = FEED_TYPE_TO_CLASS[feed_type].get_by_id(feed_id, parent=g.user.key) if not feed: return jsonify_error(message="Can't find that feed") feed_data = feed.to_json() entries = [ entry.to_dict(include=['guid', 'published', 'extra_info']) for entry in Entry.latest_for_feed(feed).fetch(10) ] feed_data['entries'] = entries return jsonify(status='ok', data=feed_data)
def email_to_feed(email): logger.info('Email: %s', email) account, _ = email.split('@', 1) logger.info('Account: %s', account) unique_key, feed_type, version = account.split('_') feed_type, version = map(int, (feed_type, version)) logger.info('unique_key: %s feed_type:%s version:%s', unique_key, feed_type, version) feed = FEED_TYPE_TO_CLASS[feed_type].for_email(unique_key) logger.info('Found feed: %s', feed) mail_message = mail.InboundEmailMessage(request.stream.read()) entry = yield feed.create_entry_from_mail(mail_message) yield publish_entry(entry, feed) raise ndb.Return(jsonify(status='ok'))
def monitor_jobs(): """Are the jobs running""" post_value = yield get_epoch_from_stat(Stat, 'post_job') external_poll_get_all_feeds = yield get_epoch_from_stat( Stat, 'external_poll_get_all_feeds') external_poll_post_feed = yield get_epoch_from_stat( Stat, 'external_poll_post_feed') response = { 'post': post_value, 'external_poll_get_all_feeds': external_poll_get_all_feeds, 'external_poll_post_feed': external_poll_post_feed, } raise ndb.Return(jsonify(status='ok', data=response))
def feed_create(): """List all examples""" try: # Get feed type default to RSS feeds feed_type = int(request.form.get('feed_type', FEED_TYPE.RSS)) feed_class = FEED_TYPE_TO_CLASS[feed_type] validation_form = feed_class.create_form except: return jsonify_error(status='error', message='Invalid feed type') form = validation_form(request.form) if not form.validate(): return jsonify(status='error', message='The passed arguments failed validation') existing_feeds = feed_class.for_user_and_form(user=g.user, form=form) if existing_feeds.count(): feed = existing_feeds.get() # Did we get a channel_id from the form channel_id = form.data.get('channel_id') # Update the channel id for this feed if channel_id: # If this feed is already publishing to a channel don't yank it away. if feed.channel_id: return jsonify( status='error', message='The feed is already connected to a channel.') feed.publish_to_stream = True feed.channel_id = channel_id feed.put() else: feed = feed_class.create_feed_from_form(g.user, form).get_result() return jsonify(status='ok', data=feed.to_json())
def deferred_task(): if not request.headers.get("X-AppEngine-QueueName"): raise ndb.Return(jsonify_error(message="Not a Task call")) data = request.stream.read() try: deferred.run(data) except deferred.SingularTaskFailure: logger.debug("Failure executing task, task retry forced") raise ndb.Return(jsonify_error(code=408)) except deferred.PermanentTaskFailure: logger.debug("Permanent Failure") raise ndb.Return(jsonify_error(code=500)) raise ndb.Return(jsonify(status="ok"))
def deferred_task(): if not request.headers.get('X-AppEngine-QueueName'): raise ndb.Return(jsonify_error(message='Not a Task call')) data = request.stream.read() try: deferred.run(data) except deferred.SingularTaskFailure: logger.debug("Failure executing task, task retry forced") raise ndb.Return(jsonify_error(code=408)) except deferred.PermanentTaskFailure: logger.debug("Permanent Failure") raise ndb.Return(jsonify_error(code=500)) raise ndb.Return(jsonify(status='ok'))
def inbound_search_matches(): if request.headers.get('X-Appengine-Queuename') != 'default': raise ndb.Return(jsonify_error(message='Not a cron call')) # List of subscription ids that matched for match. sub_ids = request.form.getlist('id') keys = [] for sub_id in sub_ids: keys.append(ndb.Key(urlsafe=sub_id)) subs = yield ndb.get_multi_async(keys) doc = prospective_search.get_document(request.form) for sub in subs: logger.info('prospective: Would have sent to %s %s', sub, doc) logger.info('prospective: Request form: %s', request.form) raise ndb.Return(jsonify(status='ok'))
def feed_change(feed_type, feed_id): """Change a feed""" form = FEED_TYPE_TO_CLASS[feed_type].update_form(request.form) if not form.validate(): return jsonify_error(message="Invalid update data") feed = FEED_TYPE_TO_CLASS[feed_type].get_by_id(feed_id, parent=g.user.key) if not feed: return jsonify_error(message="Can't find that feed") form.populate_obj(feed) feed.put() feed_data = feed.to_json() entries = [entry.to_dict(include=['title', 'link', 'published', 'published_at']) for entry in Entry.latest_for_feed(feed).fetch(10)] feed_data['entries'] = entries return jsonify(status='ok', data=feed_data)
def feed_entry_publish(feed_type, feed_id, entry_id): """Get a feed""" logger.info('Manually publishing Feed:%s Entry: %s', feed_id, entry_id) key = ndb.Key(urlsafe=entry_id) feed = FEED_TYPE_TO_CLASS[feed_type].get_by_id(feed_id, parent=g.user.key) if not (feed and key.parent() == feed.key): return jsonify_error(message="Can't find that feed") entry = key.get() if not entry: return jsonify_error(message="Can't find that entry") publish_entry(entry, feed, ignore_publish_state=True).get_result() entry.overflow = False entry.put() return jsonify(status='ok')
def feed_validate(): """preview a feed""" feed_type = int(request.form.get('feed_type', 1)) form = FEED_TYPE_TO_CLASS[feed_type].preview_form(request.form) if not form.validate(): raise ndb.Return(jsonify(status='error', form_errors=form.errors)) feed = Feed() form.populate_obj(feed) feed.preview = True error = None parsed_feed = None try: parsed_feed, resp, feed = yield fetch_parsed_feed_for_feed(feed) feed.update_feed_from_parsed_feed(parsed_feed) if len(parsed_feed.entries) == 0: error = 'The url you entred is not a valid feed.' except FetchException, e: error = unicode(e)
def tq_feed_poll(): """Poll some feeds feed""" if not request.headers.get('X-AppEngine-QueueName'): raise ndb.Return(jsonify_error(message='Not a Task call')) keys = request.form.get('keys') if not keys: logger.info('Task Queue poll no keys') raise ndb.Return(jsonify_error(code=500)) success = 0 errors = 0 entries_created = 0 ndb_keys = [ndb.Key(urlsafe=key) for key in keys.split(',')] feeds = yield ndb.get_multi_async(ndb_keys) feeds = filter(lambda x: not getattr(x, 'use_external_poller', False), feeds) logger.info('Got %d feed(s) for polling', len(feeds)) futures = [] for i, feed in enumerate(feeds): if not feed: errors += 1 logger.info("Couldn't find feed for key: %s", ndb_keys[i]) continue futures.append((i, feed.process_feed(None, None))) for i, future in futures: parsed_feed = None try: parsed_feed, num_new_entries = yield future entries_created += num_new_entries success += 1 except: errors += 1 feed = feeds[i] logger.exception('Failed to update feed:%s, i=%s' % (feed.feed_url, i)) yield write_epoch_to_stat(Stat, 'poll_job') logger.info('Polled feeds entries_created: %s success: %s errors: %s', entries_created, success, errors) raise ndb.Return(jsonify(status='ok'))
def tq_feed_poll(): """Poll some feeds feed""" if not request.headers.get("X-AppEngine-QueueName"): raise ndb.Return(jsonify_error(message="Not a Task call")) keys = request.form.get("keys") if not keys: logger.info("Task Queue poll no keys") raise ndb.Return(jsonify_error(code=500)) success = 0 errors = 0 entries_created = 0 ndb_keys = [ndb.Key(urlsafe=key) for key in keys.split(",")] feeds = yield ndb.get_multi_async(ndb_keys) feeds = filter(lambda x: not getattr(x, "use_external_poller", False), feeds) logger.info("Got %d feed(s) for polling", len(feeds)) futures = [] for i, feed in enumerate(feeds): if not feed: errors += 1 logger.info("Couldn't find feed for key: %s", ndb_keys[i]) continue futures.append((i, feed.process_feed(None, None))) for i, future in futures: parsed_feed = None try: parsed_feed, num_new_entries = yield future entries_created += num_new_entries success += 1 except: errors += 1 feed = feeds[i] logger.exception("Failed to update feed:%s, i=%s" % (feed.feed_url, i)) yield write_epoch_to_stat(Stat, "poll_job") logger.info("Polled feeds entries_created: %s success: %s errors: %s", entries_created, success, errors) raise ndb.Return(jsonify(status="ok"))
def save_feed_preview(feed_type, feed_id): """preview a saved feed""" form = FEED_TYPE_TO_CLASS[feed_type].update_form(request.args) logger.info('form errors %s', form.errors) logger.info('form.publish_to_stream errors %s', form.publish_to_stream.errors) for errorMessages, fieldName in enumerate(form.errors): for err in errorMessages: logger.info("Feed errrors, %s", err) if not form.validate(): return jsonify_error(message="Invalid update data") feed = FEED_TYPE_TO_CLASS[feed_type].get_by_id(feed_id, parent=g.user.key) if not feed: return jsonify_error(message="Can't find that feed") form.populate_obj(feed) feed.preview = True preview_entries = Entry.entry_preview(Entry.latest_for_feed_by_added(feed).fetch(3), feed, format=True) return jsonify(status='ok', data=preview_entries)
def feed_change(feed_type, feed_id): """Change a feed""" form = FEED_TYPE_TO_CLASS[feed_type].update_form(request.form) if not form.validate(): return jsonify_error(message="Invalid update data") feed = FEED_TYPE_TO_CLASS[feed_type].get_by_id(feed_id, parent=g.user.key) if not feed: return jsonify_error(message="Can't find that feed") form.populate_obj(feed) feed.put() feed_data = feed.to_json() entries = [ entry.to_dict(include=['title', 'link', 'published', 'published_at']) for entry in Entry.latest_for_feed(feed).fetch(10) ] feed_data['entries'] = entries return jsonify(status='ok', data=feed_data)
def save_feed_preview(feed_type, feed_id): """preview a saved feed""" form = FEED_TYPE_TO_CLASS[feed_type].update_form(request.args) logger.info('form errors %s', form.errors) logger.info('form.publish_to_stream errors %s', form.publish_to_stream.errors) for errorMessages, fieldName in enumerate(form.errors): for err in errorMessages: logger.info("Feed errrors, %s", err) if not form.validate(): return jsonify_error(message="Invalid update data") feed = FEED_TYPE_TO_CLASS[feed_type].get_by_id(feed_id, parent=g.user.key) if not feed: return jsonify_error(message="Can't find that feed") form.populate_obj(feed) feed.preview = True preview_entries = Entry.entry_preview( Entry.latest_for_feed_by_added(feed).fetch(3), feed, format=True) return jsonify(status='ok', data=preview_entries)
def all_feeds(): """Post all new items for feeds for a specific interval""" def feed_to_dict(feed): return { 'feed_key': feed.key.urlsafe(), 'feed_url': feed.feed_url, 'etag': feed.etag, 'last_hash': feed.last_fetched_content_hash, 'update_interval': UPDATE_INTERVAL_TO_MINUTES.get(feed.update_interval) } bucket = int(request.args.get('bucket_id', 1)) feed_clss = [Feed, RssFeed] feeds_response = [] for feed_cls in feed_clss: qit = feed_cls.query(feed_cls.external_polling_bucket == bucket) more = True cursor = None while more: feeds_to_fetch, cursor, more = yield qit.fetch_page_async(1000, start_cursor=cursor) feeds_response.extend((feed_to_dict(feed) for feed in feeds_to_fetch)) poller_run_id = uuid.uuid4().hex logger.info('Poller run %s dispatched with %d feeds', poller_run_id, len(feeds_response)) response = { 'poller_run_id': poller_run_id, 'feeds': feeds_response, } yield write_epoch_to_stat(Stat, 'external_poll_get_all_feeds') raise ndb.Return(jsonify(status='ok', data=response))
def tq_feed_post_job(): """Post some feeds feed""" if not request.headers.get('X-AppEngine-QueueName'): raise ndb.Return(jsonify_error(message='Not a Task call')) keys = request.form.get('keys') if not keys: logger.info('Task Queue post no keys') raise ndb.Return(jsonify_error(code=500)) success = 0 errors = 0 num_posted = 0 ndb_keys = [ndb.Key(urlsafe=key) for key in keys.split(',')] feeds = yield ndb.get_multi_async(ndb_keys) logger.info('Got %d feed(s) for posting', len(feeds)) futures = [] for feed in feeds: futures.append((feed, Entry.publish_for_feed(feed))) for feed, future in futures: try: num_posts = yield future if num_posts is not None: num_posted += num_posts success += 1 except: errors += 1 if feed: logger.exception('Failed to Publish feed:%s' % (feed.feed_url, )) else: logger.exception('Failed to publish non-exsistant feed') logger.info('Post Feeds success:%s errors: %s num_posted: %s', success, errors, num_posted) raise ndb.Return(jsonify(status='ok'))
def tq_feed_post_job(): """Post some feeds feed""" if not request.headers.get("X-AppEngine-QueueName"): raise ndb.Return(jsonify_error(message="Not a Task call")) keys = request.form.get("keys") if not keys: logger.info("Task Queue post no keys") raise ndb.Return(jsonify_error(code=500)) success = 0 errors = 0 num_posted = 0 ndb_keys = [ndb.Key(urlsafe=key) for key in keys.split(",")] feeds = yield ndb.get_multi_async(ndb_keys) logger.info("Got %d feed(s) for posting", len(feeds)) futures = [] for feed in feeds: futures.append((feed, Entry.publish_for_feed(feed))) for feed, future in futures: try: num_posts = yield future if num_posts is not None: num_posted += num_posts success += 1 except: errors += 1 if feed: logger.exception("Failed to Publish feed:%s" % (feed.feed_url,)) else: logger.exception("Failed to publish non-exsistant feed") logger.info("Post Feeds success:%s errors: %s num_posted: %s", success, errors, num_posted) raise ndb.Return(jsonify(status="ok"))
def me(): """return current user""" return jsonify(status='ok', data=g.adn_user)
try: parsed_feed, resp, feed = yield fetch_parsed_feed_for_feed(feed) feed.update_feed_from_parsed_feed(parsed_feed) if len(parsed_feed.entries) == 0: error = 'The url you entred is not a valid feed.' except FetchException, e: error = unicode(e) except: error = 'Something went wrong while fetching your URL.' logger.exception('Feed Preview: Failed to update feed:%s' % (feed.feed_url, )) logger.info('Parsed feed: %s', parsed_feed) if error: raise ndb.Return(jsonify(status='error', message=error)) raise ndb.Return(jsonify(status='ok', data=feed.to_json())) @app.route('/api/feeds/<int:feed_type>/<int:feed_id>', methods=['GET']) def feed(feed_type, feed_id): """Get a feed""" feed = FEED_TYPE_TO_CLASS[feed_type].get_by_id(feed_id, parent=g.user.key) if not feed: return jsonify_error(message="Can't find that feed") feed_data = feed.to_json() entries = [ entry.to_dict(include=['guid', 'published', 'extra_info']) for entry in Entry.latest_for_feed(feed).fetch(10)
parsed_feed = None try: parsed_feed, resp, feed = yield fetch_parsed_feed_for_feed(feed) feed.update_feed_from_parsed_feed(parsed_feed) if len(parsed_feed.entries) == 0: error = 'The url you entred is not a valid feed.' except FetchException, e: error = unicode(e) except: error = 'Something went wrong while fetching your URL.' logger.exception('Feed Preview: Failed to update feed:%s' % (feed.feed_url, )) logger.info('Parsed feed: %s', parsed_feed) if error: raise ndb.Return(jsonify(status='error', message=error)) raise ndb.Return(jsonify(status='ok', data=feed.to_json())) @app.route('/api/feeds/<int:feed_type>/<int:feed_id>', methods=['GET']) def feed(feed_type, feed_id): """Get a feed""" feed = FEED_TYPE_TO_CLASS[feed_type].get_by_id(feed_id, parent=g.user.key) if not feed: return jsonify_error(message="Can't find that feed") feed_data = feed.to_json() entries = [entry.to_dict(include=['guid', 'published', 'extra_info']) for entry in Entry.latest_for_feed(feed).fetch(10)] feed_data['entries'] = entries
def feeds(): """List all examples""" users_feeds = [] for feed_type in FEED_TYPE_TO_CLASS.values(): users_feeds += [feed.to_json() for feed in feed_type.for_user(g.user) if feed.visible] return jsonify(status='ok', data=users_feeds)