def handle(self, *args, **options): for feed_url in Feed.site_objects.all().values_list("url", flat=True): # process feed in create-mode self.stdout.write("Scheduling feed with URL=%s..." % feed_url) process_feed.delay(feed_url, create=False) feeds_updated.send(sender=self, instance=self)
def handle(self, *args, **options): new_posts_count = 0 start = datetime.datetime.utcnow().replace(tzinfo=utc) for feed_url in Feed.site_objects.all().values_list("url", flat=True): # process feed in create-mode new_posts_count += process_feed(feed_url, create=False) delta = datetime.datetime.utcnow().replace(tzinfo=utc) - start print "Added %s posts in %d seconds" % (new_posts_count, delta.seconds) feeds_updated.send(sender=self, instance=self)
def handle(self, *args, **options): new_posts_count = 0 start = datetime.now() for feed_url in Feed.site_objects.all().values_list("url", flat=True): # process feed in create-mode new_posts_count += process_feed(feed_url, create=False) delta = datetime.now() - start print("Added {} posts in {} seconds".format(new_posts_count, delta.seconds)) feeds_updated.send(sender=self, instance=self)
def update_feeds(): new_posts_count = 0 start = datetime.now() for feed_url in Feed.site_objects.all().values_list("url", flat=True): # process feed in create-mode new_posts_count += process_feed(feed_url, create=False) delta = datetime.now() - start print("Added {} posts in {} seconds".format(new_posts_count, delta.seconds)) feeds_updated.send(sender=None, instance=None)
def handle(self, *args, **options): new_posts_count = 0 plogger = logging.getLogger('PlanetLogger') plogger.info("Update All Feeds") start = datetime.now() for feed_url in Feed.site_objects.all().values_list("url", flat=True): # process feed in create-mode new_posts_count += process_feed(feed_url, create=False) delta = datetime.now() - start plogger.info("Added %s posts in %d seconds" % (new_posts_count, delta.seconds)) feeds_updated.send(sender=self, instance=self)
def handle(self, *args, **options): for feed_url in Feed.site_objects.all().values_list("url", flat=True): if ASYNC_BACKEND == "celery": self.stdout.write("Scheduling feed with URL=%s..." % feed_url) process_feed.delay(feed_url, create=False) elif ASYNC_BACKEND == "huey": from planet.tasks import huey_process_feed huey_process_feed(feed_url, create=False) else: process_feed(feed_url, create=False) feeds_updated.send(sender=self, instance=self) return
def handle(self, *args, **options): plogger = logging.getLogger(__name__) plogger.info("Parallel Update All Feeds") new_posts_count = 0 start = datetime.now() feed_urls = Feed.site_objects.all().values_list("url", flat=True) pool = Group() for result in pool.imap_unordered(process_feed, feed_urls): new_posts_count += result delta = datetime.now() - start plogger.info("Added %s posts in %d seconds" % (new_posts_count, delta.seconds)) feeds_updated.send(sender=self, instance=self)
def update_feeds(): """ Task for running on celery beat! CELERYBEAT_SCHEDULE = { 'update_feeds': { 'task': 'planet.tasks.update_feeds', 'schedule': timedelta(hours=1) } } """ for feed_url in Feed.site_objects.all().values_list("url", flat=True): print("Scheduling feed URL={}...".format(feed_url)) process_feed.delay(feed_url, create=False) print("Done!") feeds_updated.send(sender=None, instance=None)
def handle(self, *args, **options): for feed_url in Feed.site_objects.all().values_list("url", flat=True): # process feed in create-mode process_feed(feed_url, create=False) feeds_updated.send(sender=self, instance=self)