Example #1
0
    def post(self):
        """
        Use feedparser to save any blog posts from the given URL since
        the given timestamp
        """

        feed_key = self.request.get("feed_key", None)
        if not feed_key:
            logging.error("No feed URL provided")
            return

        feed = ContentFeed.get(feed_key)

        if not feed:
            logging.error("Couldn't find feed in the DB \"%s\"" % feed_key)
            return

        logging.debug("Dequeued feed: \"%s\"" % (feed.url))

        last_update = timeutils.add_utc_tzinfo(feed.last_update)
        logging.debug("Last processed feed on: %s" % last_update.ctime())

        try:
            result = urlfetch.fetch(feed.url)
        except urlfetch.Error, e:
            logging.warn("Exception when fetching feed: \"%s\" %s" %
                         (feed.url, e))
            return
Example #2
0
    def get(self):
        """
        Query the DB and queue any feeds that haven't been processed since
        update_interval
        """

        update_interval = timedelta(hours=1)

        current_datetime = timeutils.now_utc()

        query = ContentFeed.all()
        query.filter("last_update <", current_datetime - update_interval)

        if query.count() == 0:
            logging.debug("No entries to queue")
        else:
            for feed in query:
                # get the member's OAuth token and secret

                last_update = timeutils.add_utc_tzinfo(feed.last_update)
                feed_consumer_params = {
                    "feed_key": feed.key(),
                    "owner_id": feed.owner.user_id()
                }

                try:
                    taskqueue.add(url="/blogs/feed/consumer",
                                  params=feed_consumer_params)
                    logging.debug("Queued feed: \"%s\" %s" %
                                  (feed.url, last_update.ctime()))
                except taskqueue.Error:
                    logging.error("Unable to queue feed: \"%s\"", feed.url)
                    return
Example #3
0
    def post(self):
        """
        Use feedparser to save any blog posts from the given URL since
        the given timestamp
        """

        feed_key = self.request.get("feed_key", None)
        if not feed_key:
            logging.error("No feed URL provided")
            return

        feed = ContentFeed.get(feed_key)

        if not feed:
            logging.error("Couldn't find feed in the DB \"%s\"" % feed_key)
            return

        logging.debug("Dequeued feed: \"%s\"" % (feed.url))

        last_update = timeutils.add_utc_tzinfo(feed.last_update)
        logging.debug("Last processed feed on: %s" % last_update.ctime())

        try:
            result = urlfetch.fetch(feed.url)
        except urlfetch.Error, e:
            logging.warn("Exception when fetching feed: \"%s\" %s" %
                (feed.url, e))
            return
Example #4
0
    def get(self):
        """
        Query the DB and queue any feeds that haven't been processed since
        update_interval
        """

        update_interval = timedelta(hours=1)

        current_datetime = timeutils.now_utc()

        query = ContentFeed.all()
        query.filter("last_update <", current_datetime - update_interval)

        if query.count() == 0:
            logging.debug("No entries to queue")
        else:
            for feed in query:
                # get the member's OAuth token and secret

                last_update = timeutils.add_utc_tzinfo(feed.last_update)
                feed_consumer_params = {
                    "feed_key": feed.key(),
                    "owner_id": feed.owner.user_id()}

                try:
                    taskqueue.add(url="/blogs/feed/consumer",
                        params=feed_consumer_params)
                    logging.debug("Queued feed: \"%s\" %s" %
                        (feed.url, last_update.ctime()))
                except taskqueue.Error:
                    logging.error("Unable to queue feed: \"%s\"",
                        feed.url)
                    return
Example #5
0
    def get(self):
        """
        Query the DB and queue any entries that haven't been uploaded yet
        """

        query = ContentEntry.all()
        query.filter("pub_date <", timeutils.now_utc())
        query.filter("ning_id =", None)

        if query.count() == 0:
            logging.debug("No entries to queue")
        else:
            for entry in query:

                # Backoff method for trying to upload
                if entry.retry_count > 100:
                    logging.info("Too many retries, deleting \"%s\"" %
                        entry.title)
                    entry.delete()
                    continue
                next_try = timeutils.add_utc_tzinfo(entry.pub_date +
                    timedelta(minutes=entry.retry_count**2))
                if next_try > timeutils.now_utc():
                    logging.debug("Too soon to retry, will try again at %s" %
                        next_try.ctime())
                    continue

                entry_consumer_params = {
                    "entry_key": entry.key()}
                try:
                    taskqueue.add(url="/blogs/entry/consumer",
                        params=entry_consumer_params)
                    logging.debug("Queued entry: \"%s\" %s" %
                        (entry.title, entry.pub_date.ctime()))
                except taskqueue.Error:
                    logging.error("Unable to queue feed: \"%s\"",
                        entry_consumer_params["url"])
                    return
Example #6
0
    def get(self):
        """
        Query the DB and queue any entries that haven't been uploaded yet
        """

        query = ContentEntry.all()
        query.filter("pub_date <", timeutils.now_utc())
        query.filter("ning_id =", None)

        if query.count() == 0:
            logging.debug("No entries to queue")
        else:
            for entry in query:

                # Backoff method for trying to upload
                if entry.retry_count > 100:
                    logging.info("Too many retries, deleting \"%s\"" %
                                 entry.title)
                    entry.delete()
                    continue
                next_try = timeutils.add_utc_tzinfo(entry.pub_date + timedelta(
                    minutes=entry.retry_count**2))
                if next_try > timeutils.now_utc():
                    logging.debug("Too soon to retry, will try again at %s" %
                                  next_try.ctime())
                    continue

                entry_consumer_params = {"entry_key": entry.key()}
                try:
                    taskqueue.add(url="/blogs/entry/consumer",
                                  params=entry_consumer_params)
                    logging.debug("Queued entry: \"%s\" %s" %
                                  (entry.title, entry.pub_date.ctime()))
                except taskqueue.Error:
                    logging.error("Unable to queue feed: \"%s\"",
                                  entry_consumer_params["url"])
                    return