Esempio n. 1
0
    def c_store():

        kill = False

        while True:

            if all(each.done() for each in p_futures):
                logger.debug("future is DONE")
                kill = True

            if kill and res_q.empty():  # you can only quit when all your work is done
                break
            try:
                item = res_q.get(block=False)
            except Queue.Empty:
                pass

            else:
                logger.debug("grabbing item size: {0}".format(res_q.qsize()))
                add_entry(item)

                logger.debug('...done storing')

                # When done storing, add feed_id to "finished queue"
                fin_q.append(item['posts'][0]['feed_id'])
Esempio n. 2
0
            result = associate_feed_with_user()
            # should probably update entries after adding...
            return {'message': result, 'data': False}

    else:
        logger.info("totally new feed, adding everything")

        get_result = getfeeds.get_feed_meta(returned_feed)
        print(get_result)

        storefeeds.store_feed_data(get_result)
        exists = db_session.query(Feed).filter_by(feed_url=returned_feed).first()
        associate_feed_with_user()
        f_obj = {'url': returned_feed, 'feed_id': exists.id}
        get_entries_res = getfeeds.feed_request(f_obj)
        storefeeds.add_entry(get_entries_res)
        # to add a new feed, we need to what...
        # attempt to actually fetch the feed?
        # ugh, this is actually running parsefeed twice.. one to check for existence
        # then one to actually add to the feed table..

        # add to feed table, add to userfeed table, get new entries.
        # later: add to cache?

        return {'message': "success", 'data': False}


def remove_user_feed(user, uf_id):

    """remove a user's feed from their subscriptions list.
       keep the feed and entries, as other users might be subscribed too"""