def get_keyword_search_feed(keyword): keyword = keyword.replace('%20', ' ') try: d = event.get_feed(search=keyword, sortBy='relevance', numEvents=20) except: d = {'error': "Something's gone wrong"} return jsonify(d)
def get_location_search_feed(location): location = location.replace('%20', ' ') try: d = event.get_feed(location=location, sortBy='relevance', numEvents=20) except: d = {'error': "Something's gone wrong"} return jsonify(d)
def export_keyword_search_feed(search): feed_dict = event.get_feed(search=search, sortBy='relevance', numEvents=20) with open( DATA_CACHE_PATH + 'search/search_' + search.replace(' ', '') + '.pickle', 'wb') as file: pickle.dump(feed_dict, file) print("Exported " + DATA_CACHE_PATH + 'search/search_' + search.replace(' ', '') + '.pickle')
def export_category_search_feed(category): feed_dict = event.get_feed(category=category, sortBy='relevance', numEvents=20) with open(DATA_CACHE_PATH + 'search/category_' + category + '.pickle', 'wb') as file: pickle.dump(feed_dict, file) print("Exported " + DATA_CACHE_PATH + 'search/category_' + category + '.pickle')
def export_location_search_feed(location): feed_dict = event.get_feed(location=location, sortBy='relevance', numEvents=20) with open( DATA_CACHE_PATH + 'search/location_' + location.replace(' ', '') + '.pickle', 'wb') as file: pickle.dump(feed_dict, file) print("Exported " + DATA_CACHE_PATH + 'search/location_' + location.replace(' ', '') + '.pickle')
def get_category_search_feed(category): try: d = event.get_feed(category=category, sortBy='relevance', numEvents=20) except: d = {'error': "Something's gone wrong"} return jsonify(d)
def get_default_feed(): try: d = event.get_feed(sortBy='relevance', numEvents=35) except: d = {'error': "Something's gone wrong"} return jsonify(d)
def export_main_feed(): feed_dict = event.get_feed(sortBy='date', numEvents=50) with open(DATA_CACHE_PATH + 'feed.pickle', 'wb') as file: pickle.dump(feed_dict, file) print("Exported " + DATA_CACHE_PATH + 'feed.pickle')