def update_keywords(request): """Responds to any HTTP request. Args: request (flask.Request): HTTP request object. Returns: The response text in json format """ if request.args and 'keyword' and 'message' in request.args: keyword = request.args.get('keyword') message = request.args.get('message') # download the current keyword list, or create one it if doesn't exist keywords_string = '[]' if storage.check_if_exists(BUCKETNAME, KEYWORDSFILE): keywords_string = storage.download_blob(BUCKETNAME, KEYWORDSFILE) # turn string into array and append the new keyword/message pair keywords_list = ast.literal_eval(keywords_string) keywords_list.append((keyword, message)) # upload new array to storage storage.upload_blob(BUCKETNAME, str(keywords_list), KEYWORDSFILE) retval = jsonify('success') # allow CORS for GET and POST requests retval.headers.set('Access-Control-Allow-Origin', '*') retval.headers.set('Access-Control-Allow-Methods', 'GET, POST') return retval else: return 'failure - wrong arguments'
def get_summary_for_url(url): fn = "summary/%s.json" % (hashlib.md5(url.encode('utf8')).hexdigest()) if storage.blob_exists(BUCKET_NAME, fn): return storage.get_blob(BUCKET_NAME, fn) general_data, summary_data = summarize(url) if not "title" in general_data: return if not "sentences" in summary_data: return data = { "title": general_data['title'], "author": general_data['author'], "date": general_data['publishDate'], "article": general_data['article'], "image": general_data['image'], "sentences": summary_data['sentences'], "url": url, } binary = json.dumps(data, indent=True) storage.upload_blob(BUCKET_NAME, fn, binary) return binary
def get_news_bing(cat="ScienceAndTechnology"): t = datetime.date.today() conn = http.client.HTTPSConnection(BING_HOST) headers = { 'x-bingapis-sdk': "true", 'x-rapidapi-key': RAPIDAPI_KEY, 'x-rapidapi-host': BING_HOST } conn.request( "GET", f"/news?count=100&mkt=en-US&safeSearch=Off&textFormat=Raw&originalImg=true&category={cat}", headers=headers) res = conn.getresponse() data = res.read() news = json.loads(data) binary = json.dumps(news, indent=True) fn = "news/bing_%s_%s.json" % (cat, t.isoformat()) storage.upload_blob(BUCKET_NAME, fn, binary) return "https://%s.storage.googleapis.com/%s" % (BUCKET_NAME, fn)
def upload_unexpected_response(message): # get list of unexpected queries and append new unexpected query unexpected_queries = storage.download_blob(BUCKETNAME, UNEXPECTED_QUERIES_FILE) unexpected_queries += f"{message}|" # upload new array to storage storage.upload_blob(BUCKETNAME, unexpected_queries, UNEXPECTED_QUERIES_FILE)
def uploadFileRequest(): data = jsonifyData(request.data) with open("temp/" + data["path"], "w+") as file: file.write(data["code"]) resp = upload_blob("temp/" + data["path"], os.path.join(getPath(data), data["path"])) os.remove("temp/" + data["path"]) return jsonify({"type": "success"})
def get_news_newscaf(cat="technology"): t = datetime.date.today() conn = http.client.HTTPSConnection(NEWSCAF_HOST) headers = {'x-rapidapi-key': RAPIDAPI_KEY, 'x-rapidapi-host': NEWSCAF_HOST} conn.request("GET", f"/apirapid/news/{cat}/", headers=headers) res = conn.getresponse() data = res.read() news = json.loads(data) binary = json.dumps(news, indent=True) fn = "newscaf_%s_%s.json" % (cat, t.isoformat()) storage.upload_blob(BUCKET_NAME, fn, binary) return "https://%s.storage.googleapis.com/%s" % (BUCKET_NAME, fn)
# create directories for d in [PODCASTS_FOLDER, NEWS_DATA_FOLDER, TEMP_FOLDER]: if not os.path.exists(d): os.mkdir(d) parser = argparse.ArgumentParser() parser.add_argument("-d", "--date", default=(datetime.date.today() - datetime.timedelta(days=1)).isoformat()) args = parser.parse_args() news_date = args.date news_file = os.path.join(NEWS_DATA_FOLDER, 'news_data_%s.json' % news_date) logging.info('getting news data...') if not os.path.exists(news_file): news_data = get_news_data(get_best_hn_urls(NUMBER_ARTICLES, news_date)) json.dump(news_data, open(news_file, "w")) else: news_data = json.load(open(news_file)) fn = os.path.join(PODCASTS_FOLDER, "bestofhn_%s.mp3" % news_date) if not os.path.exists(fn): generate_podcast(news_data, news_date, fn) rss_feed = generate_rss_feed() print(rss_feed) storage.upload_blob(BUCKET_NAME, 'bestofhn.rss', rss_feed)