def get(self): """ """ data = {"index": request.args.get('index')} k = str(uuid.uuid4()) d = {"id": k, "params": data, "endpoint": "mappings"} redis_store.rpush("triple", json.dumps(d)) result = get_key(redis_store, k) headers = {"Content-Type": "application/json"} return make_response(result, 200, headers)
def post(self): params = request.get_json() gsheets_ns.logger.debug(params) k = str(uuid.uuid4()) d = {"id": k, "params": params, "endpoint": "create_kb"} gsheets_ns.logger.debug(d) redis_store.rpush("gsheets", json.dumps(d)) result = get_key(redis_store, k) try: headers = {} headers["Content-Type"] = "application/json" return make_response(result, 200, headers) except Exception as e: gsheets_ns.logger.error(e) abort(500, "Problem encountered during processing, sorry.")
def post(self): """ """ data = request.get_json() triple_ns.logger.debug(data) errors = search_param_schema.validate(data, partial=True) triple_ns.logger.debug(errors) if errors: abort(400, str(errors)) k = str(uuid.uuid4()) d = {"id": k, "params": data, "endpoint": "search"} triple_ns.logger.debug(d) redis_store.rpush("triple", json.dumps(d)) result = get_key(redis_store, k) try: headers = {} if request.headers["Accept"] == "application/json": headers["Content-Type"] = "application/json" if request.headers["Accept"] == "text/csv": if data.get("raw") is True: df = pd.DataFrame( result.get('input_data').get('hits').get('hits')) df = pd.concat([ df.drop(["_source"], axis=1), df["_source"].apply( pd.Series) ], axis=1) result = df.to_csv() else: result = pd.read_json(json.loads(result)).to_csv() headers["Content-Type"] = "text/csv" headers[ "Content-Disposition"] = "attachment; filename={0}.csv".format( k) if data.get("raw") is True: headers["Content-Type"] = "application/json" return make_response(result, 200, headers) except Exception as e: triple_ns.logger.error(e) abort(500, "Problem encountered, check logs.")
def post(self): """ """ params = request.get_json() pubmed_ns.logger.debug(params) if "optradio" in params: del params["optradio"] errors = search_param_schema.validate(params, partial=True) params["limit"] = 100 params["list_size"] = -1 pubmed_ns.logger.debug(errors) if errors: abort(400, str(errors)) k = str(uuid.uuid4()) d = {"id": k, "params": params, "endpoint": "search"} pubmed_ns.logger.debug(d) redis_store.rpush("pubmed", json.dumps(d)) result = get_key(redis_store, k) try: headers = {} if request.headers["Accept"] == "application/json": headers["Content-Type"] = "application/json" if request.headers["Accept"] == "text/csv": if params.get("raw") is True: df = pd.read_json(json.loads(result)) result = df.to_csv() else: result = pd.read_json(json.loads(result)).to_csv() headers["Content-Type"] = "text/csv" headers[ "Content-Disposition"] = "attachment; filename={0}.csv".format( k) if params.get("raw") is True: headers["Content-Type"] = "application/json" return make_response(result, 200, headers) except Exception as e: pubmed_ns.logger.error(e) abort(500, "Problem encountered, check logs.")