def index(): request_id = log_helper.extract_request_id(request.headers) type_header = request.headers.get(log_helper.TYPE_HEADER_NAME) message_type = log_helper.parse_message_type(type_header) index_name = log_helper.build_index_name(request.headers) body = request.get_json(force=True) # max size is configurable with env var or defaults to constant max_payload_bytes = log_helper.get_max_payload_bytes(MAX_PAYLOAD_BYTES) body_length = request.headers.get(log_helper.LENGTH_HEADER_NAME) if body_length and int(body_length) > int(max_payload_bytes): too_large_message = 'body too large for ' + index_name + "/" + log_helper.DOC_TYPE_NAME + "/" + request_id + ' adding ' + message_type print(too_large_message) sys.stdout.flush() return too_large_message if not type(body) is dict: body = json.loads(body) # print('RECEIVED MESSAGE.') # print(str(request.headers)) # print(str(body)) # print('----') # sys.stdout.flush() es = log_helper.connect_elasticsearch() try: #now process and update the doc doc = process_and_update_elastic_doc(es, message_type, body, request_id, request.headers, index_name) return str(doc) except Exception as ex: print(ex) sys.stdout.flush() return 'problem logging request'
import numpy as np import json import logging import sys import log_helper from collections.abc import Iterable MAX_PAYLOAD_BYTES = 300000 app = Flask(__name__) print("starting logger") sys.stdout.flush() log = logging.getLogger("werkzeug") log.setLevel(logging.ERROR) es = log_helper.connect_elasticsearch() @app.route("/", methods=["GET", "POST"]) def index(): request_id = log_helper.extract_request_id(request.headers) type_header = request.headers.get(log_helper.TYPE_HEADER_NAME) message_type = log_helper.parse_message_type(type_header) index_name = log_helper.build_index_name(request.headers) body = request.get_json(force=True) # max size is configurable with env var or defaults to constant max_payload_bytes = log_helper.get_max_payload_bytes(MAX_PAYLOAD_BYTES)