def post_json(url, **kwargs): """ ASSUME RESPONSE IN IN JSON """ if 'json' in kwargs: kwargs['data'] = unicode2utf8(value2json(kwargs['json'])) del kwargs['json'] elif 'data' in kwargs: kwargs['data'] = unicode2utf8(value2json(kwargs['data'])) else: Log.error(u"Expecting `json` parameter") response = post(url, **kwargs) details = json2value(utf82unicode(response.content)) if response.status_code not in [200, 201, 202]: if "template" in details: Log.error(u"Bad response code {{code}}", code=response.status_code, cause=Except.wrap(details)) else: Log.error(u"Bad response code {{code}}\n{{details}}", code=response.status_code, details=details) else: return details
def test_rest_get(self): settings = self.utils.fill_container({ "data": [{ "a": 0, "b": 0 }, { "a": 0, "b": 1 }, { "a": 1, "b": 0 }, { "a": 1, "b": 1 }], "query": { "from": "" } # DUMMY LINE }) url = URL(self.utils.testing.query) url.path = "json/" + settings.index url.query = {"a": 1} response = self.utils.try_till_response(str(url), data=b"") self.assertEqual(response.status_code, 200) # ORDER DOES NOT MATTER, TEST EITHER expected1 = unicode2utf8( convert.value2json([{ "a": 1, "b": 0 }, { "a": 1, "b": 1 }], pretty=True)) expected2 = unicode2utf8( convert.value2json([{ "a": 1, "b": 1 }, { "a": 1, "b": 0 }], pretty=True)) try: self.assertEqual(response.all_content, expected1) except Exception: self.assertEqual(response.all_content, expected2)
def get_raw_json(path): with RegisterThread(): active_data_timer = Timer("total duration") body = flask.request.get_data() try: with active_data_timer: args = scrub_args(flask.request.args) limit = args.limit if args.limit else 10 args.limit = None frum = find_container(path) result = jx.run( { "from": path, "where": { "eq": args }, "limit": limit, "format": "list" }, frum) if isinstance( result, Container ): # TODO: REMOVE THIS CHECK, jx SHOULD ALWAYS RETURN Containers result = result.format("list") result.meta.active_data_response_time = active_data_timer.duration response_data = unicode2utf8( convert.value2json(result.data, pretty=True)) Log.note("Response is {{num}} bytes", num=len(response_data)) return Response(response_data, status=200) except Exception as e: e = Except.wrap(e) return send_error(active_data_timer, body, e)
def test_multiple_agg_on_same_field(self): if self.not_real_service(): return test = wrap({ "query": { "from": { "type": "elasticsearch", "settings": { "host": ES_CLUSTER_LOCATION, "index": "unittest", "type": "test_result" } }, "select": [{ "name": "max_bytes", "value": "run.stats.bytes", "aggregate": "max" }, { "name": "count", "value": "run.stats.bytes", "aggregate": "count" }] } }) query = unicode2utf8(convert.value2json(test.query)) # EXECUTE QUERY with Timer("query"): response = http.get(self.testing.query, data=query) if response.status_code != 200: error(response) result = json2value(utf82unicode(response.all_content)) Log.note("result\n{{result|indent}}", {"result": result})
def relay_post(path): try: return cache.request("post", path, flask.request.headers) except Exception as e: e = Except.wrap(e) Log.warning("could not handle request", cause=e) return Response(unicode2utf8(value2json(e, pretty=True)), status=400, headers={"Content-Type": "text/html"})
def heartbeat(): try: backend_check() return Response(status=200) except Exception as e: Log.warning("heartbeat failure", cause=e) return Response(unicode2utf8(value2json(e)), status=500, headers={"Content-Type": "application/json"})
def test_timing(self): if self.not_real_service(): return test = wrap({ "query": { "from": { "type": "elasticsearch", "settings": { "host": ES_CLUSTER_LOCATION, "index": "unittest", "type": "test_result" } }, "select": [{ "name": "count", "value": "run.duration", "aggregate": "count" }, { "name": "total", "value": "run.duration", "aggregate": "sum" }], "edges": [{ "name": "chunk", "value": ["run.suite", "run.chunk"] }, "result.ok"], "where": { "and": [{ "lt": { "timestamp": Date.floor(Date.now()).milli / 1000 } }, { "gte": { "timestamp": Date.floor(Date.now() - (Duration.DAY * 7), Duration.DAY).milli / 1000 } }] }, "format": "cube", "samples": { "limit": 30 } } }) query = unicode2utf8(convert.value2json(test.query)) # EXECUTE QUERY with Timer("query"): response = http.get(self.testing.query, data=query) if response.status_code != 200: error(response) result = json2value(utf82unicode(response.all_content)) Log.note("result\n{{result|indent}}", {"result": result})
def _default(path): record_request(flask.request, None, flask.request.get_data(), None) return Response( unicode2utf8(OVERVIEW), status=200, headers={ "Content-Type": "text/html" } )
def write(self, key, value, disable_zip=False): if key.endswith(".json") or key.endswith(".zip"): Log.error("Expecting a pure key") try: if hasattr(value, "read"): if disable_zip: storage = self.bucket.new_key(key + ".json") string_length = len(value) else: storage = self.bucket.new_key(key + ".json.gz") string_length = len(value) value = convert.bytes2zip(value) file_length = len(value) Log.note( "Sending contents with length {{file_length|comma}} (from string with length {{string_length|comma}})", file_length=file_length, string_length=string_length) value.seek(0) storage.set_contents_from_file(value) if self.settings.public: storage.set_acl('public-read') return if len(value) > 20 * 1000 and not disable_zip: self.bucket.delete_key(key + ".json") self.bucket.delete_key(key + ".json.gz") if isinstance(value, str): value = convert.bytes2zip(value) key += ".json.gz" else: value = convert.bytes2zip(unicode2utf8(value)) key += ".json.gz" else: self.bucket.delete_key(key + ".json.gz") if isinstance(value, str): key += ".json" else: key += ".json" storage = self.bucket.new_key(key) storage.set_contents_from_string(value) if self.settings.public: storage.set_acl('public-read') except Exception as e: Log.error( "Problem writing {{bytes}} bytes to {{key}} in {{bucket}}", key=key, bucket=self.bucket.name, bytes=len(value), cause=e)
def write(self, key, value, disable_zip=False): if key.endswith(".json") or key.endswith(".zip"): Log.error("Expecting a pure key") try: if hasattr(value, "read"): if disable_zip: storage = self.bucket.new_key(key + ".json") string_length = len(value) else: storage = self.bucket.new_key(key + ".json.gz") string_length = len(value) value = convert.bytes2zip(value) file_length = len(value) Log.note("Sending contents with length {{file_length|comma}} (from string with length {{string_length|comma}})", file_length= file_length, string_length=string_length) value.seek(0) storage.set_contents_from_file(value) if self.settings.public: storage.set_acl('public-read') return if len(value) > 20 * 1000 and not disable_zip: self.bucket.delete_key(key + ".json") self.bucket.delete_key(key + ".json.gz") if isinstance(value, str): value = convert.bytes2zip(value) key += ".json.gz" else: value = convert.bytes2zip(unicode2utf8(value)) key += ".json.gz" else: self.bucket.delete_key(key + ".json.gz") if isinstance(value, str): key += ".json" else: key += ".json" storage = self.bucket.new_key(key) storage.set_contents_from_string(value) if self.settings.public: storage.set_acl('public-read') except Exception as e: Log.error( "Problem writing {{bytes}} bytes to {{key}} in {{bucket}}", key=key, bucket=self.bucket.name, bytes=len(value), cause=e )
def relay_post(path): try: return cache.request("post", path, flask.request.headers) except Exception as e: e = Except.wrap(e) Log.warning("could not handle request", cause=e) return Response( unicode2utf8(value2json(e, pretty=True)), status=400, headers={ "Content-Type": "text/html" } )
def post_json(url, **kwargs): """ ASSUME RESPONSE IN IN JSON """ if 'json' in kwargs: kwargs['data'] = unicode2utf8(value2json(kwargs['json'])) elif 'data' in kwargs: kwargs['data'] = unicode2utf8(value2json(kwargs['data'])) else: Log.error(u"Expecting `json` parameter") response = post(url, **kwargs) c = response.content try: details = json2value(utf82unicode(c)) except Exception as e: Log.error(u"Unexpected return value {{content}}", content=c, cause=e) if response.status_code not in [200, 201]: Log.error(u"Bad response", cause=Except.wrap(details)) return details
def heartbeat(): try: backend_check() return Response(status=200) except Exception as e: Log.warning("heartbeat failure", cause=e) return Response( unicode2utf8(value2json(e)), status=500, headers={ "Content-Type": "application/json" } )
def find_query(hash): """ FIND QUERY BY HASH, RETURN Response OBJECT :param hash: :return: Response OBJECT """ with RegisterThread(): try: hash = hash.split("/")[0] query = query_finder.find(hash) if not query: return Response(b'{"type": "ERROR", "template": "not found"}', status=404) else: return Response(unicode2utf8(query), status=200) except Exception as e: e = Except.wrap(e) Log.warning("problem finding query with hash={{hash}}", hash=hash, cause=e) return Response(unicode2utf8(convert.value2json(e)), status=400)
def test_simple_query(self): if self.not_real_service(): return query = unicode2utf8(convert.value2json({"from": "unittest"})) # EXECUTE QUERY with Timer("query"): response = http.get(self.testing.query, data=query) if response.status_code != 200: error(response) result = json2value(utf82unicode(response.all_content)) Log.note("result\n{{result|indent}}", {"result": result})
def save(self, query): query.meta = None json = convert.value2json(query) hash = unicode2utf8(json) # TRY MANY HASHES AT ONCE hashes = [None] * HASH_BLOCK_SIZE for i in range(HASH_BLOCK_SIZE): hash = hashlib.sha1(hash).digest() hashes[i] = hash short_hashes = [ convert.bytes2base64(h[0:6]).replace("/", "_") for h in hashes ] available = {h: True for h in short_hashes} existing = self.es.query({ "from": "saved_queries", "where": { "terms": { "hash": short_hashes } }, "meta": { "timeout": "2second" } }) for e in Cube(select=existing.select, edges=existing.edges, data=existing.data).values(): if e.query == json: return e.hash available[e.hash] = False # THIS WILL THROW AN ERROR IF THERE ARE NONE, HOW UNLUCKY! best = [h for h in short_hashes if available[h]][0] self.queue.add({ "id": best, "value": { "hash": best, "create_time": Date.now(), "last_used": Date.now(), "query": json } }) Log.note("Saved query as {{hash}}", hash=best) return best
def test_save_then_load(self): test = { "data": [{ "a": "b" }], "query": { "meta": { "save": True }, "from": TEST_TABLE, "select": "a" }, "expecting_list": { "meta": { "format": "list" }, "data": ["b"] } } settings = self.utils.fill_container(test) bytes = unicode2utf8( value2json({ "from": settings.index, "select": "a", "format": "list" })) expected_hash = convert.bytes2base64( hashlib.sha1(bytes).digest()[0:6]).replace("/", "_") wrap(test).expecting_list.meta.saved_as = expected_hash self.utils.send_queries(test) # ENSURE THE QUERY HAS BEEN INDEXED Log.note("Flush saved query (with hash {{hash}})", hash=expected_hash) container = elasticsearch.Index(index="saved_queries", type=save_query.DATA_TYPE, kwargs=settings) container.flush(forced=True) with Timer("wait for 5 seconds"): Till(seconds=5).wait() url = URL(self.utils.testing.query) response = self.utils.try_till_response(url.scheme + "://" + url.host + ":" + text_type(url.port) + "/find/" + expected_hash, data=b'') self.assertEqual(response.status_code, 200) self.assertEqual(response.all_content, bytes)
def get(*args, **kwargs): body = kwargs.get("data") if not body: return wrap({"status_code": 400}) text = utf82unicode(body) data = json2value(text) result = jx.run(data) output_bytes = unicode2utf8(value2json(result)) return wrap({ "status_code": 200, "all_content": output_bytes, "content": output_bytes })
def execute_query(self, query): query = wrap(query) try: query = unicode2utf8(value2json(query)) # EXECUTE QUERY response = self.try_till_response(self.testing.query, data=query) if response.status_code != 200: error(response) result = json2value(utf82unicode(response.all_content)) return result except Exception as e: Log.error("Failed query", e)
def _exit(): with RegisterThread(): Log.note("Got request to shutdown") try: return Response( unicode2utf8(OVERVIEW), status=400, headers={ "Content-Type": "text/html" } ) finally: shutdown = flask.request.environ.get('werkzeug.server.shutdown') if shutdown: shutdown() else: Log.warning("werkzeug.server.shutdown does not exist")
def send_queries(self, subtest, places=6): subtest = wrap(subtest) try: # EXECUTE QUERY num_expectations = 0 for i, (k, v) in enumerate(subtest.items()): if k.startswith("expecting_"): # WHAT FORMAT ARE WE REQUESTING format = k[len("expecting_"):] elif k == "expecting": # NO FORMAT REQUESTED (TO TEST DEFAULT FORMATS) format = None else: continue num_expectations += 1 expected = v subtest.query.format = format subtest.query.meta.testing = ( num_expectations == 1 ) # MARK FIRST QUERY FOR TESTING SO FULL METADATA IS AVAILABLE BEFORE QUERY EXECUTION query = unicode2utf8(value2json(subtest.query)) # EXECUTE QUERY response = self.try_till_response(self.testing.query, data=query) if response.status_code != 200: error(response) result = json2value(utf82unicode(response.all_content)) container = jx_elasticsearch.new_instance( self._es_test_settings) query = QueryOp.wrap(subtest.query, container, container.namespace) compare_to_expected(query, result, expected, places) Log.note("PASS {{name|quote}} (format={{format}})", name=subtest.name, format=format) if num_expectations == 0: Log.error( "Expecting test {{name|quote}} to have property named 'expecting_*' for testing the various format clauses", name=subtest.name) except Exception as e: Log.error("Failed test {{name|quote}}", {"name": subtest.name}, e)
def test_longest_running_tests(self): test = wrap({ "query": { "sort": { "sort": -1, "field": "avg" }, "from": { "from": "unittest", "where": { "and": [{ "gt": { "build.date": "1439337600" } }] }, "groupby": [ "build.platform", "build.type", "run.suite", "result.test" ], "select": [{ "aggregate": "avg", "name": "avg", "value": "result.duration" }], "format": "table", "limit": 100 }, "limit": 100, "format": "list" } }) query = unicode2utf8(convert.value2json(test.query)) # EXECUTE QUERY with Timer("query"): response = http.get(self.testing.query, data=query) if response.status_code != 200: error(response) result = json2value(utf82unicode(response.all_content)) Log.note("result\n{{result|indent}}", {"result": result})
def test_branch_count(self): if self.not_real_service(): return test = wrap({ "query": { "from": { "type": "elasticsearch", "settings": { "host": ES_CLUSTER_LOCATION, "index": "unittest", "type": "test_result" } }, "select": [ { "aggregate": "count" }, ], "edges": ["build.branch"], "where": { "or": [{ "missing": "build.id" } # {"gte": {"timestamp": Date.floor(Date.now() - (Duration.DAY * 7), Duration.DAY).milli / 1000}} ] }, "format": "table" } }) query = unicode2utf8(convert.value2json(test.query)) # EXECUTE QUERY with Timer("query"): response = http.get(self.testing.query, data=query) if response.status_code != 200: error(response) result = json2value(utf82unicode(response.all_content)) Log.note("result\n{{result|indent}}", {"result": result})
def send_error(active_data_timer, body, e): status = 400 if QUERY_TOO_LARGE in e: status = 413 record_request(flask.request, None, body, e) Log.warning("Could not process\n{{body}}", body=body.decode("latin1"), cause=e) e = e.__data__() e.meta.timing.total = active_data_timer.duration.seconds # REMOVE TRACES, BECAUSE NICER TO HUMANS # def remove_trace(e): # e.trace = e.trace[0:1:] # for c in listwrap(e.cause): # remove_trace(c) # remove_trace(e) return Response(unicode2utf8(value2json(e)), status=status)
def test_failures_by_directory(self): if self.not_real_service(): return test = wrap({ "query": { "from": { "type": "elasticsearch", "settings": { "host": ES_CLUSTER_LOCATION, "index": "unittest", "type": "test_result" } }, "select": [{ "aggregate": "count" }], "edges": ["result.test", "result.ok"], "where": { "prefix": { "result.test": "/" } }, "format": "table" } }) query = unicode2utf8(convert.value2json(test.query)) # EXECUTE QUERY with Timer("query"): response = http.get(self.testing.query, data=query) if response.status_code != 200: error(response) result = json2value(utf82unicode(response.all_content)) Log.note("result\n{{result|indent}}", {"result": result})
from flask import Flask, Response, render_template, session from flask.json import jsonify from annotations import oauth from annotations.oauth import JWT_PAYLOAD, PROFILE_KEY from annotations.utils import record_request from mo_logs import constants, startup from mo_logs.strings import unicode2utf8, utf82unicode from mo_threads.threads import register_thread from pyLibrary.env.flask_wrappers import cors_wrapper from vendor.mo_files import File from vendor.mo_json import json2value from vendor.mo_logs import Log QUERY_SIZE_LIMIT = 10000 ERROR_CONTENT = unicode2utf8(File("public/error.html").read()) @register_thread def home(): return render_template("home.html") @register_thread def dashboard(): return render_template( "dashboard.html", userinfo=session[PROFILE_KEY], userinfo_pretty=json.dumps(session[JWT_PAYLOAD], indent=4), )
def tuid_endpoint(path): try: if flask.request.headers.get("content-length", "") in ["", "0"]: # ASSUME A BROWSER HIT THIS POINT, SEND text/html RESPONSE BACK return Response(EXPECTING_QUERY, status=400, headers={"Content-Type": "text/html"}) elif int(flask.request.headers["content-length"]) > QUERY_SIZE_LIMIT: return Response(unicode2utf8("request too large"), status=400, headers={"Content-Type": "text/html"}) request_body = flask.request.get_data().strip() query = json2value(utf82unicode(request_body)) # ENSURE THE QUERY HAS THE CORRECT FORM if query['from'] != 'files': Log.error("Can only handle queries on the `files` table") ands = listwrap(query.where['and']) if len(ands) != 3: Log.error( 'expecting a simple where clause with following structure\n{{example|json}}', example={ "and": [{ "eq": { "branch": "<BRANCH>" } }, { "eq": { "revision": "<REVISION>" } }, { "in": { "path": ["<path1>", "<path2>", "...", "<pathN>"] } }] }) rev = None paths = None branch_name = None for a in ands: rev = coalesce(rev, a.eq.revision) paths = unwraplist(coalesce(paths, a['in'].path, a.eq.path)) branch_name = coalesce(branch_name, a.eq.branch) paths = listwrap(paths) if len(paths) == 0: response, completed = [], True elif service.conn.pending_transactions > TOO_BUSY: # CHECK IF service IS VERY BUSY # TODO: BE SURE TO UPDATE STATS TOO Log.note("Too many open transactions") response, completed = [], False else: # RETURN TUIDS with Timer("tuid internal response time for {{num}} files", {"num": len(paths)}): response, completed = service.get_tuids_from_files( revision=rev, files=paths, going_forward=True, repo=branch_name) if not completed: Log.note( "Request for {{num}} files is incomplete for revision {{rev}}.", num=len(paths), rev=rev) if query.meta.format == 'list': formatter = _stream_list else: formatter = _stream_table return Response(formatter(response), status=200 if completed else 202, headers={"Content-Type": mimetype.JSON}) except Exception as e: e = Except.wrap(e) Log.warning("could not handle request", cause=e) return Response(unicode2utf8(value2json(e, pretty=True)), status=400, headers={"Content-Type": "text/html"})
def tuid_endpoint(path): with RegisterThread(): try: service.statsdaemon.update_requests(requests_total=1) if flask.request.headers.get("content-length", "") in ["", "0"]: # ASSUME A BROWSER HIT THIS POINT, SEND text/html RESPONSE BACK service.statsdaemon.update_requests(requests_complete=1, requests_passed=1) return Response( EXPECTING_QUERY, status=400, headers={ "Content-Type": "text/html" } ) elif int(flask.request.headers["content-length"]) > QUERY_SIZE_LIMIT: service.statsdaemon.update_requests(requests_complete=1, requests_passed=1) return Response( unicode2utf8("request too large"), status=400, headers={ "Content-Type": "text/html" } ) request_body = flask.request.get_data().strip() query = json2value(utf82unicode(request_body)) # ENSURE THE QUERY HAS THE CORRECT FORM if query['from'] != 'files': Log.error("Can only handle queries on the `files` table") ands = listwrap(query.where['and']) if len(ands) != 3: Log.error( 'expecting a simple where clause with following structure\n{{example|json}}', example={"and": [ {"eq": {"branch": "<BRANCH>"}}, {"eq": {"revision": "<REVISION>"}}, {"in": {"path": ["<path1>", "<path2>", "...", "<pathN>"]}} ]} ) rev = None paths = None branch_name = None for a in ands: rev = coalesce(rev, a.eq.revision) paths = unwraplist(coalesce(paths, a['in'].path, a.eq.path)) branch_name = coalesce(branch_name, a.eq.branch) paths = listwrap(paths) if len(paths) == 0: response, completed = [], True elif service.conn.pending_transactions > TOO_BUSY: # CHECK IF service IS VERY BUSY # TODO: BE SURE TO UPDATE STATS TOO Log.note("Too many open transactions") response, completed = [], False elif service.get_thread_count() > TOO_MANY_THREADS: Log.note("Too many threads open") response, completed = [], False else: # RETURN TUIDS with Timer("tuid internal response time for {{num}} files", {"num": len(paths)}): response, completed = service.get_tuids_from_files( revision=rev, files=paths, going_forward=True, repo=branch_name ) if not completed: Log.note( "Request for {{num}} files is incomplete for revision {{rev}}.", num=len(paths), rev=rev ) if query.meta.format == 'list': formatter = _stream_list else: formatter = _stream_table service.statsdaemon.update_requests( requests_complete=1 if completed else 0, requests_incomplete=1 if not completed else 0, requests_passed=1 ) return Response( formatter(response), status=200 if completed else 202, headers={ "Content-Type": "application/json" } ) except Exception as e: e = Except.wrap(e) service.statsdaemon.update_requests(requests_incomplete=1, requests_failed=1) Log.warning("could not handle request", cause=e) return Response( unicode2utf8(value2json(e, pretty=True)), status=400, headers={ "Content-Type": "text/html" } )
def sql_query(path): with RegisterThread(): query_timer = Timer("total duration") request_body = None try: with query_timer: preamble_timer = Timer("preamble", silent=True) with preamble_timer: if flask.request.headers.get("content-length", "") in ["", "0"]: # ASSUME A BROWSER HIT THIS POINT, SEND text/html RESPONSE BACK return Response(BLANK, status=400, headers={"Content-Type": "text/html"}) elif int(flask.request.headers["content-length"] ) > QUERY_SIZE_LIMIT: Log.error("Query is too large") request_body = flask.request.get_data().strip() text = utf82unicode(request_body) data = json2value(text) record_request(flask.request, data, None, None) translate_timer = Timer("translate", silent=True) with translate_timer: if not data.sql: Log.error("Expecting a `sql` parameter") jx_query = parse_sql(data.sql) frum = find_container(jx_query['from']) if data.meta.testing: test_mode_wait(jx_query) result = jx.run(jx_query, container=frum) if isinstance( result, Container ): # TODO: REMOVE THIS CHECK, jx SHOULD ALWAYS RETURN Containers result = result.format(jx_query.format) result.meta.jx_query = jx_query save_timer = Timer("save") with save_timer: if data.meta.save: try: result.meta.saved_as = save_query.query_finder.save( data) except Exception as e: Log.warning("Unexpected save problem", cause=e) result.meta.timing.preamble = mo_math.round( preamble_timer.duration.seconds, digits=4) result.meta.timing.translate = mo_math.round( translate_timer.duration.seconds, digits=4) result.meta.timing.save = mo_math.round( save_timer.duration.seconds, digits=4) result.meta.timing.total = "{{TOTAL_TIME}}" # TIMING PLACEHOLDER with Timer("jsonification", silent=True) as json_timer: response_data = unicode2utf8(value2json(result)) with Timer("post timer", silent=True): # IMPORTANT: WE WANT TO TIME OF THE JSON SERIALIZATION, AND HAVE IT IN THE JSON ITSELF. # WE CHEAT BY DOING A (HOPEFULLY FAST) STRING REPLACEMENT AT THE VERY END timing_replacement = b'"total": ' + str(mo_math.round(query_timer.duration.seconds, digits=4)) +\ b', "jsonification": ' + str(mo_math.round(json_timer.duration.seconds, digits=4)) response_data = response_data.replace( b'"total":"{{TOTAL_TIME}}"', timing_replacement) Log.note("Response is {{num}} bytes in {{duration}}", num=len(response_data), duration=query_timer.duration) return Response( response_data, status=200, headers={"Content-Type": result.meta.content_type}) except Exception as e: e = Except.wrap(e) return send_error(query_timer, request_body, e)
from active_data import record_request from active_data.actions import QUERY_TOO_LARGE, find_container, save_query, send_error, test_mode_wait from jx_base.container import Container from jx_python import jx from mo_files import File from mo_future import binary_type from mo_json import json2value, value2json from mo_logs import Except, Log from mo_logs.strings import unicode2utf8, utf82unicode import mo_math from mo_threads.threads import RegisterThread from mo_times.timer import Timer from pyLibrary.env.flask_wrappers import cors_wrapper BLANK = unicode2utf8(File("active_data/public/error.html").read()) QUERY_SIZE_LIMIT = 10 * 1024 * 1024 @cors_wrapper def jx_query(path): with RegisterThread(): try: with Timer("total duration") as query_timer: preamble_timer = Timer("preamble", silent=True) with preamble_timer: if flask.request.headers.get("content-length", "") in ["", "0"]: # ASSUME A BROWSER HIT THIS POINT, SEND text/html RESPONSE BACK return Response(BLANK, status=400,