def add_alias(self, alias=None): if alias: self.cluster_state = None self.cluster._post("/_aliases", data=convert.unicode2utf8( convert.value2json({ "actions": [{ "add": { "index": self.settings.index, "alias": alias } }] })), timeout=coalesce(self.settings.timeout, 30)) else: # SET ALIAS ACCORDING TO LIFECYCLE RULES self.cluster_state = None self.cluster._post("/_aliases", data=convert.unicode2utf8( convert.value2json({ "actions": [{ "add": { "index": self.settings.index, "alias": self.settings.alias } }] })), timeout=coalesce(self.settings.timeout, 30))
def solve(): try: data = convert.json2value(convert.utf82unicode(flask.request.data)) solved = noop.solve(data) response_data = convert.unicode2utf8(convert.value2json(solved)) return Response( response_data, direct_passthrough=True, # FOR STREAMING status=200, headers={ "access-control-allow-origin": "*", "content-type": "application/json" } ) except Exception, e: e = Except.wrap(e) Log.warning("Could not process", cause=e) e = e.as_dict() return Response( convert.unicode2utf8(convert.value2json(e)), status=400, headers={ "access-control-allow-origin": "*", "content-type": "application/json" } )
def get_treeherder_job(self): try: with Timer("Process Request"): args = Dict(**flask.request.args) # IS THE branch/revision PENDING? result = self.get_markup(unwraplist(args.branch), unwraplist(args.revision), unwraplist(args.task_id), unwraplist(args.buildername), unwraplist(args.timestamp)) response_data = convert.unicode2utf8( convert.value2json(result)) return Response(response_data, status=200, headers={ "access-control-allow-origin": "*", "content-type": "text/plain" }) except Exception, e: e = Except.wrap(e) Log.warning("Could not process", cause=e) e = e.as_dict() return Response(convert.unicode2utf8(convert.value2json(e)), status=400, headers={ "access-control-allow-origin": "*", "content-type": "application/json" })
def test_rest_get(self): settings = self.utils.fill_container({ "data": [{ "a": 0, "b": 0 }, { "a": 0, "b": 1 }, { "a": 1, "b": 0 }, { "a": 1, "b": 1 }], "query": { "from": "" } # DUMMY LINE }) url = URL(self.utils.service_url) url.path = "json/" + settings.index url.query = {"a": 1} response = self.utils.try_till_response(str(url), data=b"") self.assertEqual(response.status_code, 200) # ORDER DOES NOT MATTER, TEST EITHER expected1 = convert.unicode2utf8( convert.value2json([{ "a": 1, "b": 0 }, { "a": 1, "b": 1 }], pretty=True)) expected2 = convert.unicode2utf8( convert.value2json([{ "a": 1, "b": 1 }, { "a": 1, "b": 0 }], pretty=True)) try: self.assertEqual(response.all_content, expected1) except Exception: self.assertEqual(response.all_content, expected2)
def put(self, path, **kwargs): url = self.settings.host + ":" + unicode(self.settings.port) + path data = kwargs.get(b'data') if data == None: pass elif isinstance(data, Mapping): kwargs[b'data'] = data = convert.unicode2utf8(convert.value2json(data)) elif not isinstance(kwargs["data"], str): Log.error("data must be utf8 encoded string") if self.debug: sample = kwargs.get(b'data', "")[:300] Log.note("{{url}}:\n{{data|indent}}", url=url, data=sample) # try: response = http.put(url, **kwargs) if response.status_code not in [200]: Log.error(response.reason+": "+response.all_content) if self.debug: Log.note("response: {{response}}", response= utf82unicode(response.all_content)[0:300:]) details = mo_json.json2value(utf82unicode(response.content)) if details.error: Log.error(convert.quote2string(details.error)) if details._shards.failed > 0: Log.error("Shard failures {{failures|indent}}", failures="---\n".join(r.replace(";", ";\n") for r in details._shards.failures.reason) ) return details
def get_active_data(settings): query = { "limit": 100000, "from": "unittest", "where": {"and": [ {"eq": {"result.ok": False}}, {"gt": {"run.timestamp": RECENT.milli}} ]}, "select": [ "result.ok", "build.branch", "build.platform", "build.release", "build.revision", "build.type", "build.revision", "build.date", "run.timestamp", "run.suite", "run.chunk", "result.test", "run.stats.status.test_status" ], "format": "table" } result = http.post("http://activedata.allizom.org/query", data=convert.unicode2utf8(convert.value2json(query))) query_result = convert.json2value(convert.utf82unicode(result.all_content)) tab = convert.table2tab(query_result.header, query_result.data) File(settings.output.activedata).write(tab)
def test_multiple_agg_on_same_field(self): if self.not_real_service(): return test = wrap({ "query": { "from": { "type": "elasticsearch", "settings": { "host": ES_CLUSTER_LOCATION, "index": "unittest", "type": "test_result" } }, "select": [{ "name": "max_bytes", "value": "run.stats.bytes", "aggregate": "max" }, { "name": "count", "value": "run.stats.bytes", "aggregate": "count" }] } }) query = convert.unicode2utf8(convert.value2json(test.query)) # EXECUTE QUERY with Timer("query"): response = http.get(self.service_url, data=query) if response.status_code != 200: error(response) result = convert.json2value(convert.utf82unicode(response.all_content)) Log.note("result\n{{result|indent}}", {"result": result})
def get_raw_json(path): active_data_timer = Timer("total duration") body = flask.request.get_data() try: with active_data_timer: args = wrap(Data(**flask.request.args)) limit = args.limit if args.limit else 10 args.limit = None frum = wrap_from(path) result = jx.run( { "from": path, "where": { "eq": args }, "limit": limit, "format": "list" }, frum) if isinstance( result, Container ): #TODO: REMOVE THIS CHECK, jx SHOULD ALWAYS RETURN Containers result = result.format("list") result.meta.active_data_response_time = active_data_timer.duration response_data = convert.unicode2utf8( convert.value2json(result.data, pretty=True)) Log.note("Response is {{num}} bytes", num=len(response_data)) return Response(response_data, status=200) except Exception, e: e = Except.wrap(e) return _send_error(active_data_timer, body, e)
def test_branch_count(self): if self.not_real_service(): return test = wrap({"query": { "from": { "type": "elasticsearch", "settings": { "host": ES_CLUSTER_LOCATION, "index": "unittest", "type": "test_result" } }, "select": [ {"aggregate": "count"}, ], "edges": [ "build.branch" ], "where": {"or": [ {"missing": "build.id"} # {"gte": {"timestamp": Date.floor(Date.now() - (Duration.DAY * 7), Duration.DAY).milli / 1000}} ]}, "format": "table" }}) query = convert.unicode2utf8(convert.value2json(test.query)) # EXECUTE QUERY with Timer("query"): response = http.get(self.service_url, data=query) if response.status_code != 200: error(response) result = convert.json2value(convert.utf82unicode(response.all_content)) Log.note("result\n{{result|indent}}", {"result": result})
def set_refresh_interval(self, seconds): if seconds <= 0: interval = -1 else: interval = unicode(seconds) + "s" if self.cluster.version.startswith("0.90."): response = self.cluster.put( "/" + self.settings.index + "/_settings", data='{"index":{"refresh_interval":' + convert.value2json(interval) + '}}' ) result = convert.json2value(utf82unicode(response.all_content)) if not result.ok: Log.error("Can not set refresh interval ({{error}})", { "error": utf82unicode(response.all_content) }) elif any(map(self.cluster.version.startswith, ["1.4.", "1.5.", "1.6.", "1.7."])): response = self.cluster.put( "/" + self.settings.index + "/_settings", data=convert.unicode2utf8('{"index":{"refresh_interval":' + convert.value2json(interval) + '}}') ) result = convert.json2value(utf82unicode(response.all_content)) if not result.acknowledged: Log.error("Can not set refresh interval ({{error}})", { "error": utf82unicode(response.all_content) }) else: Log.error("Do not know how to handle ES version {{version}}", version=self.cluster.version)
def store_data(path): try: request = flask.request auth = request.headers.get('Authorization') if not auth: # USE PATTERN MATCHING AUTH for c in all_creds: if c.path == path: return store_public_data(path, c) raise Log.error( "No authentication provided. path={{path}} data.length={{length}}", path=path, length=len(request.get_data()), ) try: receiver = Receiver( lookup_credentials, auth, request.url, request.method, content=request.get_data(), content_type=request.headers['Content-Type'], seen_nonce=seen_nonce ) except Exception, e: e = Except.wrap(e) raise Log.error( "Authentication failed. path={{path}} data.length={{length}}\n{{auth|indent}}", path=path, length=len(request.get_data()), auth=auth, cause=e ) permissions = lookup_user(receiver.parsed_header["id"]) if path not in listwrap(permissions.resources): Log.error("{{user}} not allowed access to {{resource}}", user=permissions.hawk.id, resource=path) link, id = submit_data(path, permissions, request.json) response_content = convert.unicode2utf8(convert.value2json({ "link": link, "etl": {"id": id} })) receiver.respond( content=response_content, content_type=RESPONSE_CONTENT_TYPE ) return Response( response_content, status=200, headers={ b'Server-Authorization': receiver.response_header, b'content-type': RESPONSE_CONTENT_TYPE } )
def post_json(url, **kwargs): """ ASSUME RESPONSE IN IN JSON """ if b"json" in kwargs: kwargs[b"data"] = convert.unicode2utf8(convert.value2json(kwargs[b"json"])) elif b'data': kwargs[b"data"] = convert.unicode2utf8(convert.value2json(kwargs[b"data"])) else: Log.error("Expecting `json` parameter") response = post(url, **kwargs) c = response.content try: details = convert.json2value(convert.utf82unicode(c)) except Exception, e: Log.error("Unexpected return value {{content}}", content=c, cause=e)
def default_page(path): return Response( convert.unicode2utf8(ERROR_PAGE), status=400, headers={ "access-control-allow-origin": "*", "content-type": "text/html" } )
def post_json(url, **kwargs): """ ASSUME RESPONSE IN IN JSON """ kwargs["data"] = convert.unicode2utf8(convert.value2json(kwargs["data"])) response = post(url, **kwargs) c=response.all_content return convert.json2value(convert.utf82unicode(c))
def test_missing_auth(self): # MAKE SOME DATA data = { "constant": "this is a test", "random-data": convert.bytes2base64(Random.bytes(100)) } response = requests.post(settings.bad_url, data=convert.unicode2utf8(convert.value2json(data))) self.assertEqual(response.status_code, 403)
def post_json(url, **kwargs): """ ASSUME RESPONSE IN IN JSON """ kwargs["data"] = convert.unicode2utf8(convert.value2json(kwargs["data"])) response = post(url, **kwargs) c = response.all_content return convert.json2value(convert.utf82unicode(c))
def test_timing(self): if self.not_real_service(): return test = wrap({ "query": { "from": { "type": "elasticsearch", "settings": { "host": ES_CLUSTER_LOCATION, "index": "unittest", "type": "test_result" } }, "select": [{ "name": "count", "value": "run.duration", "aggregate": "count" }, { "name": "total", "value": "run.duration", "aggregate": "sum" }], "edges": [{ "name": "chunk", "value": ["run.suite", "run.chunk"] }, "result.ok"], "where": { "and": [{ "lt": { "timestamp": Date.floor(Date.now()).milli / 1000 } }, { "gte": { "timestamp": Date.floor(Date.now() - (Duration.DAY * 7), Duration.DAY).milli / 1000 } }] }, "format": "cube", "samples": { "limit": 30 } } }) query = convert.unicode2utf8(convert.value2json(test.query)) # EXECUTE QUERY with Timer("query"): response = http.get(self.service_url, data=query) if response.status_code != 200: error(response) result = convert.json2value(convert.utf82unicode(response.all_content)) Log.note("result\n{{result|indent}}", {"result": result})
def post_json(url, **kwargs): """ ASSUME RESPONSE IN IN JSON """ if b"json" in kwargs: kwargs[b"data"] = convert.unicode2utf8( convert.value2json(kwargs[b"json"])) elif b'data' in kwargs: kwargs[b"data"] = convert.unicode2utf8( convert.value2json(kwargs[b"data"])) else: Log.error("Expecting `json` parameter") response = post(url, **kwargs) c = response.content try: details = mo_json.json2value(convert.utf82unicode(c)) except Exception, e: Log.error("Unexpected return value {{content}}", content=c, cause=e)
def write(self, key, value, disable_zip=False): if key.endswith(".json") or key.endswith(".zip"): Log.error("Expecting a pure key") try: if hasattr(value, "read"): if disable_zip: storage = self.bucket.new_key(key + ".json") string_length = len(value) else: storage = self.bucket.new_key(key + ".json.gz") string_length = len(value) value = convert.bytes2zip(value) file_length = len(value) Log.note( "Sending contents with length {{file_length|comma}} (from string with length {{string_length|comma}})", file_length=file_length, string_length=string_length) value.seek(0) storage.set_contents_from_file(value) if self.settings.public: storage.set_acl('public-read') return if len(value) > 20 * 1000 and not disable_zip: self.bucket.delete_key(key + ".json") self.bucket.delete_key(key + ".json.gz") if isinstance(value, str): value = convert.bytes2zip(value) key += ".json.gz" else: value = convert.bytes2zip(convert.unicode2utf8(value)) key += ".json.gz" else: self.bucket.delete_key(key + ".json.gz") if isinstance(value, str): key += ".json" else: key += ".json" storage = self.bucket.new_key(key) storage.set_contents_from_string(value) if self.settings.public: storage.set_acl('public-read') except Exception, e: Log.error( "Problem writing {{bytes}} bytes to {{key}} in {{bucket}}", key=key, bucket=self.bucket.name, bytes=len(value), cause=e)
def save(self, query): query.meta = None json = convert.value2json(query) hash = convert.unicode2utf8(json) # TRY MANY HASHES AT ONCE hashes = [None] * HASH_BLOCK_SIZE for i in range(HASH_BLOCK_SIZE): hash = hashlib.sha1(hash).digest() hashes[i] = hash short_hashes = [ convert.bytes2base64(h[0:6]).replace("/", "_") for h in hashes ] available = {h: True for h in short_hashes} existing = self.es.query({ "from": { "type": "elasticsearch", "settings": self.es.settings }, "where": { "terms": { "hash": short_hashes } }, "meta": { "timeout": "2second" } }) for e in Cube(select=existing.select, edges=existing.edges, data=existing.data).values(): if e.query == json: return e.hash available[e.hash] = False # THIS WILL THROW AN ERROR IF THERE ARE NONE, HOW UNLUCKY! best = [h for h in short_hashes if available[h]][0] self.queue.add({ "id": best, "value": { "hash": best, "create_time": Date.now(), "last_used": Date.now(), "query": json } }) Log.note("Saved query as {{hash}}", hash=best) return best
def _exit(): Log.note("Got request to shutdown") shutdown = flask.request.environ.get('werkzeug.server.shutdown') if shutdown: shutdown() else: Log.warning("werkzeug.server.shutdown does not exist") return Response(convert.unicode2utf8(OVERVIEW), status=400, headers={"Content-Type": "text/html"})
def write(self, key, value, disable_zip=False): if key.endswith(".json") or key.endswith(".zip"): Log.error("Expecting a pure key") try: if hasattr(value, "read"): if disable_zip: storage = self.bucket.new_key(key + ".json") string_length = len(value) else: storage = self.bucket.new_key(key + ".json.gz") string_length = len(value) value = convert.bytes2zip(value) file_length = len(value) Log.note("Sending contents with length {{file_length|comma}} (from string with length {{string_length|comma}})", file_length= file_length, string_length=string_length) value.seek(0) storage.set_contents_from_file(value) if self.settings.public: storage.set_acl('public-read') return if len(value) > 20 * 1000 and not disable_zip: self.bucket.delete_key(key + ".json") self.bucket.delete_key(key + ".json.gz") if isinstance(value, str): value = convert.bytes2zip(value) key += ".json.gz" else: value = convert.bytes2zip(convert.unicode2utf8(value)) key += ".json.gz" else: self.bucket.delete_key(key + ".json.gz") if isinstance(value, str): key += ".json" else: key += ".json" storage = self.bucket.new_key(key) storage.set_contents_from_string(value) if self.settings.public: storage.set_acl('public-read') except Exception, e: Log.error( "Problem writing {{bytes}} bytes to {{key}} in {{bucket}}", key=key, bucket=self.bucket.name, bytes=len(value), cause=e )
def test_simple_query(self): if self.not_real_service(): return query = convert.unicode2utf8(convert.value2json({"from": "unittest"})) # EXECUTE QUERY with Timer("query"): response = http.get(self.service_url, data=query) if response.status_code != 200: error(response) result = convert.json2value(convert.utf82unicode(response.all_content)) Log.note("result\n{{result|indent}}", {"result": result})
def post_json(url, **kwargs): """ ASSUME RESPONSE IN IN JSON """ if b"json" in kwargs: kwargs[b"data"] = convert.unicode2utf8(convert.value2json(kwargs[b"json"])) elif b'data' in kwargs: kwargs[b"data"] = convert.unicode2utf8(convert.value2json(kwargs[b"data"])) else: Log.error("Expecting `json` parameter") response = post(url, **kwargs) c = response.content try: details = mo_json.json2value(convert.utf82unicode(c)) except Exception as e: Log.error("Unexpected return value {{content}}", content=c, cause=e) if response.status_code not in [200, 201]: Log.error("Bad response", cause=Except.wrap(details)) return details
def find_query(hash): """ FIND QUERY BY HASH, RETURN Response OBJECT :param hash: :return: Response OBJECT """ try: hash = hash.split("/")[0] query = query_finder.find(hash) if not query: return Response(b'{"type": "ERROR", "template": "not found"}', status=404) else: return Response(convert.unicode2utf8(query), status=200) except Exception, e: e = Except.wrap(e) Log.warning("problem finding query with hash={{hash}}", hash=hash, cause=e) return Response(convert.unicode2utf8(convert.value2json(e)), status=400)
def get_treeherder_job(self): try: with Timer("Process Request"): args = Dict(**flask.request.args) # IS THE branch/revision PENDING? result = self.get_markup( unwraplist(args.branch), unwraplist(args.revision), unwraplist(args.task_id), unwraplist(args.buildername), unwraplist(args.timestamp) ) response_data = convert.unicode2utf8(convert.value2json(result)) return Response( response_data, status=200, headers={ "access-control-allow-origin": "*", "content-type": "text/plain" } ) except Exception, e: e = Except.wrap(e) Log.warning("Could not process", cause=e) e = e.as_dict() return Response( convert.unicode2utf8(convert.value2json(e)), status=400, headers={ "access-control-allow-origin": "*", "content-type": "application/json" } )
def test_save_then_load(self): test = { "data": [{ "a": "b" }], "query": { "meta": { "save": True }, "from": TEST_TABLE, "select": "a" }, "expecting_list": { "meta": { "format": "list" }, "data": ["b"] } } settings = self.utils.fill_container(test) bytes = unicode2utf8( value2json({ "from": settings.index, "select": "a", "format": "list" })) expected_hash = convert.bytes2base64( hashlib.sha1(bytes).digest()[0:6]).replace("/", "_") wrap(test).expecting_list.meta.saved_as = expected_hash self.utils.send_queries(test) # ENSURE THE QUERY HAS BEEN INDEXED container = elasticsearch.Index(index="saved_queries", kwargs=settings) container.flush() Till(seconds=5).wait() url = URL(self.utils.service_url) response = self.utils.try_till_response(url.scheme + "://" + url.host + ":" + unicode(url.port) + "/find/" + expected_hash, data=b'') self.assertEqual(response.status_code, 200) self.assertEqual(response.all_content, bytes)
def add_alias(self, alias=None): if alias: self.cluster_state = None self.cluster._post( "/_aliases", data=convert.unicode2utf8(convert.value2json({ "actions": [ {"add": {"index": self.settings.index, "alias": alias}} ] })), timeout=coalesce(self.settings.timeout, 30) ) else: # SET ALIAS ACCORDING TO LIFECYCLE RULES self.cluster_state = None self.cluster._post( "/_aliases", data=convert.unicode2utf8(convert.value2json({ "actions": [ {"add": {"index": self.settings.index, "alias": self.settings.alias}} ] })), timeout=coalesce(self.settings.timeout, 30) )
def execute_query(self, query): query = wrap(query) try: query = convert.unicode2utf8(convert.value2json(query)) # EXECUTE QUERY response = self.try_till_response(self.service_url, data=query) if response.status_code != 200: error(response) result = convert.json2value(convert.utf82unicode(response.all_content)) return result except Exception, e: Log.error("Failed query", e)
def post(self, path, **kwargs): url = self.settings.host + ":" + unicode(self.settings.port) + path try: wrap(kwargs).headers["Accept-Encoding"] = "gzip,deflate" data = kwargs.get(b'data') if data == None: pass elif isinstance(data, Mapping): kwargs[b'data'] = data =convert.unicode2utf8(convert.value2json(data)) elif not isinstance(kwargs["data"], str): Log.error("data must be utf8 encoded string") if self.debug: sample = kwargs.get(b'data', "")[:300] Log.note("{{url}}:\n{{data|indent}}", url=url, data=sample) if self.debug: Log.note("POST {{url}}", url=url) response = http.post(url, **kwargs) if response.status_code not in [200, 201]: Log.error(response.reason.decode("latin1") + ": " + strings.limit(response.content.decode("latin1"), 100 if self.debug else 10000)) if self.debug: Log.note("response: {{response}}", response=utf82unicode(response.content)[:130]) details = mo_json.json2value(utf82unicode(response.content)) if details.error: Log.error(convert.quote2string(details.error)) if details._shards.failed > 0: Log.error("Shard failures {{failures|indent}}", failures="---\n".join(r.replace(";", ";\n") for r in details._shards.failures.reason) ) return details except Exception, e: if url[0:4] != "http": suggestion = " (did you forget \"http://\" prefix on the host name?)" else: suggestion = "" if kwargs.get("data"): Log.error( "Problem with call to {{url}}" + suggestion + "\n{{body|left(10000)}}", url=url, body=strings.limit(kwargs["data"], 100 if self.debug else 10000), cause=e ) else: Log.error("Problem with call to {{url}}" + suggestion, url=url, cause=e)
def post(self, path, **kwargs): url = self.settings.host + ":" + unicode(self.settings.port) + path try: wrap(kwargs).headers["Accept-Encoding"] = "gzip,deflate" data = kwargs.get(b'data') if data == None: pass elif isinstance(data, Mapping): kwargs[b'data'] = data =convert.unicode2utf8(convert.value2json(data)) elif not isinstance(kwargs["data"], str): Log.error("data must be utf8 encoded string") if self.debug: sample = kwargs.get(b'data', "")[:300] Log.note("{{url}}:\n{{data|indent}}", url=url, data=sample) if self.debug: Log.note("POST {{url}}", url=url) response = http.post(url, **kwargs) if response.status_code not in [200, 201]: Log.error(response.reason.decode("latin1") + ": " + strings.limit(response.content.decode("latin1"), 100 if self.debug else 10000)) if self.debug: Log.note("response: {{response}}", response=utf82unicode(response.content)[:130]) details = convert.json2value(utf82unicode(response.content)) if details.error: Log.error(convert.quote2string(details.error)) if details._shards.failed > 0: Log.error("Shard failures {{failures|indent}}", failures="---\n".join(r.replace(";", ";\n") for r in details._shards.failures.reason) ) return details except Exception, e: if url[0:4] != "http": suggestion = " (did you forget \"http://\" prefix on the host name?)" else: suggestion = "" if kwargs.get("data"): Log.error( "Problem with call to {{url}}" + suggestion + "\n{{body|left(10000)}}", url=url, body=strings.limit(kwargs["data"], 100 if self.debug else 10000), cause=e ) else: Log.error("Problem with call to {{url}}" + suggestion, url=url, cause=e)
def _send_error(active_data_timer, body, e): record_request(flask.request, None, body, e) Log.warning("Could not process\n{{body}}", body=body.decode("latin1"), cause=e) e = e.__data__() e.meta.timing.total = active_data_timer.duration.seconds # REMOVE TRACES, BECAUSE NICER TO HUMANS # def remove_trace(e): # e.trace = e.trace[0:1:] # for c in listwrap(e.cause): # remove_trace(c) # remove_trace(e) return Response(convert.unicode2utf8(convert.value2json(e)), status=400)
def get(*args, **kwargs): body = kwargs.get("data") if not body: return wrap({"status_code": 400}) text = convert.utf82unicode(body) text = replace_vars(text) data = convert.json2value(text) result = jx.run(data) output_bytes = convert.unicode2utf8(convert.value2json(result)) return wrap({ "status_code": 200, "all_content": output_bytes, "content": output_bytes })
def test_longest_running_tests(self): test = wrap({ "query": { "sort": { "sort": -1, "field": "avg" }, "from": { "from": "unittest", "where": { "and": [{ "gt": { "build.date": "1439337600" } }] }, "groupby": [ "build.platform", "build.type", "run.suite", "result.test" ], "select": [{ "aggregate": "avg", "name": "avg", "value": "result.duration" }], "format": "table", "limit": 100 }, "limit": 100, "format": "list" } }) query = convert.unicode2utf8(convert.value2json(test.query)) # EXECUTE QUERY with Timer("query"): response = http.get(self.service_url, data=query) if response.status_code != 200: error(response) result = convert.json2value(convert.utf82unicode(response.all_content)) Log.note("result\n{{result|indent}}", {"result": result})
def store_public_data(path, permissions): """ :param path: THE BUCKET TO USE :param permissions: THE DATA PATTERN EXPECTED :return: LINK TO DATA """ try: request = flask.request if request.content_length > permissions.max_size or len(request.get_data()) > permissions.max_size: Log.error("Not acceptable, too big") json_data = wrap(request.json) for k, _ in permissions.pattern.leaves(): if not json_data[k]: Log.error("Not acceptable\n{{data|json}}", data=json_data) link, id = submit_data(path, permissions, request.json) response_content = convert.unicode2utf8(convert.value2json({ "link": link, "etl": {"id": id} })) return Response( response_content, status=200, headers={ 'content-type': RESPONSE_CONTENT_TYPE } ) except Exception, e: e = Except.wrap(e) Log.warning("Error", cause=e) return Response( RESPONSE_4XX, status=403, headers={ 'content-type': "text/plain" } )
def test_failures_by_directory(self): if self.not_real_service(): return test = wrap({"query": { "from": { "type": "elasticsearch", "settings": { "host": ES_CLUSTER_LOCATION, "index": "unittest", "type": "test_result" } }, "select": [ { "aggregate": "count" } ], "edges": [ "result.test", "result.ok" ], "where": { "prefix": { "result.test": "/" } }, "format": "table" }}) query = convert.unicode2utf8(convert.value2json(test.query)) # EXECUTE QUERY with Timer("query"): response = http.get(self.service_url, data=query) if response.status_code != 200: error(response) result = convert.json2value(convert.utf82unicode(response.all_content)) Log.note("result\n{{result|indent}}", {"result": result})
def set_refresh_interval(self, seconds, **kwargs): """ :param seconds: -1 FOR NO REFRESH :param kwargs: ANY OTHER REQUEST PARAMETERS :return: None """ if seconds <= 0: interval = -1 else: interval = unicode(seconds) + "s" if self.cluster.version.startswith("0.90."): response = self.cluster.put( "/" + self.settings.index + "/_settings", data='{"index":{"refresh_interval":' + convert.value2json(interval) + '}}', **kwargs ) result = mo_json.json2value(utf82unicode(response.all_content)) if not result.ok: Log.error("Can not set refresh interval ({{error}})", { "error": utf82unicode(response.all_content) }) elif any(map(self.cluster.version.startswith, ["1.4.", "1.5.", "1.6.", "1.7."])): response = self.cluster.put( "/" + self.settings.index + "/_settings", data=convert.unicode2utf8('{"index":{"refresh_interval":' + convert.value2json(interval) + '}}'), **kwargs ) result = mo_json.json2value(utf82unicode(response.all_content)) if not result.acknowledged: Log.error("Can not set refresh interval ({{error}})", { "error": utf82unicode(response.all_content) }) else: Log.error("Do not know how to handle ES version {{version}}", version=self.cluster.version)
def send_queries(self, subtest): subtest = wrap(subtest) try: # EXECUTE QUERY num_expectations = 0 for k, v in subtest.items(): if k.startswith("expecting_"): # WHAT FORMAT ARE WE REQUESTING format = k[len("expecting_"):] elif k == "expecting": # NO FORMAT REQUESTED (TO TEST DEFAULT FORMATS) format = None else: continue num_expectations += 1 expected = v subtest.query.format = format subtest.query.meta.testing = True # MARK ALL QUERIES FOR TESTING SO FULL METADATA IS AVAILABLE BEFORE QUERY EXECUTION query = convert.unicode2utf8(convert.value2json(subtest.query)) # EXECUTE QUERY response = self.try_till_response(self.service_url, data=query) if response.status_code != 200: error(response) result = convert.json2value( convert.utf82unicode(response.all_content)) # HOW TO COMPARE THE OUT-OF-ORDER DATA? compare_to_expected(subtest.query, result, expected) Log.note("Test result compares well") if num_expectations == 0: Log.error( "Expecting test {{name|quote}} to have property named 'expecting_*' for testing the various format clauses", {"name": subtest.name}) except Exception, e: Log.error("Failed test {{name|quote}}", {"name": subtest.name}, e)
def get_bugs(settings): request_bugs = convert.unicode2utf8(convert.value2json({ "query": {"filtered": { "query": {"match_all": {}}, "filter": {"and": [ {"term": {"keyword": "intermittent-failure"}}, {"range": {"expires_on": {"gt": Date.now().milli}}}, {"range": {"modified_ts": {"gt": RECENT.milli}}} ]} }}, "from": 0, "size": 200000, "sort": [], "facets": {}, "fields": ["bug_id", "bug_status", "short_desc", "status_whiteboard"] })) result = http.post(settings.bugs.url, data=request_bugs).all_content bugs = UniqueIndex(["bug_id"], convert.json2value(convert.utf82unicode(result)).hits.hits.fields) for i, b in enumerate(bugs): try: parse_short_desc(b) except Exception, e: Log.warning("can not parse {{bug_id}} {{short_desc}}", bug_id=b.bug_id, short_desc=b.short_desc, cause=e)
from pyLibrary.dot import unwrap from pyLibrary.maths.randoms import Random from modatasubmission import Client settings = jsons.ref.get("file://~/MoDataSubmissionClient.json") data={ "constant": "this is a test", "random-data": convert.bytes2base64(Random.bytes(100)) } link, id = Client(settings.url, unwrap(settings.hawk)).send(data) Log.note("Success! Located at {{link}} id={{id}}", link=link, id=id) data = convert.unicode2utf8(convert.value2json(settings.example)) response = requests.post( settings.url, data=data, headers={ 'Content-Type': b'application/json' } ) if response.status_code != 200: Log.error("Expecting a pass") details = convert.json2value(convert.utf82unicode(response.content)) Log.note("Success! Located at {{link}} id={{id}}", link=details.link, id=details.etl.id)
result = http.post(settings.bugs.url, data=request_bugs).all_content bugs = UniqueIndex(["bug_id"], convert.json2value(convert.utf82unicode(result)).hits.hits.fields) for i, b in enumerate(bugs): try: parse_short_desc(b) except Exception, e: Log.warning("can not parse {{bug_id}} {{short_desc}}", bug_id=b.bug_id, short_desc=b.short_desc, cause=e) request_comments = convert.unicode2utf8(convert.value2json({ "query": {"filtered": { "query": {"match_all": {}}, "filter": {"and":[ {"terms": {"bug_id": bugs.keys()}}, {"range": {"modified_ts": {"gt": RECENT.milli}}} ]} }}, "from": 0, "size": 200000, "sort": [], "facets": {}, "fields": ["bug_id", "modified_by", "modified_ts", "comment"] })) comments = convert.json2value(convert.utf82unicode(http.post(settings.comments.url, data=request_comments).all_content)).hits.hits.fields results = [] for c in comments: errors = parse_comment(bugs[c.bug_id], c) results.extend(errors) tab = convert.list2tab(results)
def _default(path): record_request(flask.request, None, flask.request.get_data(), None) return Response(convert.unicode2utf8(OVERVIEW), status=200, headers={"Content-Type": "text/html"})
def query(path): with CProfiler(): try: with Timer("total duration") as query_timer: preamble_timer = Timer("preamble") with preamble_timer: if flask.request.headers.get("content-length", "") in ["", "0"]: # ASSUME A BROWSER HIT THIS POINT, SEND text/html RESPONSE BACK return Response(BLANK, status=400, headers={"Content-Type": "text/html"}) elif int(flask.request.headers["content-length"] ) > QUERY_SIZE_LIMIT: Log.error("Query is too large") request_body = flask.request.get_data().strip() text = convert.utf82unicode(request_body) text = replace_vars(text, flask.request.args) data = convert.json2value(text) record_request(flask.request, data, None, None) if data.meta.testing: _test_mode_wait(data) translate_timer = Timer("translate") with translate_timer: if data.sql: data = parse_sql(data.sql) frum = wrap_from(data['from']) result = jx.run(data, frum=frum) if isinstance( result, Container ): #TODO: REMOVE THIS CHECK, jx SHOULD ALWAYS RETURN Containers result = result.format(data.format) save_timer = Timer("save") with save_timer: if data.meta.save: try: result.meta.saved_as = save_query.query_finder.save( data) except Exception, e: Log.warning("Unexpected save problem", cause=e) result.meta.timing.preamble = Math.round( preamble_timer.duration.seconds, digits=4) result.meta.timing.translate = Math.round( translate_timer.duration.seconds, digits=4) result.meta.timing.save = Math.round( save_timer.duration.seconds, digits=4) result.meta.timing.total = "{{TOTAL_TIME}}" # TIMING PLACEHOLDER with Timer("jsonification") as json_timer: response_data = convert.unicode2utf8( convert.value2json(result)) with Timer("post timer"): # IMPORTANT: WE WANT TO TIME OF THE JSON SERIALIZATION, AND HAVE IT IN THE JSON ITSELF. # WE CHEAT BY DOING A (HOPEFULLY FAST) STRING REPLACEMENT AT THE VERY END timing_replacement = b'"total": ' + str(Math.round(query_timer.duration.seconds, digits=4)) +\ b', "jsonification": ' + str(Math.round(json_timer.duration.seconds, digits=4)) response_data = response_data.replace( b'"total": "{{TOTAL_TIME}}"', timing_replacement) Log.note("Response is {{num}} bytes in {{duration}}", num=len(response_data), duration=query_timer.duration) return Response( response_data, status=200, headers={"Content-Type": result.meta.content_type}) except Exception, e: e = Except.wrap(e) return _send_error(query_timer, request_body, e)
from active_data import record_request, cors_wrapper from active_data.actions import save_query from mo_dots import coalesce, join_field, split_field, wrap, listwrap from pyLibrary import convert from mo_files import File from mo_math import Math from pyLibrary.queries import jx, meta, wrap_from from pyLibrary.queries.containers import Container, STRUCT from pyLibrary.queries.meta import TOO_OLD from mo_testing.fuzzytestcase import assertAlmostEqual from mo_threads import Till from mo_times.dates import Date from mo_times.durations import MINUTE from mo_times.timer import Timer BLANK = convert.unicode2utf8(File("active_data/public/error.html").read()) QUERY_SIZE_LIMIT = 10 * 1024 * 1024 @cors_wrapper def query(path): with CProfiler(): try: with Timer("total duration") as query_timer: preamble_timer = Timer("preamble") with preamble_timer: if flask.request.headers.get("content-length", "") in ["", "0"]: # ASSUME A BROWSER HIT THIS POINT, SEND text/html RESPONSE BACK return Response(BLANK, status=400,