def query(self, _query): try: query = QueryOp.wrap(_query, container=self, namespace=self.namespace) for s in listwrap(query.select): if s.aggregate != None and not aggregates.get(s.aggregate): Log.error( "ES can not aggregate {{name}} because {{aggregate|quote}} is not a recognized aggregate", name=s.name, aggregate=s.aggregate ) frum = query["from"] if isinstance(frum, QueryOp): result = self.query(frum) q2 = query.copy() q2.frum = result return jx.run(q2) if is_deepop(self.es, query): return es_deepop(self.es, query) if is_aggsop(self.es, query): return es_aggsop(self.es, frum, query) if is_setop(self.es, query): return es_setop(self.es, query) Log.error("Can not handle") except Exception as e: e = Except.wrap(e) if "Data too large, data for" in e: http.post(self.es.cluster.url / "_cache/clear") Log.error("Problem (Tried to clear Elasticsearch cache)", e) Log.error("problem", e)
def query(self, _query): try: query = QueryOp.wrap(_query, table=self) for n in self.namespaces: query = n.convert(query) for s in listwrap(query.select): if not aggregates.get(s.aggregate): Log.error( "ES can not aggregate {{name}} because {{aggregate|quote}} is not a recognized aggregate", name=s.name, aggregate=s.aggregate ) frum = query["from"] if isinstance(frum, QueryOp): result = self.query(frum) q2 = query.copy() q2.frum = result return jx.run(q2) if is_deepop(self._es, query): return es_deepop(self._es, query) if is_aggsop(self._es, query): return es_aggsop(self._es, frum, query) if is_setop(self._es, query): return es_setop(self._es, query) Log.error("Can not handle") except Exception as e: e = Except.wrap(e) if "Data too large, data for" in e: http.post(self._es.cluster.path+"/_cache/clear") Log.error("Problem (Tried to clear Elasticsearch cache)", e) Log.error("problem", e)
def get_active_data(settings): query = { "limit": 100000, "from": "unittest", "where": {"and": [ {"eq": {"result.ok": False}}, {"gt": {"run.timestamp": RECENT.milli}} ]}, "select": [ "result.ok", "build.branch", "build.platform", "build.release", "build.revision", "build.type", "build.revision", "build.date", "run.timestamp", "run.suite", "run.chunk", "result.test", "run.stats.status.test_status" ], "format": "table" } result = http.post("http://activedata.allizom.org/query", data=convert.unicode2utf8(convert.value2json(query))) query_result = convert.json2value(convert.utf82unicode(result.all_content)) tab = convert.table2tab(query_result.header, query_result.data) File(settings.output.activedata).write(tab)
def query(self, _query): try: query = QueryOp.wrap(_query, schema=self) for n in self.namespaces: query = n.convert(query) if self.typed: query = Typed().convert(query) for s in listwrap(query.select): if not aggregates1_4.get(s.aggregate): Log.error( "ES can not aggregate {{name}} because {{aggregate|quote}} is not a recognized aggregate", name=s.name, aggregate=s.aggregate, ) frum = query["from"] if isinstance(frum, QueryOp): result = self.query(frum) q2 = query.copy() q2.frum = result return jx.run(q2) if is_deepop(self._es, query): return es_deepop(self._es, query) if is_aggsop(self._es, query): return es_aggsop(self._es, frum, query) if is_setop(self._es, query): return es_setop(self._es, query) if es09_setop.is_setop(query): return es09_setop.es_setop(self._es, None, query) if es09_aggop.is_aggop(query): return es09_aggop.es_aggop(self._es, None, query) Log.error("Can not handle") except Exception, e: e = Except.wrap(e) if "Data too large, data for" in e: http.post(self._es.cluster.path + "/_cache/clear") Log.error("Problem (Tried to clear Elasticsearch cache)", e) Log.error("problem", e)
def query(self, _query): try: if not self.ready: Log.error("Must use with clause for any instance of FromES") query = Query(_query, schema=self) # try: # frum = self.get_columns(query["from"]) # mvel = _MVEL(frum) # except Exception, e: # mvel = None # Log.warning("TODO: Fix this", e) # for s in listwrap(query.select): if not aggregates1_4[s.aggregate]: Log.error("ES can not aggregate " + self.select[0].name + " because '" + self.select[0].aggregate + "' is not a recognized aggregate") frum = query["from"] if isinstance(frum, Query): result = self.query(frum) q2 = query.copy() q2.frum = result return qb.run(q2) if is_aggsop(self._es, query): return es_aggsop(self._es, frum, query) if is_fieldop(self._es, query): return es_fieldop(self._es, query) if is_setop(self._es, query): return es_setop(self._es, query) Log.error("Can not handle") except Exception, e: e = Except.wrap(e) if "Data too large, data for" in e: http.post(self._es.cluster.path + "/_cache/clear") Log.error("Problem (Tried to clear Elasticsearch cache)", e) Log.error("problem", e)
def post(self, path, **kwargs): url = self.settings.host + ":" + unicode(self.settings.port) + path try: wrap(kwargs).headers["Accept-Encoding"] = "gzip,deflate" data = kwargs.get(b'data') if data == None: pass elif isinstance(data, Mapping): kwargs[b'data'] = data =convert.unicode2utf8(convert.value2json(data)) elif not isinstance(kwargs["data"], str): Log.error("data must be utf8 encoded string") if self.debug: sample = kwargs.get(b'data', "")[:300] Log.note("{{url}}:\n{{data|indent}}", url=url, data=sample) if self.debug: Log.note("POST {{url}}", url=url) response = http.post(url, **kwargs) if response.status_code not in [200, 201]: Log.error(response.reason.decode("latin1") + ": " + strings.limit(response.content.decode("latin1"), 100 if self.debug else 10000)) if self.debug: Log.note("response: {{response}}", response=utf82unicode(response.content)[:130]) details = convert.json2value(utf82unicode(response.content)) if details.error: Log.error(convert.quote2string(details.error)) if details._shards.failed > 0: Log.error("Shard failures {{failures|indent}}", failures="---\n".join(r.replace(";", ";\n") for r in details._shards.failures.reason) ) return details except Exception, e: if url[0:4] != "http": suggestion = " (did you forget \"http://\" prefix on the host name?)" else: suggestion = "" if kwargs.get("data"): Log.error( "Problem with call to {{url}}" + suggestion + "\n{{body|left(10000)}}", url=url, body=strings.limit(kwargs["data"], 100 if self.debug else 10000), cause=e ) else: Log.error("Problem with call to {{url}}" + suggestion, url=url, cause=e)
def post(self, path, **kwargs): url = self.settings.host + ":" + unicode(self.settings.port) + path try: wrap(kwargs).headers["Accept-Encoding"] = "gzip,deflate" data = kwargs.get(b'data') if data == None: pass elif isinstance(data, Mapping): kwargs[b'data'] = data =convert.unicode2utf8(convert.value2json(data)) elif not isinstance(kwargs["data"], str): Log.error("data must be utf8 encoded string") if self.debug: sample = kwargs.get(b'data', "")[:300] Log.note("{{url}}:\n{{data|indent}}", url=url, data=sample) if self.debug: Log.note("POST {{url}}", url=url) response = http.post(url, **kwargs) if response.status_code not in [200, 201]: Log.error(response.reason.decode("latin1") + ": " + strings.limit(response.content.decode("latin1"), 100 if self.debug else 10000)) if self.debug: Log.note("response: {{response}}", response=utf82unicode(response.content)[:130]) details = mo_json.json2value(utf82unicode(response.content)) if details.error: Log.error(convert.quote2string(details.error)) if details._shards.failed > 0: Log.error("Shard failures {{failures|indent}}", failures="---\n".join(r.replace(";", ";\n") for r in details._shards.failures.reason) ) return details except Exception, e: if url[0:4] != "http": suggestion = " (did you forget \"http://\" prefix on the host name?)" else: suggestion = "" if kwargs.get("data"): Log.error( "Problem with call to {{url}}" + suggestion + "\n{{body|left(10000)}}", url=url, body=strings.limit(kwargs["data"], 100 if self.debug else 10000), cause=e ) else: Log.error("Problem with call to {{url}}" + suggestion, url=url, cause=e)
def _post(self, path, **kwargs): url = self.settings.host + ":" + unicode(self.settings.port) + path try: wrap(kwargs).headers["Accept-Encoding"] = "gzip,deflate" if "data" in kwargs and not isinstance(kwargs["data"], str): Log.error("data must be utf8 encoded string") if self.debug: sample = kwargs.get("data", "")[:300] Log.note("{{url}}:\n{{data|indent}}", url=url, data=sample) response = http.post(url, **kwargs) if response.status_code not in [200, 201]: Log.error(response.reason + ": " + response.all_content) if self.debug: Log.note("response: {{response}}", response=utf82unicode(response.all_content)[:130]) details = convert.json2value(utf82unicode(response.all_content)) if details.error: Log.error(convert.quote2string(details.error)) if details._shards.failed > 0: Log.error("Shard failures {{failures|indent}}", failures="---\n".join( r.replace(";", ";\n") for r in details._shards.failures.reason)) return details except Exception, e: if url[0:4] != "http": suggestion = " (did you forget \"http://\" prefix on the host name?)" else: suggestion = "" if kwargs.get("data"): Log.error("Problem with call to {{url}}" + suggestion + "\n{{body|left(10000)}}", url=url, body=kwargs["data"][0:10000] if self.debug else kwargs["data"][0:100], cause=e) else: Log.error("Problem with call to {{url}}" + suggestion, {"url": url}, e)
def _post(self, path, **kwargs): url = self.settings.host + ":" + unicode(self.settings.port) + path try: wrap(kwargs).headers["Accept-Encoding"] = "gzip,deflate" if "data" in kwargs and not isinstance(kwargs["data"], str): Log.error("data must be utf8 encoded string") if self.debug: sample = kwargs.get("data", "")[:300] Log.note("{{url}}:\n{{data|indent}}", url= url, data= sample) response = http.post(url, **kwargs) if response.status_code not in [200, 201]: Log.error(response.reason+": "+response.content) if self.debug: Log.note("response: {{response}}", response= utf82unicode(response.content)[:130]) details = convert.json2value(utf82unicode(response.content)) if details.error: Log.error(convert.quote2string(details.error)) if details._shards.failed > 0: Log.error("Shard failures {{failures|indent}}", failures= "---\n".join(r.replace(";", ";\n") for r in details._shards.failures.reason) ) return details except Exception, e: if url[0:4] != "http": suggestion = " (did you forget \"http://\" prefix on the host name?)" else: suggestion = "" if kwargs.get("data"): Log.error("Problem with call to {{url}}" + suggestion + "\n{{body|left(10000)}}", url= url, body= kwargs["data"][0:10000] if self.debug else kwargs["data"][0:100], cause=e) else: Log.error("Problem with call to {{url}}" + suggestion, {"url": url}, e)
def get_bugs(settings): request_bugs = convert.unicode2utf8(convert.value2json({ "query": {"filtered": { "query": {"match_all": {}}, "filter": {"and": [ {"term": {"keyword": "intermittent-failure"}}, {"range": {"expires_on": {"gt": Date.now().milli}}}, {"range": {"modified_ts": {"gt": RECENT.milli}}} ]} }}, "from": 0, "size": 200000, "sort": [], "facets": {}, "fields": ["bug_id", "bug_status", "short_desc", "status_whiteboard"] })) result = http.post(settings.bugs.url, data=request_bugs).all_content bugs = UniqueIndex(["bug_id"], convert.json2value(convert.utf82unicode(result)).hits.hits.fields) for i, b in enumerate(bugs): try: parse_short_desc(b) except Exception, e: Log.warning("can not parse {{bug_id}} {{short_desc}}", bug_id=b.bug_id, short_desc=b.short_desc, cause=e)
request_comments = convert.unicode2utf8(convert.value2json({ "query": {"filtered": { "query": {"match_all": {}}, "filter": {"and":[ {"terms": {"bug_id": bugs.keys()}}, {"range": {"modified_ts": {"gt": RECENT.milli}}} ]} }}, "from": 0, "size": 200000, "sort": [], "facets": {}, "fields": ["bug_id", "modified_by", "modified_ts", "comment"] })) comments = convert.json2value(convert.utf82unicode(http.post(settings.comments.url, data=request_comments).all_content)).hits.hits.fields results = [] for c in comments: errors = parse_comment(bugs[c.bug_id], c) results.extend(errors) tab = convert.list2tab(results) File(settings.output.tab).write(tab) def parse_short_desc(bug): parts = bug.short_desc.split("|") if len(parts) in [2, 3]: bug.result.test = parts[0].strip()