"id", "tdad_id", "status", "last_updated", "severity", "confidence", "details", "solution" ], "where": {"and": [ {"terms": {"tdad_id": evaled_tests}}, {"term": {"reason": REASON}} ]} }) found_alerts = Q.unique_index(alerts, "tdad_id") current_alerts = Q.unique_index(current_alerts, "tdad_id") new_alerts = found_alerts - current_alerts changed_alerts = current_alerts & found_alerts obsolete_alerts = Q.filter(current_alerts - found_alerts, {"not": {"term": {"status": "obsolete"}}}) if debug: Log.note("Update Alerts: ({{num_new}} new, {{num_change}} changed, {{num_delete}} obsoleted)", { "num_new": len(new_alerts), "num_change": len(changed_alerts), "num_delete": len(obsolete_alerts) }) if new_alerts: for a in new_alerts:
def talos_alert_revision(settings): assert settings.alerts != None settings.db.debug = settings.param.debug with DB(settings.alerts) as db: with ESQuery(ElasticSearch(settings.query["from"])) as esq: dbq = DBQuery(db) esq.addDimension(CNV.JSON2object(File(settings.dimension.filename).read())) #TODO: REMOVE, LEAVE IN DB if db.debug: db.execute("update reasons set email_subject={{subject}}, email_template={{template}} where code={{reason}}", { "template": CNV.object2JSON(TEMPLATE), "subject": CNV.object2JSON(SUBJECT), "reason": REASON }) db.flush() #EXISTING SUSTAINED EXCEPTIONS existing_sustained_alerts = dbq.query({ "from": "alerts", "select": "*", "where": {"and": [ {"term": {"reason": talos_sustained_median.REASON}}, {"not": {"term": {"status": "obsolete"}}}, {"range": {"create_time": {"gte": NOW - LOOK_BACK}}} ]} }) tests = Q.index(existing_sustained_alerts, ["revision", "details.Talos.Test"]) #EXISTING REVISION-LEVEL ALERTS old_alerts = dbq.query({ "from": "alerts", "select": "*", "where": {"and": [ {"term": {"reason": REASON}}, {"or": [ {"terms": {"revision": set(existing_sustained_alerts.revision)}}, {"term": {"reason": talos_sustained_median.REASON}}, {"term": {"status": "obsolete"}}, {"range": {"create_time": {"gte": NOW - LOOK_BACK}}} ]} ]} }) old_alerts = Q.unique_index(old_alerts, "revision") #SUMMARIZE known_alerts = StructList() for revision in set(existing_sustained_alerts.revision): #FIND TOTAL TDAD FOR EACH INTERESTING REVISION total_tests = esq.query({ "from": "talos", "select": {"name": "count", "aggregate": "count"}, "where": {"and":[ {"terms": {"Talos.Revision": revision}} ]} }) total_exceptions = tests[(revision, )] # FILTER BY revision parts = StructList() for g, exceptions in Q.groupby(total_exceptions, ["details.Talos.Test"]): worst_in_test = Q.sort(exceptions, ["confidence", "details.diff_percent"]).last() example = worst_in_test.details # ADD SOME DATAZILLA SPECIFIC URL PARAMETERS branch = example.Talos.Branch.replace("-Non-PGO", "") example.tbpl.url.branch = TBPL_PATH.get(branch, branch) example.mercurial.url.branch = MECURIAL_PATH.get(branch, branch) example.datazilla.url.branch = example.Talos.Branch #+ ("" if worst_in_test.Talos.Branch.pgo else "-Non-PGO") example.datazilla.url.x86 = "true" if example.Talos.Platform == "x86" else "false" example.datazilla.url.x86_64 = "true" if example.Talos.Platform == "x86_64" else "false" example.datazilla.url.stop = nvl(example.push_date_max, (2*example.push_date) - example.push_date_min) num_except = len(exceptions) if num_except == 0: continue part = { "test": g.details.Talos.Test, "num_exceptions": num_except, "num_tests": total_tests, "confidence": worst_in_test.confidence, "example": example } parts.append(part) parts = Q.sort(parts, [{"field": "confidence", "sort": -1}]) worst_in_revision = parts[0].example known_alerts.append({ "status": "new", "create_time": CNV.milli2datetime(worst_in_revision.push_date), "reason": REASON, "revision": revision, "tdad_id": revision, "details": { "revision": revision, "total_tests": total_tests, "total_exceptions": len(total_exceptions), "tests": parts, "example": worst_in_revision }, "severity": SEVERITY, "confidence": worst_in_revision.result.confidence }) known_alerts = Q.unique_index(known_alerts, "revision") #NEW ALERTS, JUST INSERT new_alerts = known_alerts - old_alerts if new_alerts: for revision in new_alerts: revision.id = SQL("util.newid()") revision.last_updated = NOW db.insert_list("alerts", new_alerts) #SHOW SUSTAINED ALERTS ARE COVERED db.execute(""" INSERT INTO hierarchy (parent, child) SELECT r.id parent, p.id child FROM alerts p LEFT JOIN hierarchy h on h.child=p.id LEFT JOIN alerts r on r.revision=p.revision AND r.reason={{parent_reason}} WHERE {{where}} """, { "where": esfilter2sqlwhere(db, {"and": [ {"term": {"p.reason": talos_sustained_median.REASON}}, {"terms": {"p.revision": Q.select(existing_sustained_alerts, "revision")}}, {"missing": "h.parent"} ]}), "parent_reason": REASON }) #CURRENT ALERTS, UPDATE IF DIFFERENT for known_alert in known_alerts & old_alerts: if len(nvl(known_alert.solution, "").strip()) != 0: continue # DO NOT TOUCH SOLVED ALERTS old_alert = old_alerts[known_alert] if old_alert.status == 'obsolete' or significant_difference(known_alert.severity, old_alert.severity) or significant_difference(known_alert.confidence, old_alert.confidence): known_alert.last_updated = NOW db.update("alerts", {"id": old_alert.id}, known_alert) #OLD ALERTS, OBSOLETE for old_alert in old_alerts - known_alerts: if old_alert.status == 'obsolete': continue db.update("alerts", {"id": old_alert.id}, {"status": "obsolete", "last_updated": NOW, "details":None})