def transform(self, id, datazilla): try: r = datazilla.json_blob #ADD DATAZILLA MARKUP r.datazilla = { "id": id, "date_loaded": datazilla.date_loaded * 1000, "error_flag": datazilla.error_flag, "test_run_id": datazilla.test_run_id, "processed_flag": datazilla.processed_flag, "error_msg": datazilla.error_msg } #CONVERT UNIX TIMESTAMP TO MILLISECOND TIMESTAMP r.testrun.date *= 1000 def mainthread_transform(r): if r == None: return None output = Struct() for i in r.mainthread_readbytes: output[literal_field(i[1])].name = i[1] output[literal_field(i[1])].readbytes = i[0] r.mainthread_readbytes = None for i in r.mainthread_writebytes: output[literal_field(i[1])].name = i[1] output[literal_field(i[1])].writebytes = i[0] r.mainthread_writebytes = None for i in r.mainthread_readcount: output[literal_field(i[1])].name = i[1] output[literal_field(i[1])].readcount = i[0] r.mainthread_readcount = None for i in r.mainthread_writecount: output[literal_field(i[1])].name = i[1] output[literal_field(i[1])].writecount = i[0] r.mainthread_writecount = None r.mainthread = output.values() mainthread_transform(r.results_aux) mainthread_transform(r.results_xperf) #ADD PUSH LOG INFO try: branch = r.test_build.branch if branch.endswith("-Non-PGO"): r.test_build.branch = branch r.test_build.pgo = False branch = branch[0:-8] else: r.test_build.pgo = True with Profiler("get from pushlog"): if not self.pushlog: #NO PUSHLOG MEANS WE DO NOTHING TO MARKUP TEST RESULTS pass elif self.pushlog[branch]: possible_dates = self.pushlog[branch][r.test_build.revision] if possible_dates: r.test_build.push_date = int(Math.round(possible_dates[0].date * 1000)) else: if r.test_build.revision == 'NULL': r.test_build.no_pushlog = True # OOPS! SOMETHING BROKE elif CNV.milli2datetime(Math.min(r.testrun.date, r.datazilla.date_loaded)) < PUSHLOG_TOO_OLD: Log.note("{{branch}} @ {{revision}} has no pushlog, transforming anyway", r.test_build) r.test_build.no_pushlog = True else: Log.note("{{branch}} @ {{revision}} has no pushlog, try again later", r.test_build) return [] # TRY AGAIN LATER else: with self.locker: if branch not in self.unknown_branches: Log.note("Whole branch {{branch}} has no pushlog", {"branch":branch}) self.unknown_branches.add(branch) if CNV.milli2datetime(Math.min(r.testrun.date, r.datazilla.date_loaded)) < PUSHLOG_TOO_OLD: r.test_build.no_pushlog = True else: r.test_build.no_pushlog = True #return [r] #TODO: DO THIS IF WE FIGURE OUT HOW TO HANDLE THE VERY LARGE NUMBER OF RESULTS WITH NO PUSHLOG except Exception, e: Log.warning("{{branch}} @ {{revision}} has no pushlog", r.test_build, e) new_records = [] # RECORD THE UNKNOWN PART OF THE TEST RESULTS remainder = r.copy() remainder.results = None if len(remainder.keys()) > 4: new_records.append(remainder) #RECORD TEST RESULTS total = StructList() if r.testrun.suite in ["dromaeo_css", "dromaeo_dom"]: #dromaeo IS SPECIAL, REPLICATES ARE IN SETS OF FIVE #RECORD ALL RESULTS for i, (test_name, replicates) in enumerate(r.results.items()): for g, sub_results in Q.groupby(replicates, size=5): new_record = Struct( test_machine=r.test_machine, datazilla=r.datazilla, testrun=r.testrun, test_build=r.test_build, result={ "test_name": unicode(test_name) + "." + unicode(g), "ordering": i, "samples": sub_results } ) try: s = stats(sub_results) new_record.result.stats = s total.append(s) except Exception, e: Log.warning("can not reduce series to moments", e) new_records.append(new_record)