def pt_update(self, json_data, file_data=None): j = PTJson(json_data, obj_name="artifact json", exception_type=SuspiciousOperation) try: self.description = j.get_str('description', defval=self.description) self.ttl_days = j.get_int('ttl_days', defval=self.ttl_days) self.uploaded_dt = timezone.now() self.filename = j.get_str('filename', defval=self.filename, require=False if self.filename else True) self.mime = j.get_str('mime', defval=self.mime) self.expires_dt = timezone.now() + timedelta(days=self.ttl_days) self.inline = j.get_bool('inline', defval=self.inline) self.compression = j.get_bool('compression', defval=self.compression) linked_uuids = j.get_list('linked_uuids', []) unlinked_uuids = j.get_list('unlinked_uuids', []) except SuspiciousOperation as e: return pt_rest_bad_req(str(e)) if not self.mime: self.mime = mimetypes.guess_type(self.filename)[0] if not self.mime: self.mime = 'application/octet-stream' if file_data: data = file_data.get('file', None) if data is None: return pt_rest_bad_req("can't get file data") ret = self._pt_save_file(data.read()) if ret is not None: return ret for uuid in linked_uuids: if not pt_is_valid_uuid(uuid): return pt_rest_bad_req( "trying to link resource with invalid UUID format: %s" % uuid) for uuid in unlinked_uuids: if not pt_is_valid_uuid(uuid): return pt_rest_bad_req( "trying to unlink resource with invalid UUID format: %s" % uuid) exists = self.id self.save() for uuid in linked_uuids: try: al = ArtifactLinkModel.objects.get(artifact=self, linked_uuid=uuid) if al.deleted: al.deleted = False al.save() continue except ArtifactLinkModel.DoesNotExist as e: al = ArtifactLinkModel(artifact=self, linked_uuid=uuid) al.save() for uuid in unlinked_uuids: try: al = ArtifactLinkModel.objects.get(artifact=self, linked_uuid=uuid) except ArtifactLinkModel.DoesNotExist as e: continue al.deleted = True al.save() return pt_rest_ok(message="Artifact has been %s, uuid: %s" % ("updated" if exists else "created", self.uuid), uuid=self.uuid)
def pt_update(self, json_data): from perftracker.models.test import TestModel j = PTJson(json_data, obj_name="job json", exception_type=SuspiciousOperation) project_name = j.get_str('project_name', require=True) self.uuid = j.get_uuid('uuid', defval=uuid.uuid1()) self.title = j.get_str('job_title') if not self.title: self.title = j.get_str('title', require=True) self.cmdline = j.get_str('cmdline') self.project = ProjectModel.pt_get_by_name(j.get_str('project_name')) append = False if self.deleted else j.get_bool('append') now = timezone.now() env_nodes_json = j.get_list('env_nodes') tests_json = j.get_list('tests') key2test = {} tests_to_delete = {} tests_to_commit = {} test_seq_num = 0 # process existing tests if self.id: for t in TestModel.objects.filter(job=self): test_seq_num = max(t.seq_num, test_seq_num) u = str(t.uuid) if append: t.pt_validate_uniqueness(key2test) tests_to_commit[u] = t else: tests_to_delete[u] = t for t in tests_json: if not len(t.keys()): continue u = TestModel.pt_get_uuid(t) if u in tests_to_delete: tests_to_commit[u] = tests_to_delete[u] del tests_to_delete[u] else: test_seq_num += 1 try: tests_to_commit[u] = TestModel.objects.get(uuid=u) except TestModel.MultipleObjectsReturned: TestModel.objects.filter(uuid=self.uuid).delete() tests_to_commit[u] = TestModel(uuid=u, seq_num=test_seq_num) except TestModel.DoesNotExist: tests_to_commit[u] = TestModel(uuid=u, seq_num=test_seq_num) tests_to_commit[u].pt_update(t) tests_to_commit[u].pt_validate_uniqueness(key2test) self.suite_name = j.get_str('suite_name') self.suite_ver = j.get_str('suite_ver') self.author = j.get_str('author') self.product_name = j.get_str('product_name') self.product_ver = j.get_str('product_ver') regression_tag = json_data.get('regression_tag', '') links = json_data.get('links', None) if links == None or links == "": self.links = json.dumps({}) elif isinstance(links, dict): self.links = json.dumps(links) elif not links.startswith("{"): self.links = json.dumps({links: links}) else: self.links = json.dumps(j.get_dict('links')) self.upload = now begin = j.get_datetime('begin', now) end = j.get_datetime('end', now) self.tests_total = 0 self.tests_completed = 0 self.tests_failed = 0 self.tests_errors = 0 self.tests_warnings = 0 self.testcases_total = 0 self.testcases_errors = 0 self.deleted = False if not append or j.get_bool( 'is_edited') or not self.duration or not self.begin: self.duration = end - begin self.begin = begin self.end = end else: # job is being appended, do correct duration math if self.end < begin: # 1st upload self.duration += end - begin else: # subsequent upload self.duration += end - self.end self.end = end if self.begin and (self.begin.tzinfo is None or self.begin.tzinfo.utcoffset(self.begin) is None): raise SuspiciousOperation( "'begin' datetime object must include timezone: %s" % str(self.begin)) if self.end and (self.end.tzinfo is None or self.end.tzinfo.utcoffset(self.end) is None): raise SuspiciousOperation( "'end' datetime object must include timezone: %s" % str(self.end)) self.save() # process env_nodes, try not to delete and re-create all the nodes each time because normally this is static information env_nodes_to_update = EnvNodeModel.pt_find_env_nodes_for_update( self, env_nodes_json) if env_nodes_to_update: EnvNodeModel.objects.filter(job=self).delete() for env_node_json in env_nodes_to_update: serializer = EnvNodeUploadSerializer(job=self, data=env_node_json) if serializer.is_valid(): serializer.save() else: raise SuspiciousOperation( str(serializer.errors) + ", original json: " + str(env_node_json)) testcases = {} # Test Case (aka Section in comparison) is defined by 2 possible scenarios: # - tests with the same tag and different categories # - tests with no categories, and same group for t in tests_to_commit.values(): t.job = self t.pt_save() self.tests_total += 1 if t.pt_status_is_completed(): self.tests_completed += 1 test_ok = True if t.pt_status_is_failed(): self.tests_failed += 1 test_ok = False if t.errors: test_ok = False if t.warnings: self.tests_warnings += 1 self.tests_errors += int(not test_ok) testcase = t.tag if t.category else t.group if testcase in testcases: testcases[testcase] = testcases[testcase] and test_ok else: testcases[testcase] = test_ok self.testcases_total = len(testcases) self.testcases_errors = len([1 for ok in testcases.values() if not ok]) if tests_to_delete: TestModel.pt_delete_tests(tests_to_delete.keys()) if regression_tag is not None: from perftracker.models.regression import RegressionModel r = RegressionModel.pt_on_job_save(self, regression_tag) self.regression_original = r self.regression_linked = r self.save()
def pt_update(self, job, json_data, validate_only=False): j = PTJson(json_data, obj_name="test json", exception_type=SuspiciousOperation) if 'seq_num' in json_data: self.seq_num = json_data['seq_num'] if not self.seq_num: self.seq_num = 0 self.tag = j.get_str('tag', require=True) j.obj_name = self.tag self.binary = j.get_str('binary') self.cmdline = j.get_str('cmdline') self.description = j.get_str('description') scores = j.get_list('scores', require=True) deviations = j.get_list('deviations') self.scores = str(scores) self.deviations = str(deviations) if deviations else str([0] * len(scores)) self.category = j.get_str('category') self.metrics = j.get_str('metrics', 'loops/sec') self.links = json.dumps(j.get_dict('links')) self.attribs = json.dumps(j.get_dict('attribs')) self.less_better = j.get_bool('less_better') self.begin = j.get_datetime('begin') self.end = j.get_datetime('end') self.status = j.get_str('status', "SUCCESS") if self.status not in TEST_STATUSES: raise SuspiciousOperation("invalid 'status' value '%s', must be one of: %s" % (self.status, str(TEST_STATUSES))) e = j.get('errors', 0) if type(e) is int: self.errors = e else: self.errors = len(j.get_list('errors')) w = j.get('warnings', 0) if type(w) is int: self.warnings = w else: self.warnings = len(j.get_list('warnings')) dur_sec = j.get_float('duration_sec', 0) self.duration = timedelta(seconds=int(dur_sec)) if dur_sec else self.end - self.begin self.job = job self.group = j.get_str('group') TestGroupModel.pt_get_by_tag(self.group) # ensure appropriate TestGroupModel object exists self.samples = j.get_int('samples', len(scores)) self.avg_score = numpy.mean(scores) self.min_score = min(scores) self.max_score = max(scores) self.avg_dev = numpy.mean(deviations) if deviations else numpy.std(scores) self.min_dev = min(deviations) if deviations else self.avg_dev self.max_dev = max(deviations) if deviations else self.avg_dev self.avg_plusmin = int(round(100 * abs(self.avg_dev / self.avg_score))) if self.avg_score else 0 self.min_plusmin = int(round(100 * abs(self.min_dev / self.min_score))) if self.min_score else 0 self.max_plusmin = int(round(100 * abs(self.max_dev / self.max_score))) if self.max_score else 0 if self.begin and (self.begin.tzinfo is None or self.begin.tzinfo.utcoffset(self.begin) is None): raise SuspiciousOperation("'begin' datetime object must include timezone: %s" % str(self.begin)) if self.end and (self.end.tzinfo is None or self.end.tzinfo.utcoffset(self.end) is None): raise SuspiciousOperation("'end' datetime object must include timezone: %s" % str(self.end)) if not validate_only: try: obj = TestModel.objects.get(uuid=self.uuid) except TestModel.DoesNotExist: obj = None if obj is None or not self.pt_is_equal_to(obj): self.save()
def pt_update(self, json_data): from perftracker.models.test import TestModel j = PTJson(json_data, obj_name="job json", exception_type=SuspiciousOperation) project_name = j.get_str('project_name', require=True) self.uuid = j.get_uuid('uuid', require=True) self.title = j.get_str('job_title') if not self.title: self.title = j.get_str('title', require=True) self.cmdline = j.get_str('cmdline') self.project = ProjectModel.pt_get_by_name(j.get_str('project_name')) now = timezone.now() env_nodes_json = j.get_list('env_nodes') tests_json = j.get_list('tests', require=True) for t in tests_json: if 'uuid' not in t: raise SuspiciousOperation("test doesn't have 'uuid' key: %s" % str(t)) test = TestModel(job=self, uuid=t['uuid']) test.pt_update(self, t, validate_only=True ) # FIXME, double pt_update() call (here and below) self.suite_name = j.get_str('suite_name') self.suite_ver = j.get_str('suite_ver') self.author = j.get_str('author') self.product_name = j.get_str('product_name') self.product_ver = j.get_str('product_ver') self.links = json.dumps(j.get_dict('links')) self.regression_tag = json_data.get('regression_tag', '') self.upload = now begin = j.get_datetime('begin', now) end = j.get_datetime('end', now) self.tests_total = 0 self.tests_completed = 0 self.tests_failed = 0 self.tests_errors = 0 self.tests_warnings = 0 append = False if self.deleted else j.get_bool('append') self.deleted = False if append: if self.duration: self.duration += end - begin else: self.duration = end - begin if not self.begin: self.begin = begin self.end = end else: self.duration = end - begin self.begin = begin self.end = end if self.begin and (self.begin.tzinfo is None or self.begin.tzinfo.utcoffset(self.begin) is None): raise SuspiciousOperation( "'begin' datetime object must include timezone: %s" % str(self.begin)) if self.end and (self.end.tzinfo is None or self.end.tzinfo.utcoffset(self.end) is None): raise SuspiciousOperation( "'end' datetime object must include timezone: %s" % str(self.end)) self.save() # process env_nodes, try not to delete and re-create all the nodes each time because normally this is static information env_nodes_to_update = EnvNodeModel.pt_find_env_nodes_for_update( self, env_nodes_json) if env_nodes_to_update: EnvNodeModel.objects.filter(job=self).delete() for env_node_json in env_nodes_to_update: serializer = EnvNodeUploadSerializer(job=self, data=env_node_json) if serializer.is_valid(): serializer.save() else: raise SuspiciousOperation( str(serializer.errors) + ", original json: " + str(env_node_json)) # process tests tests = TestModel.objects.filter(job=self) test_seq_num = 0 uuid2test = {} for t in tests: uuid2test[str(t.uuid)] = t if test_seq_num <= t.seq_num: test_seq_num = t.seq_num for t in tests_json: test_uuid = t['uuid'] if test_uuid not in uuid2test: uuid2test[test_uuid] = TestModel(job=self, uuid=test_uuid) test_seq_num += 1 uuid2test[test_uuid].seq_num = test_seq_num test = uuid2test[test_uuid] test.pt_update(self, t) self.tests_total += 1 if test.pt_status_is_completed(): self.tests_completed += 1 if test.pt_status_is_failed(): self.tests_failed += 1 if test.errors: self.tests_errors += 1 if test.warnings: self.tests_warnings += 1 ret = uuid2test.pop(test_uuid, None) if not append: TestModel.pt_delete_tests(uuid2test.keys()) for t in uuid2test.values(): self.tests_total += 1 if t.pt_status_is_completed(): self.tests_completed += 1 if t.pt_status_is_failed(): self.tests_failed += 1 if t.errors: self.tests_errors += 1 if t.warnings: self.tests_warnings += 1 self.save()
def pt_update(self, json_data): from perftracker.models.test import TestModel j = PTJson(json_data, obj_name="job json", exception_type=SuspiciousOperation) project_name = j.get_str('project_name', require=True) self.uuid = j.get_uuid('uuid', defval=uuid.uuid1()) self.title = j.get_str('job_title') if not self.title: self.title = j.get_str('title', require=True) self.cmdline = j.get_str('cmdline') self.project = ProjectModel.pt_get_by_name(j.get_str('project_name')) append = False if self.deleted else j.get_bool('append') now = timezone.now() env_nodes_json = j.get_list('env_nodes') tests_json = j.get_list('tests') key2test = {} tests_to_delete = {} tests_to_commit = {} test_seq_num = 0 # process existing tests if self.id: for t in TestModel.objects.filter(job=self): test_seq_num = max(t.seq_num, test_seq_num) u = str(t.uuid) if append: t.pt_validate_uniqueness(key2test) tests_to_commit[u] = t else: tests_to_delete[u] = t for t in tests_json: if not len(t.keys()): continue u = TestModel.pt_get_uuid(t) if u in tests_to_delete: tests_to_commit[u] = tests_to_delete[u] del tests_to_delete[u] else: test_seq_num += 1 try: tests_to_commit[u] = TestModel.objects.get(uuid=u) except TestModel.MultipleObjectsReturned: TestModel.objects.filter(uuid=self.uuid).delete() tests_to_commit[u] = TestModel(uuid=u, seq_num=test_seq_num) except TestModel.DoesNotExist: tests_to_commit[u] = TestModel(uuid=u, seq_num=test_seq_num) tests_to_commit[u].pt_update(t) tests_to_commit[u].pt_validate_uniqueness(key2test) self.suite_name = j.get_str('suite_name') self.suite_ver = j.get_str('suite_ver') self.author = j.get_str('author') self.product_name = j.get_str('product_name') self.product_ver = j.get_str('product_ver') self.links = json.dumps(j.get_dict('links')) regression_tag = json_data.get('regression_tag', '') self.upload = now begin = j.get_datetime('begin', now) end = j.get_datetime('end', now) self.tests_total = 0 self.tests_completed = 0 self.tests_failed = 0 self.tests_errors = 0 self.tests_warnings = 0 self.deleted = False if append and j.get_bool('is_edited') == False: if self.duration: self.duration += end - begin else: self.duration = end - begin if not self.begin: self.begin = begin self.end = end else: self.duration = end - begin self.begin = begin self.end = end if self.begin and (self.begin.tzinfo is None or self.begin.tzinfo.utcoffset(self.begin) is None): raise SuspiciousOperation( "'begin' datetime object must include timezone: %s" % str(self.begin)) if self.end and (self.end.tzinfo is None or self.end.tzinfo.utcoffset(self.end) is None): raise SuspiciousOperation( "'end' datetime object must include timezone: %s" % str(self.end)) self.save() # process env_nodes, try not to delete and re-create all the nodes each time because normally this is static information env_nodes_to_update = EnvNodeModel.pt_find_env_nodes_for_update( self, env_nodes_json) if env_nodes_to_update: EnvNodeModel.objects.filter(job=self).delete() for env_node_json in env_nodes_to_update: serializer = EnvNodeUploadSerializer(job=self, data=env_node_json) if serializer.is_valid(): serializer.save() else: raise SuspiciousOperation( str(serializer.errors) + ", original json: " + str(env_node_json)) for t in tests_to_commit.values(): t.job = self t.pt_save() self.tests_total += 1 if t.pt_status_is_completed(): self.tests_completed += 1 if t.pt_status_is_failed(): self.tests_failed += 1 if t.errors: self.tests_errors += 1 if t.warnings: self.tests_warnings += 1 if tests_to_delete: TestModel.pt_delete_tests(tests_to_delete.keys()) if regression_tag is not None: from perftracker.models.regression import RegressionModel r = RegressionModel.pt_on_job_save(self, regression_tag) self.regression_original = r self.regression_linked = r self.save()
def pt_update(self, json_data): from perftracker.models.test import TestModel from perftracker.models.test_group import TestGroupModel j = PTJson(json_data, obj_name="job json", exception_type=SuspiciousOperation) project_name = j.get_str('project_name', require=True) self.uuid = j.get_uuid('uuid', defval=uuid.uuid1()) self.title = j.get_str('job_title') if not self.title: self.title = j.get_str('title', require=True) self.cmdline = j.get_str('cmdline') self.project = ProjectModel.pt_get_by_name(j.get_str('project_name')) append = False if self.deleted else j.get_bool('append') now = timezone.now() env_nodes_json = j.get_list('env_nodes') tests_json = j.get_list('tests') key2test = {} tests_to_delete = {} tests_to_commit = {} test_seq_num = 0 # triggering populating queryset cache tests_from_db_by_uuid, tests_list_from_db = self._pt_db_get(TestModel.objects.filter(job=self)) # preload test groups tests_groups_by_tag, tests_groups = self._pt_db_get(TestGroupModel.objects.all(), value_to_key='tag') # process existing tests if self.id: for t in tests_list_from_db: test_seq_num = max(t.seq_num, test_seq_num) u = str(t.uuid) if append: t.pt_validate_uniqueness(key2test) tests_to_commit[u] = t else: tests_to_delete[u] = t for t in tests_json: if not len(t.keys()): continue u = TestModel.pt_get_uuid(t) if u in tests_to_delete: tests_to_commit[u] = tests_to_delete[u] del tests_to_delete[u] else: test_seq_num += 1 if not tests_to_commit.get(u): tests_to_commit[u] = TestModel(uuid=u, seq_num=test_seq_num) tests_to_commit[u].pt_update(t, tests_groups_by_tag) tests_to_commit[u].pt_validate_uniqueness(key2test) self.suite_name = j.get_str('suite_name') self.suite_ver = j.get_str('suite_ver') self.author = j.get_str('author') self.product_name = j.get_str('product_name') self.product_ver = j.get_str('product_ver') self.links = json.dumps(j.get_dict('links')) regression_tag = json_data.get('regression_tag', '') self.upload = now begin = j.get_datetime('begin', now) end = j.get_datetime('end', now) self.tests_total = 0 self.tests_completed = 0 self.tests_failed = 0 self.tests_errors = 0 self.tests_warnings = 0 self.deleted = False if append and j.get_bool('is_edited') == False: if self.duration: self.duration += end - begin else: self.duration = end - begin if not self.begin: self.begin = begin self.end = end else: self.duration = end - begin self.begin = begin self.end = end if self.begin and (self.begin.tzinfo is None or self.begin.tzinfo.utcoffset(self.begin) is None): raise SuspiciousOperation("'begin' datetime object must include timezone: %s" % str(self.begin)) if self.end and (self.end.tzinfo is None or self.end.tzinfo.utcoffset(self.end) is None): raise SuspiciousOperation("'end' datetime object must include timezone: %s" % str(self.end)) self.save() # process env_nodes, try not to delete and re-create all the nodes each time because normally this is static information env_nodes_to_update = EnvNodeModel.pt_find_env_nodes_for_update(self, env_nodes_json) if env_nodes_to_update: EnvNodeModel.objects.filter(job=self).delete() for env_node_json in env_nodes_to_update: serializer = EnvNodeUploadSerializer(job=self, data=env_node_json) if serializer.is_valid(): serializer.save() else: raise SuspiciousOperation(str(serializer.errors) + ", original json: " + str(env_node_json)) if settings.DATABASES['default']['ENGINE'] == 'django.db.backends.postgresql': bulk_mgr = BulkCreateManager(chunk_size=5000) for t in tests_to_commit.values(): t.job = self self.tests_total += 1 if t.pt_status_is_completed(): self.tests_completed += 1 if t.pt_status_is_failed(): self.tests_failed += 1 if t.errors: self.tests_errors += 1 if t.warnings: self.tests_warnings += 1 db_test = tests_from_db_by_uuid.get(str(t.uuid)) if not db_test: bulk_mgr.add(t) elif t.pt_is_equal_to(db_test): continue bulk_mgr.done() else: for t in tests_to_commit.values(): t.job = self t.pt_save() self.tests_total += 1 if t.pt_status_is_completed(): self.tests_completed += 1 if t.pt_status_is_failed(): self.tests_failed += 1 if t.errors: self.tests_errors += 1 if t.warnings: self.tests_warnings += 1 if not append and tests_to_delete: TestModel.pt_delete_tests(tests_to_delete.keys()) if regression_tag is not None: from perftracker.models.regression import RegressionModel r = RegressionModel.pt_on_job_save(self, regression_tag) self.regression_original = r self.regression_linked = r self.save()