Exemple #1
0
    def pt_update(self, json_data):
        from perftracker.models.test import TestModel

        j = PTJson(json_data,
                   obj_name="job json",
                   exception_type=SuspiciousOperation)

        project_name = j.get_str('project_name', require=True)
        self.uuid = j.get_uuid('uuid', defval=uuid.uuid1())

        self.title = j.get_str('job_title')
        if not self.title:
            self.title = j.get_str('title', require=True)
        self.cmdline = j.get_str('cmdline')
        self.project = ProjectModel.pt_get_by_name(j.get_str('project_name'))

        append = False if self.deleted else j.get_bool('append')

        now = timezone.now()

        env_nodes_json = j.get_list('env_nodes')
        tests_json = j.get_list('tests')

        key2test = {}
        tests_to_delete = {}
        tests_to_commit = {}
        test_seq_num = 0

        # process existing tests
        if self.id:
            for t in TestModel.objects.filter(job=self):
                test_seq_num = max(t.seq_num, test_seq_num)
                u = str(t.uuid)
                if append:
                    t.pt_validate_uniqueness(key2test)
                    tests_to_commit[u] = t
                else:
                    tests_to_delete[u] = t

        for t in tests_json:
            if not len(t.keys()):
                continue
            u = TestModel.pt_get_uuid(t)
            if u in tests_to_delete:
                tests_to_commit[u] = tests_to_delete[u]
                del tests_to_delete[u]
            else:
                test_seq_num += 1
                try:
                    tests_to_commit[u] = TestModel.objects.get(uuid=u)
                except TestModel.MultipleObjectsReturned:
                    TestModel.objects.filter(uuid=self.uuid).delete()
                    tests_to_commit[u] = TestModel(uuid=u,
                                                   seq_num=test_seq_num)
                except TestModel.DoesNotExist:
                    tests_to_commit[u] = TestModel(uuid=u,
                                                   seq_num=test_seq_num)
            tests_to_commit[u].pt_update(t)
            tests_to_commit[u].pt_validate_uniqueness(key2test)

        self.suite_name = j.get_str('suite_name')
        self.suite_ver = j.get_str('suite_ver')
        self.author = j.get_str('author')
        self.product_name = j.get_str('product_name')
        self.product_ver = j.get_str('product_ver')
        regression_tag = json_data.get('regression_tag', '')

        links = json_data.get('links', None)
        if links == None or links == "":
            self.links = json.dumps({})
        elif isinstance(links, dict):
            self.links = json.dumps(links)
        elif not links.startswith("{"):
            self.links = json.dumps({links: links})
        else:
            self.links = json.dumps(j.get_dict('links'))

        self.upload = now

        begin = j.get_datetime('begin', now)
        end = j.get_datetime('end', now)

        self.tests_total = 0
        self.tests_completed = 0
        self.tests_failed = 0
        self.tests_errors = 0
        self.tests_warnings = 0

        self.testcases_total = 0
        self.testcases_errors = 0

        self.deleted = False

        if not append or j.get_bool(
                'is_edited') or not self.duration or not self.begin:
            self.duration = end - begin
            self.begin = begin
            self.end = end
        else:
            # job is being appended, do correct duration math
            if self.end < begin:  # 1st upload
                self.duration += end - begin
            else:  # subsequent upload
                self.duration += end - self.end
            self.end = end

        if self.begin and (self.begin.tzinfo is None
                           or self.begin.tzinfo.utcoffset(self.begin) is None):
            raise SuspiciousOperation(
                "'begin' datetime object must include timezone: %s" %
                str(self.begin))
        if self.end and (self.end.tzinfo is None
                         or self.end.tzinfo.utcoffset(self.end) is None):
            raise SuspiciousOperation(
                "'end' datetime object must include timezone: %s" %
                str(self.end))

        self.save()

        # process env_nodes, try not to delete and re-create all the nodes each time because normally this is static information
        env_nodes_to_update = EnvNodeModel.pt_find_env_nodes_for_update(
            self, env_nodes_json)
        if env_nodes_to_update:
            EnvNodeModel.objects.filter(job=self).delete()
            for env_node_json in env_nodes_to_update:
                serializer = EnvNodeUploadSerializer(job=self,
                                                     data=env_node_json)
                if serializer.is_valid():
                    serializer.save()
                else:
                    raise SuspiciousOperation(
                        str(serializer.errors) + ", original json: " +
                        str(env_node_json))

        testcases = {}
        #  Test Case (aka Section in comparison) is defined by 2 possible scenarios:
        #    - tests with the same tag and different categories
        #    - tests with no categories, and same group

        for t in tests_to_commit.values():
            t.job = self
            t.pt_save()

            self.tests_total += 1
            if t.pt_status_is_completed():
                self.tests_completed += 1
            test_ok = True
            if t.pt_status_is_failed():
                self.tests_failed += 1
                test_ok = False
            if t.errors:
                test_ok = False
            if t.warnings:
                self.tests_warnings += 1

            self.tests_errors += int(not test_ok)
            testcase = t.tag if t.category else t.group
            if testcase in testcases:
                testcases[testcase] = testcases[testcase] and test_ok
            else:
                testcases[testcase] = test_ok

        self.testcases_total = len(testcases)
        self.testcases_errors = len([1 for ok in testcases.values() if not ok])

        if tests_to_delete:
            TestModel.pt_delete_tests(tests_to_delete.keys())

        if regression_tag is not None:
            from perftracker.models.regression import RegressionModel
            r = RegressionModel.pt_on_job_save(self, regression_tag)
            self.regression_original = r
            self.regression_linked = r

        self.save()
Exemple #2
0
    def pt_update(self, json_data):
        from perftracker.models.test import TestModel

        j = PTJson(json_data,
                   obj_name="job json",
                   exception_type=SuspiciousOperation)

        project_name = j.get_str('project_name', require=True)
        self.uuid = j.get_uuid('uuid', defval=uuid.uuid1())

        self.title = j.get_str('job_title')
        if not self.title:
            self.title = j.get_str('title', require=True)
        self.cmdline = j.get_str('cmdline')
        self.project = ProjectModel.pt_get_by_name(j.get_str('project_name'))

        append = False if self.deleted else j.get_bool('append')

        now = timezone.now()

        env_nodes_json = j.get_list('env_nodes')
        tests_json = j.get_list('tests')

        key2test = {}
        tests_to_delete = {}
        tests_to_commit = {}
        test_seq_num = 0

        # process existing tests
        if self.id:
            for t in TestModel.objects.filter(job=self):
                test_seq_num = max(t.seq_num, test_seq_num)
                u = str(t.uuid)
                if append:
                    t.pt_validate_uniqueness(key2test)
                    tests_to_commit[u] = t
                else:
                    tests_to_delete[u] = t

        for t in tests_json:
            if not len(t.keys()):
                continue
            u = TestModel.pt_get_uuid(t)
            if u in tests_to_delete:
                tests_to_commit[u] = tests_to_delete[u]
                del tests_to_delete[u]
            else:
                test_seq_num += 1
                try:
                    tests_to_commit[u] = TestModel.objects.get(uuid=u)
                except TestModel.MultipleObjectsReturned:
                    TestModel.objects.filter(uuid=self.uuid).delete()
                    tests_to_commit[u] = TestModel(uuid=u,
                                                   seq_num=test_seq_num)
                except TestModel.DoesNotExist:
                    tests_to_commit[u] = TestModel(uuid=u,
                                                   seq_num=test_seq_num)
            tests_to_commit[u].pt_update(t)
            tests_to_commit[u].pt_validate_uniqueness(key2test)

        self.suite_name = j.get_str('suite_name')
        self.suite_ver = j.get_str('suite_ver')
        self.author = j.get_str('author')
        self.product_name = j.get_str('product_name')
        self.product_ver = j.get_str('product_ver')
        self.links = json.dumps(j.get_dict('links'))
        regression_tag = json_data.get('regression_tag', '')

        self.upload = now

        begin = j.get_datetime('begin', now)
        end = j.get_datetime('end', now)

        self.tests_total = 0
        self.tests_completed = 0
        self.tests_failed = 0
        self.tests_errors = 0
        self.tests_warnings = 0

        self.deleted = False

        if append and j.get_bool('is_edited') == False:
            if self.duration:
                self.duration += end - begin
            else:
                self.duration = end - begin
            if not self.begin:
                self.begin = begin
            self.end = end
        else:
            self.duration = end - begin
            self.begin = begin
            self.end = end

        if self.begin and (self.begin.tzinfo is None
                           or self.begin.tzinfo.utcoffset(self.begin) is None):
            raise SuspiciousOperation(
                "'begin' datetime object must include timezone: %s" %
                str(self.begin))
        if self.end and (self.end.tzinfo is None
                         or self.end.tzinfo.utcoffset(self.end) is None):
            raise SuspiciousOperation(
                "'end' datetime object must include timezone: %s" %
                str(self.end))

        self.save()

        # process env_nodes, try not to delete and re-create all the nodes each time because normally this is static information
        env_nodes_to_update = EnvNodeModel.pt_find_env_nodes_for_update(
            self, env_nodes_json)
        if env_nodes_to_update:
            EnvNodeModel.objects.filter(job=self).delete()
            for env_node_json in env_nodes_to_update:
                serializer = EnvNodeUploadSerializer(job=self,
                                                     data=env_node_json)
                if serializer.is_valid():
                    serializer.save()
                else:
                    raise SuspiciousOperation(
                        str(serializer.errors) + ", original json: " +
                        str(env_node_json))

        for t in tests_to_commit.values():
            t.job = self
            t.pt_save()

            self.tests_total += 1
            if t.pt_status_is_completed():
                self.tests_completed += 1
            if t.pt_status_is_failed():
                self.tests_failed += 1
            if t.errors:
                self.tests_errors += 1
            if t.warnings:
                self.tests_warnings += 1

        if tests_to_delete:
            TestModel.pt_delete_tests(tests_to_delete.keys())

        if regression_tag is not None:
            from perftracker.models.regression import RegressionModel
            r = RegressionModel.pt_on_job_save(self, regression_tag)
            self.regression_original = r
            self.regression_linked = r

        self.save()
Exemple #3
0
    def pt_update(self, json_data):
        from perftracker.models.test import TestModel

        j = PTJson(json_data,
                   obj_name="job json",
                   exception_type=SuspiciousOperation)

        project_name = j.get_str('project_name', require=True)
        self.uuid = j.get_uuid('uuid', require=True)

        self.title = j.get_str('job_title')
        if not self.title:
            self.title = j.get_str('title', require=True)
        self.cmdline = j.get_str('cmdline')
        self.project = ProjectModel.pt_get_by_name(j.get_str('project_name'))

        now = timezone.now()

        env_nodes_json = j.get_list('env_nodes')
        tests_json = j.get_list('tests', require=True)

        for t in tests_json:
            if 'uuid' not in t:
                raise SuspiciousOperation("test doesn't have 'uuid' key: %s" %
                                          str(t))
            test = TestModel(job=self, uuid=t['uuid'])
            test.pt_update(self, t, validate_only=True
                           )  # FIXME, double pt_update() call (here and below)

        self.suite_name = j.get_str('suite_name')
        self.suite_ver = j.get_str('suite_ver')
        self.author = j.get_str('author')
        self.product_name = j.get_str('product_name')
        self.product_ver = j.get_str('product_ver')
        self.links = json.dumps(j.get_dict('links'))
        self.regression_tag = json_data.get('regression_tag', '')

        self.upload = now

        begin = j.get_datetime('begin', now)
        end = j.get_datetime('end', now)

        self.tests_total = 0
        self.tests_completed = 0
        self.tests_failed = 0
        self.tests_errors = 0
        self.tests_warnings = 0

        append = False if self.deleted else j.get_bool('append')

        self.deleted = False

        if append:
            if self.duration:
                self.duration += end - begin
            else:
                self.duration = end - begin
            if not self.begin:
                self.begin = begin
            self.end = end
        else:
            self.duration = end - begin
            self.begin = begin
            self.end = end

        if self.begin and (self.begin.tzinfo is None
                           or self.begin.tzinfo.utcoffset(self.begin) is None):
            raise SuspiciousOperation(
                "'begin' datetime object must include timezone: %s" %
                str(self.begin))
        if self.end and (self.end.tzinfo is None
                         or self.end.tzinfo.utcoffset(self.end) is None):
            raise SuspiciousOperation(
                "'end' datetime object must include timezone: %s" %
                str(self.end))

        self.save()

        # process env_nodes, try not to delete and re-create all the nodes each time because normally this is static information
        env_nodes_to_update = EnvNodeModel.pt_find_env_nodes_for_update(
            self, env_nodes_json)
        if env_nodes_to_update:
            EnvNodeModel.objects.filter(job=self).delete()
            for env_node_json in env_nodes_to_update:
                serializer = EnvNodeUploadSerializer(job=self,
                                                     data=env_node_json)
                if serializer.is_valid():
                    serializer.save()
                else:
                    raise SuspiciousOperation(
                        str(serializer.errors) + ", original json: " +
                        str(env_node_json))

        # process tests
        tests = TestModel.objects.filter(job=self)
        test_seq_num = 0
        uuid2test = {}
        for t in tests:
            uuid2test[str(t.uuid)] = t
            if test_seq_num <= t.seq_num:
                test_seq_num = t.seq_num

        for t in tests_json:
            test_uuid = t['uuid']

            if test_uuid not in uuid2test:
                uuid2test[test_uuid] = TestModel(job=self, uuid=test_uuid)
                test_seq_num += 1
                uuid2test[test_uuid].seq_num = test_seq_num

            test = uuid2test[test_uuid]

            test.pt_update(self, t)

            self.tests_total += 1
            if test.pt_status_is_completed():
                self.tests_completed += 1
            if test.pt_status_is_failed():
                self.tests_failed += 1
            if test.errors:
                self.tests_errors += 1
            if test.warnings:
                self.tests_warnings += 1
            ret = uuid2test.pop(test_uuid, None)

        if not append:
            TestModel.pt_delete_tests(uuid2test.keys())

        for t in uuid2test.values():
            self.tests_total += 1
            if t.pt_status_is_completed():
                self.tests_completed += 1
            if t.pt_status_is_failed():
                self.tests_failed += 1
            if t.errors:
                self.tests_errors += 1
            if t.warnings:
                self.tests_warnings += 1

        self.save()
Exemple #4
0
    def pt_update(self, json_data):
        from perftracker.models.test import TestModel
        from perftracker.models.test_group import TestGroupModel

        j = PTJson(json_data, obj_name="job json", exception_type=SuspiciousOperation)

        project_name = j.get_str('project_name', require=True)
        self.uuid = j.get_uuid('uuid', defval=uuid.uuid1())

        self.title = j.get_str('job_title')
        if not self.title:
            self.title = j.get_str('title', require=True)
        self.cmdline = j.get_str('cmdline')
        self.project = ProjectModel.pt_get_by_name(j.get_str('project_name'))

        append = False if self.deleted else j.get_bool('append')

        now = timezone.now()

        env_nodes_json = j.get_list('env_nodes')
        tests_json = j.get_list('tests')

        key2test = {}
        tests_to_delete = {}
        tests_to_commit = {}
        test_seq_num = 0
        # triggering populating queryset cache
        tests_from_db_by_uuid, tests_list_from_db = self._pt_db_get(TestModel.objects.filter(job=self))
        # preload test groups
        tests_groups_by_tag, tests_groups = self._pt_db_get(TestGroupModel.objects.all(), value_to_key='tag')

        # process existing tests
        if self.id:
            for t in tests_list_from_db:
                test_seq_num = max(t.seq_num, test_seq_num)
                u = str(t.uuid)
                if append:
                    t.pt_validate_uniqueness(key2test)
                    tests_to_commit[u] = t
                else:
                    tests_to_delete[u] = t

        for t in tests_json:
            if not len(t.keys()):
                continue
            u = TestModel.pt_get_uuid(t)
            if u in tests_to_delete:
                tests_to_commit[u] = tests_to_delete[u]
                del tests_to_delete[u]
            else:
                test_seq_num += 1
                if not tests_to_commit.get(u):
                    tests_to_commit[u] = TestModel(uuid=u, seq_num=test_seq_num)

            tests_to_commit[u].pt_update(t, tests_groups_by_tag)
            tests_to_commit[u].pt_validate_uniqueness(key2test)

        self.suite_name = j.get_str('suite_name')
        self.suite_ver  = j.get_str('suite_ver')
        self.author     = j.get_str('author')
        self.product_name = j.get_str('product_name')
        self.product_ver  = j.get_str('product_ver')
        self.links = json.dumps(j.get_dict('links'))
        regression_tag = json_data.get('regression_tag', '')

        self.upload = now

        begin = j.get_datetime('begin', now)
        end = j.get_datetime('end', now)

        self.tests_total = 0
        self.tests_completed = 0
        self.tests_failed = 0
        self.tests_errors = 0
        self.tests_warnings = 0

        self.deleted = False

        if append and j.get_bool('is_edited') == False:
            if self.duration:
                self.duration += end - begin
            else:
                self.duration = end - begin
            if not self.begin:
                self.begin = begin
            self.end = end
        else:
            self.duration = end - begin
            self.begin = begin
            self.end = end


        if self.begin and (self.begin.tzinfo is None or self.begin.tzinfo.utcoffset(self.begin) is None):
            raise SuspiciousOperation("'begin' datetime object must include timezone: %s" % str(self.begin))
        if self.end and (self.end.tzinfo is None or self.end.tzinfo.utcoffset(self.end) is None):
            raise SuspiciousOperation("'end' datetime object must include timezone: %s" % str(self.end))

        self.save()

        # process env_nodes, try not to delete and re-create all the nodes each time because normally this is static information
        env_nodes_to_update = EnvNodeModel.pt_find_env_nodes_for_update(self, env_nodes_json)
        if env_nodes_to_update:
            EnvNodeModel.objects.filter(job=self).delete()
            for env_node_json in env_nodes_to_update:
                serializer = EnvNodeUploadSerializer(job=self, data=env_node_json)
                if serializer.is_valid():
                    serializer.save()
                else:
                    raise SuspiciousOperation(str(serializer.errors) + ", original json: " + str(env_node_json))

        if settings.DATABASES['default']['ENGINE'] == 'django.db.backends.postgresql':
            bulk_mgr = BulkCreateManager(chunk_size=5000)
            for t in tests_to_commit.values():
                t.job = self
                self.tests_total += 1
                if t.pt_status_is_completed():
                    self.tests_completed += 1
                if t.pt_status_is_failed():
                    self.tests_failed += 1
                if t.errors:
                    self.tests_errors += 1
                if t.warnings:
                    self.tests_warnings += 1
                db_test = tests_from_db_by_uuid.get(str(t.uuid))
                if not db_test:
                    bulk_mgr.add(t)
                elif t.pt_is_equal_to(db_test):
                    continue
            bulk_mgr.done()
        else:
            for t in tests_to_commit.values():
                t.job = self
                t.pt_save()
                self.tests_total += 1
                if t.pt_status_is_completed():
                    self.tests_completed += 1
                if t.pt_status_is_failed():
                    self.tests_failed += 1
                if t.errors:
                    self.tests_errors += 1
                if t.warnings:
                    self.tests_warnings += 1

        if not append and tests_to_delete:
            TestModel.pt_delete_tests(tests_to_delete.keys())

        if regression_tag is not None:
            from perftracker.models.regression import RegressionModel
            r = RegressionModel.pt_on_job_save(self, regression_tag)
            self.regression_original = r
            self.regression_linked   = r

        self.save()