def main(args): # Some configuration, might get parameterized later SquadApi.configure(args.get('squadapi_url', None)) squad = Squad() getid = lambda s: int(re.search('\d+', s).group()) group = squad.group(args.get('group', None)) project = group.project(args.get('project', None)) bad_suite = project.suite(args.get('suite', None)) bad_test = args.get('test', None) bad_build = project.build(args.get('kernel_build', None)) bad_env = project.environment(args.get('arch', None)) # sh environment on build next-20201210 #bad_build = project.build('next-20201210') #bad_env = project.environment('sh') # now with arm64 on build next-20201204 #bad_build = project.build('next-20201204') #bad_env = project.environment('arm64') # now with parisc on build next-20201124 (it should not return anything) #bad_build = project.build('next-20201124') #bad_env = project.environment('parisc') print('Looking at the next good build in %s/%s for build %s' % (group.slug, project.slug, bad_build.version), flush=True) tests = squad.tests(build__created_at__lt=bad_build.created_at, suite=bad_suite.id, environment=bad_env.id, metadata__name=bad_test, ordering='-build_id', result=True, count=1) if len(tests): test = first(tests) build = Build(getid(test.build)) print('%s: https://qa-reports.linaro.org/%s/%s/build/%s' % (build.version, group.slug, project.slug, build.version)) else: print('No good build')
class SubmitTuxbuildCommandTest(unittest.TestCase): testing_server = "http://localhost:%s" % settings.DEFAULT_SQUAD_PORT testing_token = "193cd8bb41ab9217714515954e8724f651ef8601" def setUp(self): self.squad = Squad() SquadApi.configure(url=self.testing_server, token=self.testing_token) def submit_tuxbuild(self, tuxbuild): argv = [ "./manage.py", "--squad-host", self.testing_server, "--squad-token", self.testing_token, "submit-tuxbuild", "--group", "my_group", "--project", "my_project", tuxbuild, ] env = os.environ.copy() env['LOG_LEVEL'] = 'INFO' proc = sp.Popen(argv, stdout=sp.PIPE, stderr=sp.PIPE, env=env) proc.ok = False try: out, err = proc.communicate() proc.ok = proc.returncode == 0 except sp.TimeoutExpired: self.logger.error('Running "%s" time out after %i seconds!' % " ".join(argv)) proc.kill() out, err = proc.communicate() proc.out = out.decode("utf-8") proc.err = err.decode("utf-8") return proc def test_submit_tuxbuild_build(self): proc = self.submit_tuxbuild("tests/data/submit/tuxbuild/build.json") self.assertTrue(proc.ok, msg=proc.err) self.assertTrue(proc.err.count("Submitting 1 tests, 2 metrics") == 3) project = self.squad.group("my_group").project("my_project") # Check results for next-20201021, which has 2 instances in build.json build = project.build("next-20201021") base_kconfig = [ 'defconfig', 'https://raw.githubusercontent.com/Linaro/meta-lkft/sumo/recipes-kernel/linux/files/lkft.config', 'https://raw.githubusercontent.com/Linaro/meta-lkft/sumo/recipes-kernel/linux/files/lkft-crypto.config', 'https://raw.githubusercontent.com/Linaro/meta-lkft/sumo/recipes-kernel/linux/files/distro-overrides.config', 'https://raw.githubusercontent.com/Linaro/meta-lkft/sumo/recipes-kernel/linux/files/systemd.config', 'https://raw.githubusercontent.com/Linaro/meta-lkft/sumo/recipes-kernel/linux/files/virtio.config', ] # Make sure metadata values match expected values urls = [ 'https://builds.tuxbuild.com/%s/' % _id for _id in ['B3TECkH4_1X9yKoWOPIhew', 't8NSUfTBZiSPbBVaXLH7kw'] ] configs = [url + "config" for url in urls] expected_metadata = { 'git_repo': "https://gitlab.com/Linaro/lkft/mirrors/next/linux-next", 'git_ref': None, 'git_commit': "5302568121ba345f5c22528aefd72d775f25221e", 'git_sha': "5302568121ba345f5c22528aefd72d775f25221e", 'git_short_log': '5302568121ba ("Add linux-next specific files for 20201021")', 'git_describe': "next-20201021", 'kconfig': [ base_kconfig + ["CONFIG_ARM64_MODULE_PLTS=y"], base_kconfig + ["CONFIG_IGB=y", "CONFIG_UNWINDER_FRAME_POINTER=y"] ], 'git_branch': os.environ.get("KERNEL_BRANCH"), 'make_kernelversion': "5.9.0", 'kernel_version': "5.9.0", 'config': configs, 'download_url': urls, 'duration': 541, } for expected_key in expected_metadata.keys(): self.assertEqual(expected_metadata[expected_key], getattr(build.metadata, expected_key)) # Make sure there's no extra attributes in the metadata object metadata_attrs = build.metadata.__dict__ del metadata_attrs["id"] self.assertEqual(sorted(expected_metadata.keys()), sorted(metadata_attrs.keys())) # Check results for v4.4.4, which has 1 instance in build.json build = project.build("v4.4.4") # Make sure metadata values match expected values url = 'https://builds.tuxbuild.com/%s/' % 'B3TECkH4_1X9yKoWOPIhew' config = url + "config" expected_metadata = { 'git_repo': "https://gitlab.com/Linaro/lkft/mirrors/next/linux-next", 'git_ref': None, 'git_commit': "5302568121ba345f5c22528aefd72d775f25221e", 'git_sha': "5302568121ba345f5c22528aefd72d775f25221e", 'git_short_log': '5302568121ba ("Add linux-next specific files for 20201021")', 'git_describe': "v4.4.4", 'kconfig': base_kconfig + ["CONFIG_ARM64_MODULE_PLTS=y"], 'git_branch': os.environ.get("KERNEL_BRANCH"), 'make_kernelversion': "5.9.0", 'kernel_version': "5.9.0", 'config': config, 'download_url': url, 'duration': 541, } for expected_key in expected_metadata.keys(): self.assertEqual(expected_metadata[expected_key], getattr(build.metadata, expected_key)) # Make sure there's no extra attributes in the metadata object metadata_attrs = build.metadata.__dict__ del metadata_attrs["id"] self.assertEqual(sorted(expected_metadata.keys()), sorted(metadata_attrs.keys())) for arch in ["arm64", "x86"]: environment = (self.squad.group("my_group").project( "my_project").environment(arch)) self.assertIsNotNone(environment, "environment %s does not exist" % (arch)) suite = self.squad.group("my_group").project("my_project").suite( "build") self.assertIsNotNone(suite) test = first(self.squad.tests(name="gcc-9-defconfig-b9979cfa")) self.assertEqual("build/gcc-9-defconfig-b9979cfa", test.name) self.assertEqual("pass", test.status) test = first(self.squad.tests(name="gcc-9-defconfig-5b09568e")) self.assertEqual("build/gcc-9-defconfig-5b09568e", test.name) self.assertEqual("fail", test.status) metric = first( self.squad.metrics(name="gcc-9-defconfig-b9979cfa-warnings")) self.assertEqual("build/gcc-9-defconfig-b9979cfa-warnings", metric.name) self.assertEqual(1, metric.result) metric = first( self.squad.metrics(name="gcc-9-defconfig-5b09568e-warnings")) self.assertEqual("build/gcc-9-defconfig-5b09568e-warnings", metric.name) self.assertEqual(2, metric.result) metric = first( self.squad.metrics(name="gcc-9-defconfig-5b09568e-duration")) self.assertEqual("build/gcc-9-defconfig-5b09568e-duration", metric.name) self.assertEqual(541, metric.result) def test_submit_tuxbuild_buildset(self): os.environ["KERNEL_BRANCH"] = "master" proc = self.submit_tuxbuild("tests/data/submit/tuxbuild/buildset.json") self.assertTrue(proc.ok, msg=proc.out) self.assertTrue(proc.err.count("Submitting 1 tests, 2 metrics") == 3) build = self.squad.group("my_group").project("my_project").build( "next-20201030") # Make sure metadata values match expected values urls = [ 'https://builds.tuxbuild.com/%s/' % _id for _id in [ '9NeOU1kd65bhMrL4eyI2yA', 'cjLreGasHSZj3OctZlNdpw', 'x5Mi9j6xZItTGqVtOKmnVw' ] ] configs = [url + "config" for url in urls] expected_metadata = { 'git_repo': "https://gitlab.com/Linaro/lkft/mirrors/next/linux-next", 'git_ref': None, 'git_commit': "4e78c578cb987725eef1cec7d11b6437109e9a49", 'git_sha': "4e78c578cb987725eef1cec7d11b6437109e9a49", 'git_short_log': '4e78c578cb98 ("Add linux-next specific files for 20201030")', 'git_describe': "next-20201030", 'kconfig': [['allnoconfig'], ['tinyconfig'], ['x86_64_defconfig']], 'git_branch': os.environ.get("KERNEL_BRANCH"), 'make_kernelversion': "5.10.0-rc1", 'kernel_version': "5.10.0-rc1", 'config': configs, 'download_url': urls, 'duration': 541, } for expected_key in expected_metadata.keys(): self.assertEqual(expected_metadata[expected_key], getattr(build.metadata, expected_key)) # Make sure there's no extra attributes in the metadata object metadata_attrs = build.metadata.__dict__ del metadata_attrs["id"] self.assertEqual(sorted(expected_metadata.keys()), sorted(metadata_attrs.keys())) environment = (self.squad.group("my_group").project( "my_project").environment("x86")) self.assertIsNotNone(environment) suite = self.squad.group("my_group").project("my_project").suite( "build") self.assertIsNotNone(suite) test = first(self.squad.tests(name="gcc-8-allnoconfig")) self.assertEqual("build/gcc-8-allnoconfig", test.name) self.assertEqual("pass", test.status) test = first(self.squad.tests(name="gcc-8-tinyconfig")) self.assertEqual("build/gcc-8-tinyconfig", test.name) self.assertEqual("pass", test.status) test = first(self.squad.tests(name="gcc-8-x86_64_defconfig")) self.assertEqual("build/gcc-8-x86_64_defconfig", test.name) self.assertEqual("pass", test.status) metric = first(self.squad.metrics(name="gcc-8-allnoconfig-warnings")) self.assertEqual("build/gcc-8-allnoconfig-warnings", metric.name) self.assertEqual(0, metric.result) metric = first(self.squad.metrics(name="gcc-8-tinyconfig-warnings")) self.assertEqual("build/gcc-8-tinyconfig-warnings", metric.name) self.assertEqual(0, metric.result) metric = first( self.squad.metrics(name="gcc-8-x86_64_defconfig-warnings")) self.assertEqual("build/gcc-8-x86_64_defconfig-warnings", metric.name) self.assertEqual(0, metric.result) metric = first( self.squad.metrics(name="gcc-8-x86_64_defconfig-duration")) self.assertEqual("build/gcc-8-x86_64_defconfig-duration", metric.name) self.assertEqual(541, metric.result) def test_submit_tuxbuild_empty(self): proc = self.submit_tuxbuild("") self.assertFalse(proc.ok, msg=proc.err) self.assertIn("No such file or directory: ''", proc.err) def test_submit_tuxbuild_malformed(self): proc = self.submit_tuxbuild( "tests/data/submit/tuxbuild/malformed.json") self.assertFalse(proc.ok, msg=proc.err) self.assertIn("Failed to load json", proc.err) def test_submit_tuxbuild_missing(self): proc = self.submit_tuxbuild("tests/data/submit/tuxbuild/missing.json") self.assertFalse(proc.ok) self.assertIn( "No such file or directory: 'tests/data/submit/tuxbuild/missing.json'", proc.err, ) def test_submit_tuxbuild_empty_build_status(self): proc = self.submit_tuxbuild( "tests/data/submit/tuxbuild/empty_build_status.json") self.assertFalse(proc.ok, msg=proc.err) self.assertIn( "Failed to validate tuxbuild data: '' is not one of ['fail', 'pass']", proc.err, ) self.assertIn( "Failed validating 'enum' in schema['items'][0]['properties']['build_status']", proc.err) def test_submit_tuxbuild_malformed_build_status(self): proc = self.submit_tuxbuild( "tests/data/submit/tuxbuild/malformed_build_status.json") self.assertFalse(proc.ok, msg=proc.err) self.assertIn( "Failed to validate tuxbuild data: {'build': 'pass'} is not of type 'string'", proc.err, ) self.assertIn( "Failed validating 'type' in schema['items'][0]['properties']['build_status']", proc.err) def test_submit_tuxbuild_missing_build_status(self): proc = self.submit_tuxbuild( "tests/data/submit/tuxbuild/missing_build_status.json") self.assertFalse(proc.ok, msg=proc.err) self.assertIn( "Failed to validate tuxbuild data: 'build_status' is a required property", proc.err, ) def test_submit_tuxbuild_empty_kconfig(self): proc = self.submit_tuxbuild( "tests/data/submit/tuxbuild/empty_kconfig.json") self.assertFalse(proc.ok, msg=proc.err) self.assertIn("Failed to validate tuxbuild data: [] is too short", proc.err) self.assertIn( "Failed validating 'minItems' in schema['items'][0]['properties']['kconfig']", proc.err) def test_submit_tuxbuild_malformed_kconfig(self): proc = self.submit_tuxbuild( "tests/data/submit/tuxbuild/malformed_kconfig.json") self.assertFalse(proc.ok, msg=proc.err) self.assertIn( "Failed to validate tuxbuild data: {'CONFIG_ARM64_MODULE_PLTS': 'y'} is not of type 'string'", proc.err, ) self.assertIn( "Failed validating 'type' in schema['items'][0]['properties']['kconfig']['items'][0]", proc.err, ) def test_submit_tuxbuild_missing_kconfig(self): proc = self.submit_tuxbuild( "tests/data/submit/tuxbuild/missing_kconfig.json") self.assertFalse(proc.ok, msg=proc.err) self.assertIn( "Failed to validate tuxbuild data: 'kconfig' is a required property", proc.err, )
class SubmitTuxbuildCommandIntegrationTest(unittest.TestCase): testing_server = "http://*****:*****@unittest.mock.patch.dict(os.environ, {'KERNEL_BRANCH': 'master'}) def test_submit_tuxbuild_build(self): proc = self.submit_tuxbuild(os.path.join(self.build_dir, "build.json")) self.assertTrue(proc.ok, msg=proc.err) self.assertTrue(proc.err.count('Submitting 1 tests, 2 metrics') == 1) project = self.squad.group('my_group').project('my_project') build = project.build('next-20220217') self.assertIsNotNone(build) testrun = first(build.testruns()) self.assertIsNotNone(testrun) base_kconfig = [ 'defconfig', 'https://raw.githubusercontent.com/Linaro/meta-lkft/sumo/recipes-kernel/linux/files/lkft.config', 'https://raw.githubusercontent.com/Linaro/meta-lkft/sumo/recipes-kernel/linux/files/lkft-crypto.config', 'https://raw.githubusercontent.com/Linaro/meta-lkft/sumo/recipes-kernel/linux/files/distro-overrides.config', 'https://raw.githubusercontent.com/Linaro/meta-lkft/sumo/recipes-kernel/linux/files/systemd.config', 'https://raw.githubusercontent.com/Linaro/meta-lkft/sumo/recipes-kernel/linux/files/virtio.config', ] # Make sure metadata values match expected values expected_metadata = { 'git_repo': 'https://gitlab.com/Linaro/lkft/mirrors/next/linux-next', 'git_ref': 'master', 'git_sha': '3c30cf91b5ecc7272b3d2942ae0505dd8320b81c', 'git_short_log': '3c30cf91b5ec ("Add linux-next specific files for 20220217")', 'git_describe': 'next-20220217', 'kconfig': base_kconfig + ['CONFIG_IGB=y', 'CONFIG_UNWINDER_FRAME_POINTER=y', 'CONFIG_SYN_COOKIES=y'], 'kernel_version': '5.17.0-rc4', 'config': 'https://builds.tuxbuild.com/25EZVbc7oK6aCJfKV7V3dtFOMq5/config', 'download_url': 'https://builds.tuxbuild.com/25EZVbc7oK6aCJfKV7V3dtFOMq5/', 'duration': 422, 'toolchain': 'gcc-11', } for k, v in expected_metadata.items(): self.assertEqual(getattr(testrun.metadata, k), v, msg=k) environment = self.squad.group('my_group').project('my_project').environment('x86_64') self.assertIsNotNone(environment) suite = self.squad.group('my_group').project('my_project').suite('build') self.assertIsNotNone(suite) test = first(self.squad.tests(name='gcc-11-lkftconfig')) self.assertEqual('build/gcc-11-lkftconfig', test.name) self.assertEqual('pass', test.status) metric = first(self.squad.metrics(name='gcc-11-lkftconfig-warnings')) self.assertEqual('build/gcc-11-lkftconfig-warnings', metric.name) self.assertEqual(1, metric.result) metric = first(self.squad.metrics(name='gcc-11-lkftconfig-duration')) self.assertEqual('build/gcc-11-lkftconfig-duration', metric.name) self.assertEqual(422, metric.result) build.delete() @unittest.mock.patch.dict(os.environ, {'KERNEL_BRANCH': 'master'}) def test_submit_tuxbuild_buildset(self): proc = self.submit_tuxbuild(os.path.join(self.buildset_dir, "build.json")) self.assertTrue(proc.ok, msg=proc.out) self.assertTrue(proc.err.count('Submitting 1 tests, 2 metrics') == 3) project = self.squad.group('my_group').project('my_project') build = project.build('next-20220217') self.assertIsNotNone(build) testruns = build.testruns() self.assertIsNotNone(testruns) base_metadata = { 'git_repo': 'https://gitlab.com/Linaro/lkft/mirrors/next/linux-next', 'git_ref': 'master', 'git_sha': '3c30cf91b5ecc7272b3d2942ae0505dd8320b81c', 'git_short_log': '3c30cf91b5ec ("Add linux-next specific files for 20220217")', 'git_describe': 'next-20220217', 'kernel_version': '5.17.0-rc4', 'toolchain': 'gcc-8', } expected_metadata = [ dict(base_metadata, **{ 'config': 'https://builds.tuxbuild.com/25EZULlT5YOdXc5Hix07IGcbFtA/config', 'download_url': 'https://builds.tuxbuild.com/25EZULlT5YOdXc5Hix07IGcbFtA/', 'kconfig': ['allnoconfig'], 'duration': 324, }), dict(base_metadata, **{ 'config': 'https://builds.tuxbuild.com/25EZUJH3rXb2Ev1z5QUnTc6UKMU/config', 'download_url': 'https://builds.tuxbuild.com/25EZUJH3rXb2Ev1z5QUnTc6UKMU/', 'kconfig': ['tinyconfig'], 'duration': 350, }), dict(base_metadata, **{ 'config': 'https://builds.tuxbuild.com/25EZUJt40js6qte4xtKeLTnajQd/config', 'download_url': 'https://builds.tuxbuild.com/25EZUJt40js6qte4xtKeLTnajQd/', 'kconfig': ['x86_64_defconfig'], 'duration': 460, }) ] for tr in testruns.values(): metadata = expected_metadata.pop(0) for k, v in metadata.items(): self.assertEqual(getattr(tr.metadata, k), v, msg=k) environment = project.environment('x86_64') self.assertIsNotNone(environment) suite = project.suite('build') self.assertIsNotNone(suite) test = first(self.squad.tests(name='gcc-8-allnoconfig')) self.assertEqual('build/gcc-8-allnoconfig', test.name) self.assertEqual('pass', test.status) test = first(self.squad.tests(name='gcc-8-tinyconfig')) self.assertEqual('build/gcc-8-tinyconfig', test.name) self.assertEqual('pass', test.status) test = first(self.squad.tests(name='gcc-8-x86_64_defconfig')) self.assertEqual('build/gcc-8-x86_64_defconfig', test.name) self.assertEqual('pass', test.status) metric = first(self.squad.metrics(name='gcc-8-allnoconfig-warnings')) self.assertEqual('build/gcc-8-allnoconfig-warnings', metric.name) self.assertEqual(0, metric.result) metric = first(self.squad.metrics(name='gcc-8-tinyconfig-warnings')) self.assertEqual('build/gcc-8-tinyconfig-warnings', metric.name) self.assertEqual(1, metric.result) metric = first(self.squad.metrics(name='gcc-8-x86_64_defconfig-warnings')) self.assertEqual('build/gcc-8-x86_64_defconfig-warnings', metric.name) self.assertEqual(0, metric.result) metric = first(self.squad.metrics(name='gcc-8-allnoconfig-duration')) self.assertEqual('build/gcc-8-allnoconfig-duration', metric.name) self.assertEqual(324, metric.result) metric = first(self.squad.metrics(name='gcc-8-tinyconfig-duration')) self.assertEqual('build/gcc-8-tinyconfig-duration', metric.name) self.assertEqual(350, metric.result) metric = first(self.squad.metrics(name='gcc-8-x86_64_defconfig-duration')) self.assertEqual('build/gcc-8-x86_64_defconfig-duration', metric.name) self.assertEqual(460, metric.result) build.delete() def test_submit_tuxbuild_empty(self): proc = self.submit_tuxbuild(os.path.join(self.root_dir, 'empty.json')) self.assertFalse(proc.ok, msg=proc.err) self.assertIn('Failed to load build json', proc.err) def test_submit_tuxbuild_missing(self): proc = self.submit_tuxbuild(os.path.join(self.root_dir, 'missing.json')) self.assertFalse(proc.ok, msg=proc.err) self.assertIn('No such file or directory', proc.err)
class SquadTest(unittest.TestCase): def setUp(self): self.squad = Squad() def test_groups(self): groups = self.squad.groups() self.assertTrue(True, len(groups)) def test_not_found_groups(self): groups = self.squad.groups(name__startswith='no group with this name') self.assertEqual(0, len(groups)) def test_groups_with_count(self): all_groups = self.squad.groups(count=ALL) self.assertEqual(2, len(all_groups)) one_groups = self.squad.groups(count=1) self.assertEqual(1, len(one_groups)) def test_not_found_group(self): not_found_group = self.squad.group('this-group-does-not-really-exist') self.assertEqual(None, not_found_group) def test_group(self): group = self.squad.group('my_group') self.assertTrue(group is not None) def test_projects(self): projects = self.squad.projects() self.assertTrue(True, len(projects)) def test_builds(self): builds = self.squad.builds() self.assertTrue(True, len(builds)) def test_testjobs(self): testjobs = self.squad.testjobs() self.assertTrue(True, len(testjobs)) def test_testruns(self): testruns = self.squad.testruns() self.assertTrue(True, len(testruns)) def test_tests(self): tests = self.squad.tests() self.assertTrue(True, len(tests)) def test_suites(self): suites = self.squad.suites() self.assertTrue(True, len(suites)) def test_environments(self): environments = self.squad.environments() self.assertTrue(True, len(environments)) def test_backends(self): backends = self.squad.backends() self.assertTrue(True, len(backends)) def test_emailtemplates(self): emailtemplates = self.squad.emailtemplates() self.assertTrue(True, len(emailtemplates)) def test_knownissues(self): knownissues = self.squad.knownissues() self.assertTrue(True, len(knownissues)) def test_suitemetadata(self): suitemetadata = self.squad.suitemetadata() self.assertTrue(True, len(suitemetadata)) def test_annotations(self): annotations = self.squad.annotations() self.assertTrue(True, len(annotations)) def test_metricthresholds(self): metricthresholds = self.squad.metricthresholds() self.assertTrue(True, len(metricthresholds)) def test_reports(self): reports = self.squad.reports() self.assertTrue(True, len(reports))
class SubmitTuxbuildCommandTest(unittest.TestCase): testing_server = "http://localhost:%s" % settings.DEFAULT_SQUAD_PORT testing_token = "193cd8bb41ab9217714515954e8724f651ef8601" def setUp(self): self.squad = Squad() SquadApi.configure(url=self.testing_server, token=self.testing_token) def submit_tuxbuild(self, tuxbuild): argv = [ "./manage.py", "--squad-host", self.testing_server, "--squad-token", self.testing_token, "submit-tuxbuild", "--group", "my_group", "--project", "my_project", tuxbuild, ] proc = sp.Popen(argv, stdout=sp.PIPE, stderr=sp.PIPE) proc.ok = False try: out, err = proc.communicate() proc.ok = proc.returncode == 0 except sp.TimeoutExpired: self.logger.error('Running "%s" time out after %i seconds!' % " ".join(argv)) proc.kill() out, err = proc.communicate() proc.out = out.decode("utf-8") proc.err = err.decode("utf-8") return proc def test_submit_tuxbuild_build(self): proc = self.submit_tuxbuild("tests/data/submit/tuxbuild/build.json") self.assertTrue(proc.ok, msg=proc.err) self.assertTrue(proc.err.count("Submitting 1 tests") == 3) build = (self.squad.group("my_group").project("my_project").build( "next-20201021")) self.assertIsNotNone(build) build = ( self.squad.group("my_group").project("my_project").build("v4.4.4")) self.assertIsNotNone(build) for arch in ["arm64", "x86"]: environment = (self.squad.group("my_group").project( "my_project").environment(arch)) self.assertIsNotNone(environment, "environment %s does not exist" % (arch)) suite = self.squad.group("my_group").project("my_project").suite( "build") self.assertIsNotNone(suite) test = first(self.squad.tests(name="gcc-9-defconfig-b9979cfa")) self.assertEqual("build/gcc-9-defconfig-b9979cfa", test.name) self.assertEqual("pass", test.status) test = first(self.squad.tests(name="gcc-9-defconfig-5b09568e")) self.assertEqual("build/gcc-9-defconfig-5b09568e", test.name) self.assertEqual("fail", test.status) def test_submit_tuxbuild_buildset(self): proc = self.submit_tuxbuild("tests/data/submit/tuxbuild/buildset.json") self.assertTrue(proc.ok, msg=proc.out) self.assertIn("Submitting 3 tests", proc.err) build = (self.squad.group("my_group").project("my_project").build( "next-20201030")) self.assertIsNotNone(build) environment = (self.squad.group("my_group").project( "my_project").environment("x86")) self.assertIsNotNone(environment) suite = self.squad.group("my_group").project("my_project").suite( "build") self.assertIsNotNone(suite) test = first(self.squad.tests(name="gcc-8-allnoconfig")) self.assertEqual("build/gcc-8-allnoconfig", test.name) self.assertEqual("pass", test.status) test = first(self.squad.tests(name="gcc-8-tinyconfig")) self.assertEqual("build/gcc-8-tinyconfig", test.name) self.assertEqual("pass", test.status) test = first(self.squad.tests(name="gcc-8-x86_64_defconfig")) self.assertEqual("build/gcc-8-x86_64_defconfig", test.name) self.assertEqual("pass", test.status) def test_submit_tuxbuild_empty(self): proc = self.submit_tuxbuild("") self.assertFalse(proc.ok, msg=proc.err) self.assertIn("No such file or directory: ''", proc.err) def test_submit_tuxbuild_malformed(self): proc = self.submit_tuxbuild( "tests/data/submit/tuxbuild/malformed.json") self.assertFalse(proc.ok, msg=proc.err) self.assertIn("Failed to load json", proc.err) def test_submit_tuxbuild_missing(self): proc = self.submit_tuxbuild("tests/data/submit/tuxbuild/missing.json") self.assertFalse(proc.ok) self.assertIn( "No such file or directory: 'tests/data/submit/tuxbuild/missing.json'", proc.err, ) def test_submit_tuxbuild_empty_build_status(self): proc = self.submit_tuxbuild( "tests/data/submit/tuxbuild/empty_build_status.json") self.assertFalse(proc.ok, msg=proc.err) self.assertIn( "Failed to validate tuxbuild data: '' is not one of ['fail', 'pass']", proc.err, ) self.assertIn( "Failed validating 'enum' in schema['items'][0]['properties']['build_status']", proc.err) def test_submit_tuxbuild_malformed_build_status(self): proc = self.submit_tuxbuild( "tests/data/submit/tuxbuild/malformed_build_status.json") self.assertFalse(proc.ok, msg=proc.err) self.assertIn( "Failed to validate tuxbuild data: {'build': 'pass'} is not of type 'string'", proc.err, ) self.assertIn( "Failed validating 'type' in schema['items'][0]['properties']['build_status']", proc.err) def test_submit_tuxbuild_missing_build_status(self): proc = self.submit_tuxbuild( "tests/data/submit/tuxbuild/missing_build_status.json") self.assertFalse(proc.ok, msg=proc.err) self.assertIn( "Failed to validate tuxbuild data: 'build_status' is a required property", proc.err, ) def test_submit_tuxbuild_empty_kconfig(self): proc = self.submit_tuxbuild( "tests/data/submit/tuxbuild/empty_kconfig.json") self.assertFalse(proc.ok, msg=proc.err) self.assertIn("Failed to validate tuxbuild data: [] is too short", proc.err) self.assertIn( "Failed validating 'minItems' in schema['items'][0]['properties']['kconfig']", proc.err) def test_submit_tuxbuild_malformed_kconfig(self): proc = self.submit_tuxbuild( "tests/data/submit/tuxbuild/malformed_kconfig.json") self.assertFalse(proc.ok, msg=proc.err) self.assertIn( "Failed to validate tuxbuild data: {'CONFIG_ARM64_MODULE_PLTS': 'y'} is not of type 'string'", proc.err, ) self.assertIn( "Failed validating 'type' in schema['items'][0]['properties']['kconfig']['items'][0]", proc.err, ) def test_submit_tuxbuild_missing_kconfig(self): proc = self.submit_tuxbuild( "tests/data/submit/tuxbuild/missing_kconfig.json") self.assertFalse(proc.ok, msg=proc.err) self.assertIn( "Failed to validate tuxbuild data: 'kconfig' is a required property", proc.err, )
def main(args): global do_color do_color = args.color SquadApi.configure(args.squadapi_url) squad = Squad() print(f'I: Fetching group {args.group}') group = squad.group(args.group) print(f'I: Fetching project {args.project}') project = group.project(args.project) build_filters = {} if args.builds and len(args.builds): build_filters["version__in"] = join(args.builds) else: build_filters["count"] = args.n test_filters = {} if args.tests and len(args.tests): test_filters["metadata__name__in"] = join(args.tests) if args.suites and len(args.suites): print( f'I: Fetching {args.group}/{args.project} suites ({args.suites})') suites = project.suites(slug__in=join(args.suites)) test_filters["suite__id__in"] = join( [str(_id) for _id in suites.keys()]) else: print(f'I: Fetching {args.group}/{args.project} suites') suites = project.suites() if args.no_arch: envs = {} elif args.archs and len(args.archs): print( f'I: Fetching {args.group}/{args.project} environments ({args.archs})' ) envs = project.environments(slug__in=join(args.archs)) test_filters["environment__id__in"] = join( [str(_id) for _id in envs.keys()]) else: print(f'I: Fetching {args.group}/{args.project} environments') envs = project.environments() tests = [] print(f'I: Fetching {args.n} builds ({build_filters}):', flush=True) for build in project.builds(**build_filters).values(): print(f'D: Fetching build {build.version} tests ({test_filters})', flush=True) num_tests = 0 for test in build.tests(**test_filters).values(): if test.name.startswith('linux-log-parser'): continue tests.append(test) if num_tests % 1000 == 0: print('.', end='', flush=True) num_tests += 1 if num_tests: print() print('I: Finding stable tests') find_stable_tests( tests, envs=envs, suites=suites, )