def fill(self): logger.debug('Building report context') self.values = {} squad = Squad() for c in self.context: _type = SquadObject.get_type(c.type) self.values[c.name] = squad.fetch(_type, **c.filters) return self.values
def setUp(self): SquadApi.configure(url='http://localhost:%s' % settings.DEFAULT_SQUAD_PORT, token='193cd8bb41ab9217714515954e8724f651ef8601') self.project = first(Squad().projects(slug='my_project')) self.build = first(Squad().builds(version='my_build')) self.build2 = first(Squad().builds(version='my_build2'))
def setUp(self): self.squad = Squad() SquadApi.configure( url="http://localhost:%s" % settings.DEFAULT_SQUAD_PORT, token="193cd8bb41ab9217714515954e8724f651ef8601", ) self.group_slug = 'my_group'
def setUp(self): self.squad = Squad() self.testing_server = 'http://localhost:%s' % settings.DEFAULT_SQUAD_PORT self.testing_token = '193cd8bb41ab9217714515954e8724f651ef8601' SquadApi.configure(self.testing_server, self.testing_token) self.group = 'my_group' self.slug = 'create-project-via-cmdline'
class SubmitResultsShortcutTest(TestCase): def setUp(self): self.squad = Squad() SquadApi.configure( url="http://localhost:%s" % settings.DEFAULT_SQUAD_PORT, token="193cd8bb41ab9217714515954e8724f651ef8601", ) def test_basic(self): metadata = {"job_id": "12345", "a-metadata-field": "value"} tests = { "testa": "pass", "testb": { "result": "pass", "log": "the log" } } metrics = {"metrica": 42} success = submit_results( group_project_slug="my_group/my_project", build_version="my_build", env_slug="my_env", tests=tests, metrics=metrics, metadata=metadata, ) results = self.squad.tests(name="testa") self.assertTrue(len(results) > 0) self.assertTrue(success) def test_malformed_data(self): # job_id already exists metadata = {"job_id": "12345", "a-metadata-field": "value"} tests = { "test-malformed": "pass", "testb": { "result": "pass", "log": "the log" }, } metrics = {"metrica": 42} success = submit_results( group_project_slug="my_group/my_project", build_version="my_build", env_slug="my_env", tests=tests, metrics=metrics, metadata=metadata, ) results = self.squad.tests(name="test-malformed") self.assertTrue(len(results) == 0) self.assertFalse(success)
def setUp(self): self.squad = Squad() SquadApi.configure(url=self.testing_server, token=self.testing_token) self.root_dir = os.path.join("tests", "data", "submit_tuxbuild") self.assertTrue(os.path.exists(self.root_dir)) self.build_dir = os.path.join(self.root_dir, "build-x86-gcc") self.assertTrue(os.path.exists(self.build_dir)) self.buildset_dir = os.path.join(self.root_dir, "buildset-x86") self.assertTrue(os.path.exists(self.buildset_dir))
class SubmitResultsShortcutTest(TestCase): def setUp(self): self.squad = Squad() SquadApi.configure( url="http://localhost:%s" % settings.DEFAULT_SQUAD_PORT, token="193cd8bb41ab9217714515954e8724f651ef8601", ) def test_basic(self): metadata = {"job_id": "12345", "a-metadata-field": "value"} tests = {"testa": "pass", "testb": {"result": "pass", "log": "the log"}} metrics = {"metrica": 42} success = submit_results( group_project_slug="my_group/my_project", build_version="my_build", env_slug="my_env", tests=tests, metrics=metrics, metadata=metadata, ) results = self.squad.tests(name="testa") self.assertTrue(len(results) > 0) self.assertTrue(success) def test_malformed_data(self): # job_id already exists metadata = {"job_id": "12345", "a-metadata-field": "value"} tests = { "test-malformed": "pass", "testb": {"result": "pass", "log": "the log"}, } metrics = {"metrica": 42} with self.assertLogs(logger='squad_client.core.models', level=logging.ERROR) as cm: success = submit_results( group_project_slug="my_group/my_project", build_version="my_build", env_slug="my_env", tests=tests, metrics=metrics, metadata=metadata, ) self.assertIn( 'ERROR:squad_client.core.models:Failed to submit results: There is already a test run with job_id 12345', cm.output ) results = self.squad.tests(name="test-malformed") self.assertTrue(len(results) == 0) self.assertFalse(success)
def test_create_project(self): group = Squad().group('my_group') slug = 'test-create-project' new_project = Project() new_project.slug = slug new_project.group = group new_project.enabled_plugins_list = ['linux-log-parser'] new_project.save() check_project = first(Squad().projects(slug=slug, group__slug=group.slug)) self.assertEqual(new_project.id, check_project.id) new_project.delete()
class WatchjobShortcutTest(TestCase): def setUp(self): self.squad = Squad() SquadApi.configure( url="http://localhost:%s" % settings.DEFAULT_SQUAD_PORT, token="193cd8bb41ab9217714515954e8724f651ef8601", ) def test_basic(self): testjob_id = "watched-job-id" success = watchjob( group_project_slug="my_group/my_project", build_version="my_build", env_slug="my_env", backend_name="my_backend", testjob_id=testjob_id, ) self.assertTrue(success) results = self.squad.testjobs() self.assertTrue(len(results) > 0) for testjob in results.values(): if testjob.job_id == testjob_id: return self.assertTrue(False)
class SubmitJobShortcutTest(TestCase): def setUp(self): self.squad = Squad() SquadApi.configure( url="http://localhost:%s" % settings.DEFAULT_SQUAD_PORT, token="193cd8bb41ab9217714515954e8724f651ef8601", ) def test_basic(self): success = submit_job( group_project_slug="my_group/my_project", build_version="my_build", env_slug="my_submitted_env", backend_name="my_backend", definition="tests/data/dummy-definition.yaml", ) self.assertTrue(success) results = self.squad.testjobs() self.assertTrue(len(results) > 0) for testjob in results.values(): if testjob.environment == "my_submitted_env": return self.assertTrue(False)
def test_create_project(self): project_slug = 'test-create-project2' self.group.create_project(slug=project_slug) check_project = Squad().projects(slug=project_slug, group__slug=self.group.slug) self.assertEqual(1, len(check_project)) p = first(check_project) p.delete()
def main(args): # Some configuration, might get parameterized later SquadApi.configure(args.get('squadapi_url', None)) squad = Squad() getid = lambda s: int(re.search('\d+', s).group()) group = squad.group(args.get('group', None)) project = group.project(args.get('project', None)) bad_suite = project.suite(args.get('suite', None)) bad_test = args.get('test', None) bad_build = project.build(args.get('kernel_build', None)) bad_env = project.environment(args.get('arch', None)) # sh environment on build next-20201210 #bad_build = project.build('next-20201210') #bad_env = project.environment('sh') # now with arm64 on build next-20201204 #bad_build = project.build('next-20201204') #bad_env = project.environment('arm64') # now with parisc on build next-20201124 (it should not return anything) #bad_build = project.build('next-20201124') #bad_env = project.environment('parisc') print('Looking at the next good build in %s/%s for build %s' % (group.slug, project.slug, bad_build.version), flush=True) tests = squad.tests(build__created_at__lt=bad_build.created_at, suite=bad_suite.id, environment=bad_env.id, metadata__name=bad_test, ordering='-build_id', result=True, count=1) if len(tests): test = first(tests) build = Build(getid(test.build)) print('%s: https://qa-reports.linaro.org/%s/%s/build/%s' % (build.version, group.slug, project.slug, build.version)) else: print('No good build')
class SquadSubmitTest(unittest.TestCase): def setUp(self): self.squad = Squad() SquadApi.configure(url='http://localhost:%s' % settings.DEFAULT_SQUAD_PORT, token='193cd8bb41ab9217714515954e8724f651ef8601') def test_submit(self): group = Group() group.slug = 'my_group' project = Project() project.slug = 'my_project' project.group = group env = Environment() env.slug = 'my_env' env.project = project build = Build() build.project = project build.version = 'my_build' testrun = TestRun() testrun.build = build test = Test() test.name = 'test1' test.status = PASS test.log = 'test1 log' metric = Metric() metric.name = 'metric1' metric.result = 42 testrun.environment = env testrun.add_test(test) testrun.add_metric(metric) testrun.log = 'really long log' testrun.metadata = { 'metadata1': 'value1', 'metadata2': 'value2', 'job_id': '123' } testrun.submit_results() results = self.squad.tests(name='test1') self.assertTrue(len(results) > 0) t = first(results) self.assertEqual(t.log, test.log) self.assertEqual(t.name, test.name)
class CreateOrUpdateProjectTest(TestCase): def setUp(self): self.squad = Squad() self.testing_server = 'http://localhost:%s' % settings.DEFAULT_SQUAD_PORT self.testing_token = '193cd8bb41ab9217714515954e8724f651ef8601' SquadApi.configure(self.testing_server, self.testing_token) self.group = 'my_group' self.slug = 'create-project-via-cmdline' def manage_create_or_update_project(self, group=None, slug=None, name=None, description=None, settings=None, is_public=None, html_mail=None, moderate_notifications=None, is_archived=None, email_template=None, plugins=None, important_metadata_keys=None, wait_before_notification_timeout=None, notification_timeout=None, data_retention=None, no_overwrite=False, thresholds=None, build_confidence_count=None, build_confidence_threshold=None): argv = [ './manage.py', '--squad-host', self.testing_server, '--squad-token', self.testing_token, 'create-or-update-project' ] if group: argv += ['--group', group] if slug: argv += ['--slug', slug] if name: argv += ['--name', name] if description: argv += ['--description', description] if settings: argv += ['--settings', settings] if is_public is not None: argv += ['--is-public'] if is_public else ['--is-private'] if html_mail is not None: argv += ['--html-mail'] if html_mail else ['--no-html-mail'] if moderate_notifications is not None: argv += [ '--moderate-notifications' ] if moderate_notifications else ['--no-moderate-notifications'] if is_archived is not None and is_archived: argv += ['--is-archived'] if email_template: argv += ['--email-template', email_template] if plugins and len(plugins): argv += ['--plugins', ','.join(plugins)] if important_metadata_keys and len(important_metadata_keys): argv += [ '--important-metadata-keys', ','.join(important_metadata_keys) ] if wait_before_notification_timeout is not None: argv += [ '--wait-before-notification-timeout', str(wait_before_notification_timeout) ] if notification_timeout is not None: argv += ['--notification-timeout', str(notification_timeout)] if data_retention is not None: argv += ['--data-retention', str(data_retention)] if no_overwrite: argv += ['--no-overwrite'] if thresholds: argv += ['--thresholds'] + thresholds if build_confidence_count: argv += ['--build_confidence_count', str(build_confidence_count)] if build_confidence_threshold: argv += [ 'build_confidence_threshold', str(build_confidence_threshold) ] proc = sp.Popen(argv, stdout=sp.PIPE, stderr=sp.PIPE) proc.ok = False try: out, err = proc.communicate() proc.ok = (proc.returncode == 0) except sp.TimeoutExpired: self.logger.error('Running "%s" time out after %i seconds!' % ' '.join(argv)) proc.kill() out, err = proc.communicate() proc.out = out.decode('utf-8') proc.err = err.decode('utf-8') return proc def test_empty(self): proc = self.manage_create_or_update_project() self.assertFalse(proc.ok) self.assertIn('the following arguments are required: --group, --slug', proc.err) def test_basics(self): proc = self.manage_create_or_update_project(group=self.group, slug=self.slug) self.assertTrue(proc.ok) project = first( self.squad.projects(group__slug=self.group, slug=self.slug)) self.assertIsNotNone(project) self.assertIn('Project saved', proc.out) def test_no_overwrite(self): proc = self.manage_create_or_update_project(group=self.group, slug=self.slug) self.assertTrue(proc.ok) proc = self.manage_create_or_update_project(group=self.group, slug=self.slug, name='trying to edit', no_overwrite=True) self.assertFalse(proc.ok) self.assertIn('Project exists already', proc.err) def test_all_parameters(self): name = 'new name' description = 'project description' settings = '{"SETTING_KEY": "SETTING VALUE"}' is_public = True html_mail = False moderate_notifications = False is_archived = False plugins = ['linux-log-parser'] important_metadata_keys = ['important-key-1', 'important key 2'] wait_before_notification_timeout = 60 notification_timeout = 120 data_retention = 1 thresholds = ["my-threshold"] build_confidence_count = 25 build_confidence_threshold = 95 proc = self.manage_create_or_update_project( group=self.group, slug=self.slug, name=name, description=description, settings=settings, is_public=is_public, html_mail=html_mail, moderate_notifications=moderate_notifications, is_archived=is_archived, plugins=plugins, important_metadata_keys=important_metadata_keys, wait_before_notification_timeout=wait_before_notification_timeout, notification_timeout=notification_timeout, data_retention=data_retention, thresholds=thresholds, build_confidence_count=build_confidence_count, build_confidence_threshold=build_confidence_threshold, ) self.assertTrue(proc.ok) project = first( self.squad.projects(group__slug=self.group, slug=self.slug)) self.assertIsNotNone(project) self.assertIn('Project saved', proc.out) self.assertIn('MetricThreshold saved', proc.out) self.assertEqual(description, project.description) self.assertEqual(settings, project.project_settings) self.assertEqual(is_public, project.is_public) self.assertEqual(html_mail, project.html_mail) self.assertEqual(moderate_notifications, project.moderate_notifications) self.assertEqual(is_archived, project.is_archived) self.assertEqual(plugins, project.enabled_plugins_list) self.assertEqual('\n'.join(important_metadata_keys), project.important_metadata_keys) self.assertEqual(wait_before_notification_timeout, project.wait_before_notification) self.assertEqual(notification_timeout, project.notification_timeout) self.assertEqual(data_retention, project.data_retention_days) self.assertEqual(1, len(project.thresholds().values())) threshold = first(project.thresholds()) self.assertEqual(thresholds[0], threshold.name) self.assertEqual(build_confidence_count, project.build_confidence_count) self.assertEqual(build_confidence_threshold, project.build_confidence_threshold)
def test_save_project_settings(self): settings = 'SETTING: value' self.project.project_settings = settings self.project.save() project = first(Squad().projects(slug=self.project.slug)) self.assertTrue(project is not None)
def setUp(self): self.build = first(Squad().builds(version='my_build')) self.build2 = first(Squad().builds(version='my_build2'))
def setUp(self): self.testruns = Squad().testruns(count=2) self.testrun = self.testruns[1] self.testrun_no_metadata = self.testruns[2]
def setUp(self): self.squad = Squad() SquadApi.configure(url="http://localhost:%s" % settings.DEFAULT_SQUAD_PORT)
class CreateOrUpdateShortcutTest(TestCase): def setUp(self): self.squad = Squad() SquadApi.configure( url="http://localhost:%s" % settings.DEFAULT_SQUAD_PORT, token="193cd8bb41ab9217714515954e8724f651ef8601", ) self.group_slug = 'my_group' def assertEqualProjects(self, project1, project2): self.assertEqual(project1.id, project2.id) self.assertEqual(project1.name, project2.name) self.assertEqual(project1.description, project2.description) self.assertEqual(project1.is_public, project2.is_public) self.assertEqual(project1.html_mail, project2.html_mail) self.assertEqual(project1.moderate_notifications, project2.moderate_notifications) self.assertEqual(project1.is_archived, project2.is_archived) self.assertEqual(project1.enabled_plugins_list, project2.enabled_plugins_list) self.assertEqual(project1.important_metadata_keys, project2.important_metadata_keys) self.assertEqual(project1.wait_before_notification, project2.wait_before_notification) self.assertEqual(project1.notification_timeout, project2.notification_timeout) self.assertEqual(project1.data_retention_days, project2.data_retention_days) def test_minimum_parameters(self): project_slug = 'project-with-minimum-parameteres' project, errors = create_or_update_project( group_slug=self.group_slug, slug=project_slug, ) self.assertIsNotNone(project) self.assertEqual(0, len(errors)) check_project = first( self.squad.projects(group__slug=self.group_slug, slug=project_slug)) self.assertEqual(check_project.id, project.id) project.delete() def test_all_parameters(self): project_slug = 'project-with-all-parameteres' project, errors = create_or_update_project( group_slug=self.group_slug, slug=project_slug, name='project name', description='project description', settings='{"SETTING_KEY": "SETTING VALUE"}', is_public=True, html_mail=False, moderate_notifications=False, is_archived=False, plugins=['linux-log-parser'], important_metadata_keys="important-key-1,important key 2", wait_before_notification_timeout=60, notification_timeout=120, data_retention=1, ) self.assertIsNotNone(project) self.assertEqual(0, len(errors)) check_project = first( self.squad.projects(group__slug=self.group_slug, slug=project_slug)) self.assertEqualProjects(check_project, project) project.delete() def test_overwrite(self): project_slug = 'project-with-overwritten-data' project, errors = create_or_update_project( group_slug=self.group_slug, slug=project_slug, name='new name', ) self.assertIsNotNone(project) self.assertEqual(0, len(errors)) check_project = first( self.squad.projects(group__slug=self.group_slug, slug=project_slug)) self.assertEqual(check_project.id, project.id) self.assertEqual(check_project.name, project.name) project_edited, errors = create_or_update_project( group_slug=self.group_slug, slug=project_slug, name='new name edited', overwrite=True, ) self.assertIsNotNone(project_edited) self.assertEqual(0, len(errors)) check_project = first( self.squad.projects(group__slug=self.group_slug, slug=project_slug)) self.assertEqual(check_project.id, project_edited.id) self.assertEqual(check_project.name, project_edited.name) project_edited.delete() def test_overwrite_selected_fields_only(self): project_slug = 'project-with-overwritten-data-specific-fields-only' description = 'project description' settings = '{"SETTING_KEY": "SETTING VALUE"}' is_public = True html_mail = False moderate_notifications = False is_archived = False plugins = ['linux-log-parser'] important_metadata_keys = 'important-key-1,important key 2' wait_before_notification_timeout = 60 notification_timeout = 120 data_retention = 1 project, errors = create_or_update_project( group_slug=self.group_slug, slug=project_slug, name='new name', description=description, settings=settings, is_public=is_public, html_mail=html_mail, moderate_notifications=moderate_notifications, is_archived=is_archived, plugins=plugins, important_metadata_keys=important_metadata_keys, wait_before_notification_timeout=wait_before_notification_timeout, notification_timeout=notification_timeout, data_retention=data_retention, ) self.assertIsNotNone(project) self.assertEqual(0, len(errors)) check_project = first( self.squad.projects(group__slug=self.group_slug, slug=project_slug)) self.assertEqualProjects(check_project, project) project_edited, errors = create_or_update_project( group_slug=self.group_slug, slug=project_slug, name='new name edited', overwrite=True, ) self.assertIsNotNone(project_edited) self.assertEqual(0, len(errors)) check_project = first( self.squad.projects(group__slug=self.group_slug, slug=project_slug)) self.assertEqualProjects(check_project, project_edited) self.assertEqual(description, project_edited.description) self.assertEqual(is_public, project_edited.is_public) self.assertEqual(html_mail, project_edited.html_mail) self.assertEqual(moderate_notifications, project_edited.moderate_notifications) self.assertEqual(is_archived, project_edited.is_archived) self.assertEqual(plugins, project_edited.enabled_plugins_list) self.assertEqual(important_metadata_keys, project_edited.important_metadata_keys) self.assertEqual(wait_before_notification_timeout, project_edited.wait_before_notification) self.assertEqual(notification_timeout, project_edited.notification_timeout) self.assertEqual(data_retention, project_edited.data_retention_days) project_edited.delete() def test_no_overwrite(self): project_slug = 'project-without-overwritten-data' project, errors = create_or_update_project( group_slug=self.group_slug, slug=project_slug, name='new name', ) self.assertIsNotNone(project) self.assertEqual(0, len(errors)) check_project = first( self.squad.projects(group__slug=self.group_slug, slug=project_slug)) self.assertEqual(check_project.id, project.id) self.assertEqual(check_project.name, project.name) project_edited, errors = create_or_update_project( group_slug=self.group_slug, slug=project_slug, name='new name edited', overwrite=False, ) self.assertIsNone(project_edited) self.assertEqual(1, len(errors)) self.assertEqual(['Project exists already'], errors) project.delete()
class SubmitTuxbuildCommandTest(unittest.TestCase): testing_server = "http://localhost:%s" % settings.DEFAULT_SQUAD_PORT testing_token = "193cd8bb41ab9217714515954e8724f651ef8601" def setUp(self): self.squad = Squad() SquadApi.configure(url=self.testing_server, token=self.testing_token) def submit_tuxbuild(self, tuxbuild): argv = [ "./manage.py", "--squad-host", self.testing_server, "--squad-token", self.testing_token, "submit-tuxbuild", "--group", "my_group", "--project", "my_project", tuxbuild, ] env = os.environ.copy() env['LOG_LEVEL'] = 'INFO' proc = sp.Popen(argv, stdout=sp.PIPE, stderr=sp.PIPE, env=env) proc.ok = False try: out, err = proc.communicate() proc.ok = proc.returncode == 0 except sp.TimeoutExpired: self.logger.error('Running "%s" time out after %i seconds!' % " ".join(argv)) proc.kill() out, err = proc.communicate() proc.out = out.decode("utf-8") proc.err = err.decode("utf-8") return proc def test_submit_tuxbuild_build(self): proc = self.submit_tuxbuild("tests/data/submit/tuxbuild/build.json") self.assertTrue(proc.ok, msg=proc.err) self.assertTrue(proc.err.count("Submitting 1 tests, 2 metrics") == 3) project = self.squad.group("my_group").project("my_project") # Check results for next-20201021, which has 2 instances in build.json build = project.build("next-20201021") base_kconfig = [ 'defconfig', 'https://raw.githubusercontent.com/Linaro/meta-lkft/sumo/recipes-kernel/linux/files/lkft.config', 'https://raw.githubusercontent.com/Linaro/meta-lkft/sumo/recipes-kernel/linux/files/lkft-crypto.config', 'https://raw.githubusercontent.com/Linaro/meta-lkft/sumo/recipes-kernel/linux/files/distro-overrides.config', 'https://raw.githubusercontent.com/Linaro/meta-lkft/sumo/recipes-kernel/linux/files/systemd.config', 'https://raw.githubusercontent.com/Linaro/meta-lkft/sumo/recipes-kernel/linux/files/virtio.config', ] # Make sure metadata values match expected values urls = [ 'https://builds.tuxbuild.com/%s/' % _id for _id in ['B3TECkH4_1X9yKoWOPIhew', 't8NSUfTBZiSPbBVaXLH7kw'] ] configs = [url + "config" for url in urls] expected_metadata = { 'git_repo': "https://gitlab.com/Linaro/lkft/mirrors/next/linux-next", 'git_ref': None, 'git_commit': "5302568121ba345f5c22528aefd72d775f25221e", 'git_sha': "5302568121ba345f5c22528aefd72d775f25221e", 'git_short_log': '5302568121ba ("Add linux-next specific files for 20201021")', 'git_describe': "next-20201021", 'kconfig': [ base_kconfig + ["CONFIG_ARM64_MODULE_PLTS=y"], base_kconfig + ["CONFIG_IGB=y", "CONFIG_UNWINDER_FRAME_POINTER=y"] ], 'git_branch': os.environ.get("KERNEL_BRANCH"), 'make_kernelversion': "5.9.0", 'kernel_version': "5.9.0", 'config': configs, 'download_url': urls, 'duration': 541, } for expected_key in expected_metadata.keys(): self.assertEqual(expected_metadata[expected_key], getattr(build.metadata, expected_key)) # Make sure there's no extra attributes in the metadata object metadata_attrs = build.metadata.__dict__ del metadata_attrs["id"] self.assertEqual(sorted(expected_metadata.keys()), sorted(metadata_attrs.keys())) # Check results for v4.4.4, which has 1 instance in build.json build = project.build("v4.4.4") # Make sure metadata values match expected values url = 'https://builds.tuxbuild.com/%s/' % 'B3TECkH4_1X9yKoWOPIhew' config = url + "config" expected_metadata = { 'git_repo': "https://gitlab.com/Linaro/lkft/mirrors/next/linux-next", 'git_ref': None, 'git_commit': "5302568121ba345f5c22528aefd72d775f25221e", 'git_sha': "5302568121ba345f5c22528aefd72d775f25221e", 'git_short_log': '5302568121ba ("Add linux-next specific files for 20201021")', 'git_describe': "v4.4.4", 'kconfig': base_kconfig + ["CONFIG_ARM64_MODULE_PLTS=y"], 'git_branch': os.environ.get("KERNEL_BRANCH"), 'make_kernelversion': "5.9.0", 'kernel_version': "5.9.0", 'config': config, 'download_url': url, 'duration': 541, } for expected_key in expected_metadata.keys(): self.assertEqual(expected_metadata[expected_key], getattr(build.metadata, expected_key)) # Make sure there's no extra attributes in the metadata object metadata_attrs = build.metadata.__dict__ del metadata_attrs["id"] self.assertEqual(sorted(expected_metadata.keys()), sorted(metadata_attrs.keys())) for arch in ["arm64", "x86"]: environment = (self.squad.group("my_group").project( "my_project").environment(arch)) self.assertIsNotNone(environment, "environment %s does not exist" % (arch)) suite = self.squad.group("my_group").project("my_project").suite( "build") self.assertIsNotNone(suite) test = first(self.squad.tests(name="gcc-9-defconfig-b9979cfa")) self.assertEqual("build/gcc-9-defconfig-b9979cfa", test.name) self.assertEqual("pass", test.status) test = first(self.squad.tests(name="gcc-9-defconfig-5b09568e")) self.assertEqual("build/gcc-9-defconfig-5b09568e", test.name) self.assertEqual("fail", test.status) metric = first( self.squad.metrics(name="gcc-9-defconfig-b9979cfa-warnings")) self.assertEqual("build/gcc-9-defconfig-b9979cfa-warnings", metric.name) self.assertEqual(1, metric.result) metric = first( self.squad.metrics(name="gcc-9-defconfig-5b09568e-warnings")) self.assertEqual("build/gcc-9-defconfig-5b09568e-warnings", metric.name) self.assertEqual(2, metric.result) metric = first( self.squad.metrics(name="gcc-9-defconfig-5b09568e-duration")) self.assertEqual("build/gcc-9-defconfig-5b09568e-duration", metric.name) self.assertEqual(541, metric.result) def test_submit_tuxbuild_buildset(self): os.environ["KERNEL_BRANCH"] = "master" proc = self.submit_tuxbuild("tests/data/submit/tuxbuild/buildset.json") self.assertTrue(proc.ok, msg=proc.out) self.assertTrue(proc.err.count("Submitting 1 tests, 2 metrics") == 3) build = self.squad.group("my_group").project("my_project").build( "next-20201030") # Make sure metadata values match expected values urls = [ 'https://builds.tuxbuild.com/%s/' % _id for _id in [ '9NeOU1kd65bhMrL4eyI2yA', 'cjLreGasHSZj3OctZlNdpw', 'x5Mi9j6xZItTGqVtOKmnVw' ] ] configs = [url + "config" for url in urls] expected_metadata = { 'git_repo': "https://gitlab.com/Linaro/lkft/mirrors/next/linux-next", 'git_ref': None, 'git_commit': "4e78c578cb987725eef1cec7d11b6437109e9a49", 'git_sha': "4e78c578cb987725eef1cec7d11b6437109e9a49", 'git_short_log': '4e78c578cb98 ("Add linux-next specific files for 20201030")', 'git_describe': "next-20201030", 'kconfig': [['allnoconfig'], ['tinyconfig'], ['x86_64_defconfig']], 'git_branch': os.environ.get("KERNEL_BRANCH"), 'make_kernelversion': "5.10.0-rc1", 'kernel_version': "5.10.0-rc1", 'config': configs, 'download_url': urls, 'duration': 541, } for expected_key in expected_metadata.keys(): self.assertEqual(expected_metadata[expected_key], getattr(build.metadata, expected_key)) # Make sure there's no extra attributes in the metadata object metadata_attrs = build.metadata.__dict__ del metadata_attrs["id"] self.assertEqual(sorted(expected_metadata.keys()), sorted(metadata_attrs.keys())) environment = (self.squad.group("my_group").project( "my_project").environment("x86")) self.assertIsNotNone(environment) suite = self.squad.group("my_group").project("my_project").suite( "build") self.assertIsNotNone(suite) test = first(self.squad.tests(name="gcc-8-allnoconfig")) self.assertEqual("build/gcc-8-allnoconfig", test.name) self.assertEqual("pass", test.status) test = first(self.squad.tests(name="gcc-8-tinyconfig")) self.assertEqual("build/gcc-8-tinyconfig", test.name) self.assertEqual("pass", test.status) test = first(self.squad.tests(name="gcc-8-x86_64_defconfig")) self.assertEqual("build/gcc-8-x86_64_defconfig", test.name) self.assertEqual("pass", test.status) metric = first(self.squad.metrics(name="gcc-8-allnoconfig-warnings")) self.assertEqual("build/gcc-8-allnoconfig-warnings", metric.name) self.assertEqual(0, metric.result) metric = first(self.squad.metrics(name="gcc-8-tinyconfig-warnings")) self.assertEqual("build/gcc-8-tinyconfig-warnings", metric.name) self.assertEqual(0, metric.result) metric = first( self.squad.metrics(name="gcc-8-x86_64_defconfig-warnings")) self.assertEqual("build/gcc-8-x86_64_defconfig-warnings", metric.name) self.assertEqual(0, metric.result) metric = first( self.squad.metrics(name="gcc-8-x86_64_defconfig-duration")) self.assertEqual("build/gcc-8-x86_64_defconfig-duration", metric.name) self.assertEqual(541, metric.result) def test_submit_tuxbuild_empty(self): proc = self.submit_tuxbuild("") self.assertFalse(proc.ok, msg=proc.err) self.assertIn("No such file or directory: ''", proc.err) def test_submit_tuxbuild_malformed(self): proc = self.submit_tuxbuild( "tests/data/submit/tuxbuild/malformed.json") self.assertFalse(proc.ok, msg=proc.err) self.assertIn("Failed to load json", proc.err) def test_submit_tuxbuild_missing(self): proc = self.submit_tuxbuild("tests/data/submit/tuxbuild/missing.json") self.assertFalse(proc.ok) self.assertIn( "No such file or directory: 'tests/data/submit/tuxbuild/missing.json'", proc.err, ) def test_submit_tuxbuild_empty_build_status(self): proc = self.submit_tuxbuild( "tests/data/submit/tuxbuild/empty_build_status.json") self.assertFalse(proc.ok, msg=proc.err) self.assertIn( "Failed to validate tuxbuild data: '' is not one of ['fail', 'pass']", proc.err, ) self.assertIn( "Failed validating 'enum' in schema['items'][0]['properties']['build_status']", proc.err) def test_submit_tuxbuild_malformed_build_status(self): proc = self.submit_tuxbuild( "tests/data/submit/tuxbuild/malformed_build_status.json") self.assertFalse(proc.ok, msg=proc.err) self.assertIn( "Failed to validate tuxbuild data: {'build': 'pass'} is not of type 'string'", proc.err, ) self.assertIn( "Failed validating 'type' in schema['items'][0]['properties']['build_status']", proc.err) def test_submit_tuxbuild_missing_build_status(self): proc = self.submit_tuxbuild( "tests/data/submit/tuxbuild/missing_build_status.json") self.assertFalse(proc.ok, msg=proc.err) self.assertIn( "Failed to validate tuxbuild data: 'build_status' is a required property", proc.err, ) def test_submit_tuxbuild_empty_kconfig(self): proc = self.submit_tuxbuild( "tests/data/submit/tuxbuild/empty_kconfig.json") self.assertFalse(proc.ok, msg=proc.err) self.assertIn("Failed to validate tuxbuild data: [] is too short", proc.err) self.assertIn( "Failed validating 'minItems' in schema['items'][0]['properties']['kconfig']", proc.err) def test_submit_tuxbuild_malformed_kconfig(self): proc = self.submit_tuxbuild( "tests/data/submit/tuxbuild/malformed_kconfig.json") self.assertFalse(proc.ok, msg=proc.err) self.assertIn( "Failed to validate tuxbuild data: {'CONFIG_ARM64_MODULE_PLTS': 'y'} is not of type 'string'", proc.err, ) self.assertIn( "Failed validating 'type' in schema['items'][0]['properties']['kconfig']['items'][0]", proc.err, ) def test_submit_tuxbuild_missing_kconfig(self): proc = self.submit_tuxbuild( "tests/data/submit/tuxbuild/missing_kconfig.json") self.assertFalse(proc.ok, msg=proc.err) self.assertIn( "Failed to validate tuxbuild data: 'kconfig' is a required property", proc.err, )
def setUp(self): self.squad = Squad() SquadApi.configure(url='http://localhost:%s' % settings.DEFAULT_SQUAD_PORT, token='193cd8bb41ab9217714515954e8724f651ef8601')
def setUp(self): self.squad = Squad()
def setUp(self): self.testrun = first(Squad().testruns(count=1))
class SquadTest(unittest.TestCase): def setUp(self): self.squad = Squad() def test_groups(self): groups = self.squad.groups() self.assertTrue(True, len(groups)) def test_not_found_groups(self): groups = self.squad.groups(name__startswith='no group with this name') self.assertEqual(0, len(groups)) def test_groups_with_count(self): all_groups = self.squad.groups(count=ALL) self.assertEqual(2, len(all_groups)) one_groups = self.squad.groups(count=1) self.assertEqual(1, len(one_groups)) def test_not_found_group(self): not_found_group = self.squad.group('this-group-does-not-really-exist') self.assertEqual(None, not_found_group) def test_group(self): group = self.squad.group('my_group') self.assertTrue(group is not None) def test_projects(self): projects = self.squad.projects() self.assertTrue(True, len(projects)) def test_builds(self): builds = self.squad.builds() self.assertTrue(True, len(builds)) def test_testjobs(self): testjobs = self.squad.testjobs() self.assertTrue(True, len(testjobs)) def test_testruns(self): testruns = self.squad.testruns() self.assertTrue(True, len(testruns)) def test_tests(self): tests = self.squad.tests() self.assertTrue(True, len(tests)) def test_suites(self): suites = self.squad.suites() self.assertTrue(True, len(suites)) def test_environments(self): environments = self.squad.environments() self.assertTrue(True, len(environments)) def test_backends(self): backends = self.squad.backends() self.assertTrue(True, len(backends)) def test_emailtemplates(self): emailtemplates = self.squad.emailtemplates() self.assertTrue(True, len(emailtemplates)) def test_knownissues(self): knownissues = self.squad.knownissues() self.assertTrue(True, len(knownissues)) def test_suitemetadata(self): suitemetadata = self.squad.suitemetadata() self.assertTrue(True, len(suitemetadata)) def test_annotations(self): annotations = self.squad.annotations() self.assertTrue(True, len(annotations)) def test_metricthresholds(self): metricthresholds = self.squad.metricthresholds() self.assertTrue(True, len(metricthresholds)) def test_reports(self): reports = self.squad.reports() self.assertTrue(True, len(reports))
def setUp(self): self.squad = Squad() SquadApi.configure(url=self.testing_server, token=self.testing_token)
def setUp(self): self.suite = first(Squad().suites(slug='my_suite'))
def setUp(self): self.build = first(Squad().builds(count=1))
class SubmitTuxbuildCommandIntegrationTest(unittest.TestCase): testing_server = "http://*****:*****@unittest.mock.patch.dict(os.environ, {'KERNEL_BRANCH': 'master'}) def test_submit_tuxbuild_build(self): proc = self.submit_tuxbuild(os.path.join(self.build_dir, "build.json")) self.assertTrue(proc.ok, msg=proc.err) self.assertTrue(proc.err.count('Submitting 1 tests, 2 metrics') == 1) project = self.squad.group('my_group').project('my_project') build = project.build('next-20220217') self.assertIsNotNone(build) testrun = first(build.testruns()) self.assertIsNotNone(testrun) base_kconfig = [ 'defconfig', 'https://raw.githubusercontent.com/Linaro/meta-lkft/sumo/recipes-kernel/linux/files/lkft.config', 'https://raw.githubusercontent.com/Linaro/meta-lkft/sumo/recipes-kernel/linux/files/lkft-crypto.config', 'https://raw.githubusercontent.com/Linaro/meta-lkft/sumo/recipes-kernel/linux/files/distro-overrides.config', 'https://raw.githubusercontent.com/Linaro/meta-lkft/sumo/recipes-kernel/linux/files/systemd.config', 'https://raw.githubusercontent.com/Linaro/meta-lkft/sumo/recipes-kernel/linux/files/virtio.config', ] # Make sure metadata values match expected values expected_metadata = { 'git_repo': 'https://gitlab.com/Linaro/lkft/mirrors/next/linux-next', 'git_ref': 'master', 'git_sha': '3c30cf91b5ecc7272b3d2942ae0505dd8320b81c', 'git_short_log': '3c30cf91b5ec ("Add linux-next specific files for 20220217")', 'git_describe': 'next-20220217', 'kconfig': base_kconfig + ['CONFIG_IGB=y', 'CONFIG_UNWINDER_FRAME_POINTER=y', 'CONFIG_SYN_COOKIES=y'], 'kernel_version': '5.17.0-rc4', 'config': 'https://builds.tuxbuild.com/25EZVbc7oK6aCJfKV7V3dtFOMq5/config', 'download_url': 'https://builds.tuxbuild.com/25EZVbc7oK6aCJfKV7V3dtFOMq5/', 'duration': 422, 'toolchain': 'gcc-11', } for k, v in expected_metadata.items(): self.assertEqual(getattr(testrun.metadata, k), v, msg=k) environment = self.squad.group('my_group').project('my_project').environment('x86_64') self.assertIsNotNone(environment) suite = self.squad.group('my_group').project('my_project').suite('build') self.assertIsNotNone(suite) test = first(self.squad.tests(name='gcc-11-lkftconfig')) self.assertEqual('build/gcc-11-lkftconfig', test.name) self.assertEqual('pass', test.status) metric = first(self.squad.metrics(name='gcc-11-lkftconfig-warnings')) self.assertEqual('build/gcc-11-lkftconfig-warnings', metric.name) self.assertEqual(1, metric.result) metric = first(self.squad.metrics(name='gcc-11-lkftconfig-duration')) self.assertEqual('build/gcc-11-lkftconfig-duration', metric.name) self.assertEqual(422, metric.result) build.delete() @unittest.mock.patch.dict(os.environ, {'KERNEL_BRANCH': 'master'}) def test_submit_tuxbuild_buildset(self): proc = self.submit_tuxbuild(os.path.join(self.buildset_dir, "build.json")) self.assertTrue(proc.ok, msg=proc.out) self.assertTrue(proc.err.count('Submitting 1 tests, 2 metrics') == 3) project = self.squad.group('my_group').project('my_project') build = project.build('next-20220217') self.assertIsNotNone(build) testruns = build.testruns() self.assertIsNotNone(testruns) base_metadata = { 'git_repo': 'https://gitlab.com/Linaro/lkft/mirrors/next/linux-next', 'git_ref': 'master', 'git_sha': '3c30cf91b5ecc7272b3d2942ae0505dd8320b81c', 'git_short_log': '3c30cf91b5ec ("Add linux-next specific files for 20220217")', 'git_describe': 'next-20220217', 'kernel_version': '5.17.0-rc4', 'toolchain': 'gcc-8', } expected_metadata = [ dict(base_metadata, **{ 'config': 'https://builds.tuxbuild.com/25EZULlT5YOdXc5Hix07IGcbFtA/config', 'download_url': 'https://builds.tuxbuild.com/25EZULlT5YOdXc5Hix07IGcbFtA/', 'kconfig': ['allnoconfig'], 'duration': 324, }), dict(base_metadata, **{ 'config': 'https://builds.tuxbuild.com/25EZUJH3rXb2Ev1z5QUnTc6UKMU/config', 'download_url': 'https://builds.tuxbuild.com/25EZUJH3rXb2Ev1z5QUnTc6UKMU/', 'kconfig': ['tinyconfig'], 'duration': 350, }), dict(base_metadata, **{ 'config': 'https://builds.tuxbuild.com/25EZUJt40js6qte4xtKeLTnajQd/config', 'download_url': 'https://builds.tuxbuild.com/25EZUJt40js6qte4xtKeLTnajQd/', 'kconfig': ['x86_64_defconfig'], 'duration': 460, }) ] for tr in testruns.values(): metadata = expected_metadata.pop(0) for k, v in metadata.items(): self.assertEqual(getattr(tr.metadata, k), v, msg=k) environment = project.environment('x86_64') self.assertIsNotNone(environment) suite = project.suite('build') self.assertIsNotNone(suite) test = first(self.squad.tests(name='gcc-8-allnoconfig')) self.assertEqual('build/gcc-8-allnoconfig', test.name) self.assertEqual('pass', test.status) test = first(self.squad.tests(name='gcc-8-tinyconfig')) self.assertEqual('build/gcc-8-tinyconfig', test.name) self.assertEqual('pass', test.status) test = first(self.squad.tests(name='gcc-8-x86_64_defconfig')) self.assertEqual('build/gcc-8-x86_64_defconfig', test.name) self.assertEqual('pass', test.status) metric = first(self.squad.metrics(name='gcc-8-allnoconfig-warnings')) self.assertEqual('build/gcc-8-allnoconfig-warnings', metric.name) self.assertEqual(0, metric.result) metric = first(self.squad.metrics(name='gcc-8-tinyconfig-warnings')) self.assertEqual('build/gcc-8-tinyconfig-warnings', metric.name) self.assertEqual(1, metric.result) metric = first(self.squad.metrics(name='gcc-8-x86_64_defconfig-warnings')) self.assertEqual('build/gcc-8-x86_64_defconfig-warnings', metric.name) self.assertEqual(0, metric.result) metric = first(self.squad.metrics(name='gcc-8-allnoconfig-duration')) self.assertEqual('build/gcc-8-allnoconfig-duration', metric.name) self.assertEqual(324, metric.result) metric = first(self.squad.metrics(name='gcc-8-tinyconfig-duration')) self.assertEqual('build/gcc-8-tinyconfig-duration', metric.name) self.assertEqual(350, metric.result) metric = first(self.squad.metrics(name='gcc-8-x86_64_defconfig-duration')) self.assertEqual('build/gcc-8-x86_64_defconfig-duration', metric.name) self.assertEqual(460, metric.result) build.delete() def test_submit_tuxbuild_empty(self): proc = self.submit_tuxbuild(os.path.join(self.root_dir, 'empty.json')) self.assertFalse(proc.ok, msg=proc.err) self.assertIn('Failed to load build json', proc.err) def test_submit_tuxbuild_missing(self): proc = self.submit_tuxbuild(os.path.join(self.root_dir, 'missing.json')) self.assertFalse(proc.ok, msg=proc.err) self.assertIn('No such file or directory', proc.err)
def setUp(self): SquadApi.configure(url='http://localhost:%s' % settings.DEFAULT_SQUAD_PORT, token='193cd8bb41ab9217714515954e8724f651ef8601') self.group = first(Squad().groups(slug='my_group'))
class SubmitCommandTest(unittest.TestCase): testing_server = 'http://localhost:%s' % settings.DEFAULT_SQUAD_PORT testing_token = '193cd8bb41ab9217714515954e8724f651ef8601' def setUp(self): self.squad = Squad() SquadApi.configure(url=self.testing_server, token=self.testing_token) def manage_submit(self, results=None, result_name=None, result_value=None, metrics=None, metadata=None, attachments=None, logs=None, environment=None): argv = [ './manage.py', '--squad-host', self.testing_server, '--squad-token', self.testing_token, 'submit', '--group', 'my_group', '--project', 'my_project', '--build', 'my_build6', '--environment', 'test_submit_env' ] if logs: argv += ['--logs', logs] if results: argv += ['--results', results] if metrics: argv += ['--metrics', metrics] if metadata: argv += ['--metadata', metadata] if attachments: argv += ['--attachments', attachments] if result_name: argv += ['--result-name', result_name] if result_value: argv += ['--result-value', result_value] proc = sp.Popen(argv, stdout=sp.PIPE, stderr=sp.PIPE) proc.ok = False try: out, err = proc.communicate() proc.ok = (proc.returncode == 0) except sp.TimeoutExpired: self.logger.error('Running "%s" time out after %i seconds!' % ' '.join(argv)) proc.kill() out, err = proc.communicate() proc.out = out.decode('utf-8') proc.err = err.decode('utf-8') return proc def test_submit_empty(self): proc = self.manage_submit() self.assertFalse(proc.ok) self.assertIn( 'At least one of --result-name, --results, --metrics is required', proc.err) def test_submit_single_test(self): proc = self.manage_submit(result_name='single-test', result_value='pass') self.assertTrue(proc.ok) self.assertIn('1 tests', proc.err) test = first(self.squad.tests(name='single-test')) self.assertEqual('single-test', test.name) self.assertEqual('pass', test.status) def test_submit_invalid_result_value(self): proc = self.manage_submit(result_name='single-invalid-test', result_value='not-valid') self.assertFalse(proc.ok) self.assertIn("result-value: invalid choice: 'not-valid'", proc.err) def test_submit_results_json(self): proc = self.manage_submit( results='tests/submit_results/sample_results.json') self.assertTrue(proc.ok) self.assertIn('2 tests', proc.err) test = first(self.squad.tests(name='json-test-1')) self.assertEqual('json-test-1', test.name) self.assertEqual('pass', test.status) test = first(self.squad.tests(name='json-test-2')) self.assertEqual('json-test-2', test.name) self.assertEqual('fail', test.status) self.assertEqual('json-test-2 log', test.log) def test_submit_results_malformed_json(self): proc = self.manage_submit( results='tests/submit_results/sample_results_malformed.json') self.assertFalse(proc.ok) self.assertIn('Failed parsing file', proc.err) def test_submit_results_yaml(self): proc = self.manage_submit( results='tests/submit_results/sample_results.yaml') self.assertTrue(proc.ok) self.assertIn('2 tests', proc.err) test = first(self.squad.tests(name='yaml-test-1')) self.assertEqual('yaml-test-1', test.name) self.assertEqual('pass', test.status) test = first(self.squad.tests(name='yaml-test-2')) self.assertEqual('yaml-test-2', test.name) self.assertEqual('fail', test.status) self.assertEqual('yaml-test-2 log', test.log) def test_submit_results_malformed_yaml(self): proc = self.manage_submit( results='tests/submit_results/sample_results_malformed.yaml') self.assertFalse(proc.ok) self.assertIn('Failed parsing file', proc.err) def test_submit_single_metric(self): proc = self.manage_submit( metrics='tests/submit_results/sample_metrics.json') self.assertTrue(proc.ok) self.assertIn('1 metrics', proc.err) def test_submit_everything(self): proc = self.manage_submit( results='tests/submit_results/sample_results.json', metrics='tests/submit_results/sample_metrics.json', metadata='tests/submit_results/sample_metadata.json', logs='tests/submit_results/sample_log.log') self.assertTrue(proc.ok) self.assertIn('2 tests, 1 metrics', proc.err) testrun = first(self.squad.testruns(job_id='jsonmetadatajobid1')) self.assertEqual('jsonmetadatajobid1', testrun.job_id) self.assertEqual(2, len(testrun.tests())) self.assertEqual(1, len(testrun.metrics()))