class SubmitResultsShortcutTest(TestCase):
    def setUp(self):
        self.squad = Squad()
        SquadApi.configure(
            url="http://localhost:%s" % settings.DEFAULT_SQUAD_PORT,
            token="193cd8bb41ab9217714515954e8724f651ef8601",
        )

    def test_basic(self):
        metadata = {"job_id": "12345", "a-metadata-field": "value"}
        tests = {
            "testa": "pass",
            "testb": {
                "result": "pass",
                "log": "the log"
            }
        }
        metrics = {"metrica": 42}
        success = submit_results(
            group_project_slug="my_group/my_project",
            build_version="my_build",
            env_slug="my_env",
            tests=tests,
            metrics=metrics,
            metadata=metadata,
        )

        results = self.squad.tests(name="testa")
        self.assertTrue(len(results) > 0)
        self.assertTrue(success)

    def test_malformed_data(self):
        # job_id already exists
        metadata = {"job_id": "12345", "a-metadata-field": "value"}
        tests = {
            "test-malformed": "pass",
            "testb": {
                "result": "pass",
                "log": "the log"
            },
        }
        metrics = {"metrica": 42}
        success = submit_results(
            group_project_slug="my_group/my_project",
            build_version="my_build",
            env_slug="my_env",
            tests=tests,
            metrics=metrics,
            metadata=metadata,
        )

        results = self.squad.tests(name="test-malformed")
        self.assertTrue(len(results) == 0)
        self.assertFalse(success)
Beispiel #2
0
class SubmitResultsShortcutTest(TestCase):
    def setUp(self):
        self.squad = Squad()
        SquadApi.configure(
            url="http://localhost:%s" % settings.DEFAULT_SQUAD_PORT,
            token="193cd8bb41ab9217714515954e8724f651ef8601",
        )

    def test_basic(self):
        metadata = {"job_id": "12345", "a-metadata-field": "value"}
        tests = {"testa": "pass", "testb": {"result": "pass", "log": "the log"}}
        metrics = {"metrica": 42}
        success = submit_results(
            group_project_slug="my_group/my_project",
            build_version="my_build",
            env_slug="my_env",
            tests=tests,
            metrics=metrics,
            metadata=metadata,
        )

        results = self.squad.tests(name="testa")
        self.assertTrue(len(results) > 0)
        self.assertTrue(success)

    def test_malformed_data(self):
        # job_id already exists
        metadata = {"job_id": "12345", "a-metadata-field": "value"}
        tests = {
            "test-malformed": "pass",
            "testb": {"result": "pass", "log": "the log"},
        }
        metrics = {"metrica": 42}

        with self.assertLogs(logger='squad_client.core.models', level=logging.ERROR) as cm:
            success = submit_results(
                group_project_slug="my_group/my_project",
                build_version="my_build",
                env_slug="my_env",
                tests=tests,
                metrics=metrics,
                metadata=metadata,
            )

            self.assertIn(
                'ERROR:squad_client.core.models:Failed to submit results: There is already a test run with job_id 12345',
                cm.output
            )

        results = self.squad.tests(name="test-malformed")
        self.assertTrue(len(results) == 0)
        self.assertFalse(success)
Beispiel #3
0
class SquadSubmitTest(unittest.TestCase):
    def setUp(self):
        self.squad = Squad()
        SquadApi.configure(url='http://localhost:%s' %
                           settings.DEFAULT_SQUAD_PORT,
                           token='193cd8bb41ab9217714515954e8724f651ef8601')

    def test_submit(self):
        group = Group()
        group.slug = 'my_group'

        project = Project()
        project.slug = 'my_project'
        project.group = group

        env = Environment()
        env.slug = 'my_env'
        env.project = project

        build = Build()
        build.project = project
        build.version = 'my_build'

        testrun = TestRun()
        testrun.build = build

        test = Test()
        test.name = 'test1'
        test.status = PASS
        test.log = 'test1 log'

        metric = Metric()
        metric.name = 'metric1'
        metric.result = 42

        testrun.environment = env
        testrun.add_test(test)
        testrun.add_metric(metric)
        testrun.log = 'really long log'
        testrun.metadata = {
            'metadata1': 'value1',
            'metadata2': 'value2',
            'job_id': '123'
        }

        testrun.submit_results()

        results = self.squad.tests(name='test1')
        self.assertTrue(len(results) > 0)
        t = first(results)
        self.assertEqual(t.log, test.log)
        self.assertEqual(t.name, test.name)
Beispiel #4
0
def main(args):
    # Some configuration, might get parameterized later
    SquadApi.configure(args.get('squadapi_url', None))
    squad = Squad()
    getid = lambda s: int(re.search('\d+', s).group())
    group = squad.group(args.get('group', None))
    project = group.project(args.get('project', None))
    bad_suite = project.suite(args.get('suite', None))
    bad_test = args.get('test', None)
    bad_build = project.build(args.get('kernel_build', None))
    bad_env = project.environment(args.get('arch', None))

    # sh environment on build next-20201210
    #bad_build = project.build('next-20201210')
    #bad_env = project.environment('sh')

    # now with arm64 on build next-20201204
    #bad_build = project.build('next-20201204')
    #bad_env = project.environment('arm64')

    # now with parisc on build next-20201124 (it should not return anything)
    #bad_build = project.build('next-20201124')
    #bad_env = project.environment('parisc')

    print('Looking at the next good build in %s/%s for build %s' %
          (group.slug, project.slug, bad_build.version),
          flush=True)

    tests = squad.tests(build__created_at__lt=bad_build.created_at,
                        suite=bad_suite.id,
                        environment=bad_env.id,
                        metadata__name=bad_test,
                        ordering='-build_id',
                        result=True,
                        count=1)

    if len(tests):
        test = first(tests)
        build = Build(getid(test.build))
        print('%s: https://qa-reports.linaro.org/%s/%s/build/%s' %
              (build.version, group.slug, project.slug, build.version))
    else:
        print('No good build')
Beispiel #5
0
class SubmitTuxbuildCommandTest(unittest.TestCase):

    testing_server = "http://localhost:%s" % settings.DEFAULT_SQUAD_PORT
    testing_token = "193cd8bb41ab9217714515954e8724f651ef8601"

    def setUp(self):
        self.squad = Squad()
        SquadApi.configure(url=self.testing_server, token=self.testing_token)

    def submit_tuxbuild(self, tuxbuild):
        argv = [
            "./manage.py",
            "--squad-host",
            self.testing_server,
            "--squad-token",
            self.testing_token,
            "submit-tuxbuild",
            "--group",
            "my_group",
            "--project",
            "my_project",
            tuxbuild,
        ]

        env = os.environ.copy()
        env['LOG_LEVEL'] = 'INFO'
        proc = sp.Popen(argv, stdout=sp.PIPE, stderr=sp.PIPE, env=env)
        proc.ok = False

        try:
            out, err = proc.communicate()
            proc.ok = proc.returncode == 0
        except sp.TimeoutExpired:
            self.logger.error('Running "%s" time out after %i seconds!' %
                              " ".join(argv))
            proc.kill()
            out, err = proc.communicate()

        proc.out = out.decode("utf-8")
        proc.err = err.decode("utf-8")
        return proc

    def test_submit_tuxbuild_build(self):
        proc = self.submit_tuxbuild("tests/data/submit/tuxbuild/build.json")
        self.assertTrue(proc.ok, msg=proc.err)
        self.assertTrue(proc.err.count("Submitting 1 tests, 2 metrics") == 3)
        project = self.squad.group("my_group").project("my_project")

        # Check results for next-20201021, which has 2 instances in build.json
        build = project.build("next-20201021")

        base_kconfig = [
            'defconfig',
            'https://raw.githubusercontent.com/Linaro/meta-lkft/sumo/recipes-kernel/linux/files/lkft.config',
            'https://raw.githubusercontent.com/Linaro/meta-lkft/sumo/recipes-kernel/linux/files/lkft-crypto.config',
            'https://raw.githubusercontent.com/Linaro/meta-lkft/sumo/recipes-kernel/linux/files/distro-overrides.config',
            'https://raw.githubusercontent.com/Linaro/meta-lkft/sumo/recipes-kernel/linux/files/systemd.config',
            'https://raw.githubusercontent.com/Linaro/meta-lkft/sumo/recipes-kernel/linux/files/virtio.config',
        ]

        # Make sure metadata values match expected values
        urls = [
            'https://builds.tuxbuild.com/%s/' % _id
            for _id in ['B3TECkH4_1X9yKoWOPIhew', 't8NSUfTBZiSPbBVaXLH7kw']
        ]
        configs = [url + "config" for url in urls]
        expected_metadata = {
            'git_repo':
            "https://gitlab.com/Linaro/lkft/mirrors/next/linux-next",
            'git_ref':
            None,
            'git_commit':
            "5302568121ba345f5c22528aefd72d775f25221e",
            'git_sha':
            "5302568121ba345f5c22528aefd72d775f25221e",
            'git_short_log':
            '5302568121ba ("Add linux-next specific files for 20201021")',
            'git_describe':
            "next-20201021",
            'kconfig': [
                base_kconfig + ["CONFIG_ARM64_MODULE_PLTS=y"], base_kconfig +
                ["CONFIG_IGB=y", "CONFIG_UNWINDER_FRAME_POINTER=y"]
            ],
            'git_branch':
            os.environ.get("KERNEL_BRANCH"),
            'make_kernelversion':
            "5.9.0",
            'kernel_version':
            "5.9.0",
            'config':
            configs,
            'download_url':
            urls,
            'duration':
            541,
        }
        for expected_key in expected_metadata.keys():
            self.assertEqual(expected_metadata[expected_key],
                             getattr(build.metadata, expected_key))

        # Make sure there's no extra attributes in the metadata object
        metadata_attrs = build.metadata.__dict__
        del metadata_attrs["id"]
        self.assertEqual(sorted(expected_metadata.keys()),
                         sorted(metadata_attrs.keys()))

        # Check results for v4.4.4, which has 1 instance in build.json
        build = project.build("v4.4.4")
        # Make sure metadata values match expected values
        url = 'https://builds.tuxbuild.com/%s/' % 'B3TECkH4_1X9yKoWOPIhew'
        config = url + "config"
        expected_metadata = {
            'git_repo':
            "https://gitlab.com/Linaro/lkft/mirrors/next/linux-next",
            'git_ref': None,
            'git_commit': "5302568121ba345f5c22528aefd72d775f25221e",
            'git_sha': "5302568121ba345f5c22528aefd72d775f25221e",
            'git_short_log':
            '5302568121ba ("Add linux-next specific files for 20201021")',
            'git_describe': "v4.4.4",
            'kconfig': base_kconfig + ["CONFIG_ARM64_MODULE_PLTS=y"],
            'git_branch': os.environ.get("KERNEL_BRANCH"),
            'make_kernelversion': "5.9.0",
            'kernel_version': "5.9.0",
            'config': config,
            'download_url': url,
            'duration': 541,
        }
        for expected_key in expected_metadata.keys():
            self.assertEqual(expected_metadata[expected_key],
                             getattr(build.metadata, expected_key))

        # Make sure there's no extra attributes in the metadata object
        metadata_attrs = build.metadata.__dict__
        del metadata_attrs["id"]
        self.assertEqual(sorted(expected_metadata.keys()),
                         sorted(metadata_attrs.keys()))

        for arch in ["arm64", "x86"]:
            environment = (self.squad.group("my_group").project(
                "my_project").environment(arch))
            self.assertIsNotNone(environment,
                                 "environment %s does not exist" % (arch))

        suite = self.squad.group("my_group").project("my_project").suite(
            "build")
        self.assertIsNotNone(suite)

        test = first(self.squad.tests(name="gcc-9-defconfig-b9979cfa"))
        self.assertEqual("build/gcc-9-defconfig-b9979cfa", test.name)
        self.assertEqual("pass", test.status)

        test = first(self.squad.tests(name="gcc-9-defconfig-5b09568e"))
        self.assertEqual("build/gcc-9-defconfig-5b09568e", test.name)
        self.assertEqual("fail", test.status)

        metric = first(
            self.squad.metrics(name="gcc-9-defconfig-b9979cfa-warnings"))
        self.assertEqual("build/gcc-9-defconfig-b9979cfa-warnings",
                         metric.name)
        self.assertEqual(1, metric.result)

        metric = first(
            self.squad.metrics(name="gcc-9-defconfig-5b09568e-warnings"))
        self.assertEqual("build/gcc-9-defconfig-5b09568e-warnings",
                         metric.name)
        self.assertEqual(2, metric.result)

        metric = first(
            self.squad.metrics(name="gcc-9-defconfig-5b09568e-duration"))
        self.assertEqual("build/gcc-9-defconfig-5b09568e-duration",
                         metric.name)
        self.assertEqual(541, metric.result)

    def test_submit_tuxbuild_buildset(self):
        os.environ["KERNEL_BRANCH"] = "master"
        proc = self.submit_tuxbuild("tests/data/submit/tuxbuild/buildset.json")
        self.assertTrue(proc.ok, msg=proc.out)
        self.assertTrue(proc.err.count("Submitting 1 tests, 2 metrics") == 3)

        build = self.squad.group("my_group").project("my_project").build(
            "next-20201030")

        # Make sure metadata values match expected values
        urls = [
            'https://builds.tuxbuild.com/%s/' % _id for _id in [
                '9NeOU1kd65bhMrL4eyI2yA', 'cjLreGasHSZj3OctZlNdpw',
                'x5Mi9j6xZItTGqVtOKmnVw'
            ]
        ]
        configs = [url + "config" for url in urls]
        expected_metadata = {
            'git_repo':
            "https://gitlab.com/Linaro/lkft/mirrors/next/linux-next",
            'git_ref': None,
            'git_commit': "4e78c578cb987725eef1cec7d11b6437109e9a49",
            'git_sha': "4e78c578cb987725eef1cec7d11b6437109e9a49",
            'git_short_log':
            '4e78c578cb98 ("Add linux-next specific files for 20201030")',
            'git_describe': "next-20201030",
            'kconfig': [['allnoconfig'], ['tinyconfig'], ['x86_64_defconfig']],
            'git_branch': os.environ.get("KERNEL_BRANCH"),
            'make_kernelversion': "5.10.0-rc1",
            'kernel_version': "5.10.0-rc1",
            'config': configs,
            'download_url': urls,
            'duration': 541,
        }
        for expected_key in expected_metadata.keys():
            self.assertEqual(expected_metadata[expected_key],
                             getattr(build.metadata, expected_key))

        # Make sure there's no extra attributes in the metadata object
        metadata_attrs = build.metadata.__dict__
        del metadata_attrs["id"]
        self.assertEqual(sorted(expected_metadata.keys()),
                         sorted(metadata_attrs.keys()))

        environment = (self.squad.group("my_group").project(
            "my_project").environment("x86"))
        self.assertIsNotNone(environment)

        suite = self.squad.group("my_group").project("my_project").suite(
            "build")
        self.assertIsNotNone(suite)

        test = first(self.squad.tests(name="gcc-8-allnoconfig"))
        self.assertEqual("build/gcc-8-allnoconfig", test.name)
        self.assertEqual("pass", test.status)

        test = first(self.squad.tests(name="gcc-8-tinyconfig"))
        self.assertEqual("build/gcc-8-tinyconfig", test.name)
        self.assertEqual("pass", test.status)

        test = first(self.squad.tests(name="gcc-8-x86_64_defconfig"))
        self.assertEqual("build/gcc-8-x86_64_defconfig", test.name)
        self.assertEqual("pass", test.status)

        metric = first(self.squad.metrics(name="gcc-8-allnoconfig-warnings"))
        self.assertEqual("build/gcc-8-allnoconfig-warnings", metric.name)
        self.assertEqual(0, metric.result)

        metric = first(self.squad.metrics(name="gcc-8-tinyconfig-warnings"))
        self.assertEqual("build/gcc-8-tinyconfig-warnings", metric.name)
        self.assertEqual(0, metric.result)

        metric = first(
            self.squad.metrics(name="gcc-8-x86_64_defconfig-warnings"))
        self.assertEqual("build/gcc-8-x86_64_defconfig-warnings", metric.name)
        self.assertEqual(0, metric.result)

        metric = first(
            self.squad.metrics(name="gcc-8-x86_64_defconfig-duration"))
        self.assertEqual("build/gcc-8-x86_64_defconfig-duration", metric.name)
        self.assertEqual(541, metric.result)

    def test_submit_tuxbuild_empty(self):
        proc = self.submit_tuxbuild("")
        self.assertFalse(proc.ok, msg=proc.err)
        self.assertIn("No such file or directory: ''", proc.err)

    def test_submit_tuxbuild_malformed(self):
        proc = self.submit_tuxbuild(
            "tests/data/submit/tuxbuild/malformed.json")
        self.assertFalse(proc.ok, msg=proc.err)
        self.assertIn("Failed to load json", proc.err)

    def test_submit_tuxbuild_missing(self):
        proc = self.submit_tuxbuild("tests/data/submit/tuxbuild/missing.json")
        self.assertFalse(proc.ok)
        self.assertIn(
            "No such file or directory: 'tests/data/submit/tuxbuild/missing.json'",
            proc.err,
        )

    def test_submit_tuxbuild_empty_build_status(self):
        proc = self.submit_tuxbuild(
            "tests/data/submit/tuxbuild/empty_build_status.json")
        self.assertFalse(proc.ok, msg=proc.err)
        self.assertIn(
            "Failed to validate tuxbuild data: '' is not one of ['fail', 'pass']",
            proc.err,
        )
        self.assertIn(
            "Failed validating 'enum' in schema['items'][0]['properties']['build_status']",
            proc.err)

    def test_submit_tuxbuild_malformed_build_status(self):
        proc = self.submit_tuxbuild(
            "tests/data/submit/tuxbuild/malformed_build_status.json")
        self.assertFalse(proc.ok, msg=proc.err)
        self.assertIn(
            "Failed to validate tuxbuild data: {'build': 'pass'} is not of type 'string'",
            proc.err,
        )
        self.assertIn(
            "Failed validating 'type' in schema['items'][0]['properties']['build_status']",
            proc.err)

    def test_submit_tuxbuild_missing_build_status(self):
        proc = self.submit_tuxbuild(
            "tests/data/submit/tuxbuild/missing_build_status.json")
        self.assertFalse(proc.ok, msg=proc.err)
        self.assertIn(
            "Failed to validate tuxbuild data: 'build_status' is a required property",
            proc.err,
        )

    def test_submit_tuxbuild_empty_kconfig(self):
        proc = self.submit_tuxbuild(
            "tests/data/submit/tuxbuild/empty_kconfig.json")
        self.assertFalse(proc.ok, msg=proc.err)
        self.assertIn("Failed to validate tuxbuild data: [] is too short",
                      proc.err)
        self.assertIn(
            "Failed validating 'minItems' in schema['items'][0]['properties']['kconfig']",
            proc.err)

    def test_submit_tuxbuild_malformed_kconfig(self):
        proc = self.submit_tuxbuild(
            "tests/data/submit/tuxbuild/malformed_kconfig.json")
        self.assertFalse(proc.ok, msg=proc.err)
        self.assertIn(
            "Failed to validate tuxbuild data: {'CONFIG_ARM64_MODULE_PLTS': 'y'} is not of type 'string'",
            proc.err,
        )
        self.assertIn(
            "Failed validating 'type' in schema['items'][0]['properties']['kconfig']['items'][0]",
            proc.err,
        )

    def test_submit_tuxbuild_missing_kconfig(self):
        proc = self.submit_tuxbuild(
            "tests/data/submit/tuxbuild/missing_kconfig.json")
        self.assertFalse(proc.ok, msg=proc.err)
        self.assertIn(
            "Failed to validate tuxbuild data: 'kconfig' is a required property",
            proc.err,
        )
Beispiel #6
0
class SubmitTuxbuildCommandIntegrationTest(unittest.TestCase):

    testing_server = "http://*****:*****@unittest.mock.patch.dict(os.environ, {'KERNEL_BRANCH': 'master'})
    def test_submit_tuxbuild_build(self):
        proc = self.submit_tuxbuild(os.path.join(self.build_dir, "build.json"))
        self.assertTrue(proc.ok, msg=proc.err)
        self.assertTrue(proc.err.count('Submitting 1 tests, 2 metrics') == 1)
        project = self.squad.group('my_group').project('my_project')

        build = project.build('next-20220217')
        self.assertIsNotNone(build)

        testrun = first(build.testruns())
        self.assertIsNotNone(testrun)

        base_kconfig = [
            'defconfig',
            'https://raw.githubusercontent.com/Linaro/meta-lkft/sumo/recipes-kernel/linux/files/lkft.config',
            'https://raw.githubusercontent.com/Linaro/meta-lkft/sumo/recipes-kernel/linux/files/lkft-crypto.config',
            'https://raw.githubusercontent.com/Linaro/meta-lkft/sumo/recipes-kernel/linux/files/distro-overrides.config',
            'https://raw.githubusercontent.com/Linaro/meta-lkft/sumo/recipes-kernel/linux/files/systemd.config',
            'https://raw.githubusercontent.com/Linaro/meta-lkft/sumo/recipes-kernel/linux/files/virtio.config',
        ]

        # Make sure metadata values match expected values
        expected_metadata = {
            'git_repo': 'https://gitlab.com/Linaro/lkft/mirrors/next/linux-next',
            'git_ref': 'master',
            'git_sha': '3c30cf91b5ecc7272b3d2942ae0505dd8320b81c',
            'git_short_log': '3c30cf91b5ec ("Add linux-next specific files for 20220217")',
            'git_describe': 'next-20220217',
            'kconfig': base_kconfig + ['CONFIG_IGB=y', 'CONFIG_UNWINDER_FRAME_POINTER=y', 'CONFIG_SYN_COOKIES=y'],
            'kernel_version': '5.17.0-rc4',
            'config': 'https://builds.tuxbuild.com/25EZVbc7oK6aCJfKV7V3dtFOMq5/config',
            'download_url': 'https://builds.tuxbuild.com/25EZVbc7oK6aCJfKV7V3dtFOMq5/',
            'duration': 422,
            'toolchain': 'gcc-11',
        }

        for k, v in expected_metadata.items():
            self.assertEqual(getattr(testrun.metadata, k), v, msg=k)

        environment = self.squad.group('my_group').project('my_project').environment('x86_64')
        self.assertIsNotNone(environment)

        suite = self.squad.group('my_group').project('my_project').suite('build')
        self.assertIsNotNone(suite)

        test = first(self.squad.tests(name='gcc-11-lkftconfig'))
        self.assertEqual('build/gcc-11-lkftconfig', test.name)
        self.assertEqual('pass', test.status)

        metric = first(self.squad.metrics(name='gcc-11-lkftconfig-warnings'))
        self.assertEqual('build/gcc-11-lkftconfig-warnings', metric.name)
        self.assertEqual(1, metric.result)

        metric = first(self.squad.metrics(name='gcc-11-lkftconfig-duration'))
        self.assertEqual('build/gcc-11-lkftconfig-duration', metric.name)
        self.assertEqual(422, metric.result)

        build.delete()

    @unittest.mock.patch.dict(os.environ, {'KERNEL_BRANCH': 'master'})
    def test_submit_tuxbuild_buildset(self):
        proc = self.submit_tuxbuild(os.path.join(self.buildset_dir, "build.json"))
        self.assertTrue(proc.ok, msg=proc.out)
        self.assertTrue(proc.err.count('Submitting 1 tests, 2 metrics') == 3)
        project = self.squad.group('my_group').project('my_project')

        build = project.build('next-20220217')
        self.assertIsNotNone(build)

        testruns = build.testruns()
        self.assertIsNotNone(testruns)

        base_metadata = {
            'git_repo': 'https://gitlab.com/Linaro/lkft/mirrors/next/linux-next',
            'git_ref': 'master',
            'git_sha': '3c30cf91b5ecc7272b3d2942ae0505dd8320b81c',
            'git_short_log': '3c30cf91b5ec ("Add linux-next specific files for 20220217")',
            'git_describe': 'next-20220217',
            'kernel_version': '5.17.0-rc4',
            'toolchain': 'gcc-8',
        }

        expected_metadata = [
            dict(base_metadata, **{
                'config': 'https://builds.tuxbuild.com/25EZULlT5YOdXc5Hix07IGcbFtA/config',
                'download_url': 'https://builds.tuxbuild.com/25EZULlT5YOdXc5Hix07IGcbFtA/',
                'kconfig': ['allnoconfig'],
                'duration': 324,
            }),
            dict(base_metadata, **{
                'config': 'https://builds.tuxbuild.com/25EZUJH3rXb2Ev1z5QUnTc6UKMU/config',
                'download_url': 'https://builds.tuxbuild.com/25EZUJH3rXb2Ev1z5QUnTc6UKMU/',
                'kconfig': ['tinyconfig'],
                'duration': 350,
            }),
            dict(base_metadata, **{
                'config': 'https://builds.tuxbuild.com/25EZUJt40js6qte4xtKeLTnajQd/config',
                'download_url': 'https://builds.tuxbuild.com/25EZUJt40js6qte4xtKeLTnajQd/',
                'kconfig': ['x86_64_defconfig'],
                'duration': 460,
            })
        ]

        for tr in testruns.values():
            metadata = expected_metadata.pop(0)
            for k, v in metadata.items():
                self.assertEqual(getattr(tr.metadata, k), v, msg=k)

        environment = project.environment('x86_64')
        self.assertIsNotNone(environment)

        suite = project.suite('build')
        self.assertIsNotNone(suite)

        test = first(self.squad.tests(name='gcc-8-allnoconfig'))
        self.assertEqual('build/gcc-8-allnoconfig', test.name)
        self.assertEqual('pass', test.status)

        test = first(self.squad.tests(name='gcc-8-tinyconfig'))
        self.assertEqual('build/gcc-8-tinyconfig', test.name)
        self.assertEqual('pass', test.status)

        test = first(self.squad.tests(name='gcc-8-x86_64_defconfig'))
        self.assertEqual('build/gcc-8-x86_64_defconfig', test.name)
        self.assertEqual('pass', test.status)

        metric = first(self.squad.metrics(name='gcc-8-allnoconfig-warnings'))
        self.assertEqual('build/gcc-8-allnoconfig-warnings', metric.name)
        self.assertEqual(0, metric.result)

        metric = first(self.squad.metrics(name='gcc-8-tinyconfig-warnings'))
        self.assertEqual('build/gcc-8-tinyconfig-warnings', metric.name)
        self.assertEqual(1, metric.result)

        metric = first(self.squad.metrics(name='gcc-8-x86_64_defconfig-warnings'))
        self.assertEqual('build/gcc-8-x86_64_defconfig-warnings', metric.name)
        self.assertEqual(0, metric.result)

        metric = first(self.squad.metrics(name='gcc-8-allnoconfig-duration'))
        self.assertEqual('build/gcc-8-allnoconfig-duration', metric.name)
        self.assertEqual(324, metric.result)

        metric = first(self.squad.metrics(name='gcc-8-tinyconfig-duration'))
        self.assertEqual('build/gcc-8-tinyconfig-duration', metric.name)
        self.assertEqual(350, metric.result)

        metric = first(self.squad.metrics(name='gcc-8-x86_64_defconfig-duration'))
        self.assertEqual('build/gcc-8-x86_64_defconfig-duration', metric.name)
        self.assertEqual(460, metric.result)

        build.delete()

    def test_submit_tuxbuild_empty(self):
        proc = self.submit_tuxbuild(os.path.join(self.root_dir, 'empty.json'))
        self.assertFalse(proc.ok, msg=proc.err)
        self.assertIn('Failed to load build json', proc.err)

    def test_submit_tuxbuild_missing(self):
        proc = self.submit_tuxbuild(os.path.join(self.root_dir, 'missing.json'))
        self.assertFalse(proc.ok, msg=proc.err)
        self.assertIn('No such file or directory', proc.err)
Beispiel #7
0
class SquadTest(unittest.TestCase):
    def setUp(self):
        self.squad = Squad()

    def test_groups(self):
        groups = self.squad.groups()
        self.assertTrue(True, len(groups))

    def test_not_found_groups(self):
        groups = self.squad.groups(name__startswith='no group with this name')
        self.assertEqual(0, len(groups))

    def test_groups_with_count(self):
        all_groups = self.squad.groups(count=ALL)
        self.assertEqual(2, len(all_groups))

        one_groups = self.squad.groups(count=1)
        self.assertEqual(1, len(one_groups))

    def test_not_found_group(self):
        not_found_group = self.squad.group('this-group-does-not-really-exist')
        self.assertEqual(None, not_found_group)

    def test_group(self):
        group = self.squad.group('my_group')
        self.assertTrue(group is not None)

    def test_projects(self):
        projects = self.squad.projects()
        self.assertTrue(True, len(projects))

    def test_builds(self):
        builds = self.squad.builds()
        self.assertTrue(True, len(builds))

    def test_testjobs(self):
        testjobs = self.squad.testjobs()
        self.assertTrue(True, len(testjobs))

    def test_testruns(self):
        testruns = self.squad.testruns()
        self.assertTrue(True, len(testruns))

    def test_tests(self):
        tests = self.squad.tests()
        self.assertTrue(True, len(tests))

    def test_suites(self):
        suites = self.squad.suites()
        self.assertTrue(True, len(suites))

    def test_environments(self):
        environments = self.squad.environments()
        self.assertTrue(True, len(environments))

    def test_backends(self):
        backends = self.squad.backends()
        self.assertTrue(True, len(backends))

    def test_emailtemplates(self):
        emailtemplates = self.squad.emailtemplates()
        self.assertTrue(True, len(emailtemplates))

    def test_knownissues(self):
        knownissues = self.squad.knownissues()
        self.assertTrue(True, len(knownissues))

    def test_suitemetadata(self):
        suitemetadata = self.squad.suitemetadata()
        self.assertTrue(True, len(suitemetadata))

    def test_annotations(self):
        annotations = self.squad.annotations()
        self.assertTrue(True, len(annotations))

    def test_metricthresholds(self):
        metricthresholds = self.squad.metricthresholds()
        self.assertTrue(True, len(metricthresholds))

    def test_reports(self):
        reports = self.squad.reports()
        self.assertTrue(True, len(reports))
Beispiel #8
0
class SubmitCommandTest(unittest.TestCase):

    testing_server = 'http://localhost:%s' % settings.DEFAULT_SQUAD_PORT
    testing_token = '193cd8bb41ab9217714515954e8724f651ef8601'

    def setUp(self):
        self.squad = Squad()
        SquadApi.configure(url=self.testing_server, token=self.testing_token)

    def manage_submit(self,
                      results=None,
                      result_name=None,
                      result_value=None,
                      metrics=None,
                      metadata=None,
                      attachments=None,
                      logs=None,
                      environment=None):
        argv = [
            './manage.py', '--squad-host', self.testing_server,
            '--squad-token', self.testing_token, 'submit', '--group',
            'my_group', '--project', 'my_project', '--build', 'my_build6',
            '--environment', 'test_submit_env'
        ]

        if logs:
            argv += ['--logs', logs]
        if results:
            argv += ['--results', results]
        if metrics:
            argv += ['--metrics', metrics]
        if metadata:
            argv += ['--metadata', metadata]
        if attachments:
            argv += ['--attachments', attachments]
        if result_name:
            argv += ['--result-name', result_name]
        if result_value:
            argv += ['--result-value', result_value]

        proc = sp.Popen(argv, stdout=sp.PIPE, stderr=sp.PIPE)
        proc.ok = False

        try:
            out, err = proc.communicate()
            proc.ok = (proc.returncode == 0)
        except sp.TimeoutExpired:
            self.logger.error('Running "%s" time out after %i seconds!' %
                              ' '.join(argv))
            proc.kill()
            out, err = proc.communicate()

        proc.out = out.decode('utf-8')
        proc.err = err.decode('utf-8')
        return proc

    def test_submit_empty(self):
        proc = self.manage_submit()
        self.assertFalse(proc.ok)
        self.assertIn(
            'At least one of --result-name, --results, --metrics is required',
            proc.err)

    def test_submit_single_test(self):
        proc = self.manage_submit(result_name='single-test',
                                  result_value='pass')
        self.assertTrue(proc.ok)
        self.assertIn('1 tests', proc.err)

        test = first(self.squad.tests(name='single-test'))
        self.assertEqual('single-test', test.name)
        self.assertEqual('pass', test.status)

    def test_submit_invalid_result_value(self):
        proc = self.manage_submit(result_name='single-invalid-test',
                                  result_value='not-valid')
        self.assertFalse(proc.ok)
        self.assertIn("result-value: invalid choice: 'not-valid'", proc.err)

    def test_submit_results_json(self):
        proc = self.manage_submit(
            results='tests/submit_results/sample_results.json')
        self.assertTrue(proc.ok)
        self.assertIn('2 tests', proc.err)

        test = first(self.squad.tests(name='json-test-1'))
        self.assertEqual('json-test-1', test.name)
        self.assertEqual('pass', test.status)

        test = first(self.squad.tests(name='json-test-2'))
        self.assertEqual('json-test-2', test.name)
        self.assertEqual('fail', test.status)
        self.assertEqual('json-test-2 log', test.log)

    def test_submit_results_malformed_json(self):
        proc = self.manage_submit(
            results='tests/submit_results/sample_results_malformed.json')
        self.assertFalse(proc.ok)
        self.assertIn('Failed parsing file', proc.err)

    def test_submit_results_yaml(self):
        proc = self.manage_submit(
            results='tests/submit_results/sample_results.yaml')
        self.assertTrue(proc.ok)
        self.assertIn('2 tests', proc.err)

        test = first(self.squad.tests(name='yaml-test-1'))
        self.assertEqual('yaml-test-1', test.name)
        self.assertEqual('pass', test.status)

        test = first(self.squad.tests(name='yaml-test-2'))
        self.assertEqual('yaml-test-2', test.name)
        self.assertEqual('fail', test.status)
        self.assertEqual('yaml-test-2 log', test.log)

    def test_submit_results_malformed_yaml(self):
        proc = self.manage_submit(
            results='tests/submit_results/sample_results_malformed.yaml')
        self.assertFalse(proc.ok)
        self.assertIn('Failed parsing file', proc.err)

    def test_submit_single_metric(self):
        proc = self.manage_submit(
            metrics='tests/submit_results/sample_metrics.json')
        self.assertTrue(proc.ok)
        self.assertIn('1 metrics', proc.err)

    def test_submit_everything(self):
        proc = self.manage_submit(
            results='tests/submit_results/sample_results.json',
            metrics='tests/submit_results/sample_metrics.json',
            metadata='tests/submit_results/sample_metadata.json',
            logs='tests/submit_results/sample_log.log')
        self.assertTrue(proc.ok)
        self.assertIn('2 tests, 1 metrics', proc.err)

        testrun = first(self.squad.testruns(job_id='jsonmetadatajobid1'))
        self.assertEqual('jsonmetadatajobid1', testrun.job_id)

        self.assertEqual(2, len(testrun.tests()))
        self.assertEqual(1, len(testrun.metrics()))
Beispiel #9
0
class SubmitTuxbuildCommandTest(unittest.TestCase):

    testing_server = "http://localhost:%s" % settings.DEFAULT_SQUAD_PORT
    testing_token = "193cd8bb41ab9217714515954e8724f651ef8601"

    def setUp(self):
        self.squad = Squad()
        SquadApi.configure(url=self.testing_server, token=self.testing_token)

    def submit_tuxbuild(self, tuxbuild):
        argv = [
            "./manage.py",
            "--squad-host",
            self.testing_server,
            "--squad-token",
            self.testing_token,
            "submit-tuxbuild",
            "--group",
            "my_group",
            "--project",
            "my_project",
            tuxbuild,
        ]

        proc = sp.Popen(argv, stdout=sp.PIPE, stderr=sp.PIPE)
        proc.ok = False

        try:
            out, err = proc.communicate()
            proc.ok = proc.returncode == 0
        except sp.TimeoutExpired:
            self.logger.error('Running "%s" time out after %i seconds!' %
                              " ".join(argv))
            proc.kill()
            out, err = proc.communicate()

        proc.out = out.decode("utf-8")
        proc.err = err.decode("utf-8")
        return proc

    def test_submit_tuxbuild_build(self):
        proc = self.submit_tuxbuild("tests/data/submit/tuxbuild/build.json")
        self.assertTrue(proc.ok, msg=proc.err)
        self.assertTrue(proc.err.count("Submitting 1 tests") == 3)

        build = (self.squad.group("my_group").project("my_project").build(
            "next-20201021"))
        self.assertIsNotNone(build)

        build = (
            self.squad.group("my_group").project("my_project").build("v4.4.4"))
        self.assertIsNotNone(build)

        for arch in ["arm64", "x86"]:
            environment = (self.squad.group("my_group").project(
                "my_project").environment(arch))
            self.assertIsNotNone(environment,
                                 "environment %s does not exist" % (arch))

        suite = self.squad.group("my_group").project("my_project").suite(
            "build")
        self.assertIsNotNone(suite)

        test = first(self.squad.tests(name="gcc-9-defconfig-b9979cfa"))
        self.assertEqual("build/gcc-9-defconfig-b9979cfa", test.name)
        self.assertEqual("pass", test.status)

        test = first(self.squad.tests(name="gcc-9-defconfig-5b09568e"))
        self.assertEqual("build/gcc-9-defconfig-5b09568e", test.name)
        self.assertEqual("fail", test.status)

    def test_submit_tuxbuild_buildset(self):
        proc = self.submit_tuxbuild("tests/data/submit/tuxbuild/buildset.json")
        self.assertTrue(proc.ok, msg=proc.out)
        self.assertIn("Submitting 3 tests", proc.err)

        build = (self.squad.group("my_group").project("my_project").build(
            "next-20201030"))
        self.assertIsNotNone(build)

        environment = (self.squad.group("my_group").project(
            "my_project").environment("x86"))
        self.assertIsNotNone(environment)

        suite = self.squad.group("my_group").project("my_project").suite(
            "build")
        self.assertIsNotNone(suite)

        test = first(self.squad.tests(name="gcc-8-allnoconfig"))
        self.assertEqual("build/gcc-8-allnoconfig", test.name)
        self.assertEqual("pass", test.status)

        test = first(self.squad.tests(name="gcc-8-tinyconfig"))
        self.assertEqual("build/gcc-8-tinyconfig", test.name)
        self.assertEqual("pass", test.status)

        test = first(self.squad.tests(name="gcc-8-x86_64_defconfig"))
        self.assertEqual("build/gcc-8-x86_64_defconfig", test.name)
        self.assertEqual("pass", test.status)

    def test_submit_tuxbuild_empty(self):
        proc = self.submit_tuxbuild("")
        self.assertFalse(proc.ok, msg=proc.err)
        self.assertIn("No such file or directory: ''", proc.err)

    def test_submit_tuxbuild_malformed(self):
        proc = self.submit_tuxbuild(
            "tests/data/submit/tuxbuild/malformed.json")
        self.assertFalse(proc.ok, msg=proc.err)
        self.assertIn("Failed to load json", proc.err)

    def test_submit_tuxbuild_missing(self):
        proc = self.submit_tuxbuild("tests/data/submit/tuxbuild/missing.json")
        self.assertFalse(proc.ok)
        self.assertIn(
            "No such file or directory: 'tests/data/submit/tuxbuild/missing.json'",
            proc.err,
        )

    def test_submit_tuxbuild_empty_build_status(self):
        proc = self.submit_tuxbuild(
            "tests/data/submit/tuxbuild/empty_build_status.json")
        self.assertFalse(proc.ok, msg=proc.err)
        self.assertIn(
            "Failed to validate tuxbuild data: '' is not one of ['fail', 'pass']",
            proc.err,
        )
        self.assertIn(
            "Failed validating 'enum' in schema['items'][0]['properties']['build_status']",
            proc.err)

    def test_submit_tuxbuild_malformed_build_status(self):
        proc = self.submit_tuxbuild(
            "tests/data/submit/tuxbuild/malformed_build_status.json")
        self.assertFalse(proc.ok, msg=proc.err)
        self.assertIn(
            "Failed to validate tuxbuild data: {'build': 'pass'} is not of type 'string'",
            proc.err,
        )
        self.assertIn(
            "Failed validating 'type' in schema['items'][0]['properties']['build_status']",
            proc.err)

    def test_submit_tuxbuild_missing_build_status(self):
        proc = self.submit_tuxbuild(
            "tests/data/submit/tuxbuild/missing_build_status.json")
        self.assertFalse(proc.ok, msg=proc.err)
        self.assertIn(
            "Failed to validate tuxbuild data: 'build_status' is a required property",
            proc.err,
        )

    def test_submit_tuxbuild_empty_kconfig(self):
        proc = self.submit_tuxbuild(
            "tests/data/submit/tuxbuild/empty_kconfig.json")
        self.assertFalse(proc.ok, msg=proc.err)
        self.assertIn("Failed to validate tuxbuild data: [] is too short",
                      proc.err)
        self.assertIn(
            "Failed validating 'minItems' in schema['items'][0]['properties']['kconfig']",
            proc.err)

    def test_submit_tuxbuild_malformed_kconfig(self):
        proc = self.submit_tuxbuild(
            "tests/data/submit/tuxbuild/malformed_kconfig.json")
        self.assertFalse(proc.ok, msg=proc.err)
        self.assertIn(
            "Failed to validate tuxbuild data: {'CONFIG_ARM64_MODULE_PLTS': 'y'} is not of type 'string'",
            proc.err,
        )
        self.assertIn(
            "Failed validating 'type' in schema['items'][0]['properties']['kconfig']['items'][0]",
            proc.err,
        )

    def test_submit_tuxbuild_missing_kconfig(self):
        proc = self.submit_tuxbuild(
            "tests/data/submit/tuxbuild/missing_kconfig.json")
        self.assertFalse(proc.ok, msg=proc.err)
        self.assertIn(
            "Failed to validate tuxbuild data: 'kconfig' is a required property",
            proc.err,
        )
Beispiel #10
0
class SubmitCommandTest(unittest.TestCase):

    testing_server = 'http://localhost:%s' % settings.DEFAULT_SQUAD_PORT
    testing_token = '193cd8bb41ab9217714515954e8724f651ef8601'

    def setUp(self):
        self.squad = Squad()
        SquadApi.configure(url=self.testing_server, token=self.testing_token)

    def manage_submit(self,
                      results=None,
                      results_layout=None,
                      result_name=None,
                      result_value=None,
                      metrics=None,
                      metadata=None,
                      attachments=None,
                      logs=None,
                      environment=None):
        argv = [
            './manage.py', '--squad-host', self.testing_server,
            '--squad-token', self.testing_token, 'submit', '--group',
            'my_group', '--project', 'my_project', '--build', 'my_build6',
            '--environment', 'test_submit_env'
        ]

        if logs:
            argv += ['--logs', logs]
        if results:
            argv += ['--results', results]
        if results_layout:
            argv += ['--results-layout', results_layout]
        if metrics:
            argv += ['--metrics', metrics]
        if metadata:
            argv += ['--metadata', metadata]
        if attachments:
            argv += ['--attachments', attachments]
        if result_name:
            argv += ['--result-name', result_name]
        if result_value:
            argv += ['--result-value', result_value]

        proc = sp.Popen(argv, stdout=sp.PIPE, stderr=sp.PIPE)
        proc.ok = False

        try:
            out, err = proc.communicate()
            proc.ok = (proc.returncode == 0)
        except sp.TimeoutExpired:
            self.logger.error('Running "%s" time out after %i seconds!' %
                              ' '.join(argv))
            proc.kill()
            out, err = proc.communicate()

        proc.out = out.decode('utf-8')
        proc.err = err.decode('utf-8')
        return proc

    def test_submit_empty(self):
        proc = self.manage_submit()
        self.assertFalse(proc.ok)
        self.assertIn(
            'At least one of --result-name, --results, --metrics is required',
            proc.err)

    def test_submit_single_test(self):
        proc = self.manage_submit(result_name='single-test',
                                  result_value='pass')
        self.assertTrue(proc.ok)
        self.assertIn('1 tests', proc.err)

        test = first(self.squad.tests(name='single-test'))
        self.assertEqual('single-test', test.name)
        self.assertEqual('pass', test.status)

    def test_submit_invalid_result_value(self):
        proc = self.manage_submit(result_name='single-invalid-test',
                                  result_value='not-valid')
        self.assertFalse(proc.ok)
        self.assertIn("result-value: invalid choice: 'not-valid'", proc.err)

    def test_submit_results_json(self):
        proc = self.manage_submit(
            results='tests/submit_results/sample_results.json')
        self.assertTrue(proc.ok)
        self.assertIn('2 tests', proc.err)

        test = first(self.squad.tests(name='json-test-1'))
        self.assertEqual('json-test-1', test.name)
        self.assertEqual('pass', test.status)

        test = first(self.squad.tests(name='json-test-2'))
        self.assertEqual('json-test-2', test.name)
        self.assertEqual('fail', test.status)
        self.assertEqual('json-test-2 log', test.log)

    def test_submit_results_malformed_json(self):
        proc = self.manage_submit(
            results='tests/submit_results/sample_results_malformed.json')
        self.assertFalse(proc.ok)
        self.assertIn('Failed parsing file', proc.err)

    def test_submit_results_tuxbuild_json(self):
        proc = self.manage_submit(
            results='tests/data/submit/tuxbuild/build.json',
            results_layout='tuxbuild_json')
        self.assertTrue(proc.ok, msg=proc.err)
        self.assertIn("Submitting 2 tests", proc.err)

        test = first(self.squad.tests(name="gcc-9-defconfig-b9979cfa"))
        self.assertEqual("build/gcc-9-defconfig-b9979cfa", test.name)
        self.assertEqual("pass", test.status)

        test = first(self.squad.tests(name="gcc-9-defconfig-5b09568e"))
        self.assertEqual("build/gcc-9-defconfig-5b09568e", test.name)
        self.assertEqual("fail", test.status)

    def test_submit_results_tuxbuild_buildset_json(self):
        proc = self.manage_submit(
            results='tests/data/submit/tuxbuild/buildset.json',
            results_layout='tuxbuild_json')
        self.assertIn("Submitting 3 tests", proc.err)

        test = first(self.squad.tests(name="gcc-8-allnoconfig"))
        self.assertEqual("build/gcc-8-allnoconfig", test.name)
        self.assertEqual("pass", test.status)

        test = first(self.squad.tests(name="gcc-8-tinyconfig"))
        self.assertEqual("build/gcc-8-tinyconfig", test.name)
        self.assertEqual("pass", test.status)

        test = first(self.squad.tests(name="gcc-8-x86_64_defconfig"))
        self.assertEqual("build/gcc-8-x86_64_defconfig", test.name)
        self.assertEqual("pass", test.status)

    def test_submit_results_tuxbuild_json_malformed(self):
        proc = self.manage_submit(
            results='tests/data/submit/tuxbuild/malformed.json',
            results_layout='tuxbuild_json')
        self.assertFalse(proc.ok, msg=proc.err)
        self.assertIn("Failed to load json", proc.err)

    def test_submit_results_tuxbuild_json_missing(self):
        proc = self.manage_submit(
            results="tests/data/submit/tuxbuild/missing.json",
            results_layout="tuxbuild_json")
        self.assertFalse(proc.ok)
        self.assertIn(
            "Requested file tests/data/submit/tuxbuild/missing.json doesn't exist",
            proc.err)

    def test_submit_results_tuxbuild_json_results_opt_missing(self):
        proc = self.manage_submit(results_layout="tuxbuild_json")
        self.assertFalse(proc.ok)
        self.assertIn(
            "At least one of --result-name, --results, --metrics is required",
            proc.err)

    def test_submit_results_tuxbuild_json_layout_arg_bad(self):
        proc = self.manage_submit(
            results="tests/data/submit/tuxbuild/build.json",
            results_layout="bad_layout")
        self.assertFalse(proc.ok)
        self.assertIn(
            "argument --results-layout: invalid choice: 'bad_layout'",
            proc.err)

    def test_submit_results_tuxbuild_json_empty_kconfig(self):
        proc = self.manage_submit(
            results="tests/data/submit/tuxbuild/empty_kconfig.json",
            results_layout="tuxbuild_json")
        self.assertFalse(proc.ok)
        self.assertIn(
            "Failed to load tuxbuild json due to a missing kconfig value: list index out of range",
            proc.err)

    def test_submit_results_tuxbuild_json_missing_kconfig(self):
        proc = self.manage_submit(
            results="tests/data/submit/tuxbuild/missing_kconfig.json",
            results_layout="tuxbuild_json")
        self.assertFalse(proc.ok)
        self.assertIn(
            "Failed to load tuxbuild json due to a missing variable: 'kconfig'",
            proc.err)

    def test_submit_results_yaml(self):
        proc = self.manage_submit(
            results='tests/submit_results/sample_results.yaml')
        self.assertTrue(proc.ok)
        self.assertIn('2 tests', proc.err)

        test = first(self.squad.tests(name='yaml-test-1'))
        self.assertEqual('yaml-test-1', test.name)
        self.assertEqual('pass', test.status)

        test = first(self.squad.tests(name='yaml-test-2'))
        self.assertEqual('yaml-test-2', test.name)
        self.assertEqual('fail', test.status)
        self.assertEqual('yaml-test-2 log', test.log)

    def test_submit_results_malformed_yaml(self):
        proc = self.manage_submit(
            results='tests/submit_results/sample_results_malformed.yaml')
        self.assertFalse(proc.ok)
        self.assertIn('Failed parsing file', proc.err)

    def test_submit_results_bad_extension(self):
        p = "tests/submit_results/sample_results.txt"
        proc = self.manage_submit(results=p)
        self.assertFalse(proc.ok)
        self.assertIn(
            'File "%s" does not have a JSON or YAML file extension' % p,
            proc.err)

    def test_submit_single_metric(self):
        proc = self.manage_submit(
            metrics='tests/submit_results/sample_metrics.json')
        self.assertTrue(proc.ok)
        self.assertIn('1 metrics', proc.err)

    def test_submit_everything(self):
        proc = self.manage_submit(
            results='tests/submit_results/sample_results.json',
            metrics='tests/submit_results/sample_metrics.json',
            metadata='tests/submit_results/sample_metadata.json',
            logs='tests/submit_results/sample_log.log')
        self.assertTrue(proc.ok, msg=proc.err)
        self.assertIn('2 tests, 1 metrics', proc.err)

        testrun = first(self.squad.testruns(job_id='jsonmetadatajobid1'))
        self.assertEqual('jsonmetadatajobid1', testrun.job_id)

        self.assertEqual(2, len(testrun.tests()))
        self.assertEqual(1, len(testrun.metrics()))