def test_mixed_multinode(self): user = self.factory.make_user() device_type = self.factory.make_device_type() self.factory.make_device(device_type, 'fakeqemu1') self.factory.make_device(device_type, 'fakeqemu2') self.factory.make_device(device_type, 'fakeqemu3') self.factory.make_device(device_type, 'fakeqemu4') submission = yaml.load(open( os.path.join(os.path.dirname(__file__), 'kvm-multinode.yaml'), 'r')) role_list = submission['protocols'][MultinodeProtocol.name]['roles'] for role in role_list: if 'tags' in role_list[role]: del role_list[role]['tags'] job_list = TestJob.from_yaml_and_user(yaml.dump(submission), user) self.assertEqual(len(job_list), 2) # make the list mixed fakeqemu1 = Device.objects.get(hostname='fakeqemu1') fakeqemu1.is_pipeline = False fakeqemu1.save(update_fields=['is_pipeline']) fakeqemu3 = Device.objects.get(hostname='fakeqemu3') fakeqemu3.is_pipeline = False fakeqemu3.save(update_fields=['is_pipeline']) device_list = Device.objects.filter(device_type=device_type, is_pipeline=True) self.assertEqual(len(device_list), 2) self.assertIsInstance(device_list, RestrictedResourceQuerySet) self.assertIsInstance(list(device_list), list) job_list = TestJob.from_yaml_and_user(yaml.dump(submission), user) self.assertEqual(len(job_list), 2) for job in job_list: self.assertEqual(job.requested_device_type, device_type)
def setUp(self): super().setUp() self.admin_user = User.objects.create( username=self.factory.get_unique_user(), is_superuser=True) # set up auth data. self.group1 = self.factory.make_group(name="test-group1") self.group2 = self.factory.make_group(name="test-group2") self.user1 = self.factory.make_user() self.user1.groups.add(self.group1) self.user2 = self.factory.make_user() self.user2.groups.add(self.group2) self.user3 = self.factory.make_user() # Create device types. self.qemu_device_type = self.factory.make_device_type(name="qemu") self.bbb_device_type = self.factory.make_device_type(name="bbb") self.lxc_device_type = self.factory.make_device_type(name="lxc") self.all_device_types = [ self.qemu_device_type, self.bbb_device_type, self.lxc_device_type, ] # Create devices. self.qemu_device1 = self.factory.make_device( device_type=self.qemu_device_type, hostname="qemu-1") self.qemu_device2 = self.factory.make_device( device_type=self.qemu_device_type, hostname="qemu-2") self.qemu_device3 = self.factory.make_device( device_type=self.qemu_device_type, hostname="qemu-3") self.all_qemu_devices = [ self.qemu_device1, self.qemu_device2, self.qemu_device3, ] self.bbb_device1 = self.factory.make_device( device_type=self.bbb_device_type, hostname="bbb-1") self.bbb_device2 = self.factory.make_device( device_type=self.bbb_device_type, hostname="bbb-2") self.all_bbb_devices = [self.bbb_device1, self.bbb_device2] self.all_devices = self.all_qemu_devices + self.all_bbb_devices self.definition = self.factory.make_job_data_from_file("qemu.yaml") # Create testjobs. self.qemu_job1 = TestJob.from_yaml_and_user(self.definition, self.admin_user) self.qemu_job2 = TestJob.from_yaml_and_user(self.definition, self.admin_user) self.all_bbb_jobs = TestJob.from_yaml_and_user( self.factory.make_job_data_from_file("bbb-bbb-vland-group.yaml"), self.admin_user, ) self.bbb_job1 = self.all_bbb_jobs[0] self.bbb_job2 = self.all_bbb_jobs[1] self.all_qemu_jobs = [self.qemu_job1, self.qemu_job2] self.all_jobs = self.all_qemu_jobs + self.all_bbb_jobs
def testjob_submission(job_definition, user, check_device=None, original_job=None): """ Single submission frontend for JSON or YAML :param job_definition: string of the job submission :param user: user attempting the submission :param check_device: set specified device as the target **and** thereby set job as a health check job. (JSON only) :return: a job or a list of jobs :raises: SubmissionException, Device.DoesNotExist, DeviceType.DoesNotExist, DevicesUnavailableException, JSONDataError, JSONDecodeError, ValueError """ if is_deprecated_json(job_definition): allow_health = False job_json = simplejson.loads(job_definition) target_device = None if 'target' in job_json: target_device = Device.objects.get(hostname=job_json['target']) if check_device: job_json['target'] = check_device.hostname job_json['health-check'] = True job_definition = simplejson.dumps(job_json) allow_health = True try: # returns a single job or a list (not a QuerySet) of job objects. job = TestJob.from_json_and_user(job_definition, user, health_check=allow_health) if isinstance(job, list): # multinode health checks not supported return job job.health_check = allow_health if check_device: job.requested_device = check_device elif target_device: job.requested_device = target_device job.save(update_fields=['health_check', 'requested_device']) except (JSONDataError, ValueError) as exc: if check_device: check_device.put_into_maintenance_mode( user, "Job submission failed for health job for %s: %s" % (check_device, exc)) raise JSONDataError("Health check job submission failed for %s: %s" % (check_device, exc)) else: raise JSONDataError("Job submission failed: %s" % exc) else: validate_job(job_definition) # returns a single job or a list (not a QuerySet) of job objects. job = TestJob.from_yaml_and_user(job_definition, user, original_job=original_job) if check_device and isinstance(check_device, Device) and not isinstance(job, list): # the slave must neither know nor care if this is a health check, # only the master cares and that has the database connection. job.health_check = True job.requested_device = check_device job.save(update_fields=['health_check', 'requested_device']) return job
def testjob_submission(job_definition, user, check_device=None): """ Single submission frontend for JSON or YAML :param job_definition: string of the job submission :param user: user attempting the submission :param check_device: set specified device as the target **and** thereby set job as a health check job. (JSON only) :return: a job or a list of jobs :raises: SubmissionException, Device.DoesNotExist, DeviceType.DoesNotExist, DevicesUnavailableException, JSONDataError, JSONDecodeError, ValueError """ if is_deprecated_json(job_definition): allow_health = False job_json = simplejson.loads(job_definition) target_device = None if 'target' in job_json: target_device = Device.objects.get(hostname=job_json['target']) if check_device: job_json['target'] = check_device.hostname job_json['health-check'] = True job_definition = simplejson.dumps(job_json) allow_health = True try: # returns a single job or a list (not a QuerySet) of job objects. job = TestJob.from_json_and_user(job_definition, user, health_check=allow_health) if isinstance(job, list): # multinode health checks not supported return job job.health_check = allow_health if check_device: job.requested_device = check_device elif target_device: job.requested_device = target_device job.save(update_fields=['health_check', 'requested_device']) except (JSONDataError, ValueError) as exc: if check_device: check_device.put_into_maintenance_mode( user, "Job submission failed for health job for %s: %s" % (check_device, exc)) raise JSONDataError("Health check job submission failed for %s: %s" % (check_device, exc)) else: raise JSONDataError("Job submission failed: %s" % exc) else: validate_job(job_definition) # returns a single job or a list (not a QuerySet) of job objects. job = TestJob.from_yaml_and_user(job_definition, user) if check_device and isinstance(check_device, Device) and not isinstance(job, list): # the slave must neither know nor care if this is a health check, # only the master cares and that has the database connection. job.health_check = True job.requested_device = check_device job.save(update_fields=['health_check', 'requested_device']) return job
def test_from_yaml_and_user_reuses_tag_objects(self): self.factory.ensure_tag('tag') tags = list(Tag.objects.filter(name='tag')) self.factory.make_device(device_type=self.device_type, hostname="fakeqemu1", tags=tags) job1 = TestJob.from_yaml_and_user( self.factory.make_job_json(tags=['tag']), self.factory.make_user()) job2 = TestJob.from_yaml_and_user( self.factory.make_job_json(tags=['tag']), self.factory.make_user()) self.assertEqual(set(tag.pk for tag in job1.tags.all()), set(tag.pk for tag in job2.tags.all()))
def test_from_yaml_unsupported_tags(self): self.factory.make_device(self.device_type, 'fakeqemu1') self.factory.ensure_tag('usb') self.factory.ensure_tag('sata') try: TestJob.from_yaml_and_user( self.factory.make_job_json(tags=['usb', 'sata']), self.factory.make_user()) except DevicesUnavailableException: pass else: self.fail("Device tags failure: job submitted without any devices supporting the requested tags")
def test_from_yaml_and_user_reuses_tag_objects(self): self.factory.ensure_tag('tag') tags = list(Tag.objects.filter(name='tag')) self.factory.make_device(device_type=self.device_type, hostname="fakeqemu1", tags=tags) job1 = TestJob.from_yaml_and_user( self.factory.make_job_json(tags=['tag']), self.factory.make_user()) job2 = TestJob.from_yaml_and_user( self.factory.make_job_json(tags=['tag']), self.factory.make_user()) self.assertEqual( set(tag.pk for tag in job1.tags.all()), set(tag.pk for tag in job2.tags.all()))
def test_from_yaml_unsupported_tags(self): self.factory.make_device(self.device_type, 'fakeqemu1') self.factory.ensure_tag('usb') self.factory.ensure_tag('sata') try: TestJob.from_yaml_and_user( self.factory.make_job_json(tags=['usb', 'sata']), self.factory.make_user()) except DevicesUnavailableException: pass else: self.fail( "Device tags failure: job submitted without any devices supporting the requested tags" )
def test_from_json_and_user_matches_available_tags(self): """ Test that with more than one device of the requested type supporting tags, that the tag list set for the TestJob matches the list requested, not a shorter list from a different device or a combined list of multiple devices. """ device_type = self.factory.ensure_device_type(name='panda') tag_list = [ self.factory.ensure_tag('common_tag1'), self.factory.ensure_tag('common_tag2') ] self.factory.make_device(device_type=device_type, hostname="panda4", tags=tag_list) tag_list.append(self.factory.ensure_tag('unique_tag')) self.factory.make_device(device_type=device_type, hostname="panda5", tags=tag_list) job = TestJob.from_json_and_user( self.factory.make_job_json( tags=['common_tag1', 'common_tag2', 'unique_tag']), self.factory.make_user()) self.assertEqual(set(tag for tag in job.tags.all()), set(tag_list)) self.factory.cleanup()
def test_from_json_and_user_sets_target(self): panda_board = self.factory.make_device(hostname='panda01') job = TestJob.from_json_and_user( self.factory.make_job_json(target='panda01'), self.factory.make_user()) self.assertEqual(panda_board, job.requested_device) self.factory.cleanup()
def test_parameter_support(self): # pylint: disable=too-many-locals data = self.factory.make_job_data() test_block = [block for block in data['actions'] if 'test' in block][0] smoke = test_block['test']['definitions'][0] smoke['parameters'] = { 'VARIABLE_NAME_1': "first variable value", 'VARIABLE_NAME_2': "second value" } job = TestJob.from_yaml_and_user(yaml.dump(data), self.user) job_def = yaml.load(job.definition) job_ctx = job_def.get('context', {}) device = Device.objects.get(hostname='fakeqemu1') device_config = device.load_device_configuration(job_ctx, system=False) # raw dict parser = JobParser() obj = PipelineDevice(device_config, device.hostname) pipeline_job = parser.parse(job.definition, obj, job.id, None, "", output_dir='/tmp') allow_missing_path(pipeline_job.pipeline.validate_actions, self, 'qemu-system-x86_64') pipeline = pipeline_job.describe() device_values = _get_device_metadata(pipeline['device']) try: testdata, _ = TestData.objects.get_or_create(testjob=job) except (MultipleObjectsReturned): self.fail('multiple objects') for key, value in device_values.items(): if not key or not value: continue testdata.attributes.create(name=key, value=value) retval = _get_job_metadata(pipeline['job']['actions']) self.assertIn('test.0.common.definition.parameters.VARIABLE_NAME_2', retval) self.assertIn('test.0.common.definition.parameters.VARIABLE_NAME_1', retval) self.assertEqual(retval['test.0.common.definition.parameters.VARIABLE_NAME_1'], 'first variable value') self.assertEqual(retval['test.0.common.definition.parameters.VARIABLE_NAME_2'], 'second value')
def test_match_devices_without_map(self): """ Without a map, there is no support for knowing which interfaces to put onto a VLAN, so these devices cannot be assigned to a VLAN testjob See http://localhost/static/docs/v2/vland.html#vland-and-interface-tags-in-lava """ self.bbb3 = self.factory.make_device(self.factory.bbb_type, hostname='bbb-03') self.cubie2 = self.factory.make_device(self.factory.cubie_type, hostname='cubie2') devices = [self.bbb3, self.cubie2] self.factory.ensure_tag('usb-eth') self.factory.ensure_tag('sata') self.factory.bbb1.tags = Tag.objects.filter(name='usb-eth') self.factory.bbb1.save() self.factory.cubie1.tags = Tag.objects.filter(name='sata') self.factory.cubie1.save() user = self.factory.make_user() sample_job_file = os.path.join(os.path.dirname(__file__), 'sample_jobs', 'bbb-cubie-vlan-group.yaml') with open(sample_job_file, 'r') as test_support: data = yaml.load(test_support) vlan_job = TestJob.from_yaml_and_user(yaml.dump(data), user) assignments = {} for job in vlan_job: device = find_device_for_job(job, devices) self.assertIsNone(device) # no map defined self.assertFalse(match_vlan_interface(device, yaml.load(job.definition))) assignments[job.device_role] = device self.assertIsNone(assignments['client']) self.assertIsNone(assignments['server'])
def test_level_input(self): job = TestJob.from_yaml_and_user(self.factory.make_job_yaml(), self.user) suite = TestSuite.objects.create(job=job, name="lava") suite.save() ret = map_scanned_results( results={ "case": "test-overlay", "definition": "lava", "duration": 0.01159811019897461, "level": "1.3.3.2", "result": "pass", }, job=job, starttc=None, endtc=None, meta_filename=None, ) self.assertTrue(ret) ret.save() self.assertEqual(1, TestCase.objects.filter(suite=suite).count()) testcase = TestCase.objects.get(suite=suite) self.assertTrue(isinstance(testcase.metadata, string_types)) self.assertEqual(testcase.result, TestCase.RESULT_PASS) self.factory.cleanup()
def test_parameter_support(self): data = self.factory.make_job_data() test_block = [block for block in data["actions"] if "test" in block][0] smoke = test_block["test"]["definitions"][0] smoke["parameters"] = { "VARIABLE_NAME_1": "first variable value", "VARIABLE_NAME_2": "second value", } job = TestJob.from_yaml_and_user(yaml_dump(data), self.user) job_def = yaml_safe_load(job.definition) job_ctx = job_def.get("context", {}) job_ctx.update( {"no_kvm": True} ) # override to allow unit tests on all types of systems device = Device.objects.get(hostname="fakeqemu1") device_config = device.load_configuration(job_ctx) # raw dict parser = JobParser() obj = PipelineDevice(device_config) pipeline_job = parser.parse(job.definition, obj, job.id, None, "") allow_missing_path( pipeline_job.pipeline.validate_actions, self, "qemu-system-x86_64" ) pipeline = pipeline_job.describe() testdata, _ = TestData.objects.get_or_create(testjob=job) retval = _get_action_metadata(pipeline["job"]["actions"]) self.assertIn("test.0.common.definition.parameters.VARIABLE_NAME_2", retval) self.assertIn("test.0.common.definition.parameters.VARIABLE_NAME_1", retval) self.assertEqual( retval["test.0.common.definition.parameters.VARIABLE_NAME_1"], "first variable value", ) self.assertEqual( retval["test.0.common.definition.parameters.VARIABLE_NAME_2"], "second value", )
def test_job_multi(self): MetaType.objects.all().delete() multi_test_file = os.path.join(os.path.dirname(__file__), 'multi-test.yaml') self.assertTrue(os.path.exists(multi_test_file)) with open(multi_test_file, 'r') as test_support: data = test_support.read() job = TestJob.from_yaml_and_user(data, self.user) job_def = yaml.load(job.definition) job_ctx = job_def.get('context', {}) device = Device.objects.get(hostname='fakeqemu1') device_config = device.load_device_configuration( job_ctx, system=False) # raw dict parser = JobParser() obj = PipelineDevice(device_config, device.hostname) pipeline_job = parser.parse(job.definition, obj, job.id, None, "", output_dir='/tmp') allow_missing_path(pipeline_job.pipeline.validate_actions, self, 'qemu-system-x86_64') pipeline = pipeline_job.describe() map_metadata(yaml.dump(pipeline), job)
def test_case_as_url(self): job = TestJob.from_yaml_and_user(self.factory.make_job_yaml(), self.user) test_dict = { "definition": "unit-test", "case": "unit-test", "level": "1.3.4.1", # list of numbers, generates a much longer YAML string than just the count "result": "pass", } pattern = "[-_a-zA-Z0-9.\\(\\)]+" matches = re.search(pattern, test_dict["case"]) self.assertIsNotNone(matches) # passes self.assertEqual(matches.group(0), test_dict["case"]) suite, _ = TestSuite.objects.get_or_create( name=test_dict["definition"], job=job) case, _ = TestCase.objects.get_or_create(suite=suite, name=test_dict["case"], result=TestCase.RESULT_PASS) self.assertIsNotNone(reverse("lava.results.testcase", args=[case.id])) self.assertIsNotNone( reverse("lava.results.testcase", args=[job.id, suite.name, case.id])) self.assertIsNotNone( map_scanned_results(test_dict, job, None, None, None)) # now break the reverse pattern test_dict["case"] = "unit test" # whitespace in the case name matches = re.search(pattern, test_dict["case"]) self.assertIsNotNone(matches) self.assertRaises( NoReverseMatch, reverse, "lava.results.testcase", args=[job.id, suite.name, test_dict["case"]], )
def test_group_visibility(self): self.factory.cleanup() dt = self.factory.make_device_type(name='name') device = self.factory.make_device(device_type=dt, hostname='name-1') device.save() definition = self.factory.make_job_data() definition['visibility'] = {'group': ['newgroup']} definition['job_name'] = 'unittest_visibility' self.assertIsNotNone(yaml.dump(definition)) self.assertIsNotNone(list(Device.objects.filter(device_type=dt))) user = self.factory.make_user() user.user_permissions.add( Permission.objects.get(codename='add_testjob')) user.save() self.assertRaises( SubmissionException, TestJob.from_yaml_and_user, yaml.dump(definition), user) self.factory.make_group('newgroup') known_groups = list(Group.objects.filter(name__in=['newgroup'])) job = TestJob.from_yaml_and_user( yaml.dump(definition), user) job.refresh_from_db() self.assertEqual(user, job.submitter) self.assertEqual(job.visibility, TestJob.VISIBLE_GROUP) self.assertEqual(known_groups, list(job.viewing_groups.all())) self.factory.cleanup()
def test_group_visibility(self): self.factory.cleanup() dt = self.factory.make_device_type(name="name") device = self.factory.make_device(device_type=dt, hostname="name-1") device.save() definition = self.factory.make_job_data() definition["visibility"] = {"group": ["newgroup"]} definition["job_name"] = "unittest_visibility" self.assertIsNotNone(yaml.safe_dump(definition)) self.assertIsNotNone(list(Device.objects.filter(device_type=dt))) user = self.factory.make_user() user.user_permissions.add( Permission.objects.get(codename="add_testjob")) user.save() self.assertRaises( SubmissionException, TestJob.from_yaml_and_user, yaml.safe_dump(definition), user, ) self.factory.make_group("newgroup") known_groups = list(Group.objects.filter(name__in=["newgroup"])) job = TestJob.from_yaml_and_user(yaml.safe_dump(definition), user) job.refresh_from_db() self.assertEqual(user, job.submitter) self.assertEqual(job.visibility, TestJob.VISIBLE_GROUP) self.assertEqual(known_groups, list(job.viewing_groups.all())) self.factory.cleanup()
def test_from_json_and_user_repeat_parameter_zero(self): device_type = self.factory.make_device_type('base') device = self.factory.make_device(device_type=device_type, hostname="generic") repeat = 0 job_data = { 'timeout': 1, 'target': device.hostname, 'actions': [{ 'command': 'lava_test_shell', 'parameters': { 'repeat': repeat, 'testdef_repos': [{ 'git-repo': 'git://server/test.git', 'testdef': 'testdef.yaml' }], } }], } job_json = simplejson.dumps(job_data, sort_keys=True, indent=4 * ' ') job = TestJob.from_json_and_user(job_json, self.factory.make_user()) definition_data = simplejson.loads(job.definition) self.assertEqual(len(definition_data['actions']), 1) self.assertNotIn('repeat_count', definition_data['actions'][0]['parameters']) self.assertNotIn('repeat', definition_data['actions'][0]['parameters']) self.assertEqual(job.status, TestJob.SUBMITTED) self.factory.cleanup()
def test_repositories(self): job = TestJob.from_yaml_and_user(self.factory.make_job_yaml(), self.user) job_def = yaml_safe_load(job.definition) job_ctx = job_def.get("context", {}) job_ctx.update( {"no_kvm": True} ) # override to allow unit tests on all types of systems device = Device.objects.get(hostname="fakeqemu1") device_config = device.load_configuration(job_ctx) # raw dict parser = JobParser() obj = PipelineDevice(device_config) pipeline_job = parser.parse(job.definition, obj, job.id, None, "") allow_missing_path( pipeline_job.pipeline.validate_actions, self, "qemu-system-x86_64" ) pipeline = pipeline_job.describe() testdata, _ = TestData.objects.get_or_create(testjob=job) retval = _get_action_metadata(pipeline["job"]["actions"]) self.assertEqual( retval, { "test.1.common.definition.from": "git", "test.0.common.definition.repository": "git://git.linaro.org/lava-team/lava-functional-tests.git", "test.0.common.definition.name": "smoke-tests", "test.1.common.definition.repository": "http://git.linaro.org/lava-team/lava-functional-tests.git", "boot.0.common.method": "qemu", "test.1.common.definition.name": "singlenode-advanced", "test.0.common.definition.from": "git", "test.0.common.definition.path": "lava-test-shell/smoke-tests-basic.yaml", "test.1.common.definition.path": "lava-test-shell/single-node/singlenode03.yaml", }, )
def test_level_input(self): user = self.factory.make_user() job = TestJob.from_yaml_and_user(self.factory.make_job_yaml(), user) suite = TestSuite.objects.create(job=job, name='test-suite') suite.save() result_sample = """ results: lava-test-shell: !!python/object/apply:collections.OrderedDict - - [ping-test, fail] power_off: !!python/object/apply:collections.OrderedDict - - [status, Complete] - [level, 5.1] """ scanned = yaml.load(result_sample) ret = map_scanned_results(scanned_dict=scanned, job=job) self.assertTrue(ret) for testcase in TestCase.objects.filter(suite=suite): if testcase.name == 'power_off': self.assertTrue(type(testcase.metadata) in [str, unicode]) self.assertTrue(type(testcase.action_data) == OrderedDict) self.assertEqual(testcase.action_data['status'], 'Complete') self.assertEqual(testcase.action_data['level'], 5.1) self.assertEqual(testcase.action_level, '5.1') self.assertEqual(testcase.result, TestCase.RESULT_UNKNOWN) self.factory.cleanup()
def test_level_input(self): user = self.factory.make_user() job = TestJob.from_yaml_and_user( self.factory.make_job_yaml(), user) suite = TestSuite.objects.create( job=job, name='test-suite' ) suite.save() result_sample = """ results: lava-test-shell: !!python/object/apply:collections.OrderedDict - - [ping-test, fail] power_off: !!python/object/apply:collections.OrderedDict - - [status, Complete] - [level, 5.1] """ scanned = yaml.load(result_sample) ret = map_scanned_results(scanned_dict=scanned, job=job) self.assertTrue(ret) for testcase in TestCase.objects.filter(suite=suite): if testcase.name == 'power_off': self.assertTrue(type(testcase.metadata) in [str, unicode]) self.assertTrue(type(testcase.action_data) == OrderedDict) self.assertEqual(testcase.action_data['status'], 'Complete') self.assertEqual(testcase.action_data['level'], 5.1) self.assertEqual(testcase.action_level, '5.1') self.assertEqual(testcase.result, TestCase.RESULT_UNKNOWN) self.factory.cleanup()
def test_case_as_url(self): job = TestJob.from_yaml_and_user(self.factory.make_job_yaml(), self.user) test_dict = { 'definition': 'unit-test', 'case': 'unit-test', 'level': '1.3.4.1', # list of numbers, generates a much longer YAML string than just the count 'result': 'pass' } pattern = '[-_a-zA-Z0-9.\\(\\)]+' matches = re.search(pattern, test_dict['case']) self.assertIsNotNone(matches) # passes self.assertEqual(matches.group(0), test_dict['case']) suite, _ = TestSuite.objects.get_or_create( name=test_dict["definition"], job=job) case, _ = TestCase.objects.get_or_create(suite=suite, name=test_dict['case'], result=TestCase.RESULT_PASS) self.assertIsNotNone(reverse('lava.results.testcase', args=[case.id])) self.assertIsNotNone( reverse('lava.results.testcase', args=[job.id, suite.name, case.id])) self.assertIsNotNone(map_scanned_results(test_dict, job, {}, None)) # now break the reverse pattern test_dict['case'] = 'unit test' # whitespace in the case name matches = re.search(pattern, test_dict['case']) self.assertIsNotNone(matches) self.assertRaises(NoReverseMatch, reverse, 'lava.results.testcase', args=[job.id, suite.name, test_dict['case']])
def test_same_type_devices_with_map(self): bbb2 = self.factory.make_device(self.factory.bbb_type, hostname='bbb-02') devices = list(Device.objects.filter(status=Device.IDLE).order_by('is_public')) user = self.factory.make_user() sample_job_file = os.path.join(os.path.dirname(__file__), 'sample_jobs', 'bbb-bbb-vland-group.yaml') with open(sample_job_file, 'r') as test_support: data = yaml.load(test_support) vlan_job = TestJob.from_yaml_and_user(yaml.dump(data), user) assignments = {} for job in vlan_job: device = find_device_for_job(job, devices) self.assertIsNotNone(device) self.assertEqual(device.device_type, job.requested_device_type) # map has been defined self.assertTrue(match_vlan_interface(device, yaml.load(job.definition))) assignments[job.device_role] = device if device in devices: devices.remove(device) assign_jobs() self.factory.bbb1.refresh_from_db() bbb2.refresh_from_db() self.assertIsNotNone(self.factory.bbb1.current_job) self.assertIsNotNone(bbb2.current_job) self.assertIsNotNone(self.factory.bbb1.current_job.actual_device) self.assertIsNotNone(bbb2.current_job.actual_device) # pylint: disable=no-member self.assertNotEqual(self.factory.bbb1.current_job, bbb2.current_job) self.assertNotEqual( self.factory.bbb1.current_job.actual_device, bbb2.current_job.actual_device) # pylint: disable=no-member
def test_match_devices_without_map(self): devices = Device.objects.filter( status=Device.IDLE).order_by('is_public') self.factory.ensure_tag('usb-eth') self.factory.ensure_tag('sata') self.factory.bbb1.tags = Tag.objects.filter(name='usb-eth') self.factory.bbb1.save() self.factory.cubie1.tags = Tag.objects.filter(name='sata') self.factory.cubie1.save() user = self.factory.make_user() sample_job_file = os.path.join(os.path.dirname(__file__), 'bbb-cubie-vlan-group.yaml') with open(sample_job_file, 'r') as test_support: data = yaml.load(test_support) vlan_job = TestJob.from_yaml_and_user(yaml.dump(data), user) assignments = {} for job in vlan_job: device = find_device_for_job(job, devices) self.assertEqual(device.device_type, job.requested_device_type) # no map defined self.assertFalse( match_vlan_interface(device, yaml.load(job.definition))) assignments[job.device_role] = device self.assertEqual(assignments['client'].hostname, self.factory.bbb1.hostname) self.assertEqual(assignments['server'].hostname, self.factory.cubie1.hostname)
def test_metastore(self): field = TestCase._meta.get_field('metadata') level = '1.3.5.1' # artificially inflate results to represent a set of kernel messages results = { 'definition': 'lava', 'case': 'unit-test', # list of numbers, generates a much longer YAML string than just the count 'extra': range(int(field.max_length / 2)), 'result': 'pass' } stub = "%s-%s-%s.yaml" % (results['definition'], results['case'], level) job = TestJob.from_yaml_and_user( self.factory.make_job_yaml(), self.user) meta_filename = os.path.join(job.output_dir, 'metadata', stub) filename = "%s/job-%s/pipeline/%s/%s-%s.yaml" % (job.output_dir, job.id, level.split('.')[0], level, results['definition']) mkdir(os.path.dirname(filename)) if os.path.exists(meta_filename): # isolate from other unit tests os.unlink(meta_filename) self.assertEqual(meta_filename, create_metadata_store(results, job, level)) self.assertTrue(map_scanned_results(results, job, meta_filename)) self.assertEqual(TestCase.objects.filter(name='unit-test').count(), 1) test_data = yaml.load(TestCase.objects.filter(name='unit-test')[0].metadata, Loader=yaml.CLoader) self.assertEqual(test_data['extra'], meta_filename) self.assertTrue(os.path.exists(meta_filename)) with open(test_data['extra'], 'r') as extra_file: data = yaml.load(extra_file, Loader=yaml.CLoader) self.assertIsNotNone(data) os.unlink(meta_filename) shutil.rmtree(job.output_dir)
def test_from_json_and_user_sets_device_type(self): panda_type = self.factory.ensure_device_type(name='panda') job = TestJob.from_json_and_user( self.factory.make_job_json(device_type='panda'), self.factory.make_user()) self.assertEqual(panda_type, job.requested_device_type) self.factory.cleanup()
def testjob_submission(job_definition, user, original_job=None): """ Single submission frontend for YAML :param job_definition: string of the job submission :param user: user attempting the submission :return: a job or a list of jobs :raises: SubmissionException, Device.DoesNotExist, DeviceType.DoesNotExist, DevicesUnavailableException, ValueError """ json_data = True try: # accept JSON but store as YAML json.loads(job_definition) except json.decoder.JSONDecodeError: json_data = False if json_data: # explicitly convert to YAML. # JSON cannot have comments anyway. job_definition = yaml.safe_dump(yaml.safe_load(job_definition)) validate_job(job_definition) # returns a single job or a list (not a QuerySet) of job objects. job = TestJob.from_yaml_and_user(job_definition, user, original_job=original_job) return job
def definition(self, job_id): """ Name ---- `scheduler.jobs.definition` (`job_id`) Description ----------- Return the job definition Arguments --------- `job_id`: string Job id Return value ------------ The job definition or and error. """ try: job = TestJob.get_by_job_number(job_id) except TestJob.DoesNotExist: raise xmlrpclib.Fault( 404, "Job '%s' was not found." % job_id) if not job.can_view(self.user): raise xmlrpclib.Fault( 403, "Job '%s' not available to user '%s'." % (job_id, self.user)) if job.is_multinode: return xmlrpclib.Binary(job.multinode_definition) else: return xmlrpclib.Binary(job.original_definition)
def test_set(self): job = TestJob.from_yaml_and_user( self.factory.make_job_yaml(), self.user) result_samples = [ {"case": "linux-INLINE-lscpu", "definition": "smoke-tests-basic", "result": "fail", "set": "listing"}, {"case": "linux-INLINE-lspci", "definition": "smoke-tests-basic", "result": "fail", "set": "listing"} ] suite = TestSuite.objects.create( job=job, name='test-suite' ) suite.save() self.assertEqual('/results/%s/test-suite' % job.id, suite.get_absolute_url()) for sample in result_samples: ret = map_scanned_results(results=sample, job=job) self.assertTrue(ret) self.assertEqual(2, TestCase.objects.count()) val = URLValidator() for testcase in TestCase.objects.filter(suite=suite): self.assertEqual(testcase.suite, suite) self.assertIsNotNone(testcase.name) self.assertIsNotNone(testcase.result) self.assertIsNone(testcase.metadata) self.assertEqual(testcase.result, TestCase.RESULT_PASS) self.assertEqual(testcase.test_set.name, 'listing') self.assertTrue(testcase.name.startswith('linux-INLINE-')) val('http://localhost/%s' % testcase.get_absolute_url()) self.factory.cleanup()
def test_duration(self): TestJob.from_yaml_and_user( self.factory.make_job_yaml(), self.user) metatype = MetaType(name='fake', metatype=MetaType.DEPLOY_TYPE) metatype.save() action_data = ActionData(meta_type=metatype, action_level='1.2.3', action_name='fake') action_data.save() action_data.duration = '1.2' action_data.save(update_fields=['duration']) action_data = ActionData.objects.get(id=action_data.id) # reload self.assertIsInstance(action_data.duration, decimal.Decimal) # unit tests check the instance as well as the value. self.assertEqual(float(action_data.duration), 1.2) action_data.timeout = 300 action_data.save(update_fields=['timeout']) self.assertEqual(action_data.timeout, 300)
def test_make_ssh_guest_yaml(self): hostname = "fakeqemu3" device = self.factory.make_device(self.device_type, hostname) try: jobs = TestJob.from_yaml_and_user( self.factory.make_job_yaml(), self.factory.make_user() ) except DevicesUnavailableException as exc: self.fail(exc) sub_id = [] group_size = 0 path = os.path.join(os.path.dirname(os.path.join(__file__)), "sample_jobs") host_role = [] for job in jobs: data = yaml_safe_load(job.definition) params = data["protocols"]["lava-multinode"] params["target_group"] = "replaced" if not group_size: group_size = params["group_size"] if job.device_role == "host": self.assertFalse(job.dynamic_connection) self.assertEqual( job.requested_device_type.name, device.device_type.name ) self.assertIn(params["sub_id"], [0, 1, 2]) sub_id.append(params["sub_id"]) comparison = yaml_safe_load( open(os.path.join(path, "qemu-ssh-parent.yaml"), "r").read() ) self.assertIn("protocols", data) self.assertIn("lava-multinode", data["protocols"]) self.assertIn("sub_id", data["protocols"]["lava-multinode"]) del comparison["protocols"]["lava-multinode"]["sub_id"] del data["protocols"]["lava-multinode"]["sub_id"] self.assertEqual(data, comparison) self.assertEqual(job.device_role, "host") host_role.append(job.device_role) else: self.assertTrue(job.dynamic_connection) self.assertTrue(isinstance(params["sub_id"], int)) sub_id.append(params["sub_id"]) self.assertIsNone(job.requested_device_type) deploy = [action for action in data["actions"] if "deploy" in action][0] self.assertEqual(deploy["deploy"]["connection"], "ssh") # validate each job del data["protocols"]["lava-multinode"]["sub_id"] self.assertEqual( data, yaml_safe_load( open(os.path.join(path, "qemu-ssh-guest-1.yaml"), "r").read() ), ) self.assertIsNone(job.requested_device_type) self.assertIsNone(job.actual_device) host_role.append(data["host_role"]) self.assertFalse(any(role for role in host_role if role != "host")) self.assertEqual(len(sub_id), group_size) self.assertEqual(sub_id, list(range(group_size)))
def test_match_devices_without_map(self): """ Without a map, there is no support for knowing which interfaces to put onto a VLAN, so these devices cannot be assigned to a VLAN testjob See http://localhost/static/docs/v2/vland.html#vland-and-interface-tags-in-lava """ self.bbb3 = self.factory.make_device(self.factory.bbb_type, hostname="bbb-03") self.cubie2 = self.factory.make_device(self.factory.cubie_type, hostname="cubie2") devices = [self.bbb3, self.cubie2] self.factory.ensure_tag("usb-eth") self.factory.ensure_tag("sata") self.factory.bbb1.tags.set(Tag.objects.filter(name="usb-eth")) self.factory.cubie1.tags.set(Tag.objects.filter(name="sata")) user = self.factory.make_user() sample_job_file = os.path.join(os.path.dirname(__file__), "sample_jobs", "bbb-cubie-vlan-group.yaml") with open(sample_job_file, "r") as test_support: data = yaml_safe_load(test_support) vlan_job = TestJob.from_yaml_and_user(yaml_safe_dump(data), user) assignments = {} for job in vlan_job: self.assertFalse( match_vlan_interface(self.bbb3, yaml_safe_load(job.definition))) self.assertFalse( match_vlan_interface(self.cubie2, yaml_safe_load(job.definition)))
def test_bad_input(self): job = TestJob.from_yaml_and_user(self.factory.make_job_yaml(), self.user) # missing {'results'} key result_samples = [ { "definition": "lava", "result": "pass" }, { "case": "test-runscript-overlay", "result": "pass" }, { "case": "test-runscript-overlay", "definition": "lava" }, {}, ] for sample in result_samples: ret = map_scanned_results(results=sample, job=job, markers={}, meta_filename=None) self.assertFalse(ret) self.factory.cleanup()
def test_select_device(self): self.restart() hostname = 'fakeqemu3' device_dict = DeviceDictionary(hostname=hostname) device_dict.parameters = self.conf device_dict.save() device = self.factory.make_device(self.device_type, hostname) job = TestJob.from_yaml_and_user( self.factory.make_job_yaml(), self.factory.make_user()) # this uses the system jinja2 path - local changes to the qemu.jinja2 # will not be available. selected = select_device(job, self.dispatchers) self.assertIsNone(selected) job.actual_device = device selected = select_device(job, self.dispatchers) self.assertIsNone(selected) device.worker_host = self.worker selected = select_device(job, self.dispatchers) self.assertIsNone(selected) # device needs to be in reserved state # fake up the assignment which needs a separate test job.actual_device = device job.save() device.current_job = job device.status = Device.RESERVED device.save() selected = select_device(job, self.dispatchers) self.assertEqual(selected, device)
def test_from_json_and_user_sets_date_submitted(self): before = datetime.datetime.now() job = TestJob.from_json_and_user( self.factory.make_job_json(), self.factory.make_user()) after = datetime.datetime.now() self.assertTrue(before < job.submit_time < after)
def test_testjob_manager_view_private(self): GroupDeviceTypePermission.objects.assign_perm( DeviceType.VIEW_PERMISSION, self.group1, self.qemu_device_type ) self.qemu_job1.is_public = False self.qemu_job1.save() self.user1_job = TestJob.from_yaml_and_user(self.definition, self.user1) self.user1_job.is_public = False self.user1_job.save() # user2 should see only jobs which are not view restricted # and qemu_job1. self.assertEqual( set(TestJob.objects.all().visible_by_user(self.user2)), {self.bbb_job1, self.bbb_job2}, ) # user1 should see user1_job because he's a submitter. self.assertEqual( set(TestJob.objects.all().visible_by_user(self.user1)), {self.qemu_job2, self.bbb_job1, self.bbb_job2, self.user1_job}, ) # AnonymousUser can see also see all jobs which are not view # restricted and qemu_job1. self.assertEqual( set(TestJob.objects.all().visible_by_user(AnonymousUser())), {self.bbb_job1, self.bbb_job2}, )
def test_set(self): job = TestJob.from_yaml_and_user( self.factory.make_job_yaml(), self.user) result_samples = [ {"case": "linux-INLINE-lscpu", "definition": "smoke-tests-basic", "result": "fail", "set": "listing"}, {"case": "linux-INLINE-lspci", "definition": "smoke-tests-basic", "result": "fail", "set": "listing"} ] suite = TestSuite.objects.create( job=job, name='test-suite' ) suite.save() self.assertEqual('/results/%s/test-suite' % job.id, suite.get_absolute_url()) for sample in result_samples: ret = map_scanned_results(results=sample, job=job, meta_filename=None) self.assertTrue(ret) self.assertEqual(2, TestCase.objects.count()) val = URLValidator() for testcase in TestCase.objects.filter(suite=suite): self.assertEqual(testcase.suite, suite) self.assertIsNotNone(testcase.name) self.assertIsNotNone(testcase.result) self.assertIsNone(testcase.metadata) self.assertEqual(testcase.result, TestCase.RESULT_PASS) self.assertEqual(testcase.test_set.name, 'listing') self.assertTrue(testcase.name.startswith('linux-INLINE-')) val('http://localhost/%s' % testcase.get_absolute_url()) self.factory.cleanup()
def test_from_json_and_user_does_not_set_device_type_from_target(self): panda_type = self.factory.ensure_device_type(name='panda') self.factory.make_device(device_type=panda_type, hostname='panda01') job = TestJob.from_json_and_user( self.factory.make_job_json(target='panda01'), self.factory.make_user()) self.assertEqual(None, job.requested_device_type)
def test_from_json_and_user_repeat_parameter_zero(self): device_type = self.factory.make_device_type('base') device = self.factory.make_device(device_type=device_type, hostname="generic") repeat = 0 job_data = { 'timeout': 1, 'target': device.hostname, 'actions': [ { 'command': 'lava_test_shell', 'parameters': { 'repeat': repeat, 'testdef_repos': [ { 'git-repo': 'git://server/test.git', 'testdef': 'testdef.yaml' } ], } } ], } job_json = simplejson.dumps(job_data, sort_keys=True, indent=4 * ' ') job = TestJob.from_json_and_user(job_json, self.factory.make_user()) definition_data = simplejson.loads(job.definition) self.assertEqual(len(definition_data['actions']), 1) self.assertNotIn('repeat_count', definition_data['actions'][0]['parameters']) self.assertNotIn('repeat', definition_data['actions'][0]['parameters']) self.assertEqual(job.status, TestJob.SUBMITTED) self.factory.cleanup()
def definition(self, job_id): """ Name ---- `scheduler.jobs.definition` (`job_id`) Description ----------- Return the job definition Arguments --------- `job_id`: string Job id Return value ------------ The job definition or and error. Note: for MultiNode jobs, the original MultiNode definition is returned. """ try: job = TestJob.get_by_job_number(job_id) except TestJob.DoesNotExist: raise xmlrpc.client.Fault(404, "Job '%s' was not found." % job_id) if not job.can_view(self.user): raise xmlrpc.client.Fault( 403, "Job '%s' not available to user '%s'." % (job_id, self.user)) if job.is_multinode: return job.multinode_definition return job.original_definition
def test_job(self): user = self.factory.make_user() job = TestJob.from_yaml_and_user( self.factory.make_job_yaml(), user) job_def = yaml.load(job.definition) job_ctx = job_def.get('context', {}) device = Device.objects.get(hostname='fakeqemu1') device_config = device.load_device_configuration(job_ctx) # raw dict parser = JobParser() obj = PipelineDevice(device_config, device.hostname) pipeline_job = parser.parse(job.definition, obj, job.id, None, output_dir='/tmp') pipeline_job.pipeline.validate_actions() pipeline = pipeline_job.describe() map_metadata(yaml.dump(pipeline), job) self.assertEqual(MetaType.objects.filter(metatype=MetaType.DEPLOY_TYPE).count(), 1) self.assertEqual(MetaType.objects.filter(metatype=MetaType.BOOT_TYPE).count(), 1) count = ActionData.objects.all().count() self.assertEqual(TestData.objects.all().count(), 1) testdata = TestData.objects.all()[0] self.assertEqual(testdata.testjob, job) for actionlevel in ActionData.objects.all(): self.assertEqual(actionlevel.testdata, testdata) action_levels = [] for testdata in job.test_data.all(): action_levels.extend(testdata.actionlevels.all()) self.assertEqual(count, len(action_levels)) count = ActionData.objects.filter(meta_type__metatype=MetaType.DEPLOY_TYPE).count() self.assertNotEqual(ActionData.objects.filter(meta_type__metatype=MetaType.BOOT_TYPE).count(), 0) self.assertEqual(ActionData.objects.filter(meta_type__metatype=MetaType.UNKNOWN_TYPE).count(), 0) for actionlevel in ActionData.objects.filter(meta_type__metatype=MetaType.BOOT_TYPE): self.assertEqual(actionlevel.testdata.testjob.id, job.id) self.assertEqual(ActionData.objects.filter( meta_type__metatype=MetaType.DEPLOY_TYPE, testdata__testjob=job ).count(), count)
def test_case_as_url(self): job = TestJob.from_yaml_and_user( self.factory.make_job_yaml(), self.user) test_dict = { 'definition': 'unit-test', 'case': 'unit-test', 'level': '1.3.4.1', # list of numbers, generates a much longer YAML string than just the count 'result': 'pass' } pattern = '[-_a-zA-Z0-9.\\(\\)]+' matches = re.search(pattern, test_dict['case']) self.assertIsNotNone(matches) # passes self.assertEqual(matches.group(0), test_dict['case']) suite, _ = TestSuite.objects.get_or_create(name=test_dict["definition"], job=job) case, _ = TestCase.objects.get_or_create(suite=suite, name=test_dict['case'], result=TestCase.RESULT_PASS) self.assertIsNotNone(reverse('lava.results.testcase', args=[case.id])) self.assertIsNotNone(reverse('lava.results.testcase', args=[job.id, suite.name, case.id])) self.assertIsNotNone(map_scanned_results(test_dict, job, None)) # now break the reverse pattern test_dict['case'] = 'unit test' # whitespace in the case name matches = re.search(pattern, test_dict['case']) self.assertIsNotNone(matches) self.assertRaises(NoReverseMatch, reverse, 'lava.results.testcase', args=[job.id, suite.name, test_dict['case']])
def test_pipeline_results(self): result_sample = """ - results: !!python/object/apply:collections.OrderedDict - - [linux-linaro-ubuntu-pwd, pass] - [linux-linaro-ubuntu-uname, pass] - [linux-linaro-ubuntu-vmstat, pass] - [linux-linaro-ubuntu-ifconfig, pass] - [linux-linaro-ubuntu-lscpu, pass] - [linux-linaro-ubuntu-lsb_release, pass] - [linux-linaro-ubuntu-netstat, pass] - [linux-linaro-ubuntu-ifconfig-dump, pass] - [linux-linaro-ubuntu-route-dump-a, pass] - [linux-linaro-ubuntu-route-ifconfig-up-lo, pass] - [linux-linaro-ubuntu-route-dump-b, pass] - [linux-linaro-ubuntu-route-ifconfig-up, pass] - [ping-test, fail] - [realpath-check, fail] - [ntpdate-check, pass] - [curl-ftp, pass] - [tar-tgz, pass] - [remove-tgz, pass] """ result_store = { 'result_sample': OrderedDict([ ('linux-linaro-ubuntu-pwd', 'pass'), ('linux-linaro-ubuntu-uname', 'pass'), ('linux-linaro-ubuntu-vmstat', 'pass'), ('linux-linaro-ubuntu-ifconfig', 'pass'), ('linux-linaro-ubuntu-lscpu', 'pass'), ('linux-linaro-ubuntu-lsb_release', 'pass'), ('linux-linaro-ubuntu-netstat', 'pass'), ('linux-linaro-ubuntu-ifconfig-dump', 'pass'), ('linux-linaro-ubuntu-route-dump-a', 'pass'), ('linux-linaro-ubuntu-route-ifconfig-up-lo', 'pass'), ('linux-linaro-ubuntu-route-dump-b', 'pass'), ('linux-linaro-ubuntu-route-ifconfig-up', 'pass'), ('ping-test', 'fail'), ('realpath-check', 'fail'), ('ntpdate-check', 'pass'), ('curl-ftp', 'pass'), ('tar-tgz', 'pass'), ('remove-tgz', 'pass')])} name = "result_sample" user = self.factory.make_user() job = TestJob.from_yaml_and_user( self.factory.make_job_json(), user) store = JobPipeline.get(job.id) scanned = yaml.load(result_sample) if isinstance(scanned, list) and len(scanned) == 1: if 'results' in scanned[0] and isinstance(scanned[0], dict): store.pipeline.update({name: scanned[0]['results']}) # too often to save the results? store.save() self.assertIsNotNone(store.pipeline) self.assertIsNot({}, store.pipeline) self.assertIs(type(store.pipeline), dict) self.assertIn('result_sample', store.pipeline) self.assertIs(type(store.pipeline['result_sample']), OrderedDict) self.assertEqual(store.pipeline, result_store)
def test_result_store(self): job = TestJob.from_yaml_and_user( self.factory.make_job_yaml(), self.user) store = JobPipeline.get(job.id) self.assertIsNotNone(store) self.assertIsInstance(store, JobPipeline) self.assertIs(type(store.pipeline), dict) self.factory.cleanup()
def test_from_json_and_user_errors_on_unsupported_tags(self): """ Tests that tags which do exist but are not defined for the any of the devices of the requested type cause the submission to be rejected with Devices Unavailable. """ device_type = self.factory.ensure_device_type(name='panda') self.factory.make_device(device_type=device_type, hostname="panda2") self.factory.ensure_tag('tag1') self.factory.ensure_tag('tag2') try: TestJob.from_json_and_user( self.factory.make_job_json(tags=['tag1', 'tag2']), self.factory.make_user()) except DevicesUnavailableException: pass else: self.fail("Device tags failure: job submitted without any devices supporting the requested tags")
def test_make_ssh_guest_yaml(self): hostname = 'fakeqemu3' device = self.factory.make_device(self.device_type, hostname) try: jobs = TestJob.from_yaml_and_user( self.factory.make_job_yaml(), self.factory.make_user()) except DevicesUnavailableException as exc: self.fail(exc) sub_id = [] group_size = 0 path = os.path.join(os.path.dirname(os.path.join(__file__)), 'devices') host_role = [] for job in jobs: data = yaml.load(job.definition) params = data['protocols']['lava-multinode'] params['target_group'] = 'replaced' if not group_size: group_size = params['group_size'] if job.device_role == 'host': self.assertFalse(job.dynamic_connection) self.assertEqual(job.requested_device_type.name, device.device_type.name) self.assertIn(params['sub_id'], [0, 1, 2]) sub_id.append(params['sub_id']) comparison = yaml.load(open(os.path.join(path, 'qemu-ssh-parent.yaml'), 'r').read()) self.assertIn('protocols', data) self.assertIn('lava-multinode', data['protocols']) self.assertIn('sub_id', data['protocols']['lava-multinode']) del(comparison['protocols']['lava-multinode']['sub_id']) del(data['protocols']['lava-multinode']['sub_id']) self.assertEqual( data, comparison ) self.assertEqual(job.device_role, 'host') host_role.append(job.device_role) else: self.assertTrue(job.dynamic_connection) self.assertNotIn(sub_id, params['sub_id']) sub_id.append(params['sub_id']) self.assertIsNone(job.requested_device_type) deploy = [action for action in data['actions'] if 'deploy' in action][0] self.assertEqual(deploy['deploy']['connection'], 'ssh') # validate each job del(data['protocols']['lava-multinode']['sub_id']) self.assertEqual( data, yaml.load(open(os.path.join(path, 'qemu-ssh-guest-1.yaml'), 'r').read()) ) self.assertIsNone(job.requested_device_type) self.assertIsNone(job.actual_device) host_role.append(data['host_role']) self.assertFalse(any(role for role in host_role if role != 'host')) self.assertEqual(len(sub_id), group_size) self.assertEqual(sub_id, list(range(group_size)))
def get_testcase_results_csv(self, job_id, suite_name, case_name): """ Name ---- `get_testcase_results_csv` (`job_id`, `suite_name`, `case_name`) Description ----------- Get the test case results of given job id, suite and test case name in CSV format. Arguments --------- `job_id`: string Job id for which the results are required. `suite_name`: string Name of the suite for which the results are required. `case_name`: string Name of the test case for which the results are required. Return value ------------ This function returns an XML-RPC structures of test case results in CSV format, provided the user is authenticated with an username and token. """ self._authenticate() if not job_id: raise xmlrpclib.Fault(400, "Bad request: TestJob id was not " "specified.") try: job = TestJob.get_by_job_number(job_id) if not job.can_view(self.user): raise xmlrpclib.Fault( 401, "Permission denied for user to job %s" % job_id) output = io.BytesIO() writer = csv.DictWriter( output, quoting=csv.QUOTE_ALL, extrasaction='ignore', fieldnames=testcase_export_fields()) writer.writeheader() test_suite = job.testsuite_set.get(name=suite_name) test_case = test_suite.testcase_set.get(name=case_name) writer.writerow(export_testcase(test_case)) except TestJob.DoesNotExist: raise xmlrpclib.Fault(404, "Specified job not found.") except TestSuite.DoesNotExist: raise xmlrpclib.Fault(404, "Specified test suite not found.") except TestCase.DoesNotExist: raise xmlrpclib.Fault(404, "Specified test case not found.") return output.getvalue()
def test_from_json_and_user_can_submit_to_anonymous(self): user = self.factory.make_user() anon_user = User.objects.get_or_create(username="******")[0] b = BundleStream.objects.create( slug='anonymous', is_anonymous=True, user=anon_user, is_public=True) b.save() j = self.make_job_json_for_stream_name('/anonymous/anonymous/') job = TestJob.from_json_and_user(j, user) self.assertEqual(user, job.submitter)
def make_testjob(self, definition=None, submitter=None, **kwargs): if definition is None: definition = self.make_job_json() if submitter is None: submitter = self.make_user() if 'user' not in kwargs: kwargs['user'] = submitter testjob = TestJob.from_json_and_user(definition, submitter) testjob.save() return testjob
def test_from_json_and_user_sets_tag_from_device_tags(self): device_type = self.factory.ensure_device_type(name='panda') self.factory.ensure_tag('tag') tags = list(Tag.objects.filter(name='tag')) self.factory.make_device(device_type=device_type, hostname="panda1", tags=tags) job = TestJob.from_json_and_user( self.factory.make_job_json(tags=['tag']), self.factory.make_user()) self.assertEqual( set(tag.name for tag in job.tags.all()), {'tag'})
def test_from_json_and_user_sets_is_public_from_bundlestream(self): group = Group.objects.create(name='group') user = self.factory.make_user() user.groups.add(group) b = BundleStream.objects.create( group=group, slug='blah', is_public=False, is_anonymous=False) b.save() j = self.make_job_json_for_stream_name(b.pathname) job = TestJob.from_json_and_user(j, user) self.assertEqual(False, job.is_public)
def test_new_pipeline_store(self): user = self.factory.make_user() job = TestJob.from_yaml_and_user( self.factory.make_job_json(), user) store = JobPipeline.get('foo') self.assertIsNone(store) store = JobPipeline.get(job.id) self.assertIsNotNone(store) self.assertIsInstance(store, JobPipeline) self.assertIs(type(store.pipeline), dict)
def show(self, job_id): """ Name ---- `scheduler.jobs.show` (`job_id`) Description ----------- Show job details Arguments --------- `job_id`: string Job id Return value ------------ This function returns a dictionary of details abou the specified test job. """ try: job = TestJob.get_by_job_number(job_id) except TestJob.DoesNotExist: raise xmlrpclib.Fault( 404, "Job '%s' was not found." % job_id) if not job.can_view(self.user): raise xmlrpclib.Fault( 403, "Job '%s' not available to user '%s'." % (job_id, self.user)) device_hostname = None if job.actual_device is not None: device_hostname = job.actual_device.hostname device_type = None if job.requested_device_type is not None: device_type = job.requested_device_type.name return {"id": job.display_id, "description": job.description, "device": device_hostname, "device_type": device_type, "health_check": job.health_check, "pipeline": True, "health": job.get_health_display(), "state": job.get_state_display(), "submitter": job.submitter.username, "submit_time": job.submit_time, "start_time": job.start_time, "end_time": job.end_time, "tags": [t.name for t in job.tags.all()], "visibility": job.get_visibility_display(), "failure_comment": job.failure_comment, }
def test_from_yaml_and_user_sets_multiple_tag_from_device_tags(self): tag_list = [ self.factory.ensure_tag('tag1'), self.factory.ensure_tag('tag2') ] self.factory.make_device(self.device_type, hostname='fakeqemu1', tags=tag_list) job = TestJob.from_yaml_and_user( self.factory.make_job_json(tags=['tag1', 'tag2']), self.factory.make_user()) self.assertEqual( set(tag.name for tag in job.tags.all()), {'tag1', 'tag2'})