def test_extra_options(self): device = NewDevice(os.path.join(os.path.dirname(__file__), '../devices/kvm01.yaml')) kvm_yaml = os.path.join(os.path.dirname(__file__), 'sample_jobs/kvm-inline.yaml') with open(kvm_yaml) as sample_job_data: job_data = yaml.load(sample_job_data) device['actions']['boot']['methods']['qemu']['parameters']['extra'] = yaml.load(""" - -smp - 1 - -global - virtio-blk-device.scsi=off - -device virtio-scsi-device,id=scsi - --append "console=ttyAMA0 root=/dev/vda rw" """) self.assertIsInstance(device['actions']['boot']['methods']['qemu']['parameters']['extra'][1], int) parser = JobParser() job = parser.parse(yaml.dump(job_data), device, 4212, None, None, None, output_dir='/tmp/') job.validate() boot_image = [action for action in job.pipeline.actions if action.name == 'boot_image_retry'][0] boot_qemu = [action for action in boot_image.internal_pipeline.actions if action.name == 'boot_qemu_image'][0] qemu = [action for action in boot_qemu.internal_pipeline.actions if action.name == 'execute-qemu'][0] self.assertIsInstance(qemu.sub_command, list) [self.assertIsInstance(item, str) for item in qemu.sub_command] self.assertIn('virtio-blk-device.scsi=off', qemu.sub_command) self.assertIn('1', qemu.sub_command) self.assertNotIn(1, qemu.sub_command)
def test_deployment(self): job_parser = JobParser() cubie = NewDevice(os.path.join(os.path.dirname(__file__), '../devices/cubie1.yaml')) sample_job_file = os.path.join(os.path.dirname(__file__), 'sample_jobs/cubietruck-removable.yaml') with open(sample_job_file) as sample_job_data: job = job_parser.parse(sample_job_data, cubie, 4212, None, "", output_dir='/tmp/') job.logger = DummyLogger() job.validate() self.assertIn('usb', cubie['parameters']['media'].keys()) deploy_params = [methods for methods in job.parameters['actions'] if 'deploy' in methods.keys()][1]['deploy'] self.assertIn('device', deploy_params) self.assertIn(deploy_params['device'], cubie['parameters']['media']['usb']) self.assertIn('uuid', cubie['parameters']['media']['usb'][deploy_params['device']]) self.assertIn('device_id', cubie['parameters']['media']['usb'][deploy_params['device']]) self.assertNotIn('boot_part', cubie['parameters']['media']['usb'][deploy_params['device']]) deploy_action = [action for action in job.pipeline.actions if action.name == 'storage-deploy'][0] tftp_deploy_action = [action for action in job.pipeline.actions if action.name == 'tftp-deploy'][0] self.assertIsNotNone(deploy_action) test_dir = deploy_action.get_namespace_data(action='test', label='results', key='lava_test_results_dir', parameters=tftp_deploy_action.parameters) self.assertIsNotNone(test_dir) self.assertIn('/lava-', test_dir) self.assertIsInstance(deploy_action, MassStorage) self.assertIn('image', deploy_action.parameters.keys()) dd_action = [action for action in deploy_action.internal_pipeline.actions if action.name == 'dd-image'][0] self.assertEqual( dd_action.boot_params[dd_action.parameters['device']]['uuid'], 'usb-SanDisk_Ultra_20060775320F43006019-0:0') self.assertIsNotNone(dd_action.get_namespace_data(action=dd_action.name, label='u-boot', key='boot_part')) self.assertIsNotNone(dd_action.get_namespace_data(action='uboot-from-media', label='uuid', key='boot_part')) self.assertEqual('0', '%s' % dd_action.get_namespace_data(action=dd_action.name, label='u-boot', key='boot_part')) self.assertIsInstance(dd_action.get_namespace_data(action='uboot-from-media', label='uuid', key='boot_part'), str) self.assertEqual('0:1', dd_action.get_namespace_data(action='uboot-from-media', label='uuid', key='boot_part'))
def create_fastboot_job(self, filename, output_dir='/tmp/'): # pylint: disable=no-self-use device = NewDevice(os.path.join(os.path.dirname(__file__), '../devices/nexus4-01.yaml')) fastboot_yaml = os.path.join(os.path.dirname(__file__), filename) with open(fastboot_yaml) as sample_job_data: parser = JobParser() job = parser.parse(sample_job_data, device, 4212, None, output_dir=output_dir) return job
def test_parameter_support(self): # pylint: disable=too-many-locals data = self.factory.make_job_data() test_block = [block for block in data['actions'] if 'test' in block][0] smoke = test_block['test']['definitions'][0] smoke['parameters'] = { 'VARIABLE_NAME_1': "first variable value", 'VARIABLE_NAME_2': "second value" } job = TestJob.from_yaml_and_user(yaml.dump(data), self.user) job_def = yaml.load(job.definition) job_ctx = job_def.get('context', {}) device = Device.objects.get(hostname='fakeqemu1') device_config = device.load_device_configuration(job_ctx, system=False) # raw dict parser = JobParser() obj = PipelineDevice(device_config, device.hostname) pipeline_job = parser.parse(job.definition, obj, job.id, None, "", output_dir='/tmp') allow_missing_path(pipeline_job.pipeline.validate_actions, self, 'qemu-system-x86_64') pipeline = pipeline_job.describe() device_values = _get_device_metadata(pipeline['device']) try: testdata, _ = TestData.objects.get_or_create(testjob=job) except (MultipleObjectsReturned): self.fail('multiple objects') for key, value in device_values.items(): if not key or not value: continue testdata.attributes.create(name=key, value=value) retval = _get_job_metadata(pipeline['job']['actions']) self.assertIn('test.0.common.definition.parameters.VARIABLE_NAME_2', retval) self.assertIn('test.0.common.definition.parameters.VARIABLE_NAME_1', retval) self.assertEqual(retval['test.0.common.definition.parameters.VARIABLE_NAME_1'], 'first variable value') self.assertEqual(retval['test.0.common.definition.parameters.VARIABLE_NAME_2'], 'second value')
def test_job_parameters(self): """ Test that the job parameters match expected structure """ self.maxDiff = None # pylint: disable=invalid-name job_parser = JobParser() cubie = NewDevice(os.path.join(os.path.dirname(__file__), '../devices/cubie1.yaml')) sample_job_file = os.path.join(os.path.dirname(__file__), 'sample_jobs/cubietruck-removable.yaml') with open(sample_job_file) as sample_job_data: job = job_parser.parse(sample_job_data, cubie, 4212, None, None, None, output_dir='/tmp/') try: job.validate() except JobError: self.fail(job.pipeline.errors) sample_job_data.close() description_ref = pipeline_reference('cubietruck-removable.yaml') self.assertEqual(description_ref, job.pipeline.describe(False)) mass_storage = None # deploy for action in job.pipeline.actions: if isinstance(action, DeployAction): if isinstance(action, MassStorage): self.assertTrue(action.valid) agent = action.parameters['download']['tool'] self.assertTrue(agent.startswith('/')) # needs to be a full path but on the device, so avoid os.path self.assertIn(action.parameters['device'], job.device['parameters']['media']['usb']) mass_storage = action self.assertIsNotNone(mass_storage) self.assertIn('device', mass_storage.parameters) self.assertIn(mass_storage.parameters['device'], cubie['parameters']['media']['usb']) self.assertIsNotNone(mass_storage.get_common_data('u-boot', 'device')) u_boot_params = cubie['actions']['boot']['methods']['u-boot'] self.assertEqual(mass_storage.get_common_data('bootloader_prompt', 'prompt'), u_boot_params['parameters']['bootloader_prompt'])
def test_device_parser(self): job_parser = JobParser() device = NewDevice( os.path.join(os.path.dirname(__file__), '../devices/bbb-01.yaml')) self.assertIn('power_state', device) self.assertEqual(device.power_state, 'off') self.assertTrue(hasattr(device, 'power_state')) self.assertFalse(hasattr(device, 'hostname')) self.assertIn('hostname', device) sample_job_file = os.path.join(os.path.dirname(__file__), 'sample_jobs/uboot-ramdisk.yaml') with open(sample_job_file) as sample_job_data: job = job_parser.parse(sample_job_data, device, 4212, None, None, None) uboot_action = None for action in job.pipeline.actions: if isinstance(action, DeployAction): self.assertIn('ramdisk', action.parameters) if isinstance(action, BootAction): self.assertIn('method', action.parameters) self.assertEqual('u-boot', action.parameters['method']) methods = device['actions']['boot']['methods'] self.assertIn('ramdisk', methods['u-boot']) self.assertIn('bootloader_prompt', methods['u-boot']['parameters']) self.assertIsNotNone(methods[action.parameters['method']][ action.parameters['commands']]['commands']) for line in methods[action.parameters['method']][ action.parameters['commands']]['commands']: self.assertIsNotNone(line) self.assertIsInstance(action, UBootAction) uboot_action = action self.assertIsNotNone(uboot_action) uboot_action.validate() self.assertTrue(uboot_action.valid) for action in uboot_action.internal_pipeline.actions: if isinstance(action, UBootInterrupt): self.assertIn('power_on', action.job.device['commands']) self.assertIn('hard_reset', action.job.device['commands']) self.assertIn('connect', action.job.device['commands']) self.assertEqual( action.job.device['commands']['connect'].split(' ')[0], 'telnet') if isinstance(action, UBootAction): self.assertIn('method', action.parameters) self.assertIn('commands', action.parameters) self.assertIn('ramdisk', action.parameters['u-boot']) self.assertIn(action.parameters['commands'], action.parameters[action.parameters['method']]) self.assertIn( 'commands', action.parameters[action.parameters['method']][ action.parameters['commands']]) self.assertIsNotNone(action.parameters['u-boot']['ramdisk']) self.assertTrue( type(action.parameters['u-boot']['ramdisk']['commands']) == list) self.assertTrue( len(action.parameters['u-boot']['ramdisk']['commands']) > 2 )
def create_bbb_job(self, filename, output_dir="/tmp/"): # pylint: disable=no-self-use device = NewDevice(os.path.join(os.path.dirname(__file__), "../devices/bbb-01.yaml")) kvm_yaml = os.path.join(os.path.dirname(__file__), filename) with open(kvm_yaml) as sample_job_data: parser = JobParser() job = parser.parse(sample_job_data, device, 4212, None, output_dir=output_dir) return job
def test_configure(self): with open(self.filename) as yaml_data: alpha_data = yaml.load(yaml_data) self.assertIn('protocols', alpha_data) self.assertTrue(VlandProtocol.accepts(alpha_data)) vprotocol = VlandProtocol(alpha_data, self.job_id) vprotocol.set_up() with open(self.filename) as sample_job_data: parser = JobParser() job = parser.parse(sample_job_data, self.device, 4212, None, "", output_dir='/tmp/') ret = vprotocol.configure(self.device, job) if not ret: print(vprotocol.errors) self.assertTrue(ret) nodes = {} for name in vprotocol.names: vlan = vprotocol.params[name] # self.assertNotIn('tags', vlan) uid = ' '.join([vlan['switch'], str(vlan['port'])]) nodes[uid] = name self.assertEqual(len(nodes.keys()), len(vprotocol.names)) self.assertIn('vlan_one', vprotocol.names) self.assertNotIn('vlan_two', vprotocol.names) self.assertIn('switch', vprotocol.params['vlan_one']) self.assertIn('port', vprotocol.params['vlan_one']) self.assertIsNotNone(vprotocol.multinode_protocol) bbb2 = NewDevice( os.path.join(os.path.dirname(__file__), '../devices/bbb-01.yaml')) bbb2['parameters']['interfaces']['eth0']['switch'] = '192.168.0.2' bbb2['parameters']['interfaces']['eth0']['port'] = '6' bbb2['parameters']['interfaces']['eth1']['switch'] = '192.168.0.2' bbb2['parameters']['interfaces']['eth1']['port'] = '4' self.assertEqual( vprotocol.params, { 'vlan_one': { 'switch': '192.168.0.1', 'iface': 'eth1', 'port': 7, 'tags': ['100M', 'RJ45', '10M'] } }) # already configured the vland protocol in the same job self.assertTrue(vprotocol.configure(bbb2, job)) self.assertEqual( vprotocol.params, { 'vlan_one': { 'switch': '192.168.0.1', 'iface': 'eth1', 'port': 7, 'tags': ['100M', 'RJ45', '10M'] } }) self.assertTrue(vprotocol.valid) self.assertEqual(vprotocol.names, {'vlan_one': '4212vlanone'})
def test_job(self): with open(self.filename) as yaml_data: alpha_data = yaml.load(yaml_data) self.assertIn('protocols', alpha_data) self.assertIn(VlandProtocol.name, alpha_data['protocols']) with open(self.filename) as sample_job_data: parser = JobParser() job = parser.parse(sample_job_data, self.device, 4212, None, output_dir='/tmp/') description_ref = pipeline_reference('bbb-group-vland-alpha.yaml') self.assertEqual(description_ref, job.pipeline.describe(False)) job.validate() self.assertNotEqual([], [protocol.name for protocol in job.protocols if protocol.name == MultinodeProtocol.name]) ret = {"message": {"kvm01": {"vlan_name": "name", "vlan_tag": 6}}, "response": "ack"} self.assertEqual(('name', 6), (ret['message']['kvm01']['vlan_name'], ret['message']['kvm01']['vlan_tag'],)) self.assertIn('protocols', job.parameters) self.assertIn(VlandProtocol.name, job.parameters['protocols']) self.assertIn(MultinodeProtocol.name, job.parameters['protocols']) vprotocol = [vprotocol for vprotocol in job.protocols if vprotocol.name == VlandProtocol.name][0] self.assertTrue(vprotocol.valid) self.assertEqual(vprotocol.names, {'vlan_one': 'arbitraryg000', 'vlan_two': 'arbitraryg001'}) self.assertFalse(vprotocol.check_timeout(120, {'request': 'no call'})) self.assertRaises(JobError, vprotocol.check_timeout, 60, 'deploy_vlans') self.assertRaises(JobError, vprotocol.check_timeout, 60, {'request': 'deploy_vlans'}) self.assertTrue(vprotocol.check_timeout(120, {'request': 'deploy_vlans'})) for vlan_name in job.parameters['protocols'][VlandProtocol.name]: if vlan_name == 'yaml_line': continue self.assertIn(vlan_name, vprotocol.params) self.assertIn('switch', vprotocol.params[vlan_name]) self.assertIn('port', vprotocol.params[vlan_name])
def test_device_environment(self): data = """ # YAML syntax. overrides: DEBEMAIL: "*****@*****.**" DEBFULLNAME: "Neil Williams" """ job_parser = JobParser() device = NewDevice(os.path.join(os.path.dirname(__file__), '../devices/bbb-01.yaml')) sample_job_file = os.path.join(os.path.dirname(__file__), 'sample_jobs/uboot-ramdisk.yaml') with open(sample_job_file) as sample_job_data: job = job_parser.parse( sample_job_data, device, 4212, None, output_dir='/tmp', env_dut=data) self.assertEqual( job.parameters['env_dut'], data ) job.validate() boot_actions = [ action.internal_pipeline.actions for action in job.pipeline.actions if action.name == 'uboot-action'][0] retry = [action for action in boot_actions if action.name == 'uboot-retry'][0] boot_env = [action for action in retry.internal_pipeline.actions if action.name == 'export-device-env'][0] found = False for line in boot_env.env: if 'DEBFULLNAME' in line: found = True # assert that the string containing a space still contains that space and is quoted self.assertIn('\\\'Neil Williams\\\'', line) self.assertTrue(found)
def test_prompt_from_job(self): """ Support setting the prompt after login via the job Loads a known YAML, adds a prompt to the dict and re-parses the job. Checks that the prompt is available in the expect_shell_connection action. """ factory = Factory() job = factory.create_job('sample_jobs/ipxe-ramdisk.yaml') job.validate() bootloader = [action for action in job.pipeline.actions if action.name == 'bootloader-action'][0] retry = [action for action in bootloader.internal_pipeline.actions if action.name == 'bootloader-retry'][0] expect = [action for action in retry.internal_pipeline.actions if action.name == 'expect-shell-connection'][0] check = expect.parameters device = NewDevice(os.path.join(os.path.dirname(__file__), '../devices/x86-01.yaml')) extra_yaml = os.path.join(os.path.dirname(__file__), 'sample_jobs/ipxe.yaml') with open(extra_yaml) as data: sample_job_string = data.read() parser = JobParser() sample_job_data = yaml.load(sample_job_string) boot = [item['boot'] for item in sample_job_data['actions'] if 'boot' in item][0] sample_job_string = yaml.dump(sample_job_data) job = parser.parse(sample_job_string, device, 4212, None, None, None, output_dir='/tmp') job.validate() bootloader = [action for action in job.pipeline.actions if action.name == 'bootloader-action'][0] retry = [action for action in bootloader.internal_pipeline.actions if action.name == 'bootloader-retry'][0] expect = [action for action in retry.internal_pipeline.actions if action.name == 'expect-shell-connection'][0] self.assertNotEqual(check, expect.parameters)
def test_uboot_checksum(self): device = NewDevice( os.path.join(os.path.dirname(__file__), '../devices/bbb-01.yaml')) bbb_yaml = os.path.join(os.path.dirname(__file__), 'sample_jobs/bbb-ramdisk-nfs.yaml') with open(bbb_yaml) as sample_job_data: parser = JobParser() job = parser.parse(sample_job_data, device, 4212, None, "", output_dir='/tmp/') deploy = [ action for action in job.pipeline.actions if action.name == 'tftp-deploy' ][0] download = [ action for action in deploy.internal_pipeline.actions if action.name == 'download-retry' ][0] helper = [ action for action in download.internal_pipeline.actions if action.name == 'file-download' ][0] remote = helper.parameters[helper.key] md5sum = remote.get('md5sum', None) self.assertIsNone(md5sum) sha256sum = remote.get('sha256sum', None) self.assertIsNotNone(sha256sum)
def test_secondary_media(self): """ Test UBootSecondaryMedia validation """ job_parser = JobParser() cubie = NewDevice( os.path.join(os.path.dirname(__file__), '../devices/cubie1.yaml')) sample_job_file = os.path.join( os.path.dirname(__file__), 'sample_jobs/cubietruck-removable.yaml') sample_job_data = open(sample_job_file) job = job_parser.parse(sample_job_data, cubie, 4212, None, output_dir='/tmp/') job.validate() u_boot_media = job.pipeline.actions[1].internal_pipeline.actions[0] self.assertIsInstance(u_boot_media, UBootSecondaryMedia) self.assertEqual([], u_boot_media.errors) self.assertEqual(u_boot_media.parameters['kernel'], '/boot/vmlinuz-3.16.0-4-armmp-lpae') self.assertEqual(u_boot_media.parameters['kernel'], u_boot_media.get_common_data('file', 'kernel')) self.assertEqual(u_boot_media.parameters['ramdisk'], u_boot_media.get_common_data('file', 'ramdisk')) self.assertEqual(u_boot_media.parameters['dtb'], u_boot_media.get_common_data('file', 'dtb')) self.assertEqual(u_boot_media.parameters['root_uuid'], u_boot_media.get_common_data('uuid', 'root')) part_reference = '%s:%s' % (job.device['parameters']['media']['usb'][ u_boot_media.get_common_data('u-boot', 'device')]['device_id'], u_boot_media.parameters['boot_part']) self.assertEqual(part_reference, u_boot_media.get_common_data('uuid', 'boot_part')) self.assertEqual(part_reference, "0:1")
def test_compatibility(self): factory = Factory() job = factory.create_kvm_job('sample_jobs/kvm.yaml', mkdtemp()) pipe = job.describe() self.assertEqual(pipe['compatibility'], DeployImages.compatibility) self.assertEqual(job.compatibility, DeployImages.compatibility) kvm_yaml = os.path.join(os.path.dirname(__file__), 'sample_jobs/kvm.yaml') job_def = yaml.load(open(kvm_yaml, 'r')) job_def['compatibility'] = job.compatibility parser = JobParser() device = NewDevice(os.path.join(os.path.dirname(__file__), '../devices/kvm01.yaml')) try: job = parser.parse(yaml.dump(job_def), device, 4212, None, output_dir=mkdtemp()) except NotImplementedError: # some deployments listed in basics.yaml are not implemented yet pass self.assertIsNotNone(job) job_def['compatibility'] = job.compatibility + 1 self.assertRaises( JobError, parser.parse, yaml.dump(job_def), device, 4212, None, mkdtemp() ) job_def['compatibility'] = 0 try: job = parser.parse(yaml.dump(job_def), device, 4212, None, output_dir=mkdtemp()) except NotImplementedError: # some deployments listed in basics.yaml are not implemented yet pass self.assertIsNotNone(job)
def test_panda_lxc_template(self): logging.basicConfig(stream=sys.stdout, level=logging.DEBUG) logger = logging.getLogger('unittests') logger.disabled = True logger.propagate = False logger = logging.getLogger('dispatcher') logging.disable(logging.DEBUG) logger.disabled = True logger.propagate = False data = """{% extends 'panda.jinja2' %} {% set power_off_command = '/usr/local/lab-scripts/snmp_pdu_control --hostname pdu15 --command off --port 07' %} {% set hard_reset_command = '/usr/local/lab-scripts/snmp_pdu_control --hostname pdu15 --command reboot --port 07' %} {% set connection_command = 'telnet serial4 7010' %} {% set power_on_command = '/usr/local/lab-scripts/snmp_pdu_control --hostname pdu15 --command on --port 07' %}""" self.assertTrue(self.validate_data('staging-panda-01', data)) test_template = prepare_jinja_template('staging-panda-01', data, system_path=self.system) rendered = test_template.render() template_dict = yaml.load(rendered) fdesc, device_yaml = tempfile.mkstemp() os.write(fdesc, yaml.dump(template_dict)) panda = NewDevice(device_yaml) lxc_yaml = os.path.join(os.path.dirname(__file__), 'panda-lxc-aep.yaml') with open(lxc_yaml) as sample_job_data: parser = JobParser() job = parser.parse(sample_job_data, panda, 4577, None, "", output_dir='/tmp') os.close(fdesc) job.validate()
def test_prompt_from_job(self): # pylint: disable=too-many-locals """ Support setting the prompt after login via the job Loads a known YAML, adds a prompt to the dict and re-parses the job. Checks that the prompt is available in the expect_shell_connection action. """ factory = Factory() job = factory.create_bbb_job('sample_jobs/uboot.yaml') job.validate() uboot = [action for action in job.pipeline.actions if action.name == 'uboot-action'][0] retry = [action for action in uboot.internal_pipeline.actions if action.name == 'uboot-retry'][0] expect = [action for action in retry.internal_pipeline.actions if action.name == 'expect-shell-connection'][0] check = expect.parameters device = NewDevice(os.path.join(os.path.dirname(__file__), '../devices/bbb-01.yaml')) extra_yaml = os.path.join(os.path.dirname(__file__), 'sample_jobs/uboot.yaml') with open(extra_yaml) as data: sample_job_string = data.read() parser = JobParser() sample_job_data = yaml.load(sample_job_string) boot = [item['boot'] for item in sample_job_data['actions'] if 'boot' in item][0] self.assertIsNotNone(boot) sample_job_string = yaml.dump(sample_job_data) job = parser.parse(sample_job_string, device, 4212, None, output_dir='/tmp') job.validate() uboot = [action for action in job.pipeline.actions if action.name == 'uboot-action'][0] retry = [action for action in uboot.internal_pipeline.actions if action.name == 'uboot-retry'][0] expect = [action for action in retry.internal_pipeline.actions if action.name == 'expect-shell-connection'][0] self.assertNotEqual(check, expect.parameters)
def test_secondary_media(self): """ Test UBootSecondaryMedia validation """ job_parser = JobParser() cubie = NewDevice(os.path.join(os.path.dirname(__file__), '../devices/cubie1.yaml')) sample_job_file = os.path.join(os.path.dirname(__file__), 'sample_jobs/cubietruck-removable.yaml') sample_job_data = open(sample_job_file) job = job_parser.parse(sample_job_data, cubie, 4212, None, None, None, output_dir='/tmp/') job.validate() sample_job_data.close() u_boot_media = [action for action in job.pipeline.actions if action.name == 'uboot-action'][1].internal_pipeline.actions[0] self.assertIsInstance(u_boot_media, UBootSecondaryMedia) self.assertEqual([], u_boot_media.errors) self.assertEqual(u_boot_media.parameters['kernel'], '/boot/vmlinuz-3.16.0-4-armmp-lpae') self.assertEqual(u_boot_media.parameters['kernel'], u_boot_media.get_common_data('file', 'kernel')) self.assertEqual(u_boot_media.parameters['ramdisk'], u_boot_media.get_common_data('file', 'ramdisk')) self.assertEqual(u_boot_media.parameters['dtb'], u_boot_media.get_common_data('file', 'dtb')) self.assertEqual(u_boot_media.parameters['root_uuid'], u_boot_media.get_common_data('uuid', 'root')) part_reference = '%s:%s' % ( job.device['parameters']['media']['usb'][u_boot_media.get_common_data('u-boot', 'device')]['device_id'], u_boot_media.parameters['boot_part'] ) self.assertEqual(part_reference, u_boot_media.get_common_data('uuid', 'boot_part')) self.assertEqual(part_reference, "0:1")
def test_compatibility(self): """ Test compatibility support. The class to use in the comparison will change according to which class is related to the change which caused the compatibility to be modified. """ factory = Factory() job = factory.create_kvm_job('sample_jobs/kvm.yaml', mkdtemp()) pipe = job.describe() self.assertEqual(pipe['compatibility'], ExpectShellSession.compatibility) self.assertEqual(job.compatibility, ExpectShellSession.compatibility) kvm_yaml = os.path.join(os.path.dirname(__file__), 'sample_jobs/kvm.yaml') job_def = yaml.load(open(kvm_yaml, 'r')) job_def['compatibility'] = job.compatibility parser = JobParser() device = NewDevice(os.path.join(os.path.dirname(__file__), '../devices/kvm01.yaml')) try: job = parser.parse(yaml.dump(job_def), device, 4212, None, output_dir=mkdtemp()) except NotImplementedError: # some deployments listed in basics.yaml are not implemented yet pass self.assertIsNotNone(job) job_def['compatibility'] = job.compatibility + 1 self.assertRaises( JobError, parser.parse, yaml.dump(job_def), device, 4212, None, mkdtemp() ) job_def['compatibility'] = 0 try: job = parser.parse(yaml.dump(job_def), device, 4212, None, output_dir=mkdtemp()) except NotImplementedError: # some deployments listed in basics.yaml are not implemented yet pass self.assertIsNotNone(job)
def test_device_environment_validity(self): # pylint: disable=invalid-name """ Use non-YAML syntax a bit like existing device config syntax. Ensure this syntax is picked up as invalid. """ data = """ # YAML syntax. overrides: DEBEMAIL = "*****@*****.**" DEBFULLNAME: "Neil Williams" """ job_parser = JobParser() device = NewDevice( os.path.join(os.path.dirname(__file__), '../devices/bbb-01.yaml')) sample_job_file = os.path.join(os.path.dirname(__file__), 'sample_jobs/uboot-ramdisk.yaml') with open(sample_job_file) as sample_job_data: job = job_parser.parse(sample_job_data, device, 4212, None, "", output_dir='/tmp', env_dut=data) job.logger = DummyLogger() self.assertEqual(job.parameters['env_dut'], data) with self.assertRaises(JobError): job.validate()
def create_ssh_job(self, filename, output_dir=None): # pylint: disable=no-self-use device = NewDevice(os.path.join(os.path.dirname(__file__), '../devices/ssh-host-01.yaml')) kvm_yaml = os.path.join(os.path.dirname(__file__), filename) with open(kvm_yaml) as sample_job_data: parser = JobParser() job = parser.parse(sample_job_data, device, 0, socket_addr=None, output_dir=output_dir) return job
def test_deployment(self): job_parser = JobParser() cubie = NewDevice(os.path.join(os.path.dirname(__file__), "../devices/cubie1.yaml")) sample_job_file = os.path.join(os.path.dirname(__file__), "sample_jobs/cubietruck-removable.yaml") sample_job_data = open(sample_job_file) job = job_parser.parse(sample_job_data, cubie, 4212, None, output_dir="/tmp/") job.validate() self.assertIn("usb", cubie["parameters"]["media"].keys()) deploy_params = [methods for methods in job.parameters["actions"] if "deploy" in methods.keys()][0]["deploy"] self.assertIn("device", deploy_params) self.assertIn(deploy_params["device"], cubie["parameters"]["media"]["usb"]) self.assertIn("uuid", cubie["parameters"]["media"]["usb"][deploy_params["device"]]) self.assertIn("device_id", cubie["parameters"]["media"]["usb"][deploy_params["device"]]) self.assertNotIn("boot_part", cubie["parameters"]["media"]["usb"][deploy_params["device"]]) deploy_action = job.pipeline.actions[0] self.assertIn("lava_test_results_dir", deploy_action.data) self.assertIn("/lava-", deploy_action.data["lava_test_results_dir"]) self.assertIsInstance(deploy_action, MassStorage) self.assertIn("image", deploy_action.parameters.keys()) dd_action = deploy_action.internal_pipeline.actions[1] self.assertEqual( dd_action.boot_params[dd_action.parameters["device"]]["uuid"], "usb-SanDisk_Ultra_20060775320F43006019-0:0" ) self.assertEqual("0", "%s" % dd_action.get_common_data("u-boot", "boot_part")) self.assertTrue(type(dd_action.get_common_data("uuid", "boot_part")) is str) self.assertEqual("0:1", dd_action.get_common_data("uuid", "boot_part"))
def parse_job_file(self, filename, oob_file): """ Uses the parsed device_config instead of the old Device class so it can fail before the Pipeline is made. Avoids loading all configuration for all supported devices for every job. """ if is_pipeline_job(filename): # Prepare the pipeline from the file using the parser. device = None # secondary connections do not need a device if self.args.target: device = NewDevice(self.args.target) # DeviceParser parser = JobParser() job = None try: env_dut = str(open(self.args.env_dut_path, 'r').read()) except (TypeError, AttributeError): env_dut = None try: with open(filename) as f_in: job = parser.parse(f_in, device, self.args.job_id, socket_addr=self.args.socket_addr, output_dir=self.args.output_dir, env_dut=env_dut) except JobError as exc: logging.error("Invalid job submission: %s" % exc) exit(1) # FIXME: NewDevice schema needs a validation parser # device.check_config(job) return get_pipeline_runner(job), job.parameters # everything else is assumed to be JSON return run_legacy_job, json.load(open(filename))
def test_job_no_tags(self): with open(self.filename) as yaml_data: alpha_data = yaml.load(yaml_data) for vlan_key, vlan_value in alpha_data['protocols'][VlandProtocol.name].items(): alpha_data['protocols'][VlandProtocol.name][vlan_key] = {'tags': []} # removed tags from original job to simulate job where any interface tags will be acceptable self.assertEqual( alpha_data['protocols'][VlandProtocol.name], {'vlan_one': {'tags': []}} ) parser = JobParser() job = parser.parse(yaml.dump(alpha_data), self.device, 4212, None, None, None, output_dir='/tmp/') job.validate() vprotocol = [vprotocol for vprotocol in job.protocols if vprotocol.name == VlandProtocol.name][0] self.assertTrue(vprotocol.valid) self.assertEqual(vprotocol.names, {'vlan_one': '4212vlanone'}) self.assertFalse(vprotocol.check_timeout(120, {'request': 'no call'})) self.assertRaises(JobError, vprotocol.check_timeout, 60, 'deploy_vlans') self.assertRaises(JobError, vprotocol.check_timeout, 60, {'request': 'deploy_vlans'}) self.assertTrue(vprotocol.check_timeout(120, {'request': 'deploy_vlans'})) for vlan_name in job.parameters['protocols'][VlandProtocol.name]: if vlan_name == 'yaml_line': continue self.assertIn(vlan_name, vprotocol.params) self.assertIn('switch', vprotocol.params[vlan_name]) self.assertIn('port', vprotocol.params[vlan_name])
def test_job(self): user = self.factory.make_user() job = TestJob.from_yaml_and_user( self.factory.make_job_yaml(), user) job_def = yaml.load(job.definition) job_ctx = job_def.get('context', {}) device = Device.objects.get(hostname='fakeqemu1') device_config = device.load_device_configuration(job_ctx) # raw dict parser = JobParser() obj = PipelineDevice(device_config, device.hostname) pipeline_job = parser.parse(job.definition, obj, job.id, None, output_dir='/tmp') pipeline_job.pipeline.validate_actions() pipeline = pipeline_job.describe() map_metadata(yaml.dump(pipeline), job) self.assertEqual(MetaType.objects.filter(metatype=MetaType.DEPLOY_TYPE).count(), 1) self.assertEqual(MetaType.objects.filter(metatype=MetaType.BOOT_TYPE).count(), 1) count = ActionData.objects.all().count() self.assertEqual(TestData.objects.all().count(), 1) testdata = TestData.objects.all()[0] self.assertEqual(testdata.testjob, job) for actionlevel in ActionData.objects.all(): self.assertEqual(actionlevel.testdata, testdata) action_levels = [] for testdata in job.test_data.all(): action_levels.extend(testdata.actionlevels.all()) self.assertEqual(count, len(action_levels)) count = ActionData.objects.filter(meta_type__metatype=MetaType.DEPLOY_TYPE).count() self.assertNotEqual(ActionData.objects.filter(meta_type__metatype=MetaType.BOOT_TYPE).count(), 0) self.assertEqual(ActionData.objects.filter(meta_type__metatype=MetaType.UNKNOWN_TYPE).count(), 0) for actionlevel in ActionData.objects.filter(meta_type__metatype=MetaType.BOOT_TYPE): self.assertEqual(actionlevel.testdata.testjob.id, job.id) self.assertEqual(ActionData.objects.filter( meta_type__metatype=MetaType.DEPLOY_TYPE, testdata__testjob=job ).count(), count)
def test_job_parameters(self): """ Test that the job parameters match expected structure """ self.maxDiff = None # pylint: disable=invalid-name job_parser = JobParser() cubie = NewDevice(os.path.join(os.path.dirname(__file__), '../devices/cubie1.yaml')) sample_job_file = os.path.join(os.path.dirname(__file__), 'sample_jobs/cubietruck-removable.yaml') with open(sample_job_file) as sample_job_data: job = job_parser.parse(sample_job_data, cubie, 4212, None, "", output_dir='/tmp/') job.logger = DummyLogger() try: job.validate() except JobError: self.fail(job.pipeline.errors) sample_job_data.close() description_ref = pipeline_reference('cubietruck-removable.yaml') self.assertEqual(description_ref, job.pipeline.describe(False)) mass_storage = None # deploy for action in job.pipeline.actions: if isinstance(action, DeployAction): if isinstance(action, MassStorage): self.assertTrue(action.valid) agent = action.parameters['download']['tool'] self.assertTrue(agent.startswith('/')) # needs to be a full path but on the device, so avoid os.path self.assertIn(action.parameters['device'], job.device['parameters']['media']['usb']) mass_storage = action self.assertIsNotNone(mass_storage) self.assertIn('device', mass_storage.parameters) self.assertIn(mass_storage.parameters['device'], cubie['parameters']['media']['usb']) self.assertIsNotNone(mass_storage.get_namespace_data(action='storage-deploy', label='u-boot', key='device')) u_boot_params = cubie['actions']['boot']['methods']['u-boot'] self.assertEqual(mass_storage.get_namespace_data(action='uboot-retry', label='bootloader_prompt', key='prompt'), u_boot_params['parameters']['bootloader_prompt'])
def test_job_multi(self): MetaType.objects.all().delete() multi_test_file = os.path.join(os.path.dirname(__file__), 'multi-test.yaml') self.assertTrue(os.path.exists(multi_test_file)) with open(multi_test_file, 'r') as test_support: data = test_support.read() job = TestJob.from_yaml_and_user(data, self.user) job_def = yaml.load(job.definition) job_ctx = job_def.get('context', {}) device = Device.objects.get(hostname='fakeqemu1') device_config = device.load_device_configuration( job_ctx, system=False) # raw dict parser = JobParser() obj = PipelineDevice(device_config, device.hostname) pipeline_job = parser.parse(job.definition, obj, job.id, None, "", output_dir='/tmp') allow_missing_path(pipeline_job.pipeline.validate_actions, self, 'qemu-system-x86_64') pipeline = pipeline_job.describe() map_metadata(yaml.dump(pipeline), job)
def test_deployment(self): job_parser = JobParser() cubie = NewDevice(os.path.join(os.path.dirname(__file__), '../devices/cubie1.yaml')) sample_job_file = os.path.join(os.path.dirname(__file__), 'sample_jobs/cubietruck-removable.yaml') with open(sample_job_file) as sample_job_data: job = job_parser.parse(sample_job_data, cubie, 4212, None, None, None, output_dir='/tmp/') job.validate() self.assertIn('usb', cubie['parameters']['media'].keys()) deploy_params = [methods for methods in job.parameters['actions'] if 'deploy' in methods.keys()][1]['deploy'] self.assertIn('device', deploy_params) self.assertIn(deploy_params['device'], cubie['parameters']['media']['usb']) self.assertIn('uuid', cubie['parameters']['media']['usb'][deploy_params['device']]) self.assertIn('device_id', cubie['parameters']['media']['usb'][deploy_params['device']]) self.assertNotIn('boot_part', cubie['parameters']['media']['usb'][deploy_params['device']]) deploy_action = [action for action in job.pipeline.actions if action.name == 'storage-deploy'][0] self.assertIn('lava_test_results_dir', deploy_action.data) self.assertIn('/lava-', deploy_action.data['lava_test_results_dir']) self.assertIsInstance(deploy_action, MassStorage) self.assertIn('image', deploy_action.parameters.keys()) dd_action = [action for action in deploy_action.internal_pipeline.actions if action.name == 'dd-image'][0] self.assertEqual( dd_action.boot_params[dd_action.parameters['device']]['uuid'], 'usb-SanDisk_Ultra_20060775320F43006019-0:0') self.assertEqual('0', '%s' % dd_action.get_common_data('u-boot', 'boot_part')) self.assertTrue(type(dd_action.get_common_data('uuid', 'boot_part')) is str) self.assertEqual('0:1', dd_action.get_common_data('uuid', 'boot_part'))
def test_primary_interface(self): with open(self.filename) as yaml_data: alpha_data = yaml.load(yaml_data) for interface in self.device['parameters']['interfaces']: # jinja2 processing of tags: [] results in tags: if self.device['parameters']['interfaces'][interface][ 'tags'] == []: self.device['parameters']['interfaces'][interface][ 'tags'] = None parser = JobParser() job = parser.parse(yaml.dump(alpha_data), self.device, 4212, None, "", output_dir='/tmp/') deploy = [ action for action in job.pipeline.actions if action.name == 'tftp-deploy' ][0] prepare = [ action for action in deploy.internal_pipeline.actions if action.name == 'prepare-tftp-overlay' ][0] overlay = [ action for action in prepare.internal_pipeline.actions if action.name == 'lava-overlay' ][0] vland_overlay = [ action for action in overlay.internal_pipeline.actions if action.name == 'lava-vland-overlay' ][0] vland_overlay.validate() job.logger = DummyLogger() job.validate()
def test_secondary_media(self): """ Test UBootSecondaryMedia validation """ job_parser = JobParser() cubie = NewDevice( os.path.join(os.path.dirname(__file__), '../devices/cubie1.yaml')) sample_job_file = os.path.join( os.path.dirname(__file__), 'sample_jobs/cubietruck-removable.yaml') sample_job_data = open(sample_job_file) job = job_parser.parse(sample_job_data, cubie, 4212, None, "", output_dir='/tmp/') job.logger = DummyLogger() job.validate() sample_job_data.close() u_boot_media = [ action for action in job.pipeline.actions if action.name == 'uboot-action' ][1].internal_pipeline.actions[0] self.assertIsInstance(u_boot_media, UBootSecondaryMedia) self.assertEqual([], u_boot_media.errors) self.assertEqual(u_boot_media.parameters['kernel'], '/boot/vmlinuz-3.16.0-4-armmp-lpae') self.assertEqual( u_boot_media.parameters['kernel'], u_boot_media.get_namespace_data(action=u_boot_media.name, label='file', key='kernel')) self.assertEqual( u_boot_media.parameters['ramdisk'], u_boot_media.get_namespace_data(action=u_boot_media.name, label='file', key='ramdisk')) self.assertEqual( u_boot_media.parameters['dtb'], u_boot_media.get_namespace_data(action=u_boot_media.name, label='file', key='dtb')) self.assertEqual( u_boot_media.parameters['root_uuid'], u_boot_media.get_namespace_data(action=u_boot_media.name, label='uuid', key='root')) device = u_boot_media.get_namespace_data(action='storage-deploy', label='u-boot', key='device') self.assertIsNotNone(device) part_reference = '%s:%s' % ( job.device['parameters']['media']['usb'][device]['device_id'], u_boot_media.parameters['boot_part']) self.assertEqual( part_reference, u_boot_media.get_namespace_data(action=u_boot_media.name, label='uuid', key='boot_part')) self.assertEqual(part_reference, "0:1")
def test_device_environment_validity(self): """ Use non-YAML syntax a bit like existing device config syntax. Ensure this syntax is picked up as invalid. """ data = """ # YAML syntax. overrides: DEBEMAIL = "*****@*****.**" DEBFULLNAME: "Neil Williams" """ job_parser = JobParser() device = NewDevice(os.path.join(os.path.dirname(__file__), '../devices/bbb-01.yaml')) sample_job_file = os.path.join(os.path.dirname(__file__), 'sample_jobs/uboot-ramdisk.yaml') with open(sample_job_file) as sample_job_data: job = job_parser.parse( sample_job_data, device, 4212, None, None, None, output_dir='/tmp', env_dut=data) self.assertEqual( job.parameters['env_dut'], data ) with self.assertRaises(JobError): job.validate()
def test_prompt_from_job(self): # pylint: disable=too-many-locals """ Support setting the prompt after login via the job Loads a known YAML, adds a prompt to the dict and re-parses the job. Checks that the prompt is available in the expect_shell_connection action. """ job = self.factory.create_job('sample_jobs/ipxe-ramdisk.yaml') job.validate() bootloader = [ action for action in job.pipeline.actions if action.name == 'bootloader-action' ][0] retry = [ action for action in bootloader.internal_pipeline.actions if action.name == 'bootloader-retry' ][0] expect = [ action for action in retry.internal_pipeline.actions if action.name == 'expect-shell-connection' ][0] check = expect.parameters device = NewDevice( os.path.join(os.path.dirname(__file__), '../devices/x86-01.yaml')) extra_yaml = os.path.join(os.path.dirname(__file__), 'sample_jobs/ipxe.yaml') with open(extra_yaml) as data: sample_job_string = data.read() parser = JobParser() sample_job_data = yaml.load(sample_job_string) boot = [ item['boot'] for item in sample_job_data['actions'] if 'boot' in item ][0] self.assertIsNotNone(boot) sample_job_string = yaml.dump(sample_job_data) job = parser.parse(sample_job_string, device, 4212, None, "", output_dir='/tmp') job.logger = DummyLogger() job.validate() bootloader = [ action for action in job.pipeline.actions if action.name == 'bootloader-action' ][0] retry = [ action for action in bootloader.internal_pipeline.actions if action.name == 'bootloader-retry' ][0] expect = [ action for action in retry.internal_pipeline.actions if action.name == 'expect-shell-connection' ][0] if sys.version < '3': # skipping in 3 due to "RecursionError: maximum recursion depth exceeded in comparison" self.assertNotEqual(check, expect.parameters)
def test_repositories(self): # pylint: disable=too-many-locals job = TestJob.from_yaml_and_user(self.factory.make_job_yaml(), self.user) job_def = yaml.load(job.definition) job_ctx = job_def.get('context', {}) job_ctx.update( {'no_kvm': True}) # override to allow unit tests on all types of systems device = Device.objects.get(hostname='fakeqemu1') device_config = device.load_device_configuration( job_ctx, system=False) # raw dict parser = JobParser() obj = PipelineDevice(device_config, device.hostname) pipeline_job = parser.parse(job.definition, obj, job.id, None, "", output_dir='/tmp') allow_missing_path(pipeline_job.pipeline.validate_actions, self, 'qemu-system-x86_64') pipeline = pipeline_job.describe() device_values = _get_device_metadata(pipeline['device']) self.assertEqual(device_values, {'target.device_type': 'qemu'}) del pipeline['device']['device_type'] self.assertNotIn('device_type', pipeline['device']) device_values = _get_device_metadata(pipeline['device']) try: testdata, _ = TestData.objects.get_or_create(testjob=job) except (MultipleObjectsReturned): self.fail('multiple objects') for key, value in device_values.items(): if not key or not value: continue testdata.attributes.create(name=key, value=value) retval = _get_job_metadata(pipeline['job']['actions']) if 'lava-server-version' in retval: del retval['lava-server-version'] self.assertEqual( retval, { 'test.1.common.definition.from': 'git', 'test.0.common.definition.repository': 'git://git.linaro.org/qa/test-definitions.git', 'test.0.common.definition.name': 'smoke-tests', 'test.1.common.definition.repository': 'http://git.linaro.org/lava-team/lava-functional-tests.git', 'boot.0.common.method': 'qemu', 'test.1.common.definition.name': 'singlenode-advanced', 'test.0.common.definition.from': 'git', 'test.0.common.definition.path': 'ubuntu/smoke-tests-basic.yaml', 'test.1.common.definition.path': 'lava-test-shell/single-node/singlenode03.yaml' })
def create_x15_job(self, filename, output_dir='/tmp/'): # pylint: disable=no-self-use device = NewDevice(os.path.join(os.path.dirname(__file__), '../devices/x15-01.yaml')) fastboot_yaml = os.path.join(os.path.dirname(__file__), filename) with open(fastboot_yaml) as sample_job_data: parser = JobParser() job = parser.parse(sample_job_data, device, 4212, None, "", output_dir=output_dir) return job
def create_uefi_job(self, filename, output_dir=None): # pylint: disable=no-self-use device = NewDevice(os.path.join(os.path.dirname(__file__), '../devices/mustang-uefi.yaml')) mustang_yaml = os.path.join(os.path.dirname(__file__), filename) with open(mustang_yaml) as sample_job_data: parser = JobParser() job = parser.parse(sample_job_data, device, 0, socket_addr=None, master_cert=None, slave_cert=None, output_dir=output_dir) return job
def create_job(self, sample_job, device_file, output_dir='/tmp/'): # pylint: disable=no-self-use device = NewDevice(os.path.join(os.path.dirname(__file__), device_file)) yaml = os.path.join(os.path.dirname(__file__), sample_job) with open(yaml) as sample_job_data: parser = JobParser() job = parser.parse(sample_job_data, device, 4212, None, None, None, output_dir=output_dir) return job
def create_job(self, sample_job, device_file, output_dir='/tmp/'): # pylint: disable=no-self-use device = NewDevice(os.path.join(os.path.dirname(__file__), device_file)) yaml = os.path.join(os.path.dirname(__file__), sample_job) with open(yaml) as sample_job_data: parser = JobParser() job = parser.parse(sample_job_data, device, 4212, None, "", output_dir=output_dir) return job
def create_mustang_job(self, filename, output_dir='/tmp/'): # pylint: disable=no-self-use device = NewDevice(os.path.join(os.path.dirname(__file__), '../devices/mustang-grub-efi.yaml')) y_file = os.path.join(os.path.dirname(__file__), filename) with open(y_file) as sample_job_data: parser = JobParser() job = parser.parse(sample_job_data, device, 4212, None, "", output_dir=output_dir) job.logger = DummyLogger() return job
def create_uefi_job(self, filename, output_dir=None): # pylint: disable=no-self-use device = NewDevice(os.path.join(os.path.dirname(__file__), '../devices/mustang-uefi.yaml')) mustang_yaml = os.path.join(os.path.dirname(__file__), filename) with open(mustang_yaml) as sample_job_data: parser = JobParser() job = parser.parse(sample_job_data, device, 0, None, dispatcher_config="", output_dir=output_dir) job.logger = DummyLogger() return job
def parse_job_file(self, filename, oob_file): """ Uses the parsed device_config instead of the old Device class so it can fail before the Pipeline is made. Avoids loading all configuration for all supported devices for every job. """ if is_pipeline_job(filename): # Prepare the pipeline from the file using the parser. device = None # secondary connections do not need a device if self.args.target: device = NewDevice(self.args.target) # DeviceParser parser = JobParser() job = None # Load the configuration files (this should *not* fail) env_dut = None if self.args.env_dut_path is not None: with open(self.args.env_dut_path, 'r') as f_in: env_dut = f_in.read() dispatcher_config = None if self.args.dispatcher_config is not None: with open(self.args.dispatcher_config, "r") as f_in: dispatcher_config = f_in.read() try: # Create the ZMQ config zmq_config = None if self.args.socket_addr is not None: zmq_config = ZMQConfig(self.args.socket_addr, self.args.master_cert, self.args.slave_cert, self.args.ipv6) # Generate the pipeline with open(filename) as f_in: job = parser.parse(f_in, device, self.args.job_id, zmq_config=zmq_config, dispatcher_config=dispatcher_config, output_dir=self.args.output_dir, env_dut=env_dut) # Generate the description description = job.describe() description_file = os.path.join(self.args.output_dir, 'description.yaml') if not os.path.exists(self.args.output_dir): os.makedirs(self.args.output_dir, 0o755) with open(description_file, 'w') as f_describe: f_describe.write(yaml.dump(description)) except JobError as exc: logging.error("Invalid job submission: %s" % exc) exit(1) # FIXME: NewDevice schema needs a validation parser # device.check_config(job) return get_pipeline_runner(job), job.parameters # everything else is assumed to be JSON return run_legacy_job, json.load(open(filename))
def create_fake_qemu_job(self, output_dir=None): # pylint: disable=no-self-use device = NewDevice(os.path.join(os.path.dirname(__file__), '../devices/kvm01.yaml')) sample_job_file = os.path.join(os.path.dirname(__file__), 'sample_jobs/basics.yaml') parser = JobParser() try: with open(sample_job_file) as sample_job_data: job = parser.parse(sample_job_data, device, 4212, None, output_dir=output_dir) except NotImplementedError: # some deployments listed in basics.yaml are not implemented yet return None return job
def create_job(self, filename, output_dir=None): device = self.create_kvm_target() kvm_yaml = os.path.join(os.path.dirname(__file__), filename) self.sample_job_data = open(kvm_yaml) self.parser = JobParser() job = self.parser.parse(self.sample_job_data, device, output_dir=output_dir) job.context = LavaContext(device.config.hostname, get_config(), sys.stderr, job.parameters, '/tmp') return job
def create_fake_qemu_job(self): factory = Factory() fake_qemu = os.path.join(os.path.dirname(__file__), '..', '..', 'tests', 'test-config', 'bin', 'fake-qemu') device = factory.create_kvm_target({'qemu-binary': fake_qemu}) sample_job_file = os.path.join(os.path.dirname(__file__), 'sample_jobs/basics.yaml') self.sample_job_data = open(sample_job_file) self.parser = JobParser() job = self.parser.parse(self.sample_job_data, device) return job
def create_kvm_job(self, filename, output_dir="/tmp/"): # pylint: disable=no-self-use device = NewDevice(os.path.join(os.path.dirname(__file__), "../devices/kvm01.yaml")) kvm_yaml = os.path.join(os.path.dirname(__file__), filename) parser = JobParser() try: with open(kvm_yaml) as sample_job_data: job = parser.parse(sample_job_data, device, 4212, None, output_dir=output_dir) except NotImplementedError: # some deployments listed in basics.yaml are not implemented yet return None return job
def test_primary_media(self): """ Test that definitions of secondary media do not block submissions using primary media """ job_parser = JobParser() bbb = NewDevice(os.path.join(os.path.dirname(__file__), '../devices/bbb-01.yaml')) sample_job_file = os.path.join(os.path.dirname(__file__), 'sample_jobs/uboot-ramdisk.yaml') with open(sample_job_file) as sample_job_data: job = job_parser.parse(sample_job_data, bbb, 4212, None, None, None, output_dir='/tmp/') job.validate() self.assertEqual(job.pipeline.errors, []) self.assertIn('usb', bbb['parameters']['media'].keys())
def test_job(self): MetaType.objects.all().delete() TestJob.objects.all().delete() job = TestJob.from_yaml_and_user(self.factory.make_job_yaml(), self.user) job_def = yaml.load(job.definition) job_ctx = job_def.get('context', {}) job_ctx.update( {'no_kvm': True}) # override to allow unit tests on all types of systems device = Device.objects.get(hostname='fakeqemu1') device_config = device.load_device_configuration( job_ctx, system=False) # raw dict parser = JobParser() obj = PipelineDevice(device_config, device.hostname) pipeline_job = parser.parse(job.definition, obj, job.id, None, "", output_dir='/tmp') allow_missing_path(pipeline_job.pipeline.validate_actions, self, 'qemu-system-x86_64') pipeline = pipeline_job.describe() map_metadata(yaml.dump(pipeline), job) self.assertEqual( MetaType.objects.filter(metatype=MetaType.DEPLOY_TYPE).count(), 1) self.assertEqual( MetaType.objects.filter(metatype=MetaType.BOOT_TYPE).count(), 1) count = ActionData.objects.all().count() self.assertEqual(TestData.objects.all().count(), 1) testdata = TestData.objects.all()[0] self.assertEqual(testdata.testjob, job) for actionlevel in ActionData.objects.all(): self.assertEqual(actionlevel.testdata, testdata) action_levels = [] for testdata in job.testdata_set.all(): action_levels.extend(testdata.actionlevels.all()) self.assertEqual(count, len(action_levels)) count = ActionData.objects.filter( meta_type__metatype=MetaType.DEPLOY_TYPE).count() self.assertNotEqual( ActionData.objects.filter( meta_type__metatype=MetaType.BOOT_TYPE).count(), 0) self.assertEqual( ActionData.objects.filter( meta_type__metatype=MetaType.UNKNOWN_TYPE).count(), 0) for actionlevel in ActionData.objects.filter( meta_type__metatype=MetaType.BOOT_TYPE): self.assertEqual(actionlevel.testdata.testjob.id, job.id) self.assertEqual( ActionData.objects.filter(meta_type__metatype=MetaType.DEPLOY_TYPE, testdata__testjob=job).count(), count)
def create_custom_job(self, data, output_dir='/tmp/'): # pylint: disable=no-self-use device = NewDevice( os.path.join(os.path.dirname(__file__), '../devices/bbb-01.yaml')) parser = JobParser() job = parser.parse(data, device, 4212, None, None, None, output_dir=output_dir) return job
def test_primary_media(self): """ Test that definitions of secondary media do not block submissions using primary media """ job_parser = JobParser() bbb = NewDevice(os.path.join(os.path.dirname(__file__), "../devices/bbb-01.yaml")) sample_job_file = os.path.join(os.path.dirname(__file__), "sample_jobs/uboot-ramdisk.yaml") sample_job_data = open(sample_job_file) job = job_parser.parse(sample_job_data, bbb, 4212, None, output_dir="/tmp/") job.validate() self.assertEqual(job.pipeline.errors, []) self.assertIn("usb", bbb["parameters"]["media"].keys())
def create_fake_qemu_job(self, output_dir=None): # pylint: disable=no-self-use device = NewDevice(os.path.join(os.path.dirname(__file__), '../devices/kvm01.yaml')) sample_job_file = os.path.join(os.path.dirname(__file__), 'sample_jobs/basics.yaml') parser = JobParser() try: with open(sample_job_file) as sample_job_data: job = parser.parse(sample_job_data, device, 4212, None, "", output_dir=output_dir) except LAVAError: # some deployments listed in basics.yaml are not implemented yet return None return job
def test_name(self): deploy = [ action for action in self.job.pipeline.actions if action.name == 'deployimages' ][0] overlay = [ action for action in deploy.internal_pipeline.actions if action.name == 'lava-overlay' ][0] testdef = [ action for action in overlay.internal_pipeline.actions if action.name == 'test-definition' ][0] testdef.validate() self.assertEqual([], testdef.errors) device = NewDevice( os.path.join(os.path.dirname(__file__), '../devices/kvm01.yaml')) kvm_yaml = os.path.join(os.path.dirname(__file__), 'sample_jobs/kvm.yaml') parser = JobParser() with open(kvm_yaml, 'r') as sample_job_data: content = yaml.load(sample_job_data) data = [ block['test'] for block in content['actions'] if 'test' in block ][0] definitions = [ block for block in data['definitions'] if 'path' in block ][0] definitions['name'] = 'smoke tests' job = parser.parse(yaml.dump(content), device, 4212, None, "", output_dir='/tmp/') deploy = [ action for action in job.pipeline.actions if action.name == 'deployimages' ][0] overlay = [ action for action in deploy.internal_pipeline.actions if action.name == 'lava-overlay' ][0] testdef = [ action for action in overlay.internal_pipeline.actions if action.name == 'test-definition' ][0] testdef.validate() self.assertNotEqual([], testdef.errors) self.assertIn( 'Invalid characters found in test definition name: smoke tests', job.pipeline.errors)
def create_ssh_job(self, filename, output_dir=None): # pylint: disable=no-self-use device = NewDevice( os.path.join(os.path.dirname(__file__), '../devices/ssh-host-01.yaml')) kvm_yaml = os.path.join(os.path.dirname(__file__), filename) with open(kvm_yaml) as sample_job_data: parser = JobParser() job = parser.parse(sample_job_data, device, 0, socket_addr=None, output_dir=output_dir) return job
def test_job_bad_tags(self): with open(self.filename) as yaml_data: alpha_data = yaml.load(yaml_data) for vlan_key, vlan_value in alpha_data['protocols'][VlandProtocol.name].items(): alpha_data['protocols'][VlandProtocol.name][vlan_key] = {'tags': ['spurious']} # replaced tags from original job to simulate job where an unsupported tag is specified self.assertEqual( alpha_data['protocols'][VlandProtocol.name], {'vlan_one': {'tags': ['spurious']}} ) parser = JobParser() job = parser.parse(yaml.dump(alpha_data), self.device, 4212, None, None, None, output_dir='/tmp/') self.assertRaises(JobError, job.validate)
def test_empty_device_environment(self): data = None job_parser = JobParser() device = NewDevice(os.path.join(os.path.dirname(__file__), '../devices/bbb-01.yaml')) sample_job_file = os.path.join(os.path.dirname(__file__), 'sample_jobs/uboot-ramdisk.yaml') with open(sample_job_file) as sample_job_data: job = job_parser.parse( sample_job_data, device, 4212, None, output_dir='/tmp', env_dut=data) self.assertEqual( job.parameters['env_dut'], None )
def test_primary_media(self): """ Test that definitions of secondary media do not block submissions using primary media """ job_parser = JobParser() bbb = NewDevice(os.path.join(os.path.dirname(__file__), '../devices/bbb-01.yaml')) sample_job_file = os.path.join(os.path.dirname(__file__), 'sample_jobs/uboot-ramdisk.yaml') with open(sample_job_file) as sample_job_data: job = job_parser.parse(sample_job_data, bbb, 4212, None, "", output_dir='/tmp/') job.logger = DummyLogger() job.validate() self.assertEqual(job.pipeline.errors, []) self.assertIn('usb', bbb['parameters']['media'].keys())
def test_configure(self): with open(self.filename) as yaml_data: alpha_data = yaml.load(yaml_data) self.assertIn('protocols', alpha_data) self.assertTrue(VlandProtocol.accepts(alpha_data)) vprotocol = VlandProtocol(alpha_data, self.job_id) vprotocol.set_up() with open(self.filename) as sample_job_data: parser = JobParser() job = parser.parse(sample_job_data, self.device, 4212, None, None, None, output_dir='/tmp/') ret = vprotocol.configure(self.device, job) if not ret: print(vprotocol.errors) self.assertTrue(ret) nodes = {} for name in vprotocol.names: vlan = vprotocol.params[name] # self.assertNotIn('tags', vlan) uid = ' '.join([vlan['switch'], str(vlan['port'])]) nodes[uid] = name self.assertEqual(len(nodes.keys()), len(vprotocol.names)) self.assertIn('vlan_one', vprotocol.names) self.assertNotIn('vlan_two', vprotocol.names) self.assertIn('switch', vprotocol.params['vlan_one']) self.assertIn('port', vprotocol.params['vlan_one']) self.assertIsNotNone(vprotocol.multinode_protocol) bbb2 = NewDevice(os.path.join(os.path.dirname(__file__), '../devices/bbb-01.yaml')) bbb2['hostname'] = 'bbb2' bbb2['parameters']['interfaces']['eth0']['switch'] = '192.168.0.2' bbb2['parameters']['interfaces']['eth0']['port'] = '6' bbb2['parameters']['interfaces']['eth1']['switch'] = '192.168.0.2' bbb2['parameters']['interfaces']['eth1']['port'] = '4' self.assertEqual( vprotocol.params, { 'vlan_one': { 'switch': '192.168.0.1', 'port': 7, 'tags': ['100M', 'RJ45', '10M'] } } ) # already configured the vland protocol in the same job self.assertTrue(vprotocol.configure(bbb2, job)) self.assertEqual( vprotocol.params, { 'vlan_one': { 'switch': '192.168.0.1', 'port': 7, 'tags': ['100M', 'RJ45', '10M']} } ) self.assertTrue(vprotocol.valid) self.assertEqual(vprotocol.names, {'vlan_one': '4212vlanone'})
def create_kvm_job(self, filename, output_dir='/tmp/'): # pylint: disable=no-self-use device = NewDevice(os.path.join(os.path.dirname(__file__), '../devices/kvm01.yaml')) kvm_yaml = os.path.join(os.path.dirname(__file__), filename) parser = JobParser() try: with open(kvm_yaml) as sample_job_data: job = parser.parse(sample_job_data, device, 4212, None, "", output_dir=output_dir) job.logger = DummyLogger() except LAVAError as exc: print(exc) # some deployments listed in basics.yaml are not implemented yet return None return job
def setUp(self): logger = logging.getLogger('dispatcher') logger.disabled = True logger.propagate = False logger = logging.getLogger('lava-dispatcher') logger.disabled = True logger.propagate = False self.device = NewDevice(os.path.join(os.path.dirname(__file__), '../devices/bbb-01.yaml')) bbb_yaml = os.path.join(os.path.dirname(__file__), 'sample_jobs/uboot-ramdisk.yaml') with open(bbb_yaml) as sample_job_data: self.base_data = yaml.load(sample_job_data) self.deploy_block = [block for block in self.base_data['actions'] if 'deploy' in block][0]['deploy'] self.boot_block = [block for block in self.base_data['actions'] if 'boot' in block][0]['boot'] self.parser = JobParser()
def create_bbb_lxc_job(self, filename, output_dir='/tmp/'): # pylint: disable=no-self-use device = NewDevice( os.path.join(os.path.dirname(__file__), '../devices/bbb-01.yaml')) lxc_yaml = os.path.join(os.path.dirname(__file__), filename) with open(lxc_yaml) as sample_job_data: parser = JobParser() job = parser.parse(sample_job_data, device, 4577, None, "", output_dir=output_dir) job.logger = DummyLogger() return job
def test_uboot_checksum(self): device = NewDevice(os.path.join(os.path.dirname(__file__), '../devices/bbb-01.yaml')) bbb_yaml = os.path.join(os.path.dirname(__file__), 'sample_jobs/bbb-ramdisk-nfs.yaml') with open(bbb_yaml) as sample_job_data: parser = JobParser() job = parser.parse(sample_job_data, device, 4212, None, None, None, output_dir='/tmp/') deploy = [action for action in job.pipeline.actions if action.name == 'tftp-deploy'][0] download = [action for action in deploy.internal_pipeline.actions if action.name == 'download_retry'][0] helper = [action for action in download.internal_pipeline.actions if action.name == 'file_download'][0] remote = helper.parameters[helper.key] md5sum = remote.get('md5sum', None) self.assertIsNone(md5sum) sha256sum = remote.get('sha256sum', None) self.assertIsNotNone(sha256sum)
def test_primary_interface(self): with open(self.filename) as yaml_data: alpha_data = yaml.load(yaml_data) for interface in self.device['parameters']['interfaces']: # jinja2 processing of tags: [] results in tags: if self.device['parameters']['interfaces'][interface]['tags'] == []: self.device['parameters']['interfaces'][interface]['tags'] = None parser = JobParser() job = parser.parse(yaml.dump(alpha_data), self.device, 4212, None, None, None, output_dir='/tmp/') deploy = [action for action in job.pipeline.actions if action.name == 'tftp-deploy'][0] prepare = [action for action in deploy.internal_pipeline.actions if action.name == 'prepare-tftp-overlay'][0] overlay = [action for action in prepare.internal_pipeline.actions if action.name == 'lava-overlay'][0] vland_overlay = [action for action in overlay.internal_pipeline.actions if action.name == 'lava-vland-overlay'][0] vland_overlay.validate() job.validate()
def test_compatibility(self): # pylint: disable=too-many-locals user = self.factory.make_user() # public set in the YAML yaml_str = self.factory.make_job_json() yaml_data = yaml.load(yaml_str) job = TestJob.from_yaml_and_user( yaml_str, user) self.assertTrue(job.is_public) self.assertTrue(job.can_view(user)) # initial state prior to validation self.assertEqual(job.pipeline_compatibility, 0) self.assertNotIn('compatibility', yaml_data) # FIXME: dispatcher master needs to make this kind of test more accessible. definition = yaml.load(job.definition) self.assertNotIn('protocols', definition) job.actual_device = Device.objects.get(hostname='fakeqemu1') job_def = yaml.load(job.definition) job_ctx = job_def.get('context', {}) parser = JobParser() device = job.actual_device try: device_config = device.load_device_configuration(job_ctx, system=False) # raw dict except (jinja2.TemplateError, yaml.YAMLError, IOError) as exc: # FIXME: report the exceptions as useful user messages self.fail("[%d] jinja2 error: %s" % (job.id, exc)) if not device_config or not isinstance(device_config, dict): # it is an error to have a pipeline device without a device dictionary as it will never get any jobs. msg = "Administrative error. Device '%s' has no device dictionary." % device.hostname self.fail('[%d] device-dictionary error: %s' % (job.id, msg)) device_object = PipelineDevice(device_config, device.hostname) # equivalent of the NewDevice in lava-dispatcher, without .yaml file. # FIXME: drop this nasty hack once 'target' is dropped as a parameter if 'target' not in device_object: device_object.target = device.hostname device_object['hostname'] = device.hostname parser_device = device_object try: # pass (unused) output_dir just for validation as there is no zmq socket either. pipeline_job = parser.parse( job.definition, parser_device, job.id, None, None, None, output_dir=job.output_dir) except (AttributeError, JobError, NotImplementedError, KeyError, TypeError) as exc: self.fail('[%s] parser error: %s' % (job.sub_id, exc)) description = pipeline_job.describe() self.assertIn('compatibility', description) self.assertGreaterEqual(description['compatibility'], BootQEMU.compatibility)