def parse_action(job_data, name, device, pipeline, test_info, test_count): """ If protocols are defined, each Action may need to be aware of the protocol parameters. """ parameters = job_data[name] parameters['test_info'] = test_info if 'protocols' in pipeline.job.parameters: parameters.update(pipeline.job.parameters['protocols']) if name == 'boot': Boot.select(device, parameters)(pipeline, parameters) elif name == 'test': # stage starts at 0 parameters['stage'] = test_count - 1 LavaTest.select(device, parameters)(pipeline, parameters) elif name == 'deploy': if parameters['namespace'] in test_info: if any([ testclass for testclass in test_info[parameters['namespace']] if testclass['class'].needs_deployment_data() ]): parameters.update({ 'deployment_data': get_deployment_data(parameters.get('os', '')) }) if 'preseed' in parameters: parameters.update({ 'deployment_data': get_deployment_data(parameters.get('os', '')) }) Deployment.select(device, parameters)(pipeline, parameters)
def parse_action(job_data, name, device, pipeline): """ If protocols are defined, each Action may need to be aware of the protocol parameters. """ parameters = job_data[name] if 'protocols' in pipeline.job.parameters: parameters.update(pipeline.job.parameters['protocols']) if name == 'boot': Boot.select(device, job_data[name])(pipeline, parameters) elif name == 'test': LavaTest.select(device, job_data[name])(pipeline, parameters) elif name == 'deploy': parameters.update({'deployment_data': get_deployment_data(parameters.get('os', ''))}) Deployment.select(device, job_data[name])(pipeline, parameters)
def parse_action(job_data, name, device, pipeline): """ If protocols are defined, each Action may need to be aware of the protocol parameters. """ parameters = job_data[name] if 'protocols' in pipeline.job.parameters: parameters.update(pipeline.job.parameters['protocols']) if name == 'boot': Boot.select(device, job_data[name])(pipeline, parameters) elif name == 'test': LavaTest.select(device, job_data[name])(pipeline, parameters) elif name == 'deploy': parameters.update( {'deployment_data': get_deployment_data(parameters.get('os', ''))}) Deployment.select(device, job_data[name])(pipeline, parameters)
def parse_action(job_data, name, device, pipeline, test_action, count): """ If protocols are defined, each Action may need to be aware of the protocol parameters. """ parameters = job_data[name] if 'protocols' in pipeline.job.parameters: parameters.update(pipeline.job.parameters['protocols']) if name == 'boot': Boot.select(device, parameters)(pipeline, parameters) elif name == 'test': # stage starts at 0 parameters['stage'] = count - 1 LavaTest.select(device, parameters)(pipeline, parameters) elif name == 'deploy': if 'type' not in parameters: parameters.update({'deployment_data': get_deployment_data(parameters.get('os', ''))}) parameters.update({'test_action': test_action}) Deployment.select(device, parameters)(pipeline, parameters)
def parse(self, content, device, job_id, zmq_config, dispatcher_config, output_dir=None, env_dut=None): self.loader = yaml.Loader(content) self.loader.compose_node = self.compose_node self.loader.construct_mapping = self.construct_mapping data = self.loader.get_single_data() job = Job(job_id, data, zmq_config) test_counts = {} job.device = device job.parameters['output_dir'] = output_dir job.parameters['env_dut'] = env_dut # Load the dispatcher config job.parameters['dispatcher'] = {} if dispatcher_config is not None: config = yaml.load(dispatcher_config) if isinstance(config, dict): job.parameters['dispatcher'] = config # Setup the logging now that we have the parameters job.setup_logging() level_tuple = Protocol.select_all(job.parameters) # sort the list of protocol objects by the protocol class level. job.protocols = [ item[0](job.parameters, job_id) for item in sorted(level_tuple, key=lambda level_tuple: level_tuple[1]) ] pipeline = Pipeline(job=job) self._timeouts(data, job) # deploy and boot classes can populate the pipeline differently depending # on the test action type they are linked with (via namespacing). # This code builds an information dict for each namespace which is then # passed as a parameter to each Action class to use. test_info = {} test_actions = ([ action for action in data['actions'] if 'test' in action ]) for test_action in test_actions: test_parameters = test_action['test'] test_type = LavaTest.select(device, test_parameters) namespace = test_parameters.get('namespace', 'common') if namespace in test_info: test_info[namespace].append({ 'class': test_type, 'parameters': test_parameters }) else: test_info.update({ namespace: [{ 'class': test_type, 'parameters': test_parameters }] }) # FIXME: also read permissable overrides from device config and set from job data # FIXME: ensure that a timeout for deployment 0 does not get set as the timeout for deployment 1 if 1 is default for action_data in data['actions']: action_data.pop('yaml_line', None) for name in action_data: # Set a default namespace if needed namespace = action_data[name].setdefault('namespace', 'common') test_counts.setdefault(namespace, 1) if name == 'deploy' or name == 'boot' or name == 'test': parse_action(action_data, name, device, pipeline, test_info, test_counts[namespace]) if name == 'test': test_counts[namespace] += 1 elif name == 'repeat': count = action_data[name][ 'count'] # first list entry must be the count dict repeats = action_data[name]['actions'] for c_iter in range(count): for repeating in repeats: # block of YAML to repeat for repeat_action in repeating: # name of the action for this block if repeat_action == 'yaml_line': continue repeating[repeat_action][ 'repeat-count'] = c_iter namespace = repeating[ repeat_action].setdefault( 'namespace', 'common') test_counts.setdefault(namespace, 1) parse_action(repeating, repeat_action, device, pipeline, test_info, test_counts[namespace]) if repeat_action == 'test': test_counts[namespace] += 1 elif name == 'command': action = CommandAction() action.parameters = action_data[name] pipeline.add_action(action) else: raise JobError("Unknown action name '%'" % name) # there's always going to need to be a finalize_process action finalize = FinalizeAction() pipeline.add_action(finalize) finalize.populate(None) data['output_dir'] = output_dir job.pipeline = pipeline if 'compatibility' in data: try: job_c = int(job.compatibility) data_c = int(data['compatibility']) except ValueError as exc: raise JobError('invalid compatibility value: %s' % exc) if job_c < data_c: raise JobError( 'Dispatcher unable to meet job compatibility requirement. %d > %d' % (job_c, data_c)) return job