Beispiel #1
0
 def test_add_action_to_pipeline(self):
     action = Action()
     action.name = "test-action"
     action.description = "test action only"
     action.summary = "starter"
     self.assertEqual(action.description, "test action only")
     self.assertEqual(action.summary, "starter")
     # action needs to be added to a top level pipe first
     with self.assertRaises(LAVABug):
         Pipeline(action)
     pipe = Pipeline()
     with self.assertRaises(LAVABug):
         pipe.add_action(None)
     with self.assertRaises(LAVABug):
         pipe.add_action(pipe)
     pipe.add_action(action)
     self.assertEqual(pipe.actions, [action])
     self.assertEqual(action.level, "1")
     try:
         description = pipe.describe()
     except Exception as exc:  # pylint: disable=broad-except
         self.fail(exc)
     self.assertIsNotNone(description)
     self.assertIsInstance(description, list)
     self.assertIn('description', description[0])
     self.assertIn('level', description[0])
     self.assertIn('summary', description[0])
     self.assertIn('max_retries', description[0])
     self.assertIn('timeout', description[0])
Beispiel #2
0
 def test_summary_exists(self):
     for subclass in Action.__subclasses__():  # pylint: disable=no-member
         if not hasattr(subclass, 'name'):
             continue
         if not hasattr(subclass,
                        'summary') and subclass.name not in self.allowed:
             self.fail(subclass)
Beispiel #3
0
 def test_definition_exists(self):
     for subclass in Action.__subclasses__():
         if not subclass.name:
             continue
         if not hasattr(subclass,
                        'definition') and subclass.name not in self.allowed:
             self.fail(subclass)
Beispiel #4
0
    def test_composite_action_aggregates_errors_from_sub_actions(self):  # pylint: disable=invalid-name
        # Unable to call Action.validate() as there is no job in this unit test
        sub1 = Action()
        sub1.__errors__ = [1]
        sub2 = Action()
        sub2.name = "sub2"
        sub2.__errors__ = [2]

        pipe = Pipeline()
        sub1.name = "sub1"
        pipe.add_action(sub1)
        pipe.add_action(sub2)
        self.assertEqual([1, 2], pipe.errors)
 def test_add_action_to_pipeline(self):
     action = Action()
     action.name = "test-action"
     action.description = "test action only"
     action.summary = "starter"
     self.assertEqual(action.description, "test action only")
     self.assertEqual(action.summary, "starter")
     # action needs to be added to a top level pipe first
     with self.assertRaises(RuntimeError):
         Pipeline(action)
     pipe = Pipeline()
     with self.assertRaises(RuntimeError):
         pipe.add_action(None)
     with self.assertRaises(RuntimeError):
         pipe.add_action(pipe)
     pipe.add_action(action)
     self.assertNotEqual(pipe.children, {pipe: []})
     self.assertEqual(pipe.children, {pipe: [action]})
     self.assertEqual(action.level, "1")
     try:
         simplejson.loads(pipe.describe())
     except:  # pylint: disable=bare-except
         self.assertFalse(0)
Beispiel #6
0
 def test_add_action_to_pipeline(self):
     action = Action()
     action.name = "test-action"
     action.description = "test action only"
     action.summary = "starter"
     self.assertEqual(action.description, "test action only")
     self.assertEqual(action.summary, "starter")
     # action needs to be added to a top level pipe first
     with self.assertRaises(RuntimeError):
         Pipeline(action)
     pipe = Pipeline()
     with self.assertRaises(RuntimeError):
         pipe.add_action(None)
     with self.assertRaises(RuntimeError):
         pipe.add_action(pipe)
     pipe.add_action(action)
     self.assertNotEqual(pipe.children, {pipe: []})
     self.assertEqual(pipe.children, {pipe: [action]})
     self.assertEqual(action.level, "1")
     try:
         simplejson.loads(pipe.describe())
     except:  # pylint: disable=bare-except
         self.assertFalse(0)
Beispiel #7
0
    def parse(self, content, device, output_dir=None):
        self.loader = yaml.Loader(content)
        self.loader.compose_node = self.compose_node
        self.loader.construct_mapping = self.construct_mapping
        data = self.loader.get_single_data()

        job = Job(data)

        job.device = device
        job.parameters['output_dir'] = output_dir
        pipeline = Pipeline(job=job)
        for action_data in data['actions']:
            line = action_data.pop('yaml_line', None)
            for name in action_data:
                if name == "deploy":
                    # allow the classmethod to check the parameters
                    deploy = Deployment.select(device, action_data[name])(pipeline)
                    deploy.action.parameters = action_data[name]  # still need to pass the parameters to the instance
                    if 'test' in data['actions']:
                        deploy.action.parameters = action_data['test']
                    deploy.action.yaml_line = line
                    device.deployment_data = deployment_data.get(deploy.action.parameters['os'])
                    deploy.action.parameters = {'deployment_data': device.deployment_data}
                else:
                    action_class = Action.find(name)
                    # select the specific action of this class for this job
                    action = action_class()
                    # put parameters (like rootfs_type, results_dir) into the actions.
                    if type(action_data[name]) == dict:
                        action.parameters = action_data[name]
                    elif name == "commands":
                        # FIXME
                        pass
                    elif type(action_data[name]) == list:
                        for param in action_data[name]:
                            action.parameters = param
                    action.summary = name
                    pipeline.add_action(action)
                # uncomment for debug
                # print action.parameters

        # the only parameters sent to the job are job parameters
        # like job_name, logging_level or target_group.
        data.pop('actions')
        data['output_dir'] = output_dir
        job.set_pipeline(pipeline)
        return job
    def test_composite_action_aggregates_errors_from_sub_actions(self):  # pylint: disable=invalid-name
        # Unable to call Action.validate() as there is no job in this unit test
        sub1 = Action()
        sub1.__errors__ = [1]
        sub2 = Action()
        sub2.name = "sub2"
        sub2.__errors__ = [2]

        pipe = Pipeline()
        sub1.name = "sub1"
        pipe.add_action(sub1)
        pipe.add_action(sub2)
        self.assertEqual([1, 2], pipe.errors)
Beispiel #9
0
    def parse(self,
              content,
              device,
              job_id,
              socket_addr,
              master_cert,
              slave_cert,
              output_dir=None,
              env_dut=None):
        self.loader = yaml.Loader(content)
        self.loader.compose_node = self.compose_node
        self.loader.construct_mapping = self.construct_mapping
        data = self.loader.get_single_data()

        self.context['default_action_duration'] = Timeout.default_duration()
        self.context['default_test_duration'] = Timeout.default_duration()
        self.context['default_connection_duration'] = Timeout.default_duration(
        )
        job = Job(job_id, socket_addr, master_cert, slave_cert, data)
        counts = {}
        job.device = device
        job.parameters['output_dir'] = output_dir
        job.parameters['env_dut'] = env_dut
        job.parameters['target'] = device.target
        level_tuple = Protocol.select_all(job.parameters)
        # sort the list of protocol objects by the protocol class level.
        job.protocols = [
            item[0](job.parameters, job_id)
            for item in sorted(level_tuple,
                               key=lambda level_tuple: level_tuple[1])
        ]
        pipeline = Pipeline(job=job)
        self._timeouts(data, job)

        # some special handling is needed to tell the overlay classes about the presence or absence of a test action
        test_action = True
        test_list = [action for action in data['actions'] if 'test' in action]
        if test_list and 'test' not in test_list[0]:
            test_action = False

        # FIXME: also read permissable overrides from device config and set from job data
        # FIXME: ensure that a timeout for deployment 0 does not get set as the timeout for deployment 1 if 1 is default
        for action_data in data['actions']:
            action_data.pop('yaml_line', None)
            for name in action_data:
                if isinstance(
                        action_data[name], dict
                ):  # FIXME: commands are not fully implemented & may produce a list
                    action_data[name].update(self._map_context_defaults())
                counts.setdefault(name, 1)
                if name == 'deploy' or name == 'boot' or name == 'test':
                    parse_action(action_data, name, device, pipeline,
                                 test_action)
                elif name == 'repeat':
                    count = action_data[name][
                        'count']  # first list entry must be the count dict
                    repeats = action_data[name]['actions']
                    for c_iter in range(count):
                        for repeating in repeats:  # block of YAML to repeat
                            for repeat_action in repeating:  # name of the action for this block
                                if repeat_action == 'yaml_line':
                                    continue
                                repeating[repeat_action][
                                    'repeat-count'] = c_iter
                                parse_action(repeating, repeat_action, device,
                                             pipeline, test_action)

                else:
                    # May only end up being used for submit as other actions all need strategy method objects
                    # select the specific action of this class for this job
                    action = Action.select(name)()
                    action.job = job
                    # put parameters (like rootfs_type, results_dir) into the actions.
                    if isinstance(action_data[name], dict):
                        action.parameters = action_data[name]
                    elif name == "commands":
                        # FIXME
                        pass
                    elif isinstance(action_data[name], list):
                        for param in action_data[name]:
                            action.parameters = param
                    action.summary = name
                    action.timeout = Timeout(
                        action.name, self.context['default_action_duration'])
                    action.connection_timeout = Timeout(
                        action.name,
                        self.context['default_connection_duration'])
                    pipeline.add_action(action)
                counts[name] += 1

        # there's always going to need to be a finalize_process action
        finalize = FinalizeAction()
        pipeline.add_action(finalize)
        finalize.populate(self._map_context_defaults())
        data['output_dir'] = output_dir
        job.set_pipeline(pipeline)
        if 'compatibility' in data:
            try:
                job_c = int(job.compatibility)
                data_c = int(data['compatibility'])
            except ValueError as exc:
                raise JobError('invalid compatibility value: %s' % exc)
            if job_c < data_c:
                raise JobError(
                    'Dispatcher unable to meet job compatibility requirement. %d > %d'
                    % (job_c, data_c))
        return job
Beispiel #10
0
 def test_references_a_device(self):
     device = object()
     cmd = Action()
     cmd.device = device
     self.assertIs(cmd.device, device)
Beispiel #11
0
 def test_action_is_valid_if_there_are_not_errors(self):  # pylint: disable=invalid-name
     action = Action()
     action.__errors__ = [1]
     self.assertFalse(action.valid)
     action.__errors__ = []
     self.assertTrue(action.valid)
Beispiel #12
0
 def test_create_internal_pipeline(self):
     action = Action()
     action.name = "internal_pipe"
     action.description = "test action only"
     action.summary = "starter"
     pipe = Pipeline()
     pipe.add_action(action)
     self.assertEqual(len(pipe.children[pipe]), 1)
     self.assertEqual(action.level, "1")
     action = Action()
     action.name = "child_action"
     action.summary = "child"
     action.description = "action implementing an internal pipe"
     with self.assertRaises(RuntimeError):
         Pipeline(action)
     pipe.add_action(action)
     self.assertEqual(action.level, "2")
     self.assertEqual(len(pipe.children[pipe]), 2)
     # a formal RetryAction would contain a pre-built pipeline which can be inserted directly
     retry_pipe = Pipeline(action)
     action = Action()
     action.name = "inside_action"
     action.description = "action inside the internal pipe"
     action.summary = "child"
     retry_pipe.add_action(action)
     self.assertEqual(len(retry_pipe.children[retry_pipe]), 1)
     self.assertEqual(action.level, "2.1")
Beispiel #13
0
    def test_complex_pipeline(self):  # pylint: disable=too-many-statements
        action = Action()
        action.name = "starter_action"
        action.description = "test action only"
        action.summary = "starter"
        pipe = Pipeline()
        pipe.add_action(action)
        self.assertEqual(action.level, "1")
        action = Action()
        action.name = "pipe_action"
        action.description = "action implementing an internal pipe"
        action.summary = "child"
        pipe.add_action(action)
        self.assertEqual(action.level, "2")
        # a formal RetryAction would contain a pre-built pipeline which can be inserted directly
        retry_pipe = Pipeline(action)
        action = Action()
        action.name = "child_action"
        action.description = "action inside the internal pipe"
        action.summary = "child"
        retry_pipe.add_action(action)
        self.assertEqual(action.level, "2.1")
        action = Action()
        action.name = "second-child-action"
        action.description = "second action inside the internal pipe"
        action.summary = "child2"
        retry_pipe.add_action(action)
        self.assertEqual(action.level, "2.2")
        action = Action()
        action.name = "baby_action"
        action.description = "action implementing an internal pipe"
        action.summary = "baby"
        retry_pipe.add_action(action)
        self.assertEqual(action.level, "2.3")
        inner_pipe = Pipeline(action)
        action = Action()
        action.name = "single_action"
        action.description = "single line action"
        action.summary = "single"
        inner_pipe.add_action(action)
        self.assertEqual(action.level, "2.3.1")

        action = Action()
        action.name = "step_out"
        action.description = "step out of inner pipe"
        action.summary = "brother"
        retry_pipe.add_action(action)
        self.assertEqual(action.level, "2.4")
        action = Action()
        action.name = "top-level"
        action.description = "top level"
        action.summary = "action"
        pipe.add_action(action)
        self.assertEqual(action.level, "3")
        self.assertEqual(len(pipe.describe()), 3)
Beispiel #14
0
    def parse(self,
              content,
              device,
              job_id,
              socket_addr,
              output_dir=None,
              env_dut=None):
        self.loader = yaml.Loader(content)
        self.loader.compose_node = self.compose_node
        self.loader.construct_mapping = self.construct_mapping
        data = self.loader.get_single_data()

        self.context['default_action_duration'] = Timeout.default_duration()
        self.context['default_test_duration'] = Timeout.default_duration()
        job = Job(job_id, socket_addr, data)
        counts = {}
        job.device = device
        job.parameters['output_dir'] = output_dir
        job.parameters['env_dut'] = env_dut
        job.parameters['target'] = device.target
        for instance in Protocol.select_all(job.parameters):
            job.protocols.append(instance(job.parameters))
        pipeline = Pipeline(job=job)
        self._timeouts(data, job)

        # FIXME: also read permissable overrides from device config and set from job data
        # FIXME: ensure that a timeout for deployment 0 does not get set as the timeout for deployment 1 if 1 is default
        for action_data in data['actions']:
            action_data.pop('yaml_line', None)
            for name in action_data:
                if type(
                        action_data[name]
                ) is dict:  # FIXME: commands are not fully implemented & may produce a list
                    action_data[name]['default_action_timeout'] = self.context[
                        'default_action_duration']
                    action_data[name]['default_test_timeout'] = self.context[
                        'default_test_duration']
                counts.setdefault(name, 1)
                if name == 'deploy' or name == 'boot' or name == 'test':
                    # reset the context before adding a second deployment and again before third etc.
                    if name == 'deploy' and counts[name] >= 2:
                        reset_context = ResetContext()
                        reset_context.section = name
                        pipeline.add_action(reset_context)
                    parse_action(action_data, name, device, pipeline)
                elif name == 'repeat':
                    count = action_data[name][
                        'count']  # first list entry must be the count dict
                    repeats = action_data[name]['actions']
                    for c_iter in xrange(count):
                        for repeating in repeats:  # block of YAML to repeat
                            for repeat_action in repeating:  # name of the action for this block
                                if repeat_action == 'yaml_line':
                                    continue
                                repeating[repeat_action][
                                    'repeat-count'] = c_iter
                                parse_action(repeating, repeat_action, device,
                                             pipeline)

                else:
                    # May only end up being used for submit as other actions all need strategy method objects
                    # select the specific action of this class for this job
                    action = Action.select(name)()
                    action.job = job
                    # put parameters (like rootfs_type, results_dir) into the actions.
                    if type(action_data[name]) == dict:
                        action.parameters = action_data[name]
                    elif name == "commands":
                        # FIXME
                        pass
                    elif type(action_data[name]) == list:
                        for param in action_data[name]:
                            action.parameters = param
                    action.summary = name
                    action.timeout = Timeout(
                        action.name, self.context['default_action_duration'])
                    pipeline.add_action(action)
                counts[name] += 1

        # there's always going to need to be a finalize_process action
        pipeline.add_action(FinalizeAction())
        data['output_dir'] = output_dir
        job.set_pipeline(pipeline)
        return job
Beispiel #15
0
    def parse(self, content, device, job_id, socket_addr, output_dir=None,
              env_dut=None):
        self.loader = yaml.Loader(content)
        self.loader.compose_node = self.compose_node
        self.loader.construct_mapping = self.construct_mapping
        data = self.loader.get_single_data()

        self.context['default_action_duration'] = Timeout.default_duration()
        self.context['default_test_duration'] = Timeout.default_duration()
        self.context['default_connection_duration'] = Timeout.default_duration()
        job = Job(job_id, socket_addr, data)
        counts = {}
        job.device = device
        job.parameters['output_dir'] = output_dir
        job.parameters['env_dut'] = env_dut
        job.parameters['target'] = device.target
        level_tuple = Protocol.select_all(job.parameters)
        # sort the list of protocol objects by the protocol class level.
        job.protocols = [item[0](job.parameters) for item in sorted(level_tuple, key=lambda level_tuple: level_tuple[1])]
        pipeline = Pipeline(job=job)
        self._timeouts(data, job)

        # FIXME: also read permissable overrides from device config and set from job data
        # FIXME: ensure that a timeout for deployment 0 does not get set as the timeout for deployment 1 if 1 is default
        for action_data in data['actions']:
            action_data.pop('yaml_line', None)
            for name in action_data:
                if type(action_data[name]) is dict:  # FIXME: commands are not fully implemented & may produce a list
                    action_data[name].update(self._map_context_defaults())
                counts.setdefault(name, 1)
                if name == 'deploy' or name == 'boot' or name == 'test':
                    parse_action(action_data, name, device, pipeline)
                elif name == 'repeat':
                    count = action_data[name]['count']  # first list entry must be the count dict
                    repeats = action_data[name]['actions']
                    for c_iter in xrange(count):
                        for repeating in repeats:  # block of YAML to repeat
                            for repeat_action in repeating:  # name of the action for this block
                                if repeat_action == 'yaml_line':
                                    continue
                                repeating[repeat_action]['repeat-count'] = c_iter
                                parse_action(repeating, repeat_action, device, pipeline)

                else:
                    # May only end up being used for submit as other actions all need strategy method objects
                    # select the specific action of this class for this job
                    action = Action.select(name)()
                    action.job = job
                    # put parameters (like rootfs_type, results_dir) into the actions.
                    if type(action_data[name]) == dict:
                        action.parameters = action_data[name]
                    elif name == "commands":
                        # FIXME
                        pass
                    elif type(action_data[name]) == list:
                        for param in action_data[name]:
                            action.parameters = param
                    action.summary = name
                    action.timeout = Timeout(action.name, self.context['default_action_duration'])
                    action.connection_timeout = Timeout(action.name, self.context['default_connection_duration'])
                    pipeline.add_action(action)
                counts[name] += 1

        # there's always going to need to be a finalize_process action
        finalize = FinalizeAction()
        pipeline.add_action(finalize)
        finalize.populate(self._map_context_defaults())
        data['output_dir'] = output_dir
        job.set_pipeline(pipeline)
        logger = logging.getLogger('dispatcher')
        logger.warn("pipeline contains %x", pipeline)
        if 'compatibility' in data:
            try:
                job_c = int(job.compatibility)
                data_c = int(data['compatibility'])
            except ValueError as exc:
                raise JobError('invalid compatibility value: %s' % exc)
            if job_c < data_c:
                raise JobError('Dispatcher unable to meet job compatibility requirement. %d > %d' % (job_c, data_c))
        return job
Beispiel #16
0
 def test_action_is_valid_if_there_are_not_errors(self):  # pylint: disable=invalid-name
     action = Action()
     action.__errors__ = [1]
     self.assertFalse(action.valid)
     action.__errors__ = []
     self.assertTrue(action.valid)
Beispiel #17
0
    def parse(self, content, device, job_id, socket_addr, output_dir=None,
              env_dut=None):
        self.loader = yaml.Loader(content)
        self.loader.compose_node = self.compose_node
        self.loader.construct_mapping = self.construct_mapping
        data = self.loader.get_single_data()

        self.context['default_action_duration'] = Timeout.default_duration()
        self.context['default_test_duration'] = Timeout.default_duration()
        job = Job(job_id, socket_addr, data)
        counts = {}
        job.device = device
        job.parameters['output_dir'] = output_dir
        job.parameters['env_dut'] = env_dut
        job.parameters['target'] = device.target
        level_tuple = Protocol.select_all(job.parameters)
        # sort the list of protocol objects by the protocol class level.
        job.protocols = [item[0](job.parameters) for item in sorted(level_tuple, key=lambda level_tuple: level_tuple[1])]
        pipeline = Pipeline(job=job)
        self._timeouts(data, job)

        # FIXME: also read permissable overrides from device config and set from job data
        # FIXME: ensure that a timeout for deployment 0 does not get set as the timeout for deployment 1 if 1 is default
        for action_data in data['actions']:
            action_data.pop('yaml_line', None)
            for name in action_data:
                if type(action_data[name]) is dict:  # FIXME: commands are not fully implemented & may produce a list
                    action_data[name]['default_action_timeout'] = self.context['default_action_duration']
                    action_data[name]['default_test_timeout'] = self.context['default_test_duration']
                counts.setdefault(name, 1)
                if name == 'deploy' or name == 'boot' or name == 'test':
                    # reset the context before adding a second deployment and again before third etc.
                    if name == 'deploy' and counts[name] >= 2:
                        reset_context = ResetContext()
                        reset_context.section = name
                        pipeline.add_action(reset_context)
                    parse_action(action_data, name, device, pipeline)
                elif name == 'repeat':
                    count = action_data[name]['count']  # first list entry must be the count dict
                    repeats = action_data[name]['actions']
                    for c_iter in xrange(count):
                        for repeating in repeats:  # block of YAML to repeat
                            for repeat_action in repeating:  # name of the action for this block
                                if repeat_action == 'yaml_line':
                                    continue
                                repeating[repeat_action]['repeat-count'] = c_iter
                                parse_action(repeating, repeat_action, device, pipeline)

                else:
                    # May only end up being used for submit as other actions all need strategy method objects
                    # select the specific action of this class for this job
                    action = Action.select(name)()
                    action.job = job
                    # put parameters (like rootfs_type, results_dir) into the actions.
                    if type(action_data[name]) == dict:
                        action.parameters = action_data[name]
                    elif name == "commands":
                        # FIXME
                        pass
                    elif type(action_data[name]) == list:
                        for param in action_data[name]:
                            action.parameters = param
                    action.summary = name
                    action.timeout = Timeout(action.name, self.context['default_action_duration'])
                    pipeline.add_action(action)
                counts[name] += 1

        # there's always going to need to be a finalize_process action
        pipeline.add_action(FinalizeAction())
        data['output_dir'] = output_dir
        job.set_pipeline(pipeline)
        return job
Beispiel #18
0
 def test_definition_exists(self):
     for subclass in Action.__subclasses__():
         if not subclass.name:
             continue
         if not hasattr(subclass, 'definition') and subclass.name not in self.allowed:
             self.fail(subclass)
Beispiel #19
0
    def test_kvm_simulation(self):  # pylint: disable=too-many-statements
        """
        Build a pipeline which simulates a KVM LAVA job
        without using the formal objects (to avoid validating
        data known to be broken). The details are entirely
        arbitrary.
        """
        factory = Factory()
        job = factory.create_kvm_job('sample_jobs/kvm.yaml')
        pipe = Pipeline()
        action = Action()
        action.name = "deploy_linaro_image"
        action.description = "deploy action using preset subactions in an internal pipe"
        action.summary = "deploy_linaro_image"
        action.job = job
        # deliberately unlikely location
        # a successful validation would need to use the cwd
        action.parameters = {
            "image": "file:///none/images/bad-kvm-debian-wheezy.img"
        }
        pipe.add_action(action)
        self.assertEqual(action.level, "1")
        deploy_pipe = Pipeline(action)
        action = Action()
        action.name = "downloader"
        action.description = "download image wrapper, including an internal retry pipe"
        action.summary = "downloader"
        action.job = job
        deploy_pipe.add_action(action)
        self.assertEqual(action.level, "1.1")
        # a formal RetryAction would contain a pre-built pipeline which can be inserted directly
        retry_pipe = Pipeline(action)
        action = Action()
        action.name = "wget"
        action.description = "do the download with retries"
        action.summary = "wget"
        action.job = job
        retry_pipe.add_action(action)
        self.assertEqual(action.level, "1.1.1")
        action = Action()
        action.name = "checksum"
        action.description = "checksum the downloaded file"
        action.summary = "md5sum"
        action.job = job
        deploy_pipe.add_action(action)
        self.assertEqual(action.level, "1.2")
        action = Action()
        action.name = "overlay"
        action.description = "apply lava overlay"
        action.summary = "overlay"
        action.job = job
        deploy_pipe.add_action(action)
        self.assertEqual(action.level, "1.3")
        action = Action()
        action.name = "boot"
        action.description = "boot image"
        action.summary = "qemu"
        action.job = job
        # cmd_line built from device configuration
        action.parameters = {
            'cmd_line': [
                'qemu-system-x86_64', '-machine accel=kvm:tcg',
                '-hda'
                '%s' % "tbd", '-nographic', '-net', 'nic,model=virtio'
                '-net user'
            ]
        }
        pipe.add_action(action)
        self.assertEqual(action.level, "2")

        action = Action()
        action.name = "simulated"
        action.description = "lava test shell"
        action.summary = "simulated"
        action.job = job
        # a formal lava test shell action would include an internal pipe
        # which would handle the run.sh
        pipe.add_action(action)
        self.assertEqual(action.level, "3")
        # just a fake action
        action = Action()
        action.name = "fake"
        action.description = "faking results"
        action.summary = "fake action"
        action.job = job
        pipe.add_action(action)
        self.assertEqual(action.level, "4")
        self.assertEqual(len(pipe.describe()), 4)
Beispiel #20
0
 def test_create_internal_pipeline(self):
     action = Action()
     action.name = "internal_pipe"
     action.description = "test action only"
     action.summary = "starter"
     pipe = Pipeline()
     pipe.add_action(action)
     self.assertEqual(len(pipe.actions), 1)
     self.assertEqual(action.level, "1")
     action = Action()
     action.name = "child_action"
     action.summary = "child"
     action.description = "action implementing an internal pipe"
     with self.assertRaises(LAVABug):
         Pipeline(action)
     pipe.add_action(action)
     self.assertEqual(action.level, "2")
     self.assertEqual(len(pipe.actions), 2)
     # a formal RetryAction would contain a pre-built pipeline which can be inserted directly
     retry_pipe = Pipeline(action)
     action = Action()
     action.name = "inside_action"
     action.description = "action inside the internal pipe"
     action.summary = "child"
     retry_pipe.add_action(action)
     self.assertEqual(len(retry_pipe.actions), 1)
     self.assertEqual(action.level, "2.1")
Beispiel #21
0
 def test_references_a_device(self):
     device = object()
     cmd = Action()
     cmd.device = device
     self.assertIs(cmd.device, device)
Beispiel #22
0
    def test_complex_pipeline(self):  # pylint: disable=too-many-statements
        action = Action()
        action.name = "starter_action"
        action.description = "test action only"
        action.summary = "starter"
        pipe = Pipeline()
        pipe.add_action(action)
        self.assertEqual(action.level, "1")
        action = Action()
        action.name = "pipe_action"
        action.description = "action implementing an internal pipe"
        action.summary = "child"
        pipe.add_action(action)
        self.assertEqual(action.level, "2")
        # a formal RetryAction would contain a pre-built pipeline which can be inserted directly
        retry_pipe = Pipeline(action)
        action = Action()
        action.name = "child_action"
        action.description = "action inside the internal pipe"
        action.summary = "child"
        retry_pipe.add_action(action)
        self.assertEqual(action.level, "2.1")
        action = Action()
        action.name = "second-child-action"
        action.description = "second action inside the internal pipe"
        action.summary = "child2"
        retry_pipe.add_action(action)
        self.assertEqual(action.level, "2.2")
        action = Action()
        action.name = "baby_action"
        action.description = "action implementing an internal pipe"
        action.summary = "baby"
        retry_pipe.add_action(action)
        self.assertEqual(action.level, "2.3")
        inner_pipe = Pipeline(action)
        action = Action()
        action.name = "single_action"
        action.description = "single line action"
        action.summary = "single"
        inner_pipe.add_action(action)
        self.assertEqual(action.level, "2.3.1")

        action = Action()
        action.name = "step_out"
        action.description = "step out of inner pipe"
        action.summary = "brother"
        retry_pipe.add_action(action)
        self.assertEqual(action.level, "2.4")
        action = Action()
        action.name = "top-level"
        action.description = "top level"
        action.summary = "action"
        pipe.add_action(action)
        self.assertEqual(action.level, "3")
        self.assertEqual(len(pipe.describe()), 3)
Beispiel #23
0
    def test_kvm_simulation(self):  # pylint: disable=too-many-statements
        """
        Build a pipeline which simulates a KVM LAVA job
        without using the formal objects (to avoid validating
        data known to be broken). The details are entirely
        arbitrary.
        """
        factory = Factory()
        job = factory.create_kvm_job('sample_jobs/kvm.yaml')
        pipe = Pipeline()
        action = Action()
        action.name = "deploy_linaro_image"
        action.description = "deploy action using preset subactions in an internal pipe"
        action.summary = "deploy_linaro_image"
        action.job = job
        # deliberately unlikely location
        # a successful validation would need to use the cwd
        action.parameters = {"image": "file:///none/images/bad-kvm-debian-wheezy.img"}
        pipe.add_action(action)
        self.assertEqual(action.level, "1")
        deploy_pipe = Pipeline(action)
        action = Action()
        action.name = "downloader"
        action.description = "download image wrapper, including an internal retry pipe"
        action.summary = "downloader"
        action.job = job
        deploy_pipe.add_action(action)
        self.assertEqual(action.level, "1.1")
        # a formal RetryAction would contain a pre-built pipeline which can be inserted directly
        retry_pipe = Pipeline(action)
        action = Action()
        action.name = "wget"
        action.description = "do the download with retries"
        action.summary = "wget"
        action.job = job
        retry_pipe.add_action(action)
        self.assertEqual(action.level, "1.1.1")
        action = Action()
        action.name = "checksum"
        action.description = "checksum the downloaded file"
        action.summary = "md5sum"
        action.job = job
        deploy_pipe.add_action(action)
        self.assertEqual(action.level, "1.2")
        action = Action()
        action.name = "overlay"
        action.description = "apply lava overlay"
        action.summary = "overlay"
        action.job = job
        deploy_pipe.add_action(action)
        self.assertEqual(action.level, "1.3")
        action = Action()
        action.name = "boot"
        action.description = "boot image"
        action.summary = "qemu"
        action.job = job
        # cmd_line built from device configuration
        action.parameters = {
            'cmd_line': [
                'qemu-system-x86_64',
                '-machine accel=kvm:tcg',
                '-hda'
                '%s' % "tbd",
                '-nographic',
                '-net',
                'nic,model=virtio'
                '-net user'
            ]
        }
        pipe.add_action(action)
        self.assertEqual(action.level, "2")

        action = Action()
        action.name = "simulated"
        action.description = "lava test shell"
        action.summary = "simulated"
        action.job = job
        # a formal lava test shell action would include an internal pipe
        # which would handle the run.sh
        pipe.add_action(action)
        self.assertEqual(action.level, "3")
        # just a fake action
        action = Action()
        action.name = "fake"
        action.description = "faking results"
        action.summary = "fake action"
        action.job = job
        pipe.add_action(action)
        self.assertEqual(action.level, "4")
        self.assertEqual(len(pipe.describe()), 4)
Beispiel #24
0
    def parse(self, content, device, output_dir=None):
        self.loader = yaml.Loader(content)
        self.loader.compose_node = self.compose_node
        self.loader.construct_mapping = self.construct_mapping
        data = self.loader.get_single_data()

        job = Job(data)

        job.device = device
        job.parameters['output_dir'] = output_dir
        pipeline = Pipeline(job=job)
        for action_data in data['actions']:
            line = action_data.pop('yaml_line', None)
            for name in action_data:
                if name == "deploy":
                    # allow the classmethod to check the parameters
                    deploy = Deployment.select(device,
                                               action_data[name])(pipeline)
                    deploy.action.parameters = action_data[
                        name]  # still need to pass the parameters to the instance
                    if 'test' in data['actions']:
                        deploy.action.parameters = action_data['test']
                    deploy.action.yaml_line = line
                    device.deployment_data = deployment_data.get(
                        deploy.action.parameters['os'])
                    deploy.action.parameters = {
                        'deployment_data': device.deployment_data
                    }
                elif name == "boot":
                    boot = Boot.select(device, action_data[name])(pipeline)
                    boot.action.parameters = action_data[name]
                    boot.action.yaml_line = line
#                elif name == "test":
#                    lavatest = LavaTest.select(device, action_data[name])(pipeline)
#                    lavatest.action.parameters = action_data[name]
#                    lavatest.action.yaml_line = line
                else:
                    # May only end up being used for submit as other actions all need strategy method objects
                    # select the specific action of this class for this job
                    action = Action.find(name)()
                    # put parameters (like rootfs_type, results_dir) into the actions.
                    if type(action_data[name]) == dict:
                        action.parameters = action_data[name]
                    elif name == "commands":
                        # FIXME
                        pass
                    elif type(action_data[name]) == list:
                        for param in action_data[name]:
                            action.parameters = param
                    action.summary = name
                    pipeline.add_action(action)
                # uncomment for debug
                # print action.parameters

        # there's always going to need to be a finalize_process action
        pipeline.add_action(FinalizeAction())
        # the only parameters sent to the job are job parameters
        # like job_name, logging_level or target_group.
        data.pop('actions')
        data['output_dir'] = output_dir
        job.set_pipeline(pipeline)
        return job