Beispiel #1
0
 def setUp(self):
     super(TestTimeout, self).setUp()
     self.parameters = {
         "job_name": "fakejob",
         'timeouts': {
             'job': {
                 'seconds': 3
             }
         },
         "actions": [
             {
                 'deploy': {
                     'namespace': 'common',
                     'failure_retry': 3
                 },
                 'boot': {
                     'namespace': 'common',
                     'failure_retry': 4
                 },
                 'test': {
                     'namespace': 'common',
                     'failure_retry': 5
                 }
             }
         ]
     }
     self.fakejob = TestTimeout.FakeJob(self.parameters)
     # copy of the _timeout function from parser.
     if 'timeouts' in self.parameters:
         if 'job' in self.parameters['timeouts']:
             duration = Timeout.parse(self.parameters['timeouts']['job'])
             self.fakejob.timeout = Timeout(self.parameters['job_name'], duration)
Beispiel #2
0
    def _check_data(self, data):
        try:
            json_data = json.loads(data)
        except (ValueError, TypeError) as exc:
            raise JobError("Invalid data for %s protocol: %s %s" % (self.name, data, exc))
        if not isinstance(json_data, dict):
            raise JobError("Invalid data type %s for protocol %s" % (data, self.name))
        if not json_data:
            raise JobError("No data to be sent over protocol %s" % self.name)
        if 'request' not in json_data:
            raise JobError("Bad API call over protocol - missing request")
        if json_data["request"] == "aggregate":
            raise JobError("Pipeline submission has not been implemented.")
        if "poll_delay" in json_data:
            self.settings['poll_delay'] = int(json_data["poll_delay"])
        if 'timeout' in json_data:
            if isinstance(json_data['timeout'], dict):
                self.poll_timeout.duration = Timeout.parse(json_data['timeout'])
            elif isinstance(json_data['timeout'], int) or isinstance(json_data['timeout'], float):
                self.poll_timeout.duration = json_data['timeout']
            else:
                self.logger.debug(json_data['timeout'])
                raise JobError("Invalid timeout request")
            self.logger.debug("Setting poll timeout of %s seconds", int(self.poll_timeout.duration))
        if 'messageID' not in json_data:
            raise JobError("Missing messageID")
        # handle conversion of api calls to internal functions
        json_data['request'] = json_data['request'].replace('-', '_')

        return json_data
Beispiel #3
0
 def setup(self, parameters):
     """
     Retrieve the poll_timeout from the protocol parameters which are set after init.
     """
     if MultinodeProtocol.name not in parameters:
         return
     if 'timeout' in parameters[MultinodeProtocol.name]:
         self.base_message = {
             'timeout': Timeout.parse(parameters[MultinodeProtocol.name]['timeout'])
         }
 def test_multinode_timeout(self):
     """
     Test the protocol timeout is assigned to the action
     """
     testshell = [action for action in self.client_job.pipeline.actions if isinstance(action, MultinodeTestAction)][0]
     testshell.validate()
     self.assertIn(30, [p.poll_timeout.duration for p in testshell.protocols])
     self.assertIn('minutes', testshell.parameters['lava-multinode']['timeout'])
     self.assertEqual(10, testshell.parameters['lava-multinode']['timeout']['minutes'])
     self.assertEqual(
         testshell.signal_director.base_message['timeout'],
         Timeout.parse(testshell.parameters['lava-multinode']['timeout'])
     )
 def test_action_connection_timeout(self):
     """
     Test connection timeout specified for a particular action
     """
     y_file = os.path.join(os.path.dirname(__file__), './sample_jobs/uboot-ramdisk.yaml')
     with open(y_file, 'r') as uboot_ramdisk:
         data = yaml.load(uboot_ramdisk)
     connection_timeout = Timeout.parse(data['timeouts']['connection'])
     self.assertEqual(connection_timeout, 240)
     job = self.create_custom_job(yaml.dump(data))
     boot = [action for action in job.pipeline.actions if action.name == 'uboot-action'][0]
     retry = [action for action in boot.internal_pipeline.actions if action.name == 'uboot-retry'][0]
     self.assertEqual(retry.timeout.duration, 90)  # Set by the job global action timeout
     self.assertEqual(retry.connection_timeout.duration, 45)
    def test_testshell(self):
        testshell = None
        for action in self.job.pipeline.actions:
            self.assertIsNotNone(action.name)
            if isinstance(action, TestShellRetry):
                testshell = action.pipeline.actions[0]
                break
        self.assertIsInstance(testshell, TestShellAction)
        self.assertTrue(testshell.valid)

        if 'timeout' in testshell.parameters:
            time_int = Timeout.parse(testshell.parameters['timeout'])
        else:
            time_int = Timeout.default_duration()
        self.assertEqual(
            datetime.timedelta(seconds=time_int).total_seconds(),
            testshell.timeout.duration
        )
Beispiel #7
0
 def __init__(self, parameters, job_id):
     super(MultinodeProtocol, self).__init__(parameters, job_id)
     self.blocks = 4 * 1024
     # how long between polls (in seconds)
     self.system_timeout = Timeout('system', LAVA_MULTINODE_SYSTEM_TIMEOUT)
     self.settings = None
     self.sock = None
     self.base_message = None
     self.logger = logging.getLogger('dispatcher')
     self.delayed_start = False
     params = parameters['protocols'][self.name]
     if 'request' in params and 'lava-start' == params['request'] and 'expect_role' in params:
         if params['expect_role'] != params['role']:
             self.delayed_start = True
             self.system_timeout.duration = Timeout.parse(params['timeout'])
         else:
             self.errors = "expect_role must not match the role declaring lava_start"
             self.logger.warning(self.errors)
Beispiel #8
0
def build_action(action_data, testdata, submission):
    # test for a known section
    logger = logging.getLogger('lava-master')
    if 'section' not in action_data:
        logger.warning("Invalid action data - missing section")
        return

    metatype = MetaType.get_section(action_data['section'])
    if metatype is None:  # 0 is allowed
        logger.debug("Unrecognised metatype in action_data: %s", action_data['section'])
        return
    # lookup the type from the job definition.
    type_name = MetaType.get_type_name(action_data, submission)
    if not type_name:
        logger.debug(
            "type_name failed for %s metatype %s",
            action_data['section'], MetaType.TYPE_CHOICES[metatype])
        return
    action_meta, _ = MetaType.objects.get_or_create(name=type_name,
                                                    metatype=metatype)
    max_retry = action_data.get('max_retries')

    # find corresponding test case
    match_case = None
    test_cases = TestCase.objects.filter(suite__job=testdata.testjob, suite__name='lava')
    for case in test_cases:
        if 'level' in case.action_metadata:
            if case.action_metadata['level'] == action_data['level']:
                match_case = case

    # maps the static testdata derived from the definition to the runtime pipeline construction
    ActionData.objects.create(
        action_name=action_data['name'],
        action_level=action_data['level'],
        action_summary=action_data['summary'],
        testdata=testdata,
        action_description=action_data['description'],
        meta_type=action_meta,
        max_retries=max_retry,
        timeout=int(Timeout.parse(action_data['timeout'])),
        testcase=match_case
    )
 def test_multi_deploy(self):
     self.assertIsNotNone(self.parsed_data)
     job = Job(4212, self.parsed_data, None)
     job.timeout = Timeout("Job", Timeout.parse({'minutes': 2}))
     pipeline = Pipeline(job=job)
     device = TestMultiDeploy.FakeDevice()
     self.assertIsNotNone(device)
     job.device = device
     job.logger = DummyLogger()
     job.pipeline = pipeline
     counts = {}
     for action_data in self.parsed_data['actions']:
         for name in action_data:
             counts.setdefault(name, 1)
             parameters = action_data[name]
             test_deploy = TestMultiDeploy.TestDeploy(pipeline, parameters, job)
             self.assertEqual(
                 {},
                 test_deploy.action.data
             )
             counts[name] += 1
     # check that only one action has the example set
     self.assertEqual(
         ['nowhere'],
         [detail['deploy']['example'] for detail in self.parsed_data['actions'] if 'example' in detail['deploy']]
     )
     self.assertEqual(
         ['faked', 'valid'],
         [detail['deploy']['parameters'] for detail in self.parsed_data['actions'] if 'parameters' in detail['deploy']]
     )
     self.assertIsInstance(pipeline.actions[0], TestMultiDeploy.TestDeployAction)
     self.assertIsInstance(pipeline.actions[1], TestMultiDeploy.TestDeployAction)
     self.assertIsInstance(pipeline.actions[2], TestMultiDeploy.TestDeployAction)
     job.validate()
     self.assertEqual([], job.pipeline.errors)
     job.run()
     self.assertNotEqual(pipeline.actions[0].data, {'fake-deploy': pipeline.actions[0].parameters})
     self.assertEqual(pipeline.actions[1].data, {'fake-deploy': pipeline.actions[2].parameters})
     # check that values from previous DeployAction run actions have been cleared
     self.assertEqual(pipeline.actions[2].data, {'fake-deploy': pipeline.actions[2].parameters})
Beispiel #10
0
    def test_panda_template(self):
        logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
        logger = logging.getLogger('unittests')
        logger.disabled = True
        logger.propagate = False
        data = """{% extends 'panda.jinja2' %}
{% set connection_command = 'telnet serial4 7012' %}
{% set hard_reset_command = '/usr/bin/pduclient --daemon staging-master --hostname pdu15 --command reboot --port 05' %}
{% set power_off_command = '/usr/bin/pduclient --daemon staging-master --hostname pdu15 --command off --port 05' %}
{% set power_on_command = '/usr/bin/pduclient --daemon staging-master --hostname pdu15 --command on --port 05' %}"""
        self.assertTrue(self.validate_data('staging-panda-01', data))
        context = {'extra_kernel_args': 'intel_mmio=on mmio=on'}
        template_dict = prepare_jinja_template('staging-panda-01',
                                               data,
                                               job_ctx=context,
                                               raw=False)
        self.assertIn('bootloader-commands',
                      template_dict['timeouts']['actions'])
        self.assertEqual(
            180.0,
            Timeout.parse(
                template_dict['timeouts']['actions']['bootloader-commands']))
        commands = template_dict['actions']['boot']['methods']['u-boot'][
            'ramdisk']['commands']
        checked = False
        self.assertIsNotNone(commands)
        self.assertIsInstance(commands, list)
        self.assertIn('usb start', commands)
        for line in commands:
            if 'setenv bootargs' in line:
                self.assertIn('console=ttyO2', line)
                self.assertIn(' ' + context['extra_kernel_args'] + ' ', line)
                checked = True
        self.assertTrue(checked)
        checked = False
        for line in commands:
            if 'setenv initrd_high' in line:
                checked = True
        self.assertTrue(checked)
 def test_action_connection_timeout(self):
     """
     Test connection timeout specified for a particular action
     """
     y_file = os.path.join(os.path.dirname(__file__),
                           './sample_jobs/uboot-ramdisk.yaml')
     with open(y_file, 'r') as uboot_ramdisk:
         data = yaml.load(uboot_ramdisk)
     connection_timeout = Timeout.parse(data['timeouts']['connection'])
     self.assertEqual(connection_timeout, 240)
     job = self.create_custom_job(yaml.dump(data))
     boot = [
         action for action in job.pipeline.actions
         if action.name == 'uboot-action'
     ][0]
     retry = [
         action for action in boot.internal_pipeline.actions
         if action.name == 'uboot-retry'
     ][0]
     self.assertEqual(retry.timeout.duration,
                      90)  # Set by the job global action timeout
     self.assertEqual(retry.connection_timeout.duration, 45)
Beispiel #12
0
    def test_panda_template(self):
        logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
        logger = logging.getLogger("unittests")
        logger.disabled = True
        logger.propagate = False
        data = """{% extends 'panda.jinja2' %}
{% set connection_command = 'telnet serial4 7012' %}
{% set hard_reset_command = '/usr/bin/pduclient --daemon staging-master --hostname pdu15 --command reboot --port 05' %}
{% set power_off_command = '/usr/bin/pduclient --daemon staging-master --hostname pdu15 --command off --port 05' %}
{% set power_on_command = '/usr/bin/pduclient --daemon staging-master --hostname pdu15 --command on --port 05' %}"""
        self.assertTrue(self.validate_data("staging-panda-01", data))
        context = {"extra_kernel_args": "intel_mmio=on mmio=on"}
        template_dict = prepare_jinja_template(
            "staging-panda-01", data, job_ctx=context, raw=False
        )
        self.assertIn("bootloader-commands", template_dict["timeouts"]["actions"])
        self.assertEqual(
            180.0,
            Timeout.parse(template_dict["timeouts"]["actions"]["bootloader-commands"]),
        )
        commands = template_dict["actions"]["boot"]["methods"]["u-boot"]["ramdisk"][
            "commands"
        ]
        checked = False
        self.assertIsNotNone(commands)
        self.assertIsInstance(commands, list)
        self.assertIn("usb start", commands)
        for line in commands:
            if "setenv bootargs" in line:
                self.assertIn("console=ttyO2", line)
                self.assertIn(" " + context["extra_kernel_args"] + " ", line)
                checked = True
        self.assertTrue(checked)
        checked = False
        for line in commands:
            if "setenv initrd_high" in line:
                checked = True
        self.assertTrue(checked)
Beispiel #13
0
 def _timeouts(self, data, job):  # pylint: disable=no-self-use
     if 'job' in data.get('timeouts', {}):
         duration = Timeout.parse(data['timeouts']['job'])
         job.timeout = Timeout('job', duration)
Beispiel #14
0
 def _timeouts(self, data, job):  # pylint: disable=no-self-use
     if "job" in data.get("timeouts", {}):
         duration = Timeout.parse(data["timeouts"]["job"])
         job.timeout = Timeout("job", duration)
Beispiel #15
0
 def _timeouts(self, data, job):
     if "job" in data.get("timeouts", {}):
         duration = Timeout.parse(data["timeouts"]["job"])
         job.timeout = Timeout("job", duration)
Beispiel #16
0
    test_cases = TestCase.objects.filter(suite__job=testdata.testjob,
                                         suite__name='lava')
    for case in test_cases:
        if 'level' in case.action_metadata:
            if case.action_metadata['level'] == action_data['level']:
                match_case = case

    # maps the static testdata derived from the definition to the runtime pipeline construction
    ActionData.objects.create(action_name=action_data['name'],
                              action_level=action_data['level'],
                              action_summary=action_data['summary'],
                              testdata=testdata,
                              action_description=action_data['description'],
                              meta_type=action_meta,
                              max_retries=max_retry,
                              timeout=int(Timeout.parse(
                                  action_data['timeout'])),
                              testcase=match_case)


def walk_actions(data, testdata, submission):
    for action in data:
        build_action(action, testdata, submission)
        if 'pipeline' in action:
            walk_actions(action['pipeline'], testdata, submission)


def map_metadata(description, job):
    """
    Generate metadata from the combination of the pipeline definition
    file (after any parsing for protocols) and the pipeline description
    into static metadata (TestData) related to this specific job
Beispiel #17
0
 def _timeouts(self, data, job):
     if data.get('timeouts', None) is not None:
         if 'job' in data['timeouts']:
             duration = Timeout.parse(data['timeouts']['job'])
             job.timeout = Timeout(data['job_name'], duration)