Example #1
0
def validate_clean(clean_file, testbed_file, lint=True):
    """ Validates the clean yaml using device abstraction to collect
        the proper schemas

        Args:
            clean_file (str/dict): clean datafile
            testbed_file (str/dict): testbed datafile
            lint (bool, optional): Do yaml linting on the clean_file

        Returns:
            {
                'warnings' ['Warning example', ...],
                'exceptions: [ValueError, ...]
            }
    """
    warnings = []
    exceptions = []
    validation_results = {'warnings': warnings, 'exceptions': exceptions}

    if lint:
        lint_messages = do_lint(clean_file)
        for message in lint_messages:
            # we want to use the str representation not the object
            warnings.append(str(message))

    # these sections are not true stages and therefore cant be loaded
    sections_to_ignore = [
        'images',
        'order'
    ]

    base_schema = {
        Optional('clean_devices'): list,
        'cleaners': {
            Any(): {
                'module': str,
                Optional('devices'): list,
                Optional('platforms'): list,
                Optional('groups'): list,
                Any(): Any()
            }
        },
        'devices': {

        }
    }

    try:
        # Load yaml without parsing markup
        # Mock the use validate to prevent calling functions like
        # translate_host or import_from_name
        with patch.object(PyatsUse, 'validate') as mockvalid:
            # return data on Use.validate
            mockvalid.side_effect = lambda *x, **y: x[1]
            loaded_tb = testbed_loader(testbed_file,
                                       locations={},
                                       markupprocessor=TestbedMarkupProcessor(
                                           reference=True,
                                           callable=False,
                                           env_var=False,
                                           include_file=False,
                                           ask=False,
                                           encode=False))
    except Exception:
        exceptions.append(
            Exception("Could not load the testbed file. Use "
                      "'pyats validate testbed <file>' to validate "
                      "the testbed file.")
        )
        loaded_tb = testbed_loader({})

    loader = Loader(enable_extensions=True,
                    markupprocessor=MarkupProcessor(reference=True,
                                                    callable=False,
                                                    env_var=False,
                                                    include_file=False,
                                                    ask=False,
                                                    encode=False))

    try:
        clean_dict = loader.load(clean_file, locations={})
    except Exception as e:
        exceptions.append(e)
        return validation_results

    loader = Loader(enable_extensions=True,
                    markupprocessor=MarkupProcessor(reference=True,
                                                    callable=False,
                                                    env_var=False,
                                                    include_file=False,
                                                    ask=False,
                                                    encode=False))

    try:
        clean_dict = loader.load(clean_file, locations={})
    except Exception as e:
        exceptions.append(e)
        return validation_results

    try:
        clean_json = load_clean_json()
    except Exception as e:
        exceptions.append(e)

    from genie.libs.clean.recovery import recovery_processor

    for dev in clean_dict.get('devices', {}):
        schema = base_schema.setdefault('devices', {}).setdefault(dev, {})
        schema.update({Optional('order'): list})
        schema.update({Optional('device_recovery'): dict})
        schema.update({Optional('images'): Or(list, dict)})

        clean_data = clean_dict["devices"][dev]

        try:
            dev = loaded_tb.devices[dev]
        except KeyError as e:
            warnings.append(
                "The device {dev} specified in the clean yaml does "
                "not exist in the testbed.".format(dev=e))
            # cant validate schema so allow anything under dev
            schema.update({Any(): Any()})
            continue
        except Exception as e:
            exceptions.append(e)
            schema.update({Any(): Any()})
            continue

        # update stages with image
        if clean_data.get('images'):
            setattr(dev, 'clean', clean_data)
            try:
                # Get abstracted ImageHandler class
                abstract = Lookup.from_device(dev, packages={'clean': clean})
                ImageHandler = abstract.clean.stages.image_handler.ImageHandler
                image_handler = ImageHandler(dev, dev.clean['images'])
                initialize_clean_sections(image_handler, clean_data['order'])
            except Exception as e:
                # If the device does not have custom.abstraction defined
                # then we cannot load the correct stages to test the
                # correct schema. Skip this device.
                exceptions.append(Exception(dev.name+': '+str(e)))
                schema.update({Any(): Any()})
                continue


        for section in clean_data:

            # ignore sections that aren't true stages
            if section in sections_to_ignore:
                continue

            if section == 'device_recovery':
                schema.update({'device_recovery': recovery_processor.schema})
                continue

            clean_data[section].pop('change_order_if_fail', None)
            clean_data[section].pop('change_order_if_pass', None)

            # when no data is provided under stage, change None to dict
            # this is needed for schema validation
            if clean_data[section] is None:
                clean_data[section] = {}

            # Load it up so we can grab the schema from the stage
            # If source isnt provided then check if it is inside the clean json
            try:
                if 'source' not in clean_data:
                    task = get_clean_function(section, clean_json, dev)
                else:
                    task = load_class(clean_data, dev)
            except Exception as e:
                # Stage cannot be found. Allow any schema to prevent schema error
                # and skip this stage
                exceptions.append(str(e))
                schema.update({section: Any()})
                continue


            # Add the stage schema to the base schema
            if hasattr(task, 'schema'):
                schema.update({task.__name__: task.schema})

    try:
        Schema(base_schema).validate(clean_dict)
    except Exception as e:
        exceptions.append(pretty_schema_exception(e))

    return validation_results
Example #2
0
def validate_clean(clean_dict, testbed_dict):
    """ Validates the clean yaml using device abstraction to collect
        the proper schemas

        Args:
            clean_dict (dict): clean datafile
            testbed_dict (dict): testbed datafile

        Returns:
            {
                'warnings' ['Warning example', ...],
                'exceptions: [ValueError, ...]
            }
    """
    warnings = []
    exceptions = []
    validation_results = {'warnings': warnings, 'exceptions': exceptions}

    # these sections are not true stages and therefore cant be loaded
    sections_to_ignore = [
        'images',
        'order'
    ]

    base_schema = {
        'cleaners': {
            Any(): {
                'module': str,
                Optional('devices'): list,
                Optional('platforms'): list,
                Optional('groups'): list,
                Any(): Any()
            }
        },
        'devices': {

        }
    }

    try:
        loaded_tb = testbed_loader(testbed_dict)
    except Exception:
        exceptions.append(
            Exception("Could not load the testbed file. Use "
                      "'pyats validate testbed <file>' to validate "
                      "the testbed file.")
        )

    clean_json = load_clean_json()
    from genie.libs.clean.stages.recovery import recovery_processor

    for dev in clean_dict.get('devices', {}):
        schema = base_schema.setdefault('devices', {}).setdefault(dev, {})
        schema.update({Optional('order'): list})
        schema.update({Optional('device_recovery'): dict})
        schema.update({Optional('images'): Or(list, dict)})

        clean_data = clean_dict["devices"][dev]

        try:
            dev = loaded_tb.devices[dev]
        except KeyError as e:
            warnings.append(
                "The device {dev} specified in the clean yaml does "
                "not exist in the testbed.".format(dev=e))
            # cant validate schema so allow anything under dev
            schema.update({Any(): Any()})
            continue

        # update stages with image
        if clean_data.get('images'):
            setattr(dev, 'clean', clean_data)
            # Get abstracted ImageHandler class
            abstract = Lookup.from_device(dev, packages={'clean': clean})
            ImageHandler = abstract.clean.stages.image_handler.ImageHandler

            # Image handler
            image_handler = ImageHandler(dev, dev.clean['images'])
            initialize_clean_sections(image_handler, clean_data['order'])

        for section in clean_data:
            # ignore sections that aren't true stages
            if section in sections_to_ignore:
                continue

            if section == 'device_recovery':
                schema.update({'device_recovery': recovery_processor.schema})
                continue

            # when no data is provided under stage, change None to dict
            # this is needed for schema validation
            if clean_data[section] is None:
                clean_data[section] = {}

            # Load it up so we can grab the schema from the stage
            # If source isnt provided then check if it is inside the clean json
            if 'source' not in clean_data:
                task = get_clean_function(section, clean_json, dev)
            else:
                task = load_class(clean_data, dev)

            # Add the stage schema to the base schema
            if hasattr(task, 'schema'):
                schema.update({task.__name__: task.schema})

    try:
        Schema(base_schema).validate(clean_dict)
    except Exception as e:
        exceptions.append(pretty_schema_exception(e))

    return validation_results