Exemplo n.º 1
0
    def validate(cls, cfg):
        """Validate arguments

        :param cfg: project configuration
        :type cfg: dict
        """
        schema_yml = """
        measures_to_ignore: list(str(), required=False)
        residential_simulation_controls: map(required=False)
        measures: list(include('measure-spec'), required=False)
        reporting_measures: list(include('measure-spec'), required=False)
        simulation_output: map(required=False)
        timeseries_csv_export: map(required=False)
        server_directory_cleanup: map(required=False)
        ---
        measure-spec:
            measure_dir_name: str(required=True)
            arguments: map(required=False)
        """
        workflow_generator_args = cfg['workflow_generator']['args']
        schema_yml = re.sub(r'^ {8}', '', schema_yml, flags=re.MULTILINE)
        schema = yamale.make_schema(content=schema_yml, parser='ruamel')
        data = yamale.make_data(content=json.dumps(workflow_generator_args), parser='ruamel')
        yamale.validate(schema, data, strict=True)
        return cls.validate_measures_and_arguments(cfg)
Exemplo n.º 2
0
def qc_metadata(metadatafile):
    print("Start metadata validation...")
    schema = yamale.make_schema('../example/dummyschema.yaml')
    data = yamale.make_data(metadatafile)
    # Validate data against the schema. Throws a ValueError if data is invalid.
    yamale.validate(schema, data)
    print("...complete!")
Exemplo n.º 3
0
def validate(data: Dict, schema: Path) -> None:
    schema = yamale.make_schema(schema,
                                validators=yamale_validators.all_validators())
    try:
        yamale.validate(schema, [(data, "<dict literal>")], strict=True)
    except ValueError as validation_error:
        raise YamaleValidationException(validation_error)
Exemplo n.º 4
0
def main(directory=None, file=None, schema=None, recursive=False, quiet=False):
  'Program entry point.'

  yamale_schema = yamale.make_schema(schema)
  search = "**/*.yaml" if recursive else "*.yaml"
  has_errors = []

  files = list(file)
  for d in directory:
    files = files + glob.glob(os.path.join(d, search), recursive=recursive)

  for document in files:
    yamale_data = yamale.make_data(document)
    try:
      yamale.validate(yamale_schema, yamale_data)
      if quiet:
        pass
      else:
        print(f'✅  {document} -> {os.path.basename(schema)}')
    except ValueError as e:
      has_errors.append(document)
      print(e)
      print(f'❌ {document} -> {os.path.basename(schema)}')

  if len(has_errors) > 0:
    raise SystemExit(f"❌ Errors found in {len(has_errors)} documents.")
Exemplo n.º 5
0
    def validate(self, bug_desc: t.Union[str, OrderedDict]) -> bool:
        if type(bug_desc) == str:
            bug_data = self._load(bug_desc)
        elif type(bug_desc) == OrderedDict:
            bug_data = bug_desc
        else:
            print(f"Unsupported data type for bug description")
            return False

        # validates only sections of bug for which the schema is created for
        if self._keys is None:
            data = yamale.make_data(bug_data)
        else:
            # print(f"validating bug description for sections {self._keys}")
            data_str = ''
            for key in self._keys:
                data_str += f"{key}: {bug_data[key]}\n"
            data = yamale.make_data(content=data_str)

        # Validate data against the schema. Throws a ValueError if data is invalid.
        try:
            yamale.validate(self._schema, data)
            # print('Validation success! 👍')
            return True
        except yamale.YamaleError as e:
            # print('Validation failed!\n')
            for result in e.results:
                # for error in result.errors:
                print(result.errors[0])

        return False
Exemplo n.º 6
0
def recipe_with_schema(filename):
    """Check if the recipe content matches schema."""
    schema_file = os.path.join(os.path.dirname(__file__), 'recipe_schema.yml')
    logger.debug("Checking recipe against schema %s", schema_file)
    recipe = yamale.make_data(filename)
    schema = yamale.make_schema(schema_file)
    yamale.validate(schema, recipe)
Exemplo n.º 7
0
def test_config_file_schema():
     schema = yamale.make_schema(os.path.join(ROOT_DIR, 'src/config/schema.yaml'))
     source_files = os.listdir(SOURCE_DIR)
     for source_file in source_files:
         print(source_file)
         data = yamale.make_data(os.path.join(SOURCE_DIR, source_file))
         yamale.validate(schema, data, strict=True)
Exemplo n.º 8
0
    def make_from_config(cls, config_path: str, **kwargs) -> YABU:
        # Loads the config schema to validate the config
        schema = yamale.make_schema(
            path.join(path.dirname(__file__), "resources/config.schema.yaml"))

        # Tries to load config file
        try:
            config = yamale.make_data(config_path)
        except FileNotFoundError:
            _LOGGER.error(
                "Configuration file '{}' not found".format(config_path))
            raise exceptions.ConfigNotFound(config_path)
        except ScannerError as e:
            _LOGGER.error("Invalid configuration file '{}'\n{}".format(
                config_path, e))
            raise exceptions.InvalidConfig(e)

        # Tries to validate the configuration with the schema
        try:
            yamale.validate(schema, config)
        except YamaleError as e:
            _LOGGER.error("Invalid configuration file '{}'\n{}".format(
                config_path, e))
            raise exceptions.InvalidConfig(e)

        _LOGGER.info("Configuration loaded")

        # create instance form config
        config, _ = config[0]
        return cls(**config, **kwargs)
Exemplo n.º 9
0
def delete():
    msg = {'err': None, 'res': None}

    try:
        # schema validation
        yamale.validate(schema_delete,
                        yamale.make_data(content=request.data.decode('utf-8')))

        body = yaml.load(request.data, Loader=yaml.Loader)

        temp = f"""
apiVersion: autoscaling.k8s.io/v1beta2
kind: VerticalPodAutoscaler
metadata:
  name: {body['name']}
  namespace: {body['namespace']}
"""
        cmd = f'cat << EOF | kubectl delete -f -\n{temp}\nEOF\n'
        st, res = subprocess.getstatusoutput(cmd)
        if st != 0:
            logger.error(res)
            msg['err'] = res

    except Exception as e:
        logger.error(str(e))
        msg['err'] = str(e)

    return jsonify(msg)
Exemplo n.º 10
0
def delete():
    msg = {
        'err': None,
        'res': None
    }

    try:
        # schema validation
        yamale.validate(schema_delete, yamale.make_data(content=request.data.decode('utf-8')))
        body = yaml.load(request.data, Loader=yaml.Loader)
        name = body['name']
        namespace = body['namespace']

        temp = """
    spec:
      template:
        spec:
          nodeSelector:
    """
        temp = yaml.load(temp, Loader=yaml.Loader)
        temp = json.dumps(temp)

        # multiline command! : """command"""
        cmd = f"""kubectl patch deployment {name} -n {namespace} --patch '{temp}'"""
        st, res = subprocess.getstatusoutput(cmd)
        print(res)
        if st != 0:
            logger.error(res)
            msg['err'] = res
    except Exception as e:
        logger.error(str(e))
        msg['err'] = str(e)

    return jsonify(msg)
Exemplo n.º 11
0
    def validate(cls, cfg):
        """Validate arguments

        :param cfg: project configuration
        :type cfg: dict
        """
        schema_yml = """
        measures_to_ignore: list(str(), required=False)
        build_existing_model: map(required=False)
        emissions: list(include('scenario-spec'), required=False)
        reporting_measures: list(include('measure-spec'), required=False)
        simulation_output_report: map(required=False)
        server_directory_cleanup: map(required=False)
        ---
        scenario-spec:
            scenario_name: str(required=True)
            type: str(required=True)
            elec_folder: str(required=True)
            gas_value: num(required=False)
            propane_value: num(required=False)
            oil_value: num(required=False)
            wood_value: num(required=False)
        measure-spec:
            measure_dir_name: str(required=True)
            arguments: map(required=False)
        """
        workflow_generator_args = cfg['workflow_generator']['args']
        schema_yml = re.sub(r'^ {8}', '', schema_yml, flags=re.MULTILINE)
        schema = yamale.make_schema(content=schema_yml, parser='ruamel')
        data = yamale.make_data(content=json.dumps(workflow_generator_args),
                                parser='ruamel')
        yamale.validate(schema, data, strict=True)
        return True
Exemplo n.º 12
0
 def _validate_config(cls, name):
     config_file = ConfigSet.__CONFIG_MAP[name]
     schema_file = ConfigSet.__SCHEMA_MAP[name]
     logger.info("validating config " + config_file + " against schema " +
                 schema_file + " for " + name)
     schema = yamale.make_schema(schema_file)
     data = yamale.make_data(config_file)
     yamale.validate(schema, data)
Exemplo n.º 13
0
def count_exception_lines(schema, data):
    try:
        yamale.validate(schema, data)
    except ValueError as exp:
        message = str(exp)
        count = len(message.split('\n'))
        return count
    raise Exception("Data valid")
Exemplo n.º 14
0
def validate_schema(file):
    validators = DefaultValidators.copy()
    PathValidator.configuration_file = file
    validators[PathValidator.tag] = PathValidator

    data = yamale.make_data(file)
    schema = yamale.make_schema(V2_SCHEMA, validators=validators)
    yamale.validate(schema, data)
def test_config_file_schema():
    schema = yamale.make_schema(path_utils.path_to('schema_yaml'))
    source_files = os.listdir(path_utils.path_to('sources_dir'))
    for source_file in source_files:
        print(source_file)
        data = yamale.make_data(
            os.path.join(path_utils.path_to('sources_dir'), source_file))
        yamale.validate(schema, data, strict=True)
Exemplo n.º 16
0
def match_exception_lines(schema, data, expected, strict=False):
    with pytest.raises(ValueError) as e:
        yamale.validate(schema, data, strict)

    got = e.value.results[0].errors
    got.sort()
    expected.sort()
    assert got == expected
Exemplo n.º 17
0
def validate_yaml(schema_file: str, data_file: str):
    if not os.path.isfile(schema_file):
        raise RuntimeError(f"Schema yaml file is missing: {schema_file}")
    if not os.path.isfile(data_file):
        raise RuntimeError(f"Data yaml file is missing: {data_file}")

    schema = yamale.make_schema(schema_file)
    data = yamale.make_data(data_file)
    yamale.validate(schema, data)
Exemplo n.º 18
0
 def __init__(self, configFile):
     self.loadedYaml = yamale.make_data(configFile)
     loadedYamlSchema = yamale.make_schema(
         str(Path(__file__).parent) + "/resources/configuration_schema.yml")
     try:
         yamale.validate(loadedYamlSchema, self.loadedYaml)
     except yamale.YamaleError as e:
         print('Validation failed!\n%s' % str(e))
         exit(1)
Exemplo n.º 19
0
def test_parameter_group_info_schema():
    # Validate test schema against schema... schema

    test_info1_str = pkg_resources.resource_string(__name__, "test_parameter_group_info1.yml").decode("utf-8")
    test_info1_data = yamale.make_data(content = test_info1_str)
    test_info1_data0 = test_info1_data
    if isinstance(test_info1_data,list):
        test_schema1_data0 = test_info1_data[0]
    yamale.validate(_group_info_schema, [test_schema1_data0])
Exemplo n.º 20
0
def create():
    msg = {
        'err': None,
        'res': None
    }

    try:
        # schema validation
        yamale.validate(schema_create, yamale.make_data(content=request.data.decode('utf-8')))

        # name
        body = yaml.load(request.data, Loader=yaml.Loader)

        metric_type = ""
        if body['type'].lower() == "resource":
            metric_type = "Resource"
        elif body['type'].lower() == "custom":
            metric_type = "Pod"
        elif body['type'].lower() == "external":
            metric_type = "External"

        temp = f"""
apiVersion: autoscaling/v2beta2
kind: HorizontalPodAutoscaler
metadata:
  name: {body['name']}
  namespace: {body['namespace']}
spec:
  scaleTargetRef:
    apiVersion: apps/v1
    kind: Deployment
    name: {body['name']}
  minReplicas: {body['min']}
  maxReplicas: {body['max']}
  metrics:
  - type: {metric_type}
    resource:
      name: {body['metric']}
      target:
        type: Utilization
        averageUtilization: {body['avg']}
"""
        temp = yaml.load(temp, Loader=yaml.Loader)
        temp = yaml.dump(temp)

        cmd = f'cat << EOF | kubectl apply -f -\n{temp}\nEOF\n'
        st, res = subprocess.getstatusoutput(cmd)
        if st != 0:
            logger.error(res)
            msg['err'] = res

    except Exception as e:
        logger.error(str(e))
        msg['err'] = str(e)

    return jsonify(msg)
Exemplo n.º 21
0
    def load_yaml_to_dict(self):
        """
        Load default runconfig, override with user input, and convert to dict
        Leading namespaces can be stripped off down the line
        """
        # assign default config and yamale schema
        # assume defaults have already been yamale validated
        try:
            default_cfg = f'{helpers.WORKFLOW_SCRIPTS_DIR}/defaults/{self.workflow_name}.yaml'
            schema = yamale.make_schema(
                f'{helpers.WORKFLOW_SCRIPTS_DIR}/schemas/{self.workflow_name}.yaml',
                parser='ruamel')
        except:
            err_str = f'workflow {self.workflow_name} does not have a schema.'
            raise ValueError(err_str)

        # set run config type
        run_config_is_txt = False
        # if newlines then run_config is YAML string (primarily for unit test)
        if self.args.run_config_path is not None:
            if '\n' in self.args.run_config_path:
                run_config_is_txt = True

        # validate yaml file taken from command line
        try:
            if run_config_is_txt:
                data = yamale.make_data(content=self.args.run_config_path,
                                        parser='ruamel')
            else:
                data = yamale.make_data(self.args.run_config_path,
                                        parser='ruamel')
        except yamale.YamaleError as e:
            err_str = f'Yamale unable to load {self.workflow_name} runconfig yaml {self.args.run_config_path} for validation.'
            raise yamale.YamaleError(err_str) from e
        try:
            yamale.validate(schema, data)
        except yamale.YamaleError as e:
            err_str = f'Validation fail for {self.workflow_name} runconfig yaml {self.args.run_config_path}.'
            raise yamale.YamaleError(err_str) from e

        # load default config
        parser = YAML(typ='safe')
        with open(default_cfg, 'r') as f:
            self.cfg = parser.load(f)

        # load user config based on input type
        if run_config_is_txt:
            self.user = parser.load(self.args.run_config_path)
        else:
            with open(self.args.run_config_path) as f_yaml:
                self.user = parser.load(f_yaml)

        # copy user suppiled config into default config
        helpers.deep_update(self.cfg, self.user)
def validate_schema(file):
    validators = DefaultValidators.copy()
    PathValidator.configuration_file = file
    validators[PathValidator.tag] = PathValidator

    data = yamale.make_data(file)
    schema = yamale.make_schema(
        V2_SCHEMA,
        validators=validators
    )
    yamale.validate(schema, data)
Exemplo n.º 23
0
def valid_conf(schema_file, config_file):
    schema_yamale = yamale.make_schema(schema_file)
    config_yamale = yamale.make_data(config_file)

    try:
        yamale.validate(schema_yamale, config_yamale)
    except ValueError as e:
        for r in e.results:
            for err in r.errors:
                print(f"[ERROR] {err}")
        sys.exit(1)
Exemplo n.º 24
0
def main(assets_glob, assets_schema, custom_validators):
    # Run only if schema is set
    if assets_schema:
        schema = yamale.make_schema(assets_schema,
                                    validators=validators.load(custom_validators))

        for f in sorted(glob.glob(assets_glob, recursive=True)):
            log('INFO', f"Validating {f} against schema {assets_schema}")
            yamale.validate(schema, yamale.make_data(f))

        log('INFO', "... finished")
Exemplo n.º 25
0
def load_query(filep):
    """Load a YAML query from the open file fp.

    An exception will be raised if the query is invalid.
    """
    content = filep.read()

    query = yamale.make_data(content=content)

    yamale.validate(schema, query)

    return query[0][0]
Exemplo n.º 26
0
def delete():
    msg = {'err': None, 'res': None}

    try:
        # schema validation
        yamale.validate(schema_delete,
                        yamale.make_data(content=request.data.decode('utf-8')))

        body = yaml.load(request.data, Loader=yaml.Loader)

        temp = f"""
apiVersion: apps/v1
kind: Deployment
metadata:
 name: {body['name']}
 namespace: {body['namespace']}
---
apiVersion: v1
kind: Service
metadata:
 name: {body['name']}
 namespace: {body['namespace']}
"""

        k = f"{body['namespace']}/{body['name']}"
        v = mydb.get(db_path, k)
        if v is not None:
            obj = json.loads(v.decode())
            if "monitorPorts" in obj:
                temp += f"""---
apiVersion: monitoring.coreos.com/v1
kind: ServiceMonitor
metadata:
 name: {body['name']}
 namespace: {body['namespace']}
"""

        # delete deployment & service
        cmd = f'cat << EOF | kubectl delete -f -\n{temp}\nEOF\n'
        st, res = subprocess.getstatusoutput(cmd)
        if st != 0:
            logger.error(res)
            msg['err'] = res

        # delete scheduleHint in db (integrity risk!!!) but, it's a etri's requirement.
        k = f"{body['namespace']}/{body['name']}"
        mydb.delete(db_path, k)

    except Exception as e:
        logger.error(str(e))
        msg['err'] = str(e)

    return jsonify(msg)
Exemplo n.º 27
0
 def verify_recipe(self, filename):
     data = yamale.make_data(filename)
     try:
         yamale.validate(self.recipe_schema, data)
     except YamaleError as e:
         print('Validation failed!\n')
         for result in e.results:
             print("Error validating data '%s' with '%s'\n\t" %
                   (result.data, result.schema))
             for error in result.errors:
                 print('\t%s' % error)
         exit(1)
     logging.info(f"Builder file {filename} is ✔ good")
Exemplo n.º 28
0
def validate(spicespath, schemapath):

    try:
        schema = yamale.make_schema(schemapath, parser='ruamel')
        data = yamale.make_data(spicespath, parser='ruamel')
        yamale.validate(schema, data)
        return data
    except yamale.YamaleError as e:
        for result in e.results:
            logger.error("Error validating data "
                         "'%s' with '%s'\n\t" % (result.data, result.schema))
            for error in result.errors:
                raise ValidationError(error)
Exemplo n.º 29
0
def read_config(config_file: str) -> Config:
    config_file_path = Path(config_file)
    if not config_file_path.is_file():
        logger.error(f'Config file does not exist at {config_file_path.resolve()}')
        sys.exit(1)
    schema = yamale.make_schema(Path('schema.yml'))
    data = yamale.make_data(config_file_path)
    try:
        yamale.validate(schema, data)
    except ValueError:
        logger.exception('Config file validation failed')
        sys.exit(1)
    return parse_config_file(config_file_path)
Exemplo n.º 30
0
def load_and_validate(schema, fname):
    data = yamale.make_data(fname)
    try:
        yamale.validate(schema, data, strict=True)
    except yamale.YamaleError as e:
        for result in e.results:
            print('Error validating data {0} with {1}:'.format(
                result.data, result.schema),
                  file=sys.stderr)
            for error in result.errors:
                print('', error, sep='\t', file=sys.stderr)
        exit(1)
    return data[0][0]
Exemplo n.º 31
0
 def __validate(self, config: dict, config_path: str) -> dict:
     if 'version' not in config:
         raise ConfigException('no version field in config')
     schema_path = '{schemas_path}/config_schema_v{version}.yaml'.format(
         schemas_path=os.path.dirname(os.path.realpath(__file__)),
         version=config['version'])
     if not os.path.isfile(schema_path):
         raise ConfigException('unknown config version ' +
                               str(config['version']))
     schema = yamale.make_schema(schema_path, parser='pyyaml')
     data = yamale.make_data(config_path, parser='pyyaml')
     yamale.validate(schema, data)
     return config
Exemplo n.º 32
0
def _validate(schema_path, data_path, parser):
    schema = schemas.get(schema_path)
    try:
        if not schema:
            schema = yamale.make_schema(schema_path, parser)
            schemas[schema_path] = schema
        data = yamale.make_data(data_path, parser)
        yamale.validate(schema, data)
    except Exception as e:
        error = '\nError!\n'
        error += 'Schema: %s\n' % schema_path
        error += 'Data file: %s\n' % data_path
        error += traceback.format_exc()
        print(error)
        raise ValueError('Validation failed!')
Exemplo n.º 33
0
    def validate(self, validators=None):
        schema = self.schema
        yaml = self.yaml
        base_dir = self.base_dir

        if schema is None:
            return

        if type(yaml) != list:
            yaml = [yaml]

        if base_dir is not None:
            schema = os.path.join(base_dir, schema)
            yaml = {os.path.join(base_dir, y) for y in yaml}

        # Run yaml through glob and flatten list
        yaml = set(itertools.chain(*map(glob.glob, yaml)))

        # Remove schema from set of data files
        yaml = yaml - {schema}

        yamale_schema = yamale.make_schema(schema, validators=validators)
        yamale_data = itertools.chain(*map(yamale.make_data, yaml))

        return yamale.validate(yamale_schema, yamale_data) is not None
from glob import glob
import sys
import yamale

schema = yamale.make_schema('./docs/schema.yaml')

data = yamale.make_data('./docs/current.settings.yaml')
yamale.validate(schema, data)

templates = glob('mia/templates/*/settings.yaml')
for template in templates:
    sys.stdout.write('Checking %s against schema... ' % template)
    data = yamale.make_data(template)
    yamale.validate(schema, data)
    print("done!")
Exemplo n.º 35
0
def test_bad_validate():
    sch.validate(types['schema'], types['bad'])
Exemplo n.º 36
0
def test_bad_nested():
    sch.validate(nested['schema'], nested['bad'])
Exemplo n.º 37
0
def test_bad_custom():
    assert sch.validate(custom['schema'], custom['bad'])
Exemplo n.º 38
0
def test_bad_lists():
    assert sch.validate(lists['schema'], lists['bad'])
Exemplo n.º 39
0
def test_good(data_map):
    yamale.validate(data_map['schema'], data_map['good'])
Exemplo n.º 40
0
def test_bad_keywords():
    assert sch.validate(keywords['schema'], keywords['bad'])
Exemplo n.º 41
0
def good_gen(data_map):
    yamale.validate(data_map['schema'], data_map['good'])
Exemplo n.º 42
0
def good_gen(data_map):
    sch.validate(data_map['schema'], data_map['good'])