Пример #1
0
    def validate(self, bug_desc: t.Union[str, OrderedDict]) -> bool:
        if type(bug_desc) == str:
            bug_data = self._load(bug_desc)
        elif type(bug_desc) == OrderedDict:
            bug_data = bug_desc
        else:
            print(f"Unsupported data type for bug description")
            return False

        # validates only sections of bug for which the schema is created for
        if self._keys is None:
            data = yamale.make_data(bug_data)
        else:
            # print(f"validating bug description for sections {self._keys}")
            data_str = ''
            for key in self._keys:
                data_str += f"{key}: {bug_data[key]}\n"
            data = yamale.make_data(content=data_str)

        # Validate data against the schema. Throws a ValueError if data is invalid.
        try:
            yamale.validate(self._schema, data)
            # print('Validation success! 👍')
            return True
        except yamale.YamaleError as e:
            # print('Validation failed!\n')
            for result in e.results:
                # for error in result.errors:
                print(result.errors[0])

        return False
Пример #2
0
    def load_yaml_to_dict(self):
        """
        Load default runconfig, override with user input, and convert to dict
        Leading namespaces can be stripped off down the line
        """
        # assign default config and yamale schema
        # assume defaults have already been yamale validated
        try:
            default_cfg = f'{helpers.WORKFLOW_SCRIPTS_DIR}/defaults/{self.workflow_name}.yaml'
            schema = yamale.make_schema(
                f'{helpers.WORKFLOW_SCRIPTS_DIR}/schemas/{self.workflow_name}.yaml',
                parser='ruamel')
        except:
            err_str = f'workflow {self.workflow_name} does not have a schema.'
            raise ValueError(err_str)

        # set run config type
        run_config_is_txt = False
        # if newlines then run_config is YAML string (primarily for unit test)
        if self.args.run_config_path is not None:
            if '\n' in self.args.run_config_path:
                run_config_is_txt = True

        # validate yaml file taken from command line
        try:
            if run_config_is_txt:
                data = yamale.make_data(content=self.args.run_config_path,
                                        parser='ruamel')
            else:
                data = yamale.make_data(self.args.run_config_path,
                                        parser='ruamel')
        except yamale.YamaleError as e:
            err_str = f'Yamale unable to load {self.workflow_name} runconfig yaml {self.args.run_config_path} for validation.'
            raise yamale.YamaleError(err_str) from e
        try:
            yamale.validate(schema, data)
        except yamale.YamaleError as e:
            err_str = f'Validation fail for {self.workflow_name} runconfig yaml {self.args.run_config_path}.'
            raise yamale.YamaleError(err_str) from e

        # load default config
        parser = YAML(typ='safe')
        with open(default_cfg, 'r') as f:
            self.cfg = parser.load(f)

        # load user config based on input type
        if run_config_is_txt:
            self.user = parser.load(self.args.run_config_path)
        else:
            with open(self.args.run_config_path) as f_yaml:
                self.user = parser.load(f_yaml)

        # copy user suppiled config into default config
        helpers.deep_update(self.cfg, self.user)
Пример #3
0
def validate_yaml(config_file_path: str,
                  cloud: str,
                  json_schema: bool = False) -> Literal[True]:
    if json_schema:
        print("TODO")
    else:
        CLOUD_TO_SCHEMA = {
            "aws": aws_main_schema,
            "google": gcp_main_schema,
            "azurerm": azure_main_schema,
            "local": local_main_schema,
            "helm": helm_main_schema,
        }
        DEFAULT_SCHEMA = vanilla_main_schema
        data = yamale.make_data(config_file_path, parser="ruamel")
        schema = CLOUD_TO_SCHEMA.get(cloud, DEFAULT_SCHEMA)
        yamale_result = yamale.validate(schema, data, _raise_error=False)
        errors = []
        for result in yamale_result:
            errors.extend(result.errors)

        if len(errors) > 0:
            _print_errors(errors)
            raise UserErrors(f"{config_file_path} is not a valid Opta file.")

    return True
Пример #4
0
    def validate(cls, cfg):
        """Validate arguments

        :param cfg: project configuration
        :type cfg: dict
        """
        schema_yml = """
        measures_to_ignore: list(str(), required=False)
        build_existing_model: map(required=False)
        emissions: list(include('scenario-spec'), required=False)
        reporting_measures: list(include('measure-spec'), required=False)
        simulation_output_report: map(required=False)
        server_directory_cleanup: map(required=False)
        ---
        scenario-spec:
            scenario_name: str(required=True)
            type: str(required=True)
            elec_folder: str(required=True)
            gas_value: num(required=False)
            propane_value: num(required=False)
            oil_value: num(required=False)
            wood_value: num(required=False)
        measure-spec:
            measure_dir_name: str(required=True)
            arguments: map(required=False)
        """
        workflow_generator_args = cfg['workflow_generator']['args']
        schema_yml = re.sub(r'^ {8}', '', schema_yml, flags=re.MULTILINE)
        schema = yamale.make_schema(content=schema_yml, parser='ruamel')
        data = yamale.make_data(content=json.dumps(workflow_generator_args),
                                parser='ruamel')
        yamale.validate(schema, data, strict=True)
        return True
Пример #5
0
    def validate(cls, cfg):
        """Validate arguments

        :param cfg: project configuration
        :type cfg: dict
        """
        schema_yml = """
        measures_to_ignore: list(str(), required=False)
        residential_simulation_controls: map(required=False)
        measures: list(include('measure-spec'), required=False)
        reporting_measures: list(include('measure-spec'), required=False)
        simulation_output: map(required=False)
        timeseries_csv_export: map(required=False)
        server_directory_cleanup: map(required=False)
        ---
        measure-spec:
            measure_dir_name: str(required=True)
            arguments: map(required=False)
        """
        workflow_generator_args = cfg['workflow_generator']['args']
        schema_yml = re.sub(r'^ {8}', '', schema_yml, flags=re.MULTILINE)
        schema = yamale.make_schema(content=schema_yml, parser='ruamel')
        data = yamale.make_data(content=json.dumps(workflow_generator_args), parser='ruamel')
        yamale.validate(schema, data, strict=True)
        return cls.validate_measures_and_arguments(cfg)
Пример #6
0
    def make_from_config(cls, config_path: str, **kwargs) -> YABU:
        # Loads the config schema to validate the config
        schema = yamale.make_schema(
            path.join(path.dirname(__file__), "resources/config.schema.yaml"))

        # Tries to load config file
        try:
            config = yamale.make_data(config_path)
        except FileNotFoundError:
            _LOGGER.error(
                "Configuration file '{}' not found".format(config_path))
            raise exceptions.ConfigNotFound(config_path)
        except ScannerError as e:
            _LOGGER.error("Invalid configuration file '{}'\n{}".format(
                config_path, e))
            raise exceptions.InvalidConfig(e)

        # Tries to validate the configuration with the schema
        try:
            yamale.validate(schema, config)
        except YamaleError as e:
            _LOGGER.error("Invalid configuration file '{}'\n{}".format(
                config_path, e))
            raise exceptions.InvalidConfig(e)

        _LOGGER.info("Configuration loaded")

        # create instance form config
        config, _ = config[0]
        return cls(**config, **kwargs)
Пример #7
0
def recipe_with_schema(filename):
    """Check if the recipe content matches schema."""
    schema_file = os.path.join(os.path.dirname(__file__), 'recipe_schema.yml')
    logger.debug("Checking recipe against schema %s", schema_file)
    recipe = yamale.make_data(filename)
    schema = yamale.make_schema(schema_file)
    yamale.validate(schema, recipe)
Пример #8
0
def _validate(schema_path, data_path, parser, strict, _raise_error):
    schema = schemas.get(schema_path)
    if not schema:
        schema = yamale.make_schema(schema_path, parser)
        schemas[schema_path] = schema
    data = yamale.make_data(data_path, parser)
    return yamale.validate(schema, data, strict, _raise_error)
Пример #9
0
    def validate(self, yaml_path):

        # TODO: this is weird, obsever should handle this instead
        validated_at = self.validated_timestamps.get(yaml_path)
        if validated_at is not None and (datetime.now() -
                                         validated_at).seconds < 3:
            return

        self.validated_timestamps[yaml_path] = datetime.now()

        schema_path = self.schema
        base_dir = self.base_dir

        if schema_path is None:
            return

        if base_dir is not None:
            schema_path = os.path.join(base_dir, schema_path)

        # Run yaml through glob and flatten list
        yamale_schema = yamale.make_schema(schema_path)
        try:
            yamale_data = yamale.make_data(yaml_path)
            for result in yamale.validate(yamale_schema,
                                          yamale_data,
                                          _raise_error=False):
                if result.isValid():
                    logging.info('[valid] %s', yaml_path)
                else:
                    logging.error(
                        '[invalid] %s',
                        str(yaml_path) + '\n\t' + '\n\t'.join(result.errors))
        except ScannerError as se:
            logging.error('[exception] %s', str(yaml_path) + '\n\t' + str(se))
Пример #10
0
def delete():
    msg = {'err': None, 'res': None}

    try:
        # schema validation
        yamale.validate(schema_delete,
                        yamale.make_data(content=request.data.decode('utf-8')))

        body = yaml.load(request.data, Loader=yaml.Loader)

        temp = f"""
apiVersion: autoscaling.k8s.io/v1beta2
kind: VerticalPodAutoscaler
metadata:
  name: {body['name']}
  namespace: {body['namespace']}
"""
        cmd = f'cat << EOF | kubectl delete -f -\n{temp}\nEOF\n'
        st, res = subprocess.getstatusoutput(cmd)
        if st != 0:
            logger.error(res)
            msg['err'] = res

    except Exception as e:
        logger.error(str(e))
        msg['err'] = str(e)

    return jsonify(msg)
Пример #11
0
def test_config_file_schema():
     schema = yamale.make_schema(os.path.join(ROOT_DIR, 'src/config/schema.yaml'))
     source_files = os.listdir(SOURCE_DIR)
     for source_file in source_files:
         print(source_file)
         data = yamale.make_data(os.path.join(SOURCE_DIR, source_file))
         yamale.validate(schema, data, strict=True)
Пример #12
0
def qc_metadata(metadatafile):
    print("Start metadata validation...")
    schema = yamale.make_schema('../example/dummyschema.yaml')
    data = yamale.make_data(metadatafile)
    # Validate data against the schema. Throws a ValueError if data is invalid.
    yamale.validate(schema, data)
    print("...complete!")
Пример #13
0
def delete():
    msg = {
        'err': None,
        'res': None
    }

    try:
        # schema validation
        yamale.validate(schema_delete, yamale.make_data(content=request.data.decode('utf-8')))
        body = yaml.load(request.data, Loader=yaml.Loader)
        name = body['name']
        namespace = body['namespace']

        temp = """
    spec:
      template:
        spec:
          nodeSelector:
    """
        temp = yaml.load(temp, Loader=yaml.Loader)
        temp = json.dumps(temp)

        # multiline command! : """command"""
        cmd = f"""kubectl patch deployment {name} -n {namespace} --patch '{temp}'"""
        st, res = subprocess.getstatusoutput(cmd)
        print(res)
        if st != 0:
            logger.error(res)
            msg['err'] = res
    except Exception as e:
        logger.error(str(e))
        msg['err'] = str(e)

    return jsonify(msg)
Пример #14
0
 def test_site_infra(self):
     schema = yamale.make_schema(
         './config_validation_engine/validators/schema/site_infra_schema.yaml',
         validators=val.all_config_validators())
     data = yamale.make_data(
         './config_validation_engine/validators/tests/site_infra_data.yaml')
     assert yamale.validate(schema, data)
Пример #15
0
def main(directory=None, file=None, schema=None, recursive=False, quiet=False):
  'Program entry point.'

  yamale_schema = yamale.make_schema(schema)
  search = "**/*.yaml" if recursive else "*.yaml"
  has_errors = []

  files = list(file)
  for d in directory:
    files = files + glob.glob(os.path.join(d, search), recursive=recursive)

  for document in files:
    yamale_data = yamale.make_data(document)
    try:
      yamale.validate(yamale_schema, yamale_data)
      if quiet:
        pass
      else:
        print(f'✅  {document} -> {os.path.basename(schema)}')
    except ValueError as e:
      has_errors.append(document)
      print(e)
      print(f'❌ {document} -> {os.path.basename(schema)}')

  if len(has_errors) > 0:
    raise SystemExit(f"❌ Errors found in {len(has_errors)} documents.")
Пример #16
0
 def _validate_config(cls, name):
     config_file = ConfigSet.__CONFIG_MAP[name]
     schema_file = ConfigSet.__SCHEMA_MAP[name]
     logger.info("validating config " + config_file + " against schema " +
                 schema_file + " for " + name)
     schema = yamale.make_schema(schema_file)
     data = yamale.make_data(config_file)
     yamale.validate(schema, data)
def test_config_file_schema():
    schema = yamale.make_schema(path_utils.path_to('schema_yaml'))
    source_files = os.listdir(path_utils.path_to('sources_dir'))
    for source_file in source_files:
        print(source_file)
        data = yamale.make_data(
            os.path.join(path_utils.path_to('sources_dir'), source_file))
        yamale.validate(schema, data, strict=True)
Пример #18
0
def validate_schema(file):
    validators = DefaultValidators.copy()
    PathValidator.configuration_file = file
    validators[PathValidator.tag] = PathValidator

    data = yamale.make_data(file)
    schema = yamale.make_schema(V2_SCHEMA, validators=validators)
    yamale.validate(schema, data)
Пример #19
0
def test_parameter_group_info_schema():
    # Validate test schema against schema... schema

    test_info1_str = pkg_resources.resource_string(__name__, "test_parameter_group_info1.yml").decode("utf-8")
    test_info1_data = yamale.make_data(content = test_info1_str)
    test_info1_data0 = test_info1_data
    if isinstance(test_info1_data,list):
        test_schema1_data0 = test_info1_data[0]
    yamale.validate(_group_info_schema, [test_schema1_data0])
Пример #20
0
def validate_yaml(schema_file: str, data_file: str):
    if not os.path.isfile(schema_file):
        raise RuntimeError(f"Schema yaml file is missing: {schema_file}")
    if not os.path.isfile(data_file):
        raise RuntimeError(f"Data yaml file is missing: {data_file}")

    schema = yamale.make_schema(schema_file)
    data = yamale.make_data(data_file)
    yamale.validate(schema, data)
Пример #21
0
 def __init__(self, configFile):
     self.loadedYaml = yamale.make_data(configFile)
     loadedYamlSchema = yamale.make_schema(
         str(Path(__file__).parent) + "/resources/configuration_schema.yml")
     try:
         yamale.validate(loadedYamlSchema, self.loadedYaml)
     except yamale.YamaleError as e:
         print('Validation failed!\n%s' % str(e))
         exit(1)
Пример #22
0
def create():
    msg = {
        'err': None,
        'res': None
    }

    try:
        # schema validation
        yamale.validate(schema_create, yamale.make_data(content=request.data.decode('utf-8')))

        # name
        body = yaml.load(request.data, Loader=yaml.Loader)

        metric_type = ""
        if body['type'].lower() == "resource":
            metric_type = "Resource"
        elif body['type'].lower() == "custom":
            metric_type = "Pod"
        elif body['type'].lower() == "external":
            metric_type = "External"

        temp = f"""
apiVersion: autoscaling/v2beta2
kind: HorizontalPodAutoscaler
metadata:
  name: {body['name']}
  namespace: {body['namespace']}
spec:
  scaleTargetRef:
    apiVersion: apps/v1
    kind: Deployment
    name: {body['name']}
  minReplicas: {body['min']}
  maxReplicas: {body['max']}
  metrics:
  - type: {metric_type}
    resource:
      name: {body['metric']}
      target:
        type: Utilization
        averageUtilization: {body['avg']}
"""
        temp = yaml.load(temp, Loader=yaml.Loader)
        temp = yaml.dump(temp)

        cmd = f'cat << EOF | kubectl apply -f -\n{temp}\nEOF\n'
        st, res = subprocess.getstatusoutput(cmd)
        if st != 0:
            logger.error(res)
            msg['err'] = res

    except Exception as e:
        logger.error(str(e))
        msg['err'] = str(e)

    return jsonify(msg)
Пример #23
0
def test_validate_errors(use_schema_string, use_data_string, expected_message_re):
    schema_path = get_fixture('types.yaml')
    data_path = get_fixture('types_bad_data.yaml')
    if use_schema_string:
        with io.open(schema_path, encoding='utf-8') as f:
            schema = yamale.make_schema(content=f.read())
    else:
        schema = yamale.make_schema(schema_path)
    if use_data_string:
        with io.open(data_path, encoding='utf-8') as f:
            data = yamale.make_data(content=f.read())
    else:
        data = yamale.make_data(data_path)
    with pytest.raises(yamale.yamale_error.YamaleError) as excinfo:
        yamale.validate(schema, data)
    assert re.match(expected_message_re, excinfo.value.message, re.MULTILINE), \
        'Message {} should match {}'.format(
            excinfo.value.message, expected_message_re
        )
Пример #24
0
def valid_conf(schema_file, config_file):
    schema_yamale = yamale.make_schema(schema_file)
    config_yamale = yamale.make_data(config_file)

    try:
        yamale.validate(schema_yamale, config_yamale)
    except ValueError as e:
        for r in e.results:
            for err in r.errors:
                print(f"[ERROR] {err}")
        sys.exit(1)
Пример #25
0
def main(assets_glob, assets_schema, custom_validators):
    # Run only if schema is set
    if assets_schema:
        schema = yamale.make_schema(assets_schema,
                                    validators=validators.load(custom_validators))

        for f in sorted(glob.glob(assets_glob, recursive=True)):
            log('INFO', f"Validating {f} against schema {assets_schema}")
            yamale.validate(schema, yamale.make_data(f))

        log('INFO', "... finished")
def validate_schema(file):
    validators = DefaultValidators.copy()
    PathValidator.configuration_file = file
    validators[PathValidator.tag] = PathValidator

    data = yamale.make_data(file)
    schema = yamale.make_schema(
        V2_SCHEMA,
        validators=validators
    )
    yamale.validate(schema, data)
Пример #27
0
def delete():
    msg = {'err': None, 'res': None}

    try:
        # schema validation
        yamale.validate(schema_delete,
                        yamale.make_data(content=request.data.decode('utf-8')))

        body = yaml.load(request.data, Loader=yaml.Loader)

        temp = f"""
apiVersion: apps/v1
kind: Deployment
metadata:
 name: {body['name']}
 namespace: {body['namespace']}
---
apiVersion: v1
kind: Service
metadata:
 name: {body['name']}
 namespace: {body['namespace']}
"""

        k = f"{body['namespace']}/{body['name']}"
        v = mydb.get(db_path, k)
        if v is not None:
            obj = json.loads(v.decode())
            if "monitorPorts" in obj:
                temp += f"""---
apiVersion: monitoring.coreos.com/v1
kind: ServiceMonitor
metadata:
 name: {body['name']}
 namespace: {body['namespace']}
"""

        # delete deployment & service
        cmd = f'cat << EOF | kubectl delete -f -\n{temp}\nEOF\n'
        st, res = subprocess.getstatusoutput(cmd)
        if st != 0:
            logger.error(res)
            msg['err'] = res

        # delete scheduleHint in db (integrity risk!!!) but, it's a etri's requirement.
        k = f"{body['namespace']}/{body['name']}"
        mydb.delete(db_path, k)

    except Exception as e:
        logger.error(str(e))
        msg['err'] = str(e)

    return jsonify(msg)
Пример #28
0
def load_query(filep):
    """Load a YAML query from the open file fp.

    An exception will be raised if the query is invalid.
    """
    content = filep.read()

    query = yamale.make_data(content=content)

    yamale.validate(schema, query)

    return query[0][0]
Пример #29
0
def _validate(schema_path, data_path, parser, strict, _raise_error):
    schema = schemas.get(schema_path)
    try:
        if not schema:
            schema = yamale.make_schema(schema_path, parser)
            schemas[schema_path] = schema
    except (SyntaxError, ValueError) as e:
        results = [Result([str(e)])]
        if not _raise_error:
            return results
        raise YamaleError(results)
    data = yamale.make_data(data_path, parser)
    return yamale.validate(schema, data, strict, _raise_error)
Пример #30
0
def read_config(config_file: str) -> Config:
    config_file_path = Path(config_file)
    if not config_file_path.is_file():
        logger.error(f'Config file does not exist at {config_file_path.resolve()}')
        sys.exit(1)
    schema = yamale.make_schema(Path('schema.yml'))
    data = yamale.make_data(config_file_path)
    try:
        yamale.validate(schema, data)
    except ValueError:
        logger.exception('Config file validation failed')
        sys.exit(1)
    return parse_config_file(config_file_path)
Пример #31
0
 def verify_recipe(self, filename):
     data = yamale.make_data(filename)
     try:
         yamale.validate(self.recipe_schema, data)
     except YamaleError as e:
         print('Validation failed!\n')
         for result in e.results:
             print("Error validating data '%s' with '%s'\n\t" %
                   (result.data, result.schema))
             for error in result.errors:
                 print('\t%s' % error)
         exit(1)
     logging.info(f"Builder file {filename} is ✔ good")
Пример #32
0
def _validate(schema_path, data_path, parser):
    schema = schemas.get(schema_path)
    try:
        if not schema:
            schema = yamale.make_schema(schema_path, parser)
            schemas[schema_path] = schema
        data = yamale.make_data(data_path, parser)
        yamale.validate(schema, data)
    except Exception as e:
        error = '\nError!\n'
        error += 'Schema: %s\n' % schema_path
        error += 'Data file: %s\n' % data_path
        error += traceback.format_exc()
        print(error)
        raise ValueError('Validation failed!')
Пример #33
0
}

maps = {
    'schema': 'map.yaml',
    'bad': 'map_bad.yaml',
    'good': 'map_good.yaml'
}

test_data = [types, nested, custom, keywords, lists, maps]

for d in test_data:
    for key in d.keys():
        if key == 'schema':
            d[key] = yamale.make_schema(get_fixture(d[key]))
        else:
            d[key] = yamale.make_data(get_fixture(d[key]))


def test_tests():
    ''' Make sure the test runner is working.'''
    assert 1 + 1 == 2


def test_flat_make_schema():
    assert isinstance(types['schema']['string'], val.String)


def test_nested_schema():
    nested_schema = nested['schema']
    assert isinstance(nested_schema['string'], val.String)
    assert isinstance(nested_schema.dict['list'], (list, tuple))
Пример #34
0
}

lists = {
    'schema': 'lists.yaml',
    'bad': 'lists_bad.yaml',
    'good': 'lists_good.yaml'
}

test_data = [types, nested, custom, keywords, lists]

for d in test_data:
    for key in d.keys():
        if key == 'schema':
            d[key] = sch.make_schema(get_fixture(d[key]))
        else:
            d[key] = sch.make_data(get_fixture(d[key]))


def test_tests():
    ''' Make sure the test runner is working.'''
    assert 1 + 1 == 2


def test_flat_make_schema():
    assert isinstance(types['schema']['string'], val.String)


def test_nested_schema():
    nested_schema = nested['schema']
    assert isinstance(nested_schema['string'], val.String)
    assert isinstance(nested_schema.dict['list'], (list, tuple))
from glob import glob
import sys
import yamale

schema = yamale.make_schema('./docs/schema.yaml')

data = yamale.make_data('./docs/current.settings.yaml')
yamale.validate(schema, data)

templates = glob('mia/templates/*/settings.yaml')
for template in templates:
    sys.stdout.write('Checking %s against schema... ' % template)
    data = yamale.make_data(template)
    yamale.validate(schema, data)
    print("done!")