def run(self, event, context):

        gh_hook = json.loads(event['body'])
        repo = gh_hook['repository']['full_name']
        sha = gh_hook['pull_request']['head']['sha']

        try:
            hooks_yml = get_github().get_repo(repo, lazy=True).get_file_contents('.hooks.yml', ref=sha)
            logger.info("Fetched .hooks.yml from repo {}".format(repo))
        except github.GithubException:
            logger.error("Missig .hooks.yml on repo {}".format(repo))
            send_status(event, context, gh_hook, self.configname, 'success', ".hooks.yml not present in branch")
            return

        try:
            hook_config = yaml.safe_load(hooks_yml.decoded_content)
            logger.info("Basic yml validation passed")
        except Exception as e:
            logger.error("Failed to decode hook yaml: " + e.message)
            send_status(event, context, gh_hook, self.configname, 'failure', "Could not decode branch .hooks.yml")
            return

        logger.info("Advanced schema validation")
        c = Core(source_data=hook_config,
                 schema_files=[os.path.join(os.path.dirname(__file__), "..", "hooks.schema.yml")])
        c.validate(raise_exception=False)
        vc = len(c.validation_errors)
        if vc > 0:
            for err in c.validation_errors:
                logger.error(" - {}".format(err))
            send_status(event, context, gh_hook, self.configname, 'failure', ".hooks.yml has {} validation errors; see log".format(vc))
            return

        send_status(event, context, gh_hook, self.configname, 'success', ".hooks.yml present and valid")
def incoming(event, context):
    """
    Validate the incoming event from the API gateway
    """

    print json.dumps(event)  # not logger.info() so it doesn't show up in logview itself :)

    # validate the secret
    if not validate_secret(event['headers'].get('X-Hub-Signature'), event['body']):
        return {"body": json.dumps({"error": "invalid signature"}), "statusCode": 403}

    # Get the hook info
    try:
        hookdata = json.loads(event['body'])
    except Exception:
        logger.error("Failed to decode json")
        return {"body": json.dumps({"error": "json decode failure"}), "statusCode": 500}

    # this will only work, for now, with hooks that include repo information
    if 'repository' not in hookdata:
        logger.error("No repository in the hook, no processing")
        return {"body": json.dumps({"error": "unsupported hook type; missing repository information"}), "statusCode": 501}

    repo = hookdata['repository']['full_name']

    # Now, we fetch the config from the repo to see what hooks we should trigger
    try:
        hooks_yml = get_github().get_repo(repo, lazy=True).get_file_contents('.hooks.yml')
        logger.info("Fetched .hooks.yml from repo {}".format(repo))
    except github.GithubException:
        logger.error("Missig .hooks.yml on repo {}".format(repo))
        return {"body": json.dumps({"error": "no .hooks.yml present"}), "statusCode": 501}

    try:
        hook_config = yaml.safe_load(hooks_yml.decoded_content)
    except Exception:
        logger.error("Failed to decode hook yaml")
        return {"body": json.dumps({"error": "hook yaml failure"}), "statusCode": 500}

    # Schema based validation
    c = Core(source_data=hook_config, schema_files=[os.path.join(os.path.dirname(__file__), "..", "hooks.schema.yml")])
    c.validate(raise_exception=False)
    if len(c.validation_errors) > 0:
        logger.error(c.validation_errors)
        return {"body": json.dumps({"error": "invalid hooks configuration"}), "statusCode": 501}

    ghevent = event['headers'].get('X-GitHub-Event', '')

    # Check hooks!
    logger.info("Qualifying checks:")
    for name, check in all_checks.get_all_checks().iteritems():
        check_config = check.qualify(ghevent, hookdata, hook_config)
        if check_config != False: # use boolean in case of "empty" configs
            logger.info("- {} passed qualify, invoking secondary call".format(name))
            invoke_secondary(name, check_config, event)
        else:
            logger.info("- {} did not qualify, skipping".format(name))

    # all done!
    return {"body": json.dumps({"message": "Thanks"}), "statusCode": 200}
Beispiel #3
0
    def _validate_cfg(self):
        """
        Open and parse the YAML configuration file and ensure it matches
        our Schema for a Dogen configuration.
        """
        # Fail early if descriptor file is not found
        if not os.path.exists(self.descriptor):
            raise Error("Descriptor file '%s' could not be found. Please make sure you specified correct path." % self.descriptor)

        schema_path = os.path.join(self.pwd, "schema", "kwalify_schema.yaml")
        schema = {}
        with open(schema_path, 'r') as fh:
            schema = yaml.safe_load(fh)

        if schema is None:
            raise Error("couldn't read a valid schema at %s" % schema_path)

        for plugin in self.plugins:
            plugin.extend_schema(schema)

        with open(self.descriptor, 'r') as stream:
            self.cfg = yaml.safe_load(stream)

        c = Core(source_data=self.cfg, schema_data=schema)
        try:
            c.validate(raise_exception=True)
        except SchemaError as e:
            raise Error(e)
Beispiel #4
0
    def test_files_with_unicode_content_failing(self):
        """
        These tests should fail with the specified exception
        """
        _fail_tests = [
            # Test mapping with unicode key and value but wrong type
            (u"1f.yaml", SchemaError),
            # Test unicode filename with validation errors
            (u"2få.yaml", SchemaError),
            # Test unicode data inside seq but wrong type
            (u"3f.yaml", SchemaError),
        ]

        for failing_test, exception_type in _fail_tests:
            # f = self.f(os.path.join("fail", failing_test))
            f = unicode(self.f(failing_test))

            with open(f, "r") as stream:
                yaml_data = yaml.load(stream)
                data = yaml_data["data"]
                schema = yaml_data["schema"]
                errors = yaml_data["errors"]

            try:
                print(u"Running test files: {}".format(f))
                c = Core(source_data=data, schema_data=schema)
                c.validate()
            except exception_type:
                pass  # OK
            else:
                raise AssertionError(u"Exception {} not raised as expected... FILES: {} : {}".format(exception_type, exception_type))

            compare(sorted(c.validation_errors), sorted(errors), prefix=u"Wrong validation errors when parsing files : {}".format(f))
Beispiel #5
0
    def load(self, config_file):
        """Load configuration from config_file."""
        with resource_stream(__name__, 'config-schema.yaml') as schema_stream:
            schema = yaml.load(schema_stream)

        core = Core(source_file=config_file, schema_data=schema)
        self.config = core.validate(raise_exception=True)
Beispiel #6
0
def load_config(path):

    """validates, loads and configures the yaml document at the specified path

    :param path: the path to the file
    :return: the parsed yaml document
    :raises SchemaError: if the yaml document does not validate
    """

    validator = Core(source_file=path, schema_data=config_schema)
    validator.validate(raise_exception=True)

    pattern = re.compile(r'^(.*)<%= ENV\[\'(.*)\'\] %>(.*)$')
    yaml.add_implicit_resolver('!env_regex', pattern)

    def env_regex(loader, node):
        value = loader.construct_scalar(node)
        front, variable_name, back = pattern.match(value).groups()
        return str(front) + os.environ[variable_name] + str(back)

    yaml.add_constructor('!env_regex', env_regex)

    with open(path, 'r') as stream:
        doc = yaml.load(stream)
        return doc
Beispiel #7
0
    def test_files_with_unicode_content_success(self):
        """
        These tests should pass with no exception raised
        """
        _pass_tests = [
            # Test mapping with unicode key and value
            u"1s.yaml",
            # Test unicode filename
            u"2så.yaml",
            # Test sequence with unicode keys
            u"3s.yaml",
        ]

        for passing_test_files in _pass_tests:
            f = unicode(self.f(passing_test_files))

            with open(f, "r") as stream:
                yaml_data = yaml.load(stream)
                data = yaml_data["data"]
                schema = yaml_data["schema"]

            try:
                print(u"Running test files: {}".format(f))
                c = Core(source_data=data, schema_data=schema)
                c.validate()
                compare(c.validation_errors, [], prefix="No validation errors should exist...")
            except Exception as e:
                print(u"ERROR RUNNING FILES: {}".format(f))
                raise e

            # This serve as an extra schema validation that tests more complex structures then testrule.py do
            compare(c.root_rule._schema_str, schema, prefix=u"Parsed rules is not correct, something have changed... files : {}".format(f))
Beispiel #8
0
def validate_result(data):
    try:
        validator = Core(source_data=data, schema_data=result_schema)
        validator.validate(raise_exception=True)
    except SchemaError as se:
        raise PresenceError(se)

    return data
Beispiel #9
0
def check_schema_test(opts, file):
    logging.info("check schema...: %s" % file)
    try:
        c = Core(source_file=file, schema_files=[opts.yaml_schema])
        c.validate(raise_exception=True)
    except SchemaError, e:
        print "check schema: %-80s  %s" % (file, RET_ERROR)
        raise
Beispiel #10
0
 def validate_config(self):
     try:
         c = Core(source_file="/Users/JohnS5/dev/replication_manager/src/webapp/bdr_app.yml",
             schema_files=['/Users/JohnS5/dev/replication_manager/src/webapp/schema.yml'])
         return c.validate(raise_exception=True)
     except Exception as e:
         print "LOG: ERROR: config file is not valid"
         print e
         return None
Beispiel #11
0
def validate_with_schema(source_data, schema_file):
    core = Core(source_data=source_data, schema_files=[schema_file])
    try:
        core.validate(raise_exception=True)
    except Exception as error:
        if len(core.errors) > 0:
            show_validation_errors(source_data, core.errors)
        else:
            raise error
def validate_config_yaml(package_name):
  """Check that an integration's config.yaml file has a valid schema

  Raises:
    Exception if the config.yaml file has an improper schema
  """
  resource_path = os.path.join('schema_files', 'config_schema.yaml')
  file_path = pkg_resources.resource_filename(resource_package, resource_path)
  schema_validator = Core(source_file=os.path.join(package_name, 'config.yaml'), schema_files=[file_path])
  schema_validator.validate(raise_exception=True)
Beispiel #13
0
 def load(self, validate=True):
     schema_file = os.path.join(sys._MEIPASS, 'schema.yml') \
         if hasattr(sys, '_MEIPASS') else self._default_schema_path
     try:
         self._yaml = anyconfig.load(self.path, ignore_missing=False)
     except FileNotFoundError:
         panic('ERROR - %s configuration file does not exist' % self.path)
     if validate:
         validator = Core(source_file=self.path, schema_files=[schema_file])
         validator.validate(raise_exception=True)
Beispiel #14
0
    def test_validation_error_but_not_raise_exception(self):
        """
        Test that if 'raise_exception=False' when validating that no exception is raised.

        Currently file 2a.yaml & 2b.yaml is designed to cause exception.
        """
        c = Core(source_file=self.f("cli", "2a.yaml"), schema_files=[self.f("cli", "2b.yaml")])
        c.validate(raise_exception=False)

        assert c.validation_errors == ["Value: 1 is not of type 'str' : /0", "Value: 2 is not of type 'str' : /1", "Value: 3 is not of type 'str' : /2"]
Beispiel #15
0
def check_schema_test(opts, file):
    logging.info("check schema...: %s" % file)
    try:
        c = Core(source_file=file, schema_files=[opts.yaml_schema])
        c.validate(raise_exception=True)
    except SchemaError as e:
        six.print_("check schema: %-80s  ERROR" % file)
        raise
    else:
        six.print_("check schema: %-80s  OK" % file)
Beispiel #16
0
def test_component_data_valid():
    """ Check that the content of data fits with masonry schema v2 """
    validator = Core(source_data={}, schema_data=get_schema())
    for component_file in iglob('*/component.yaml'):
        print(component_file)
        source_data = yaml.load(open(component_file))
        validator.source = source_data
        try:
            validator.validate(raise_exception=True)
        except:
            assert False, "Error found in: {0}".format(component_file)
Beispiel #17
0
def _validate(suite, contexts):

    schema = os.path.join(sys.path[0], "utils/schema.yml")
    ext = os.path.join(sys.path[0], "utils/ext_schema.py")
    c = Core(source_data=suite, schema_files=[schema], extensions=[ext])
    c.validate()

    if suite['context'] in contexts:
        raise SchemaError("duplicate 'context' value detected")

    contexts.append(suite['context'])
def validate(p):
    for portfolio_file_name in os.listdir(p):
        portfolios_file_path = os.path.sep.join([p, portfolio_file_name])
        logger.info('Validating {}'.format(portfolios_file_path))
        core = Core(
            source_file=portfolios_file_path,
            schema_files=[resolve_from_site_packages('schema.yaml')]
        )
        core.validate(raise_exception=True)
        click.echo("Finished validating: {}".format(portfolios_file_path))
    click.echo("Finished validating: OK")
def _validate_against_schema(config):
    """Verify that config file is compliant with the defined schema"""
    logging.info("Validating config file against the schema")
    try:
        c = Core(source_data=config, schema_files=[CONFIG_SCHEMA])
        c.validate(raise_exception=True)
    except Exception as e:
        logging.error("Failed when validating schema: %s", e)
        logging.info("Dumping rendered template:\n%s",
                     dump_rendered_config_file(config))
        raise
Beispiel #20
0
    def test_files_with_unicode_content_failing(self, tmpdir):
        """
        These tests should fail with the specified exception
        """
        # To trigger schema exception we must pass in a source file
        fail_data_2f_yaml = {
            'schema': {
                'type': 'map',
                'mapping': {
                    'msg': {
                        'type': 'int',
                    },
                }
            },
            'data': {
                'msg': 'Foobar',
            },
            'errors': ["Value 'Foobar' is not of type 'int'. Path: '/msg'"]
        }

        source_f = tmpdir.join(u"2få.json")
        source_f.write(yaml.safe_dump(fail_data_2f_yaml, allow_unicode=True))

        _fail_tests = [
            # Test mapping with unicode key and value but wrong type
            (u"1f.yaml", SchemaError),
            # Test unicode filename with validation errors.
            # It is not possible to package a file with unicode characters
            # like åäö in the filename in some python versions.
            # Mock a file with åäö during testing to properly simulate this again.
            (unicode(source_f), SchemaError),
            # Test unicode data inside seq but wrong type
            (u"3f.yaml", SchemaError),
        ]

        for failing_test, exception_type in _fail_tests:
            f = self.f(failing_test)

            with open(f, "r") as stream:
                yaml_data = yaml.safe_load(stream)
                data = yaml_data["data"]
                schema = yaml_data["schema"]
                errors = yaml_data["errors"]

            try:
                print(u"Running test files: {0}".format(f))
                c = Core(source_data=data, schema_data=schema)
                c.validate()
            except exception_type:
                pass  # OK
            else:
                raise AssertionError(u"Exception {0} not raised as expected... FILES: {1} : {2}".format(exception_type, exception_type))

            compare(sorted(c.validation_errors), sorted(errors), prefix=u"Wrong validation errors when parsing files : {0}".format(f))
Beispiel #21
0
def validate(yml_rule: Dict, schema_path: str):
    """
    This uses pykwalify to validate the given Rule YAML file against the Rule
    schema.
    """
    try:
        c = Core(source_data=yml_rule,
                 schema_files=[schema_path],
                 fix_ruby_style_regex=True)
        c.validate(raise_exception=True)
    except pykwalify.errors.SchemaError as e:
        raise click.ClickException(e.msg)
Beispiel #22
0
def validate(f):
    logger.info("Validating {}".format(f.name))
    c = Core(
        source_file=f.name,
        schema_files=[asset_helpers.resolve_from_site_packages("schema.yaml")],
        extensions=[
            asset_helpers.resolve_from_site_packages("puppet_schema_extensions.py")
        ],
    )
    c.validate(raise_exception=True)
    click.echo("Finished validating: {}".format(f.name))
    click.echo("Finished validating: OK")
Beispiel #23
0
    def test_validation_error_but_not_raise_exception(self):
        """
        Test that if 'raise_exception=False' when validating that no exception is raised.

        Currently file 2a.yaml & 2b.yaml is designed to cause exception.
        """
        c = Core(source_file=self.f("cli", "2a.yaml"), schema_files=[self.f("cli", "2b.yaml")])
        c.validate(raise_exception=False)

        assert c.validation_errors == [
            "Value '1' is not of type 'str'. Path: '/0'", "Value '2' is not of type 'str'. Path: '/1'", "Value '3' is not of type 'str'. Path: '/2'"
        ]
Beispiel #24
0
def test_policies(path, caplog):
    """
    Triggers test failure by looking for any logged warnings.
    """
    kwalify = Kwalify(
        source_file=path,
        schema_data=schema,
        strict_rule_validation=True,
    )
    data = kwalify.validate(raise_exception=False)
    n_log_captures = len(caplog.records)
    assert n_log_captures == 0
Beispiel #25
0
    def test_files_with_unicode_content_success(self, tmpdir):
        """
        These tests should pass with no exception raised
        """
        fail_data_2s_yaml = {
            'schema': {
                'type': 'map',
                'mapping': {
                    'msg': {
                        'type': 'int',
                    },
                }
            },
            'data': {
                'msg': 123,
            },
            'errors': []
        }

        source_f = tmpdir.join(u"2så.json")
        source_f.write(yaml.safe_dump(fail_data_2s_yaml, allow_unicode=True))

        _pass_tests = [
            # Test mapping with unicode key and value
            u"1s.yaml",
            # # Test unicode filename.
            # It is not possible to package a file with unicode characters
            # like åäö in the filename in some python versions.
            # Mock a file with åäö during testing to properly simulate this again.
            unicode(source_f),
            # Test sequence with unicode keys
            u"3s.yaml",
        ]

        for passing_test_files in _pass_tests:
            f = self.f(passing_test_files)

            with open(f, "r") as stream:
                yaml_data = yaml.safe_load(stream)
                data = yaml_data["data"]
                schema = yaml_data["schema"]

            try:
                print(u"Running test files: {0}".format(f))
                c = Core(source_data=data, schema_data=schema)
                c.validate()
                compare(c.validation_errors, [], prefix="No validation errors should exist...")
            except Exception as e:
                print(u"ERROR RUNNING FILES: {0}".format(f))
                raise e

            # This serve as an extra schema validation that tests more complex structures then testrule.py do
            compare(c.root_rule.schema_str, schema, prefix=u"Parsed rules is not correct, something have changed... files : {0}".format(f))
Beispiel #26
0
    def __validate(self):
        for schema in self.schemas:
            core = Core(source_data=self._descriptor,
                        schema_data=schema, allow_assertions=True)
            try:
                core.validate(raise_exception=True)
                return
            except Exception as ex:
                # We log this as debug, because we support multiple schemas
                logger.debug("Schema validation failed: %s" % ex)

        raise CekitError("Cannot validate schema: %s" % (self.__class__.__name__))
def validate_fittings(file_path):
    with open(file_path, 'r') as document_stream:
        document = document_stream.read()
        for settings in yaml.load_all(document):
            logging.debug("Validating source data %s", settings)

            c = Core(source_data=settings, schema_files=["schema.yaml"])
            try:
                c.validate(raise_exception=True)
            except pykwalify.errors.SchemaError as schema_error:
                logging.error("Validation of %s failed.", file_path)
                logging.error(schema_error)
Beispiel #28
0
def validate_yaml_schema(
    yaml_file_content: Text, schema_path: Text, show_validation_errors: bool = True
) -> None:
    """
    Validate yaml content.

    Args:
        yaml_file_content: the content of the yaml file to be validated
        schema_path: the schema of the yaml file
        show_validation_errors: if true, validation errors are shown
    """
    from pykwalify.core import Core
    from pykwalify.errors import SchemaError
    from ruamel.yaml import YAMLError
    import pkg_resources
    import rasa.utils.io
    import logging

    log = logging.getLogger("pykwalify")
    if show_validation_errors:
        log.setLevel(logging.WARN)
    else:
        log.setLevel(logging.CRITICAL)

    try:
        source_data = rasa.utils.io.read_yaml(yaml_file_content)
    except YAMLError:
        raise InvalidYamlFileError(
            "The provided yaml file is invalid. You can use "
            "http://www.yamllint.com/ to validate the yaml syntax "
            "of your file."
        )
    except DuplicateKeyError as e:
        raise InvalidYamlFileError(
            "The provided yaml file contains a duplicated key: '{}'. You can use "
            "http://www.yamllint.com/ to validate the yaml syntax "
            "of your file.".format(str(e))
        )

    try:
        schema_file = pkg_resources.resource_filename(PACKAGE_NAME, schema_path)

        c = Core(source_data=source_data, schema_files=[schema_file])
        c.validate(raise_exception=True)
    except SchemaError:
        raise InvalidYamlFileError(
            "Failed to validate yaml file. "
            "Please make sure the file is correct and all "
            "mandatory parameters are specified; to do so, "
            "take a look at the errors logged during "
            "validation previous to this exception."
        )
Beispiel #29
0
 def validate_config(self):
     try:
         c = Core(
             source_file=
             "/Users/JohnS5/dev/replication_manager/src/webapp/bdr_app.yml",
             schema_files=[
                 '/Users/JohnS5/dev/replication_manager/src/webapp/schema.yml'
             ])
         return c.validate(raise_exception=True)
     except Exception as e:
         print "LOG: ERROR: config file is not valid"
         print e
         return None
def validate_fittings(file_path):
    with open(file_path, 'r') as document_stream:
        document = document_stream.read()
        for settings in yaml.load_all(document):
            logging.debug("Validating source data %s",
                         settings)

            c = Core(source_data=settings, schema_files=["schema.yaml"])
            try:
                c.validate(raise_exception=True)
            except pykwalify.errors.SchemaError as schema_error:
                logging.error("Validation of %s failed.", file_path)
                logging.error(schema_error)
Beispiel #31
0
def validate(path: str) -> None:
    """Validate the given manifest."""
    logging.getLogger(pykwalify.__name__).setLevel(logging.CRITICAL)

    with dfetch.resources.schema_path() as schema_path:
        try:
            validator = Core(source_file=path, schema_files=[str(schema_path)])
        except ScannerError as err:
            raise RuntimeError(f"{schema_path} is not a valid YAML file!") from err

    try:
        validator.validate(raise_exception=True)
    except SchemaError as err:
        raise RuntimeError(err.msg) from err
Beispiel #32
0
 def __init__(self, api_name):
     api_definition_file = "{0}.yaml".format(api_name)
     api_parameters_file = "{0}-params.yaml".format(api_name)
     c = Core(source_file=api_definition_file,
              schema_files=[determine_path() + "/schemas/schema.yaml"])
     c.validate(raise_exception=True)
     with open(api_parameters_file, 'r') as fp:
         params = yaml.load(fp)
     with open(api_definition_file, 'r') as fp:
         self.schema = yaml.load(fp)
     self.headers = traverse_schema(self.schema['Headers'],
                                    GLOBAL_PARAMETER_PATTERN, params)
     self.base = traverse_schema(self.schema['Base'],
                                 GLOBAL_PARAMETER_PATTERN, params)
Beispiel #33
0
def validate_file_schema(file_url, info):
    """Validate if a file has the correct schema."""
    core = Core(source_file=file_url, schema_files=[FILE_PATH + 'schema.yaml'])
    is_valid = False
    try:
        core.validate(raise_exception=True)
        is_valid = True
    except (CoreError, SchemaError):
        util.cloudwatch_log(
            info.context,
            'Error: An error occurred validating vulnerabilities file')
        is_valid = False
        raise InvalidSchema()
    return is_valid
Beispiel #34
0
    def __validate(self):
        if not self.schema:
            return

        try:
            core = Core(source_data=self._descriptor,
                        schema_data=self.schema,
                        allow_assertions=True)

            core.validate(raise_exception=True)
        except SchemaError as ex:
            raise CekitError(
                "Cannot validate schema: {}".format(self.__class__.__name__),
                ex)
Beispiel #35
0
def validate_yaml_schema(yaml_file_content: Text, schema_path: Text) -> None:
    """
    Validate yaml content.

    Args:
        yaml_file_content: the content of the yaml file to be validated
        schema_path: the schema of the yaml file
    """
    from pykwalify.core import Core
    from pykwalify.errors import SchemaError
    from ruamel.yaml import YAMLError
    import pkg_resources
    import logging

    log = logging.getLogger("pykwalify")
    log.setLevel(logging.CRITICAL)

    try:
        # we need "rt" since
        # it will add meta information to the parsed output. this meta information
        # will include e.g. at which line an object was parsed. this is very
        # helpful when we validate files later on and want to point the user to the
        # right line
        source_data = rasa.shared.utils.io.read_yaml(
            yaml_file_content, reader_type=["safe", "rt"])
    except (YAMLError, DuplicateKeyError) as e:
        raise YamlSyntaxException(underlying_yaml_exception=e)

    schema_file = pkg_resources.resource_filename(PACKAGE_NAME, schema_path)
    schema_utils_file = pkg_resources.resource_filename(
        PACKAGE_NAME, RESPONSES_SCHEMA_FILE)
    schema_extensions = pkg_resources.resource_filename(
        PACKAGE_NAME, SCHEMA_EXTENSIONS_FILE)

    c = Core(
        source_data=source_data,
        schema_files=[schema_file, schema_utils_file],
        extensions=[schema_extensions],
    )

    try:
        c.validate(raise_exception=True)
    except SchemaError:
        raise YamlValidationException(
            "Please make sure the file is correct and all "
            "mandatory parameters are specified. Here are the errors "
            "found during validation",
            c.errors,
            content=source_data,
        )
Beispiel #36
0
    def __init__(self, config_path, schema_path, config_changes):
        with open(config_path, 'rt') as src:
            config = read_config(src)
        make_config_changes(config, config_changes)

        self.multi_stage = 'stages' in config
        if self.multi_stage:
            ordered_changes = OrderedDict(
                sorted(
                    config['stages'].items(),
                    key=lambda (k, v): v['number'],
                ))
            self.ordered_stages = OrderedDict()
            for name, changes in ordered_changes.items():
                current_config = copy.deepcopy(config)
                del current_config['stages']
                del changes['number']
                merge_recursively(current_config, changes)
                self.ordered_stages[name] = current_config

        # Validate the configuration and the training stages
        with open(os.path.expandvars(schema_path)) as schema_file:
            schema = yaml.safe_load(schema_file)
            core = Core(source_data=config, schema_data=schema)
            core.validate(raise_exception=True)
            if self.multi_stage:
                for stage in self.ordered_stages.values():
                    core = Core(source_data=config, schema_data=schema)
                    core.validate(raise_exception=True)
        super(Configuration, self).__init__(config)
    def __init__(self, d: Dict[Any, Any]) -> None:
        validator = Validator(schema_data=schemas.config, source_data=d)
        validator.validate()
        config = self._merge_dicts(self._DEFAULTS, d)

        self.logging = LoggingConfig(config['logging'])
        self.vagrant = VagrantConfig(
            Path(config['vagrant']['vagrant_files_home']), )
        self.runner = RunnerConfig(
            token=config['runner']['token'],
            interval=timedelta(seconds=config['runner']['interval']),
            instances=config['runner']['instances'],
            endpoint=config['runner']['endpoint'],
        )
Beispiel #38
0
            def collect_configs(search_type, search_dir, filename_list,
                                include, gen):
                if len(filename_list) == 0:
                    return  # no configs of this type found, bail out

                check_include = len(include) > 0 or self.wip
                with Dir(search_dir):
                    for config_filename in filename_list:
                        is_include = False

                        # loading WIP configs requires a pre-load of the config to check
                        # removing this requirement would require a massive rework of Metadata()
                        # deemed not worth it - AMH 10/9/18
                        is_wip = False
                        if self.wip:
                            full_path = os.path.join(search_dir,
                                                     config_filename)
                            with open(full_path, 'r') as f:
                                cfg_data = yaml.load(f)
                                if cfg_data.get('mode', None) == 'wip':
                                    is_wip = True

                        if not is_wip and check_include:
                            if check_include and config_filename in include:
                                is_include = config_filename in include
                                self.logger.debug("include: " +
                                                  config_filename)
                                include.remove(config_filename)
                            else:
                                self.logger.debug(
                                    "Skipping {} {} since it is not in the include list"
                                    .format(search_type, config_filename))
                                continue

                        try:
                            schema_path = os.path.join(
                                os.path.dirname(os.path.realpath(__file__)),
                                "schema_{}.yml".format(search_type))
                            c = Core(source_file=config_filename,
                                     schema_files=[schema_path])
                            c.validate(raise_exception=True)

                            gen(search_dir, config_filename, self.disabled
                                or is_include or is_wip)
                        except Exception:
                            self.logger.error(
                                "Configuration file failed to load: {}".format(
                                    os.path.join(search_dir, config_filename)))
                            raise
Beispiel #39
0
    def test_create_empty_core_object(self, tmpdir):
        """
        If createing a core object without any source or schema file an exception should be raised.
        """
        with pytest.raises(CoreError) as ex:
            Core()
        assert "No source file/data was loaded" in str(ex.value)

        # To trigger schema exception we must pass in a source file
        source_f = tmpdir.join("bar.json")
        source_f.write("3.14159")

        with pytest.raises(CoreError) as ex:
            Core(source_file=str(source_f))
        assert "No schema file/data was loaded" in str(ex.value)
Beispiel #40
0
def main(argv):
    if len(argv) < 2:
        print('you must provide file path & content-type')
        sys.exit(1)

    file_path = argv[0]
    schema_path = argv[1]
    c = Core(source_file=file_path, schema_files=[schema_path])
    try:
        c.validate(raise_exception=True)
    except Exception as err:
        print 'Failed: %s failed' % (file_path, )
        print err
        sys.exit(1)
    sys.exit(0)
Beispiel #41
0
    def _validate(self):
        regexp = re.compile(r"^cff-version: (['|\"])?(?P<semver>[\d\.]*)(['\"])?\s*$")
        semver = None
        has_no_cff_version_key = True
        for line in self.cffstr.split("\n"):
            if line[0:12] == "cff-version:":
                has_no_cff_version_key = False
                matched = re.match(regexp, line)
                if matched is not None:
                    semver = matched.groupdict()["semver"]
                    break

        if has_no_cff_version_key:
            raise ValueError("Unable to identify the schema version. Does the CFF include the 'cff-version' key?")
        if semver is None:
            raise ValueError("Unrecognized value for key \"cff-version\".")

        schema_urls = {
            "1.0.1": "https://raw.githubusercontent.com/citation-file-format/schema/1.0.1/CFF-Core/schema.yaml",
            "1.0.2": "https://raw.githubusercontent.com/citation-file-format/schema/1.0.2/CFF-Core/schema.yaml",
            "1.0.3": "https://raw.githubusercontent.com/citation-file-format/schema/1.0.3-1/CFF-Core/schema.yaml",
            "1.1.0": "https://raw.githubusercontent.com/citation-file-format/citation-file-format/1.1.0/schema.yaml"
        }

        try:
            schema_url = schema_urls[semver]
        except KeyError:
            versions = '"' + '", "'.join(sorted(schema_urls.keys())) + '"'
            raise Exception("\"{0}\" is not a supported release. Instead, use one of {1}."
                            .format(semver, versions))

        r = requests.get(schema_url)
        r.raise_for_status()
        self.schema = r.text

        with tempfile.TemporaryDirectory() as tmpdir:

            datafile = os.path.join(tmpdir, "data.yaml")
            schemafile = os.path.join(tmpdir, "schema.yaml")
            with open(datafile, "w") as f:
                f.write(self.cffstr)
            with open(schemafile, "w") as f:
                f.write(self.schema)

            c = Core(source_file=datafile, schema_files=[schemafile])
            c.validate(raise_exception=self.raise_exception)

        return self
Beispiel #42
0
class Parser:
    def __init__(self, schema_file=None):

        self.validator = None
        self.data = None
        self.schema_file = None

        if schema_file is not None and os.path.exists(schema_file):
            self.schema_file = schema_file
        else:
            raise Exception("Couldn't not find schema file")

    def parse(self, source_file=None):

        if source_file is not None:
            try:
                self.validator = Core(source_file=source_file,
                                      schema_files=[self.schema_file])
            except Exception as e:
                raise

        if self.validator is None:
            raise

        try:
            self.data = self.validator.validate()
            return self.data
        except Exception as e:
            raise
Beispiel #43
0
 def test_load_wrong_schema_files_type(self):
     """
     It should only be possible to send in a list type as 'schema_files' object
     """
     with pytest.raises(CoreError) as ex:
         Core(source_file=None, schema_files={})
     assert "schema_files must be of list type" in str(ex.value)
Beispiel #44
0
def parse_data(site):
    data = yaml.load(file(op.join(op.dirname(__file__), 'games.yaml')))

    try:
        core = Core(source_data=data, schema_files=['schema.yaml'])
        core.validate(raise_exception=True)
    except Exception as error:
        if len(core.errors) > 0:
            show_validation_errors(data, core.errors)
        else:
            raise error

    for item in data:
        parse_global_tags(site, item.get('meta', {}), 'genre')
        parse_items(site, item, 'clones')
        parse_items(site, item, 'reimplementations')
Beispiel #45
0
def checkFile(path):
    """
    Validate one file
    """
    with open(path, 'r') as yamlfile:
        contents = yaml.load_all(yamlfile)
        for i, doc in enumerate(contents):
            c = Core(source_data=doc, schema_files=[schemaFile])

            try:
                c.validate(raise_exception=True)
            except Exception as e:
                print("Schema validation error in {} entry {}".format(path, i))
                raise e
                sys.exit(1)

            # validate uniqueness
            key = unique_doc_key(doc)
            if key in doc_unique_keys:
                raise Exception("Entry {index} is duplicate: {path} {key}".format(
                    index=i,
                    path=path,
                    key=key
                ))
            doc_unique_keys.add(key)

            if 'emails' in doc:
                for address in doc['emails']:
                    if address['address'] in emails:
                        raise Exception("Email in entry {index} is duplicate: {email} {path} {key}".format(
                            index=i,
                            email=address['address'],
                            path=path,
                            key=key
                        ))
                    emails.add(address['address'])

            if 'urls' in doc:
                for url in doc['urls']:
                    if url['url'] in urls:
                        raise Exception("URL in entry {index} is duplicate: {url} {path} {key}".format(
                            index=i,
                            url=url['url'],
                            path=path,
                            key=key
                        ))
                    urls.add(url['url'])
Beispiel #46
0
    def begin(self):
        schema_file = self.load_yaml(
            os.path.dirname(__file__) + "/schemas/input.yaml")

        core = Core(source_data=self.app_args.yaml_file,
                    schema_data=schema_file)
        core.validate(raise_exception=True)

        self.directory_manager.archive_outputs()

        if self.options.execution_mode == 'serial':
            self.execute_serial()
        else:
            # parallel is default
            self.execute_parallel()

        LOG.info('All frameworks have finished execution')
Beispiel #47
0
    def parse(self, source_file=None):

        if source_file is not None:
            try:
                self.validator = Core(source_file=source_file,
                                      schema_files=[self.schema_file])
            except Exception as e:
                raise

        if self.validator is None:
            raise

        try:
            self.data = self.validator.validate()
            return self.data
        except Exception as e:
            raise
Beispiel #48
0
def test_yaml_advanced_validation():
    schema = os.path.join(os.path.dirname(__file__), "..", "..", "..", "lib", "galaxy", "webapps", "galaxy", "job_config_schema.yml")
    integration_tests_dir = os.path.join(os.path.dirname(__file__), "..", "..", "..", "test", "integration")
    valid_files = [
        ADVANCED_JOB_CONF_YAML,
        os.path.join(integration_tests_dir, "delay_job_conf.yml"),
        os.path.join(integration_tests_dir, "embedded_pulsar_metadata_job_conf.yml"),
        os.path.join(integration_tests_dir, "io_injection_job_conf.yml"),
        os.path.join(integration_tests_dir, "resubmission_job_conf.yml"),
        os.path.join(integration_tests_dir, "resubmission_default_job_conf.yml"),
    ]
    for valid_file in valid_files:
        c = Core(
            source_file=valid_file,
            schema_files=[schema],
        )
        c.validate()
Beispiel #49
0
def validate_parameters(parameters, kliko):
    """
    validate a set of parameters given a kliko definition

    args:
        parameters (dict): A structure that should follow the given kliko structure
        kliko (dict): A nested dict which defines the valid parameters in Kliko format

    returns:
        str: the parsed parameters

    raises:
        an exception if the string can't be parsed or is not in the defining valid parameters
    """
    schema = convert_to_parameters_schema(kliko)
    c = Core(source_data=parameters, schema_data=schema)
    c.validate(raise_exception=True)
    return True
Beispiel #50
0
def validate_kliko(kliko, version=SCHEMA_VERSION):
    """
    validate a kliko yaml string

    args:
        kliko: a parsed kliko object

    returns:
        dict: a (nested) kliko structure

    raises:
        an exception if the string can't be parsed or is not in the following the Kliko schema
    """
    # first try to parse it, to make sure it is parsable

    schema_file = os.path.join(here, "schemas/%s.yml" % version)
    c = Core(source_data=kliko, schema_files=[schema_file])
    c.validate(raise_exception=True)
    return kliko
Beispiel #51
0
    def validate_domain_yaml(cls, yaml):
        """Validate domain yaml."""
        from pykwalify.core import Core

        log = logging.getLogger('pykwalify')
        log.setLevel(logging.WARN)

        schema_file = pkg_resources.resource_filename(__name__,
                                                      "schemas/domain.yml")
        source_data = utils.read_yaml_string(yaml)
        c = Core(source_data=source_data,
                 schema_files=[schema_file])
        try:
            c.validate(raise_exception=True)
        except SchemaError:
            raise ValueError("Failed to validate your domain yaml. "
                             "Make sure the file is correct, to do so"
                             "take a look at the errors logged during "
                             "validation previous to this exception. ")
Beispiel #52
0
def parse_data(site):
    base = op.join(op.dirname(__file__), 'games')
    data = []
    for fn in sorted(os.listdir(base)):
        if fn.endswith('.yaml'):
            data.extend(yaml.load(open(op.join(base, fn))))

    try:
        core = Core(source_data=data, schema_files=['schema.yaml'])
        core.validate(raise_exception=True)
    except Exception as error:
        if len(core.errors) > 0:
            show_validation_errors(data, core.errors)
        else:
            raise error

    for item in data:
        parse_global_tags(site, item.get('meta', {}), 'genre')
        parse_items(site, item, 'remakes')
        parse_items(site, item, 'clones')
Beispiel #53
0
def import_from_yaml(statechart: Iterable[str], ignore_schema: bool=False, ignore_validation: bool=False) -> Statechart:
    """
    Import a statechart from a YAML representation.
    YAML is first validated against *sismic.io.yaml.SCHEMA_PATH*, and resulting statechart is validated
    using its *validate* method.

    :param statechart: string or any equivalent object
    :param ignore_schema: set to *True* to disable yaml validation.
    :param ignore_validation: set to *True* to disable statechart validation.
    :return: a *Statechart* instance
    """
    data = yaml.load(statechart)  # type: dict
    if not ignore_schema:
        checker = Core(source_data=data, schema_files=[SCHEMA_PATH])
        checker.validate(raise_exception=True)

    sc = import_from_dict(data)
    if not ignore_validation:
        sc.validate()
    return sc
Beispiel #54
0
def import_from_yaml(statechart: str, validate_yaml=True, validate_statechart=True) -> StateChart:
    """
    Import a statechart from a YAML representation.
    YAML is first validated against ``io.SCHEMA``.

    :param statechart: string or any equivalent object
    :param validate_yaml: set to ``False`` to disable yaml validation.
    :param validate_statechart: set to ``False`` to disable statechart validation
      (see ``model.StateChart.validate``).
    :return: a ``StateChart`` instance
    """
    statechart_data = yaml.load(statechart)
    if validate_yaml:
        checker = Core(source_data=statechart_data, schema_files=[SCHEMA_PATH])
        checker.validate(raise_exception=True)

    sc = _import_from_dict(statechart_data['statechart'])
    if validate_statechart:
        sc.validate()

    return sc
Beispiel #55
0
    def __init__(self, config_path, schema_path, config_changes):
        with open(config_path, 'rt') as src:
            config = read_config(src)
        make_config_changes(config, config_changes)

        self.multi_stage = 'stages' in config
        if self.multi_stage:
            ordered_changes = OrderedDict(
                sorted(config['stages'].items(),
                       key=lambda (k, v): v['number'],))
            self.ordered_stages = OrderedDict()
            for name, changes in ordered_changes.items():
                current_config = copy.deepcopy(config)
                del current_config['stages']
                del changes['number']
                merge_recursively(current_config, changes)
                self.ordered_stages[name] = current_config

        # Validate the configuration and the training stages
        with open(os.path.expandvars(schema_path)) as schema_file:
            schema = yaml.safe_load(schema_file)
            core = Core(source_data=config, schema_data=schema)
            core.validate(raise_exception=True)
            if self.multi_stage:
                for stage in self.ordered_stages.values():
                    core = Core(source_data=config, schema_data=schema)
                    core.validate(raise_exception=True)
        super(Configuration, self).__init__(config)
Beispiel #56
0
def _validate(args, app_desc):
    path = _find_config(args, app_desc)
    # Allow empty mapping (not allowed by pykawlify)
    raw_config = _order_load_path(path)
    if raw_config.get(app_desc.app_name, None) is None:
        raw_config[app_desc.app_name] = {}
        config_p = tempfile.NamedTemporaryFile(delete=False, suffix=".yml")
        _ordered_dump(raw_config, config_p)
        config_p.flush()
        path = config_p.name

    fp = tempfile.NamedTemporaryFile(delete=False, suffix=".yml")
    _ordered_dump(app_desc.schema.raw_schema, fp)
    fp.flush()
    name = fp.name
    if Core is None:
        raise Exception("Cannot validate file, pykwalify is not installed.")
    c = Core(
        source_file=path,
        schema_files=[name],
    )
    c.validate()
Beispiel #57
0
def load_config(file_name):
    """
    Load the file, verify that it conforms to the schema,
    and return the configuration.
    """
    import yaml
    from pykwalify.core import Core
    from pykwalify.errors import SchemaError

    # Disable most logging for pykwalify
    import logging
    logging.getLogger('pykwalify').setLevel(logging.CRITICAL)
    logging.getLogger('pykwalify').addHandler(logging.NullHandler())

    try:
        with open(file_name, 'r') as conf_file:
            data = yaml.safe_load(conf_file)
            validator = Core(
                source_data=data,
                schema_files=[dirname(__file__) + "/rebench-schema.yml"])
            try:
                validator.validate(raise_exception=True)
            except SchemaError as err:
                errors = [escape_braces(val_err) for val_err in validator.validation_errors]
                raise UIError(
                    "Validation of " + file_name + " failed.\n{ind}" +
                    "\n{ind}".join(errors) + "\n", err)
            return data
    except IOError as err:
        if err.errno == 2:
            assert err.strerror == "No such file or directory"
            raise UIError("The requested config file (%s) could not be opened. %s.\n"
                          % (file_name, err.strerror), err)
        raise UIError(str(err) + "\n", err)
    except yaml.YAMLError as err:
        raise UIError("Parsing of the config file "
                      + file_name + " failed.\nError " + str(err) + "\n", err)
Beispiel #58
0
    def validate_deploy(self):
        """
        Validates the deployment yaml file with the schema
        :raises pykwalify.errors.SchemaError: if validation fails
        :raises pykwalify.errors.CoreError: for other type of errors
        """
        logging.debug(self.__class__.__name__ + ': ' + sys._getframe().f_code.co_name)
        if not self.deploy_file:
            raise AssertionError

        try:
            c = Core(source_file=self.deploy_file,
                     schema_files=[self.schema_file],
                     extensions=[self.schema_functions_file])
            c.validate()
        except CoreError as e:
            # Most probably there is something wrong with the source files
            logging.debug(self.__class__.__name__ + ': ' + sys._getframe().f_code.co_name + ': ' + e.msg)
            raise
        except SchemaError as e:
            # The deploy file is not valid
            logging.debug(self.__class__.__name__ + ': ' + sys._getframe().f_code.co_name + ': ' + e.msg)
            print("The deployment file at '%s' is not valid." % (self.deploy_file_host,))
            raise
Beispiel #59
0
 def __init__(self, component_directory=None, component_dict=None):
     """ Initialize a component object by identifying the system and
     component key, loading the metadata from the component.yaml, and
     creating a mapping of the controls it satisfies. If a component dict
     is passed in no special mappings needs to be created because imports come
     from certifications
     """
     self.validator = Core(
         source_data={}, schema_data=OPENCONTROL_V2_SCHEMA
     )
     if component_directory and not component_dict:
         self.component_directory = component_directory
         system_dir, self.component_key = os.path.split(component_directory)
         self.system_key = os.path.split(system_dir)[-1]
         self.load_metadata(component_directory)
         self.justification_mapping = self.prepare_justifications()
     elif component_dict and not component_directory:
         self.system_key = component_dict['system_key']
         self.component_key = component_dict['component_key']
         self.meta = component_dict