Ejemplo n.º 1
0
    def validate(self, config):
        schema = load_schema(str(self.VERSION))
        jsonschema.validate(config, schema)

        if have_parameter_reference_pattern(config):
            raise jsonschema.SchemaError(
                'Do not support parameter reference in config version <= 2')
Ejemplo n.º 2
0
def http_get_schema(scid):
    """Retrieve schema via HTTP."""
    validate_scid(scid)
    url = SCHEMA_URL_FORMAT % scid
    try:
        schema = json.loads(http_get(url))
    except (ValueError, EnvironmentError) as ex:
        raise jsonschema.SchemaError('Schema fetch failure: %s' % ex)
    jsonschema.Draft3Validator.check_schema(schema)
    return schema
Ejemplo n.º 3
0
def http_get_schema(scid):
    """Retrieve schema via HTTP."""
    url = RAW_SCHEMA_URL_FORMAT % scid
    try:
        content = urlopen(url).read().decode('utf-8')
        schema = json.loads(content)
    except (ValueError, EnvironmentError) as ex:
        raise jsonschema.SchemaError('Schema fetch failure: %s' % ex)
    jsonschema.Draft3Validator.check_schema(schema)
    return schema
Ejemplo n.º 4
0
    def load(self, schemafile=None):
        """Load and process the schema file"""
        if schemafile is not None:
            self._schemafile = schemafile

        try:
            self.data = json.load(open(self._schemafile))
        except (IOError, ValueError), e:
            msg = "Could not load schema file '" + self._schemafile + "': '" + str(
                e) + "'"
            raise jsonschema.SchemaError(msg)
Ejemplo n.º 5
0
    def schema_val(self, messages=None):
        "Perform validation with processed YAML and Schema"
        self._ymlproc = YAMLProcessor(self._ymlfile)
        self._schemaproc = SchemaProcessor(self._schemafile)
        valid = True

        log.debug(
            "BEGIN: Schema-based validation for YAML '%s' with schema '%s'",
            self._ymlfile,
            self._schemafile,
        )

        # Make sure the yml and schema have been loaded
        if self._ymlproc.loaded and self._schemaproc.loaded:
            # Load all of the yaml documents. Could be more than one in the same YAML file.
            for docnum, data in enumerate(
                    yaml.load_all(self._ymlproc.data, Loader=yaml.Loader)):

                # Since YAML allows integer keys but JSON does not, we need to first
                # dump the data as a JSON string to encode all of the potential integers
                # as strings, and then read it back out into the YAML format. Kind of
                # a clunky workaround but it works as expected.
                data = yaml.load(json.dumps(data), Loader=yaml.Loader)

                # Now we want to get a validator ready
                v = jsonschema.Draft4Validator(self._schemaproc.data)

                # Loop through the errors (if any) and set valid = False if any are found
                # Display the error message
                for error in v.iter_errors(data):
                    msg = ("Schema-based validation failed for YAML file '" +
                           self._ymlfile + "'")
                    self.ehandler.process(docnum, self._ymlproc.doclines,
                                          error, messages)
                    valid = False

                if not valid:
                    log.error(msg)

        elif not self._ymlproc.loaded:
            raise util.YAMLError("YAML must be loaded in order to validate.")
        elif not self._schemaproc.loaded:
            raise jsonschema.SchemaError(
                "Schema must be loaded in order to validate.")

        log.debug("END: Schema-based validation complete for '%s'",
                  self._ymlfile)
        return valid
Ejemplo n.º 6
0
    def test_validate_invalid_data(self, jschema_m):
        jschema_m.ValidationError = jsonschema.ValidationError
        jschema_m.SchemaError = jsonschema.SchemaError
        paths = [(("a", 0), "\['a'\]\[0\]"), ((), "")]
        for path, details in paths:
            msg = "Invalid data: error."
            if details:
                msg += "\nField: {0}".format(details)
            with self.assertRaisesRegexp(ValueError, msg):
                jschema_m.validate.side_effect = jsonschema.ValidationError(
                    "error", path=path)
                self.api._validate_data([], {})
            jschema_m.validate.assert_called_with([], {})
            jschema_m.validate.reset_mock()

            msg = "Invalid schema: error."
            if details:
                msg += "\nField: {0}".format(details)
            with self.assertRaisesRegexp(ValueError, msg):
                jschema_m.validate.side_effect = jsonschema.SchemaError(
                    "error", schema_path=path)
                self.api._validate_data([], {})
            jschema_m.validate.assert_called_with([], {})
Ejemplo n.º 7
0
def init_base_logging(directory="./log",
                      verbose=0,
                      silent=False,
                      color=False,
                      no_file=False,
                      truncate=True,
                      config_location=None):
    """
    Initialize the Icetea logging by creating a directory to store logs
    for this run and initialize the console logger for Icetea itself.

    :param directory: Directory where to store the resulting logs
    :param verbose: Log level as integer
    :param silent: Log level warning
    :param no_file: Log to file
    :param color: Log coloring
    :param truncate: Log truncating
    :param config_location: Location of config file.
    :raises IOError if unable to read configuration file.
    :raises OSError if log path already exists.
    :raises ImportError if colored logging was requested but coloredlogs module is not installed.
    """
    global LOGPATHDIR
    global STANDALONE_LOGGING
    global TRUNCATE_LOG
    global COLOR_ON
    global SILENT_ON
    global VERBOSE_LEVEL

    if config_location:
        try:
            _read_config(config_location)
        except IOError as error:
            raise IOError(
                "Unable to read from configuration file {}: {}".format(
                    config_location, error))
        except jsonschema.SchemaError as error:
            raise jsonschema.SchemaError("Logging configuration schema "
                                         "file malformed: {}".format(error))

    LOGPATHDIR = os.path.join(
        directory,
        datetime.datetime.now().strftime("%Y-%m-%d_%H%M%S.%f").rstrip("0"))

    # Initialize the simple console logger for IceteaManager
    icetealogger = logging.getLogger("icetea")
    icetealogger.propagate = False
    icetealogger.setLevel(logging.DEBUG)
    stream_handler = logging.StreamHandler()
    formatter = BenchFormatter(
        LOGGING_CONFIG.get("IceteaManager").get("format"),
        LOGGING_CONFIG.get("IceteaManager").get("dateformat"))
    if not color:
        stream_handler.setFormatter(formatter)
    elif color and not COLORS:
        raise ImportError("Missing coloredlogs module. Please install with "
                          "pip to use colors in logging.")
    else:

        class ColoredBenchFormatter(coloredlogs.ColoredFormatter):
            """
            This is defined as an internal class here because coloredlogs is and optional
            dependency.
            """
            converter = datetime.datetime.fromtimestamp

            def formatTime(self, record, datefmt=None):
                date_and_time = self.converter(record.created, tz=pytz.utc)
                if "%F" in datefmt:
                    msec = "%03d" % record.msecs
                    datefmt = datefmt.replace("%F", msec)
                str_time = date_and_time.strftime(datefmt)
                return str_time

        COLOR_ON = color
        stream_handler.setFormatter(
            ColoredBenchFormatter(
                LOGGING_CONFIG.get("IceteaManager").get("format"),
                LOGGING_CONFIG.get("IceteaManager").get("dateformat"),
                LEVEL_FORMATS, FIELD_STYLES))

    SILENT_ON = silent
    VERBOSE_LEVEL = verbose
    if not no_file:
        try:
            os.makedirs(LOGPATHDIR)
        except OSError:
            raise OSError("Log path %s already exists." % LOGPATHDIR)
        filename = LOGGING_CONFIG.get("IceteaManager").get("file").get(
            "name", "icetea.log")
        icetealogger = _add_filehandler(icetealogger,
                                        get_base_logfilename(filename),
                                        formatter, "IceteaManager")
    if verbose and not silent:
        stream_handler.setLevel(logging.DEBUG)
    elif silent:
        stream_handler.setLevel(logging.WARN)
    else:
        stream_handler.setLevel(
            getattr(logging,
                    LOGGING_CONFIG.get("IceteaManager").get("level")))
    icetealogger.addHandler(stream_handler)
    TRUNCATE_LOG = truncate
    if TRUNCATE_LOG:
        icetealogger.addFilter(ContextFilter())
    STANDALONE_LOGGING = False