Example #1
0
    def perform(self, multiple=False, object_cache=None):
        from . import changeset # to avoid a circular import

        if self.state in ('pending-approval', 'performed', 'failed'):
            return
        try:
            jsonschema.validate(self.data, changeset.schema)
        except jsonschema.ValidationError as e:
            raise exceptions.SchemaValidationError(e)
        if self.pk:
            try:
                Changeset.objects.select_for_update().get(pk=self.pk, version=self.version)
            except Changeset.DoesNotExist as e:
                raise exceptions.ChangesetConflict() from e
        updater = changeset.SourceUpdater(self.base_href, self.author, self.committer, multiple=multiple,
                                          object_cache=object_cache)
        try:
            with transaction.atomic():
                updater.perform_updates(self.data)
        except Exception:
            self.state = 'failed'
            self.performed = now()
            self.save()
            raise
        else:
            self.state = 'performed'
            self.performed = now()
            self.save()
Example #2
0
    def validate_response(cls, schema, resp, body):
        # Only check the response if the status code is a success code
        # TODO(cyeoh): Eventually we should be able to verify that a failure
        # code if it exists is something that we expect. This is explicitly
        # declared in the V3 API and so we should be able to export this in
        # the response schema. For now we'll ignore it.
        if resp.status in HTTP_SUCCESS:
            cls.expected_success(schema['status_code'], resp.status)

            # Check the body of a response
            body_schema = schema.get('response_body')
            if body_schema:
                try:
                    jsonschema.validate(body, body_schema)
                except jsonschema.ValidationError as ex:
                    msg = ("HTTP response body is invalid (%s)") % ex
                    raise exceptions.InvalidHTTPResponseBody(msg)
            else:
                if body:
                    msg = ("HTTP response body should not exist (%s)") % body
                    raise exceptions.InvalidHTTPResponseBody(msg)

            # Check the header of a response
            header_schema = schema.get('response_header')
            if header_schema:
                try:
                    jsonschema.validate(resp, header_schema)
                except jsonschema.ValidationError as ex:
                    msg = ("HTTP response header is invalid (%s)") % ex
                    raise exceptions.InvalidHTTPResponseHeader(msg)
Example #3
0
def validate_report_all_errors(instance, schema):
    try:
        validate(instance, schema)
    except ValidationError:
        v = Draft4Validator(schema)
        msg = ','.join([err.message for err in v.iter_errors(instance)])
        raise ValidateAllErrors(msg)
 def validate_metadata(self, metadata, reviewer=False, required_fields=False):
     """
     Validates registration_metadata field.
     """
     schema = create_jsonschema_from_metaschema(self.schema,
                                                required_fields=required_fields,
                                                is_reviewer=reviewer)
     try:
         jsonschema.validate(metadata, schema)
     except jsonschema.ValidationError as e:
         for page in self.schema['pages']:
             for question in page['questions']:
                 if e.relative_schema_path[0] == 'required':
                     raise ValidationError(
                         'For your registration the \'{}\' field is required'.format(question['title'])
                     )
                 elif e.relative_schema_path[0] == 'additionalProperties':
                     raise ValidationError(
                         'For your registration the \'{}\' field is extraneous and not permitted in your response.'.format(question['qid'])
                     )
                 elif e.relative_path[0] == question['qid']:
                     if 'options' in question:
                         raise ValidationError(
                             'For your registration your response to the \'{}\' field is invalid, your response must be one of the provided options.'.format(
                                 question['title'],
                             ),
                         )
                     raise ValidationError(
                         'For your registration your response to the \'{}\' field is invalid.'.format(question['title']),
                     )
         raise ValidationError(e.message)
     except jsonschema.SchemaError as e:
         raise ValidationValueError(e.message)
     return
Example #5
0
def text_to_transcript(text_file, output_file):
    text = open(text_file).read()

    filedir = os.path.dirname(os.path.realpath(__file__))
    schema_path = os.path.join(
        filedir, "alignment-schemas/transcript_schema.json")

    transcript_schema = json.load(open(schema_path))

    paragraphs = text.split("\n\n")
    out = []
    for para in paragraphs:
        para = para.replace("\n", " ")
        if para == "" or para.startswith("#"):
            continue

        line = {
            "speaker": "narrator",
            "line": para
        }
        out.append(line)

    jsonschema.validate(out, transcript_schema)
    if output_file is None:
        print json.dumps(out, indent=4)
    else:
        with open(output_file, 'w') as f:
            f.write(json.dumps(out, indent=4))
    return
    def test_method_security_headers(self, app):
        api = restplus.Api(app, authorizations={
            'apikey': {
                'type': 'apiKey',
                'in': 'header',
                'name': 'X-API'
            }
        })

        @api.route('/secure/')
        class Secure(restplus.Resource):
            @api.doc('secure', security='apikey')
            def get(self):
                pass

        @api.route('/unsecure/')
        class Unsecure(restplus.Resource):
            @api.doc('unsecure')
            def get(self):
                pass

        data = api.as_postman()

        validate(data, schema)
        requests = dict((r['name'], r['headers']) for r in data['requests'])

        assert requests['unsecure'] == ''
        assert requests['secure'] == 'X-API:'
    def test_export_with_namespace(self, app):
        api = restplus.Api(app)
        ns = api.namespace('test', 'A test namespace')

        @ns.route('/test')
        class Test(restplus.Resource):
            @api.doc('test_post')
            def post(self):
                '''A test post'''
                pass

        data = api.as_postman()

        validate(data, schema)

        assert len(data['requests']) == 1
        request = data['requests'][0]
        assert request['name'] == 'test_post'
        assert request['description'] == 'A test post'

        assert len(data['folders']) == 1
        folder = data['folders'][0]
        assert folder['name'] == 'test'
        assert folder['description'] == 'A test namespace'

        assert request['folder'] == folder['id']
Example #8
0
 def test_minumum_fail(self):
   
   #Test number
   data1 = {
     "prop01": 9,
     "prop02": 21
   }
   
   #Test integer
   data2 = {
     "prop01": 10,
     "prop02": 19
   }
   
   try:
     jsonschema.validate(data1, self.schema)
   except ValueError:
     pass
   else:
     self.fail("Expected failure for %s" % repr(None))
   
   try:
     jsonschema.validate(data2, self.schema)
   except ValueError:
     pass
   else:
     self.fail("Expected failure for %s" % repr(None))
    def test_url_variables_enabled(self, app):
        api = restplus.Api(app)

        parser = api.parser()
        parser.add_argument('int', type=int)
        parser.add_argument('default', type=int, default=5)
        parser.add_argument('str', type=str)

        @api.route('/test/')
        class Test(restplus.Resource):
            @api.expect(parser)
            def get(self):
                pass

        data = api.as_postman(urlvars=True)

        validate(data, schema)

        assert len(data['requests']) == 1
        request = data['requests'][0]
        qs = parse_qs(urlparse(request['url']).query, keep_blank_values=True)

        assert 'int' in qs
        assert qs['int'][0] == '0'

        assert 'default' in qs
        assert qs['default'][0] == '5'

        assert 'str' in qs
        assert qs['str'][0] == ''
Example #10
0
def posts_put():
    """  Endpoint to post blog posts """

    # Construct the request data object
    data = request.json
    
    # First validate data object is valid JSON
    # If not valid return 422 error
    try:
        validate(data, post_schema)
    except ValidationError as error:
        data = {"message": error.message}
        return Response(json.dumps(data), 422, mimetype="application/json")        

    
    # Create data object from the request 
    data = request.json

    # Post data object to database
    post = models.Post(title=data["title"], body=data["body"])
    session.add(post)
    session.commit()

    # Response to client of successful post
    data = json.dumps(post.as_dictionary())
    headers = {"Location": url_for("post_get", id=post.id)}
    return Response(data, 201, headers=headers,
                    mimetype="application/json")
Example #11
0
    def _run(self):
        for log_type in [ 'file:logging' , 'syslog:logging' ]:
            if log_type in self._config.sections():
                try:
                    jsonschema.validate(self._config_dict[log_type], CHECK_CONFIG_LOGGING[log_type])
                except jsonschema.exceptions.ValidationError as err:
                    self.log.fatal("invalid file:logging section: {0}".format(err))
                    sys.exit(1)
                self._app_logging()

        self.log.info("starting up")
        for section in self._config_dict.keys():
            if section.endswith(':source'):
                self._worker_start(section)
        while not self._terminate:
            self._process_message()
        self.log.info("shutting down worker threads")
        for _worker in self._worker.keys():
            self._worker_stop(_worker)
        self.log.info("shutdown signal send to worker threads")
        self.log.info("waiting for worker threads to terminate")
        for _worker in self._worker.keys():
            self._worker_join(_worker)
        self.log.info("all worker threads gone")
        self.log.info("cleanup up message queue")
        while True:
            if not self._process_message():
                break
        self.log.info("successfully shutdown")
def validate_schema(schema_to_validate_path):
    with open(DRAFT_SCHEMA_PATH, 'r') as f:
        draft_schema = json.loads(f.read())
    with open(schema_to_validate_path, 'r') as f:
        schema = json.loads(f.read())
    validate(schema, draft_schema)
    print "SUCCESS: Schema is valid against JSON Schema Draft 4."
Example #13
0
def songs_post():
    """ Add a new post """
    data = request.json
    
    # Try to validate data
    try:
        validate(data, song_schema)
    except ValidationError as error:
        data = {"message": error.message}
        return Response(json.dumps(data), 422, mimetype="application/json")
    # Checking file in the database
    fileInDB = session.query(models.File).get(data["file"]["id"])

    if not fileInDB:
        data = {"message": "Cannot find the file in database"}
        return Response(json.dumps(data), 404, mimetype="application/json")
        
    # Add the song to the database
    song = models.Song(file=fileInDB)
    session.add(song)
    session.commit()

    # Return a 201 Created, containing the post as JSON and with the
    # Location header set to the location of the post
    data = json.dumps(song.as_dictionary())
    headers = {"Location": url_for("songs_get")}
    return Response(data, 201, headers=headers,
                    mimetype="application/json")
Example #14
0
 def validate(self):
     format_check = jsonschema.FormatChecker()
     try:
         jsonschema.validate(
             self._snapcraft, self._schema, format_checker=format_check)
     except jsonschema.ValidationError as e:
         raise errors.SnapcraftSchemaError.from_validation_error(e)
Example #15
0
def validate(obj, schema, format_checker=None, safe=True):
    """
    Validate target object with given schema object, loaded from JSON schema.

    See also: https://python-jsonschema.readthedocs.org/en/latest/validate/

    :parae obj: Target object (a dict or a dict-like object) to validate
    :param schema: Schema object (a dict or a dict-like object)
        instantiated from schema JSON file or schema JSON string
    :param format_checker: A format property checker object of which class is
        inherited from jsonschema.FormatChecker, it's default if None given.
    :param safe: Exception (jsonschema.ValidationError or
        jsonschema.SchemaError) will be thrown if it's True and any validation
        error occurs.

    :return: (True if validation succeeded else False, error message)
    """
    try:
        if format_checker is None:
            format_checker = jsonschema.FormatChecker()  # :raises: NameError
        try:
            jsonschema.validate(obj, schema, format_checker=format_checker)
            return (True, '')
        except (jsonschema.ValidationError, jsonschema.SchemaError,
                Exception) as exc:
            if safe:
                return (False, str(exc))
            else:
                raise

    except NameError:
        return (True, "Validation module (jsonschema) is not available")

    return (True, '')
Example #16
0
    def _validate_schema(self, body, type='image'):
        if type in ['image', 'images']:
            resp, schema = self.get_schema(type)
        else:
            raise ValueError("%s is not a valid schema type" % type)

        jsonschema.validate(body, schema)
Example #17
0
def validateExamples(examples, schemas, maxExamples, shuffle):
  failures = []
  numberOfSuccessfulValidations = 0
  unchecked = []

  numChecked = 0
  latestReportTime = time.clock()
  
  if shuffle:
    random.shuffle(examples)
  for path, type, version, id, json in examples:
    schemaKey = type + "-" + version
    if schemaKey in schemas:
      try:
        validate(json, schemas[schemaKey])
        numberOfSuccessfulValidations += 1
      except Exception as e:
        failures.append((path, type, id, e))
    else:
      unchecked.append((path, type, id, json))
    
    numChecked += 1
    
    if maxExamples > 0 and numChecked >= maxExamples:
      print("Reached limit of " + str(maxExamples) + " examples to validate. Breaking.")
      break
      
    if time.clock() - latestReportTime > 5:
      print("Checked " + str(numChecked) + " / " + str(len(examples)) + " examples.", flush=True)
      latestReportTime = time.clock()
      
  return failures, unchecked, numberOfSuccessfulValidations
Example #18
0
    def _validate_change(self, change):
        change_path = change['path'][0]
        change_op = change['op']
        allowed_methods = self.allowed_operations.get(change_path)

        if not allowed_methods:
            msg = _("Attribute '{0}' is invalid").format(change_path)
            raise webob.exc.HTTPForbidden(explanation=six.text_type(msg))

        if change_op not in allowed_methods:
            msg = _("Method '{method}' is not allowed for a path with name "
                    "'{name}'. Allowed operations are: "
                    "'{ops}'").format(method=change_op,
                                      name=change_path,
                                      ops=', '.join(allowed_methods))

            raise webob.exc.HTTPForbidden(explanation=six.text_type(msg))

        property_to_update = {change_path: change['value']}

        try:
            jsonschema.validate(property_to_update, schemas.PKG_UPDATE_SCHEMA)
        except jsonschema.ValidationError as e:
            LOG.error(_LE("Schema validation error occured: {error}")
                      .format(error=e))
            raise webob.exc.HTTPBadRequest(explanation=e.message)
Example #19
0
	def schema_validator(self, data, schema_path):
		import jsonschema
		schema = self.load_json(schema_path)
		try:
			jsonschema.validate(data, schema)
		except jsonschema.ValidationError as e:
			self.validation_error(e.message, e.path)
Example #20
0
def _check_topology_schema(topo):
    try:
        jsonschema.validate(topo, TOPOLOGY_SCHEMA)

        # Check the nodes property against compute schemas
        for node in topo["topology"].get("nodes", []):
            schema = None
            if node["node_type"] == "dynamips":
                schema = copy.deepcopy(dynamips_vm.VM_CREATE_SCHEMA)

            if schema:
                # Properties send to compute but in an other place in topology
                delete_properties = ["name", "node_id"]
                for prop in delete_properties:
                    del schema["properties"][prop]
                schema["required"] = [p for p in schema["required"] if p not in delete_properties]

                jsonschema.validate(node.get("properties", {}), schema)

    except jsonschema.ValidationError as e:
        error = "Invalid data in topology file: {} in schema: {}".format(
            e.message,
            json.dumps(e.schema))
        log.critical(error)
        raise aiohttp.web.HTTPConflict(text=error)
 def test_networks_schema(self):
     checker = jsonschema.FormatChecker()
     release_data = self.env.create_release()
     jsonschema.validate(
         release_data['networks_metadata'],
         release.NETWORKS_SCHEMA,
         format_checker=checker)
Example #22
0
 def test_ldap_validation_fail_if_permissions_not_enum(self):
     self.mfs.add_entries({jenkins_yaml_path: '\n'.join(
         [
           'ldap:',
           '  server: ldap://mirantis.com:3268',
           '  root_bind:',
           '    dn: dc=mirantis,dc=com',
           '    allow_blank: false',
           '  search:',
           '    user_filter: userPrincipalName={0}',
           '  manager:',
           '    name: [email protected]',
           '    password: passwd',
           '  access:',
           '  - name: test',
           '    permissions:',
           '    - test',
           '    - credentials',
           '    - gerrit',
           'cli_user:'******'  name: jenkins-manager',
           '  public_key: sssh-rsa AAAAB3NzaC'
         ])
     })
     repo_data = yaml_reader.read(jenkins_yaml_path)
     with pytest.raises(jsonschema.ValidationError) as excinfo:
         jsonschema.validate(repo_data, self.schema)
     assert excinfo.value.message == "'test' is not one of " \
                                     "['overall', 'credentials', 'gerrit', 'manage-ownership', " \
                                     "'slave', 'job', 'run', 'view', 'scm']"
Example #23
0
 def test_valid_oneof_ldap_data(self):
     self.mfs.add_entries({jenkins_yaml_path: '\n'.join(
         [
           'ldap:',
           '  server: ldap://mirantis.com:3268',
           '  root_bind:',
           '    dn: dc=mirantis,dc=com',
           '    allow_blank: false',
           '  search:',
           '    user_filter: userPrincipalName={0}',
           '  manager:',
           '    name: [email protected]',
           '    password: passwd',
           '  access:',
           '  - name: amihura',
           '    permissions:',
           '    - overall',
           '    - credentials',
           '    - gerrit',
           'cli_user:'******'  name: jenkins-manager',
           '  public_key: sssh-rsa AAAAB3NzaC'
         ])
     })
     repo_data = yaml_reader.read(jenkins_yaml_path)
     jsonschema.validate(repo_data, self.schema)
def test_measurement_list():
    metrics = mon_client("mini-mon",
                         "password",
                         auth_url,
                         endpoint).metrics

    start = datetime.datetime.now()
    ago = datetime.timedelta(minutes=2)
    start = start - ago

    result = metrics.list_measurements(name="cpu.idle_perc",
                                       start_time=start.isoformat(),
                                       merge_metrics=True)

    schema = {'type': 'object',
              'properties': {
                  'dimensions': {'type': 'object'},
                  'measurements': {'type': 'array',
                                   'items': {'type': 'array',
                                             'items': [{'type': 'string'},
                                                       {'type': 'number'},
                                                       {'type': 'object'}]}},
                  'columns': {'type': 'array',
                              'items': [{'type': 'string'},
                                        {'type': 'string'},
                                        {'type': 'string'}]},
                  'name': {'type': 'string'}}}

    for measure in result:
        jsonschema.validate(measure, schema)
Example #25
0
def validate_schema(instance, schema_url):
    """ This function tests the graph with a JSON schema
    """
    schema = requests.get(schema_url).json()

    # validate() does not return a value if valid, thus, not keeping track of it.
    validate(instance=instance, schema=schema, format_checker=FormatChecker())
Example #26
0
def test_impact_with_object_and_application_period_validation():
    json = {"severity": {"id": "7ffab232-3d48-4eea-aa2c-22f8680230b6"},
            "application_periods": [{"begin": "2014-06-20T17:00:00Z","end":"2014-07-28T17:00:00Z"}],
            "objects": [{"id": "stop_area:RTP:SA:3786125","type": "network"},{"id": "network:RTP:LI:378","type": "network"},
                        {"id": "line:RTP:LI:378","type": "line"}]
            }
    validate(json, formats.impact_input_format)
def test_alarm_history():
    alarms = mon_client("mini-mon",
                        "password",
                        auth_url,
                        endpoint).alarms

    result = alarms.history_list()

    id = result[0]['alarm_id']

    result = alarms.history(alarm_id=id)

    schema = {'type': 'object',
              'properties': {
                  'new_state': {'type': 'string'},
                  'timestamp': {'type': 'string'},
                  'metrics': {'type': 'array',
                              'items': [{'type': 'object',
                                         'properties': {
                                             'dimensions': {'type': 'object'},
                                             'id': {'type': 'null'},
                                             'name': {'type': 'string'}}}]},
                  'alarm_id': {'type': 'string'},
                  'reason': {'type': 'string'},
                  'reason_data': {'type': 'string'},
                  'sub_alarms': {'type': 'array'},
                  'old_state': {'type': 'string'},
                  'id': {'type': 'string'}}}

    for alarm in result:
        jsonschema.validate(alarm, schema)
Example #28
0
 def test_ldap_validation_fail_if_access_name_is_not_string(self):
     self.mfs.add_entries({jenkins_yaml_path: '\n'.join(
         [
           'ldap:',
           '  server: ldap://mirantis.com:3268',
           '  root_bind:',
           '    dn: dc=mirantis,dc=com',
           '    allow_blank: false',
           '  search:',
           '    user_filter: userPrincipalName={0}',
           '  manager:',
           '    name: [email protected]',
           '    password: passwd',
           '  access:',
           '  - name: 123',
           '    permissions:',
           '    - overall',
           '    - credentials',
           '    - gerrit',
           'cli_user:'******'  name: jenkins-manager',
           '  public_key: ssh-rsa AAB'
         ])
     })
     repo_data = yaml_reader.read(jenkins_yaml_path)
     with pytest.raises(jsonschema.ValidationError) as excinfo:
         jsonschema.validate(repo_data, self.schema)
     assert excinfo.value.message == "123 is not of type 'string'"
Example #29
0
    def test_parameter_schema(self):
        runnertype = self._create_save_runnertype(metadata=True)
        saved = self._create_save_action(runnertype, metadata=True)
        retrieved = Action.get_by_id(saved.id)

        # validate generated schema
        schema = util_schema.get_schema_for_action_parameters(retrieved)
        self.assertDictEqual(schema, PARAM_SCHEMA)
        validator = util_schema.get_validator()
        validator.check_schema(schema)

        # use schema to validate parameters
        jsonschema.validate({"r2": "abc", "p1": "def"}, schema, validator)
        jsonschema.validate({"r2": "abc", "p1": "def", "r1": {"r1a": "ghi"}}, schema, validator)
        self.assertRaises(jsonschema.ValidationError, jsonschema.validate,
                          '{"r2": "abc", "p1": "def"}', schema, validator)
        self.assertRaises(jsonschema.ValidationError, jsonschema.validate,
                          {"r2": "abc"}, schema, validator)
        self.assertRaises(jsonschema.ValidationError, jsonschema.validate,
                          {"r2": "abc", "p1": "def", "r1": 123}, schema, validator)

        # cleanup
        self._delete([retrieved])
        try:
            retrieved = Action.get_by_id(saved.id)
        except ValueError:
            retrieved = None
        self.assertIsNone(retrieved, 'managed to retrieve after failure.')
Example #30
0
def check_appliance(appliance):
    global images
    images = set()
    global md5sums
    md5sums = set()

    with open('schemas/appliance.json') as f:
        schema = json.load(f)

    with open(os.path.join('appliances', appliance)) as f:
        appliance_json = json.load(f)
        jsonschema.validate(appliance_json, schema)

    if 'images' in appliance_json:
        for image in appliance_json['images']:
            if image['filename'] in images:
                print('Duplicate image filename ' + image['filename'])
                sys.exit(1)
            if image['md5sum'] in md5sums:
                print('Duplicate image md5sum ' + image['md5sum'])
                sys.exit(1)
            images.add(image['filename'])
            md5sums.add(image['md5sum'])

        for version in appliance_json['versions']:
            for image in version['images'].values():
                found = False
                for i in appliance_json['images']:
                    if i['filename'] in image:
                        found = True

                if not found:
                    print('Missing relation ' + i['filename'] + ' ' + ' in ' + appliance)
                    sys.exit(1)
Example #31
0
 def test_get_node_link_data_output(self):
     subgroup = choice(['Global', 'Syria', 'Iraq'])
     year = randint(2009, 2016)
     output = self.gssn_qpu.get_node_link_data(subgroup, year)
     jsonschema.validate(output, json_schema)
Example #32
0
def test_generate_json_for_encoding(app):
    """Test if the JSON generated for Sorenson has a correct structure.

    Wrong JSON won't return any meaningful errors, so it's very hard to debug
    """
    sorenson_schema = {
      "$schema": "http://json-schema.org/draft-04/schema#",
      "title": "Schema for Sorenson encoding.",
      "type": "object",
      "properties": {
        "QueueId": {
          "type": "string"
        },
        "JobMediaInfo": {
          "type": "object",
          "properties": {
            "CompressionPresetList": {
              "type": "array",
              "items": [
                {
                  "type": "object",
                  "properties": {
                    "PresetId": {
                      "type": "string"
                    }
                  },
                  "required": [
                    "PresetId"
                  ]
                }
              ]
            },
            "DestinationList": {
              "type": "array",
              "items": [
                {
                  "type": "object",
                  "properties": {
                    "FileUri": {
                      "type": "string"
                    }
                  },
                  "required": [
                    "FileUri"
                  ]
                }
              ]
            },
            "SourceMediaList": {
              "type": "array",
              "items": [
                {
                  "type": "object",
                  "properties": {
                    "Password": {
                      "type": "string"
                    },
                    "UserName": {
                      "type": "string"
                    },
                    "FileUri": {
                      "type": "string"
                    }
                  },
                  "required": [
                    "Password",
                    "UserName",
                    "FileUri"
                  ]
                }
              ]
            }
          },
          "required": [
            "CompressionPresetList",
            "DestinationList",
            "SourceMediaList"
          ]
        },
        "Name": {
          "type": "string"
        }
      },
      "required": [
        "QueueId",
        "JobMediaInfo",
        "Name"
      ]
    }

    expected_output = {
        'QueueId': '064153dd-ade2-4824-8458-88e6ea03d395',
        'JobMediaInfo': {
            'CompressionPresetList': [{
                'PresetId': 'dc2187a3-8f64-4e73-b458-7370a88d92d7'
            }],
            'DestinationList': [{
                'FileUri': '/tmp/test_output_file.mp4'
            }],
            'SourceMediaList': [{
                'Password': '',
                'UserName': '',
                'FileUri': '/tmp/test_input_file.mp4'
            }]},
        'Name': 'CDS File:/tmp/test_input_file.mp4 ' +
                'Preset:dc2187a3-8f64-4e73-b458-7370a88d92d7'
    }
    output = generate_json_for_encoding('/tmp/test_input_file.mp4',
                                        '/tmp/test_output_file.mp4',
                                        'dc2187a3-8f64-4e73-b458-7370a88d92d7')

    validate(output, sorenson_schema)
    assert output == expected_output
Example #33
0
def test_get_books():
    response = requests.get('http://localhost:8000/books')

    assert response.status_code == 200
    jsonschema.validate(response.json(), schema_books)
Example #34
0
def test_get_book():
    response = requests.get(
        'http://localhost:8000/books/8b91b84b-04e4-4496-9635-66468c2f3e41')

    assert response.status_code == 200
    jsonschema.validate(response.json(), schema_book)
Example #35
0
 def validate(self, json):
     print 'SchemaValidator validate'
     return validate(json, self.schema)
Example #36
0
def assert_valid_schema(data, schema_file):
    """ Checks whether the given data matches the schema """

    schema = _load_json_schema(schema_file)
    return validate(data, schema)
Example #37
0
def test_push_mirror_contents(tmpdir, mutable_mock_env_path, env_deactivate,
                              install_mockery, mock_packages, mock_fetch,
                              mock_stage, mock_gnupghome):
    working_dir = tmpdir.join('working_dir')

    mirror_dir = working_dir.join('mirror')
    mirror_url = 'file://{0}'.format(mirror_dir.strpath)

    signing_key_dir = spack_paths.mock_gpg_keys_path
    signing_key_path = os.path.join(signing_key_dir, 'package-signing-key')
    with open(signing_key_path) as fd:
        signing_key = fd.read()

    ci.import_signing_key(signing_key)

    spack_yaml_contents = """
spack:
 definitions:
   - packages: [patchelf]
 specs:
   - $packages
 mirrors:
   test-mirror: {0}
""".format(mirror_url)

    print('spack.yaml:\n{0}\n'.format(spack_yaml_contents))

    filename = str(tmpdir.join('spack.yaml'))
    with open(filename, 'w') as f:
        f.write(spack_yaml_contents)

    with tmpdir.as_cwd():
        env_cmd('create', 'test', './spack.yaml')
        with ev.read('test') as env:
            spec_map = ci.get_concrete_specs(
                'patchelf', 'patchelf', '', 'FIND_ANY')
            concrete_spec = spec_map['patchelf']
            spec_yaml = concrete_spec.to_yaml(hash=ht.build_hash)
            yaml_path = str(tmpdir.join('spec.yaml'))
            with open(yaml_path, 'w') as ypfd:
                ypfd.write(spec_yaml)

            install_cmd('--keep-stage', yaml_path)

            # env, spec, yaml_path, mirror_url, build_id
            ci.push_mirror_contents(
                env, concrete_spec, yaml_path, mirror_url, '42')

            buildcache_path = os.path.join(mirror_dir.strpath, 'build_cache')

            # Test generating buildcache index while we have bin mirror
            buildcache_cmd('update-index', '--mirror-url', mirror_url)
            index_path = os.path.join(buildcache_path, 'index.json')
            with open(index_path) as idx_fd:
                index_object = json.load(idx_fd)
                validate(index_object, db_idx_schema)

            # Now that index is regenerated, validate "buildcache list" output
            buildcache_list_output = buildcache_cmd('list', output=str)
            assert('patchelf' in buildcache_list_output)

            # Also test buildcache_spec schema
            bc_files_list = os.listdir(buildcache_path)
            for file_name in bc_files_list:
                if file_name.endswith('.spec.yaml'):
                    spec_yaml_path = os.path.join(buildcache_path, file_name)
                    with open(spec_yaml_path) as yaml_fd:
                        yaml_object = syaml.load(yaml_fd)
                        validate(yaml_object, spec_yaml_schema)

            logs_dir = working_dir.join('logs_dir')
            if not os.path.exists(logs_dir.strpath):
                os.makedirs(logs_dir.strpath)

            ci.copy_stage_logs_to_artifacts(concrete_spec, logs_dir.strpath)

            logs_dir_list = os.listdir(logs_dir.strpath)

            assert('spack-build-out.txt' in logs_dir_list)

            # Also just make sure that if something goes wrong with the
            # stage logs copy, no exception is thrown
            ci.copy_stage_logs_to_artifacts(None, logs_dir.strpath)

            dl_dir = working_dir.join('download_dir')
            if not os.path.exists(dl_dir.strpath):
                os.makedirs(dl_dir.strpath)

            buildcache_cmd('download', '--spec-yaml', yaml_path, '--path',
                           dl_dir.strpath, '--require-cdashid')

            dl_dir_list = os.listdir(dl_dir.strpath)

            assert(len(dl_dir_list) == 3)
Example #38
0
 def validate_form(form, schema_file_name):
     with open(f'./json_schema/{schema_file_name}') as schema_file:
         schema = json.load(schema_file)
         validate(instance=form, schema=schema)
Example #39
0
def ocs_install_verification(
    timeout=600, skip_osd_distribution_check=False, ocs_registry_image=None,
    post_upgrade_verification=False,
):
    """
    Perform steps necessary to verify a successful OCS installation

    Args:
        timeout (int): Number of seconds for timeout which will be used in the
            checks used in this function.
        skip_osd_distribution_check (bool): If true skip the check for osd
            distribution.
        ocs_registry_image (str): Specific image to check if it was installed
            properly.
        post_upgrade_verification (bool): Set to True if this function is
            called after upgrade.

    """
    from ocs_ci.ocs.node import get_typed_nodes
    from ocs_ci.ocs.resources.pvc import get_deviceset_pvcs
    from ocs_ci.ocs.resources.pod import get_ceph_tools_pod, get_all_pods
    number_of_worker_nodes = len(get_typed_nodes())
    namespace = config.ENV_DATA['cluster_namespace']
    log.info("Verifying OCS installation")

    # Verify OCS CSV is in Succeeded phase
    log.info("verifying ocs csv")
    operator_selector = get_selector_for_ocs_operator()
    ocs_package_manifest = PackageManifest(
        resource_name=defaults.OCS_OPERATOR_NAME, selector=operator_selector,
    )
    channel = config.DEPLOYMENT.get('ocs_csv_channel')
    ocs_csv_name = ocs_package_manifest.get_current_csv(channel=channel)
    ocs_csv = CSV(
        resource_name=ocs_csv_name, namespace=namespace
    )
    log.info(f"Check if OCS operator: {ocs_csv_name} is in Succeeded phase.")
    ocs_csv.wait_for_phase(phase="Succeeded", timeout=timeout)
    # Verify if OCS CSV has proper version.
    csv_version = ocs_csv.data['spec']['version']
    ocs_version = config.ENV_DATA['ocs_version']
    log.info(
        f"Check if OCS version: {ocs_version} matches with CSV: {csv_version}"
    )
    assert ocs_version in csv_version, (
        f"OCS version: {ocs_version} mismatch with CSV version {csv_version}"
    )
    # Verify if OCS CSV has the same version in provided CI build.
    ocs_registry_image = ocs_registry_image or config.DEPLOYMENT.get(
        'ocs_registry_image'
    )
    if ocs_registry_image and ocs_registry_image.endswith(".ci"):
        ocs_registry_image = ocs_registry_image.split(":")[1]
        log.info(
            f"Check if OCS registry image: {ocs_registry_image} matches with "
            f"CSV: {csv_version}"
        )
        assert ocs_registry_image in csv_version, (
            f"OCS registry image version: {ocs_registry_image} mismatch with "
            f"CSV version {csv_version}"
        )

    # Verify OCS Cluster Service (ocs-storagecluster) is Ready
    storage_cluster_name = config.ENV_DATA['storage_cluster_name']
    log.info("Verifying status of storage cluster: %s", storage_cluster_name)
    storage_cluster = StorageCluster(
        resource_name=storage_cluster_name,
        namespace=namespace,
    )
    log.info(
        f"Check if StorageCluster: {storage_cluster_name} is in"
        f"Succeeded phase"
    )
    storage_cluster.wait_for_phase(phase='Ready', timeout=timeout)

    # Verify pods in running state and proper counts
    log.info("Verifying pod states and counts")
    pod = OCP(
        kind=constants.POD, namespace=namespace
    )
    # ocs-operator
    assert pod.wait_for_resource(
        condition=constants.STATUS_RUNNING,
        selector=constants.OCS_OPERATOR_LABEL,
        timeout=timeout
    )
    # rook-ceph-operator
    assert pod.wait_for_resource(
        condition=constants.STATUS_RUNNING,
        selector=constants.OPERATOR_LABEL,
        timeout=timeout
    )
    # noobaa
    assert pod.wait_for_resource(
        condition=constants.STATUS_RUNNING,
        selector=constants.NOOBAA_APP_LABEL,
        resource_count=2,
        timeout=timeout
    )
    # mons
    assert pod.wait_for_resource(
        condition=constants.STATUS_RUNNING,
        selector=constants.MON_APP_LABEL,
        resource_count=3,
        timeout=timeout
    )
    # csi-cephfsplugin
    assert pod.wait_for_resource(
        condition=constants.STATUS_RUNNING,
        selector=constants.CSI_CEPHFSPLUGIN_LABEL,
        resource_count=number_of_worker_nodes,
        timeout=timeout
    )
    # csi-cephfsplugin-provisioner
    assert pod.wait_for_resource(
        condition=constants.STATUS_RUNNING,
        selector=constants.CSI_CEPHFSPLUGIN_PROVISIONER_LABEL,
        resource_count=2,
        timeout=timeout
    )
    # csi-rbdplugin
    assert pod.wait_for_resource(
        condition=constants.STATUS_RUNNING,
        selector=constants.CSI_RBDPLUGIN_LABEL,
        resource_count=number_of_worker_nodes,
        timeout=timeout
    )
    # csi-rbdplugin-provisioner
    assert pod.wait_for_resource(
        condition=constants.STATUS_RUNNING,
        selector=constants.CSI_RBDPLUGIN_PROVISIONER_LABEL,
        resource_count=2,
        timeout=timeout
    )
    # osds
    osd_count = (
        int(storage_cluster.data['spec']['storageDeviceSets'][0]['count'])
        * int(storage_cluster.data['spec']['storageDeviceSets'][0]['replica'])
    )
    assert pod.wait_for_resource(
        condition=constants.STATUS_RUNNING,
        selector=constants.OSD_APP_LABEL,
        resource_count=osd_count,
        timeout=timeout
    )
    # mgr
    assert pod.wait_for_resource(
        condition=constants.STATUS_RUNNING,
        selector=constants.MGR_APP_LABEL,
        timeout=timeout
    )
    # mds
    assert pod.wait_for_resource(
        condition=constants.STATUS_RUNNING,
        selector=constants.MDS_APP_LABEL,
        resource_count=2,
        timeout=timeout
    )

    # rgw check only for VmWare
    if config.ENV_DATA.get('platform') == constants.VSPHERE_PLATFORM:
        assert pod.wait_for_resource(
            condition=constants.STATUS_RUNNING,
            selector=constants.RGW_APP_LABEL,
            resource_count=1,
            timeout=timeout
        )

    # Verify ceph health
    log.info("Verifying ceph health")
    health_check_tries = 20
    health_check_delay = 30
    if post_upgrade_verification:
        # In case of upgrade with FIO we have to wait longer time to see
        # health OK. See discussion in BZ:
        # https://bugzilla.redhat.com/show_bug.cgi?id=1817727
        health_check_tries = 60
    assert utils.ceph_health_check(
        namespace, health_check_tries, health_check_delay
    )

    # Verify StorageClasses (1 ceph-fs, 1 ceph-rbd)
    log.info("Verifying storage classes")
    storage_class = OCP(
        kind=constants.STORAGECLASS, namespace=namespace
    )
    storage_cluster_name = config.ENV_DATA['storage_cluster_name']
    required_storage_classes = {
        f'{storage_cluster_name}-cephfs',
        f'{storage_cluster_name}-ceph-rbd'
    }
    storage_classes = storage_class.get()
    storage_class_names = {
        item['metadata']['name'] for item in storage_classes['items']
    }
    assert required_storage_classes.issubset(storage_class_names)

    # Verify OSDs are distributed
    if not skip_osd_distribution_check:
        log.info("Verifying OSDs are distributed evenly across worker nodes")
        ocp_pod_obj = OCP(kind=constants.POD, namespace=namespace)
        osds = ocp_pod_obj.get(selector=constants.OSD_APP_LABEL)['items']
        deviceset_count = get_deviceset_count()
        node_names = [osd['spec']['nodeName'] for osd in osds]
        for node in node_names:
            assert not node_names.count(node) > deviceset_count, (
                "OSD's are not distributed evenly across worker nodes"
            )

    # Verify that CSI driver object contains provisioner names
    log.info("Verifying CSI driver object contains provisioner names.")
    csi_driver = OCP(kind="CSIDriver")
    assert {defaults.CEPHFS_PROVISIONER, defaults.RBD_PROVISIONER} == (
        {item['metadata']['name'] for item in csi_driver.get()['items']}
    )

    # Verify node and provisioner secret names in storage class
    log.info("Verifying node and provisioner secret names in storage class.")
    sc_rbd = storage_class.get(
        resource_name=constants.DEFAULT_STORAGECLASS_RBD
    )
    sc_cephfs = storage_class.get(
        resource_name=constants.DEFAULT_STORAGECLASS_CEPHFS
    )
    assert sc_rbd['parameters']['csi.storage.k8s.io/node-stage-secret-name'] == constants.RBD_NODE_SECRET
    assert sc_rbd['parameters']['csi.storage.k8s.io/provisioner-secret-name'] == constants.RBD_PROVISIONER_SECRET
    assert sc_cephfs['parameters']['csi.storage.k8s.io/node-stage-secret-name'] == constants.CEPHFS_NODE_SECRET
    assert sc_cephfs['parameters']['csi.storage.k8s.io/provisioner-secret-name'] == constants.CEPHFS_PROVISIONER_SECRET
    log.info("Verified node and provisioner secret names in storage class.")

    # Verify ceph osd tree output
    log.info(
        "Verifying ceph osd tree output and checking for device set PVC names "
        "in the output."
    )
    deviceset_pvcs = [pvc.name for pvc in get_deviceset_pvcs()]
    ct_pod = get_ceph_tools_pod()
    osd_tree = ct_pod.exec_ceph_cmd(ceph_cmd='ceph osd tree', format='json')
    schemas = {
        'root': constants.OSD_TREE_ROOT,
        'rack': constants.OSD_TREE_RACK,
        'host': constants.OSD_TREE_HOST,
        'osd': constants.OSD_TREE_OSD,
        'region': constants.OSD_TREE_REGION,
        'zone': constants.OSD_TREE_ZONE
    }
    schemas['host']['properties']['name'] = {'enum': deviceset_pvcs}
    for item in osd_tree['nodes']:
        validate(instance=item, schema=schemas[item['type']])
        if item['type'] == 'host':
            deviceset_pvcs.remove(item['name'])
    assert not deviceset_pvcs, (
        f"These device set PVCs are not given in ceph osd tree output "
        f"- {deviceset_pvcs}"
    )
    log.info(
        "Verified ceph osd tree output. Device set PVC names are given in the "
        "output."
    )

    # TODO: Verify ceph osd tree output have osd listed as ssd
    # TODO: Verify ceph osd tree output have zone or rack based on AZ

    # Verify CSI snapshotter sidecar container is not present
    log.info("Verifying CSI snapshotter is not present.")
    provisioner_pods = get_all_pods(
        namespace=defaults.ROOK_CLUSTER_NAMESPACE,
        selector=[
            constants.CSI_CEPHFSPLUGIN_PROVISIONER_LABEL,
            constants.CSI_RBDPLUGIN_PROVISIONER_LABEL
        ]
    )
    for pod_obj in provisioner_pods:
        pod_info = pod_obj.get()
        for container, image in get_images(data=pod_info).items():
            assert ('snapshot' not in container) and ('snapshot' not in image), (
                f"Snapshot container is present in {pod_obj.name} pod. "
                f"Container {container}. Image {image}"
            )
    deployments = ocs_csv.get()['spec']['install']['spec']['deployments']
    rook_ceph_operator_deployment = [
        deployment_val for deployment_val in deployments if deployment_val['name'] == 'rook-ceph-operator'
    ]
    assert {'name': 'CSI_ENABLE_SNAPSHOTTER', 'value': 'false'} in (
        rook_ceph_operator_deployment[0]['spec']['template']['spec']['containers'][0]['env']
    ), "CSI_ENABLE_SNAPSHOTTER value is not set to 'false'."
    log.info("Verified: CSI snapshotter is not present.")

    # Verify pool crush rule is with "type": "zone"
    if utils.get_az_count() == 3:
        log.info("Verifying pool crush rule is with type: zone")
        crush_dump = ct_pod.exec_ceph_cmd(
            ceph_cmd='ceph osd crush dump', format=''
        )
        pool_names = [
            constants.METADATA_POOL, constants.DEFAULT_BLOCKPOOL,
            constants.DATA_POOL
        ]
        crush_rules = [rule for rule in crush_dump['rules'] if rule['rule_name'] in pool_names]
        for crush_rule in crush_rules:
            assert [
                item for item in crush_rule['steps'] if item.get('type') == 'zone'
            ], f"{crush_rule['rule_name']} is not with type as zone"
        log.info("Verified - pool crush rule is with type: zone")
Example #40
0
def test_schema(tasks):
    checker = jsonschema.FormatChecker()
    jsonschema.validate(tasks, TASKS_SCHEMA, format_checker=checker)
Example #41
0
 def test_validate_json(self, load_schema, load_json):
     try:
         validate(instance=load_json, schema=load_schema)
     except:
         pytest.fail("Invalid JSON")
Example #42
0
import requests
from jsonschema import validate, ValidationError
from DictToObj import DictToObj

# getting the schema for /people endpoint from swapi
schema_url = "https://swapi.dev/api/people/schema"
schema = requests.get(schema_url).json()

# getting the actual data for a specific person
luke_url = "https://swapi.dev/api/people/1"
Luke = requests.get(luke_url).json()

try:
    validate(Luke, schema)
except ValidationError as err:
    raise err
else:
    Luke = DictToObj(Luke)
    print(Luke.name, Luke.eye_color)
Example #43
0
def prepare_db_info(request, test_info, executable, niter, manifest_metadata):
    """Fixture for preparing and validating data to submit to a database.

    Fixture prepares data and metadata to submit to a database. One of the steps
    is parsing of build information from build manifest. After preparation,
    it checks if data contains required properties.
    """
    FIELDS_FOR_ID = ['run_id', 'timetest', 'model', 'device', 'niter']

    run_id = request.config.getoption("db_submit")
    if not run_id:
        yield
        return

    # add db_metadata
    db_meta_path = request.config.getoption("db_metadata")
    if db_meta_path:
        with open(db_meta_path, "r") as db_meta_f:
            test_info["db_info"].update(json.load(db_meta_f))

    # add test info
    info = {
        # results will be added immediately before uploading to DB in `pytest_runtest_makereport`
        "run_id": run_id,
        "timetest": str(executable.stem),
        "model": request.node.funcargs["instance"]["model"],
        "device": request.node.funcargs["instance"]["device"],
        "niter": niter,
        "test_name": request.node.name,
        "os":
        "_".join([str(item) for item in [get_os_name(), *get_os_version()]])
    }
    info['_id'] = hashlib.sha256(''.join(
        [str(info[key]) for key in FIELDS_FOR_ID]).encode()).hexdigest()
    test_info["db_info"].update(info)

    # add manifest metadata
    test_info["db_info"].update(manifest_metadata)

    # validate db_info
    schema = """
    {
        "type": "object",
        "properties": {
            "device": {
                "type": "object",
                "properties": {
                    "name": {"type": "string"}
                },
                "required": ["name"]
            },
            "model": {
                "type": "object",
                "properties": {
                    "path": {"type": "string"},
                    "name": {"type": "string"},
                    "precision": {"type": "string"},
                    "framework": {"type": "string"}
                },
                "required": ["path", "name", "precision", "framework"]
            },
            "run_id": {"type": "string"},
            "timetest": {"type": "string"},
            "niter": {"type": "integer"},
            "test_name": {"type": "string"},
            "results": {"type": "object"},
            "os": {"type": "string"},
            "_id": {"type": "string"}
        },
        "required": ["device", "model", "run_id", "timetest", "niter", "test_name", "os", "_id"],
        "additionalProperties": true
    }
    """
    schema = json.loads(schema)

    try:
        validate(instance=test_info["db_info"], schema=schema)
    except ValidationError:
        request.config.option.db_submit = False
        raise
    yield
Example #44
0
else:
    data_file = "d5.json"
    schema_file = "schema1.json"

try:
    with open(data_file, "r") as read_file:
        data = json.load(read_file)
except Exception as err:
    print("Something went wrong with data_file \'", data_file, "\'", sep='')
    report(str(err))
    exit()

try:
    with open(schema_file, "r") as read_schema:
        schema = json.load(read_schema)
except Exception as err:
    print("Something went wrong with schema_file \'",
          schema_file,
          "\'",
          sep='')
    report(str(err))
    exit()

print("Validating the input data using JSON-schema:")
try:
    validate(instance=data, schema=schema)
    print("Correct data")
except jsonschema.exceptions.ValidationError as ve:
    print("ERROR!")
    report(str(ve))
Example #45
0
 def validate_output(self, process_output, spec_schema, process_name,
                     process_type):
     try:
         validate(process_output, spec_schema)
     except (exceptions.ValidationError, exceptions.SchemaError) as e:
         self.missing_outputs.append((f'{process_type}:{process_name}', e))
 def compare(self, a, b):
     try:
         jsonschema.validate(a, b)
     except jsonschema.exceptions.ValidationError:
         raise django.core.exceptions.ValidationError(
             '%(value)s failed JSON schema check', params={'value': a})
Example #47
0
def validate(data, schema, set_default=True):
    """Validate data with JSON schema at given path.

    Args:
        data (object): data to validate. Can be a config dict or a pandas data frame.
        schema (str): Path to JSON schema used for validation. The schema can also be
            in YAML format. If validating a pandas data frame, the schema has to
            describe a row record (i.e., a dict with column names as keys pointing
            to row values). See http://json-schema.org. The path is interpreted
            relative to the Snakefile when this function is called.
        set_default (bool): set default values defined in schema. See
            http://python-jsonschema.readthedocs.io/en/latest/faq/ for more
            information
    """
    try:
        import jsonschema
        from jsonschema import validators, RefResolver
    except ImportError:
        raise WorkflowError(
            "The Python 3 package jsonschema must be installed "
            "in order to use the validate directive.")

    if not os.path.isabs(schema):
        frame = inspect.currentframe().f_back
        # if workflow object is not available this has not been started from a workflow
        if "workflow" in frame.f_globals:
            workflow = frame.f_globals["workflow"]
            schema = os.path.join(workflow.current_basedir, schema)

    schemafile = schema
    schema = _load_configfile(schema, filetype="Schema")
    resolver = RefResolver(
        urljoin("file:", schemafile),
        schema,
        handlers={
            "file": lambda uri: _load_configfile(re.sub("^file://", "", uri))
        },
    )

    # Taken from http://python-jsonschema.readthedocs.io/en/latest/faq/
    def extend_with_default(validator_class):
        validate_properties = validator_class.VALIDATORS["properties"]

        def set_defaults(validator, properties, instance, schema):
            for property, subschema in properties.items():
                if "default" in subschema:
                    instance.setdefault(property, subschema["default"])

            for error in validate_properties(validator, properties, instance,
                                             schema):
                yield error

        return validators.extend(validator_class, {"properties": set_defaults})

    Validator = validators.validator_for(schema)
    if Validator.META_SCHEMA["$schema"] != schema["$schema"]:
        logger.warning(
            "No validator found for JSON Schema version identifier '{}'".
            format(schema["$schema"]))
        logger.warning(
            "Defaulting to validator for JSON Schema version '{}'".format(
                Validator.META_SCHEMA["$schema"]))
        logger.warning("Note that schema file may not be validated correctly.")
    DefaultValidator = extend_with_default(Validator)

    if not isinstance(data, dict):
        try:
            import pandas as pd

            recordlist = []
            if isinstance(data, pd.DataFrame):
                for i, record in enumerate(data.to_dict("records")):
                    record = {
                        k: v
                        for k, v in record.items() if not pd.isnull(v)
                    }
                    try:
                        if set_default:
                            DefaultValidator(
                                schema, resolver=resolver).validate(record)
                            recordlist.append(record)
                        else:
                            jsonschema.validate(record,
                                                schema,
                                                resolver=resolver)
                    except jsonschema.exceptions.ValidationError as e:
                        raise WorkflowError(
                            "Error validating row {} of data frame.".format(i),
                            e)
                if set_default:
                    newdata = pd.DataFrame(recordlist, data.index)
                    newcol = ~newdata.columns.isin(data.columns)
                    n = len(data.columns)
                    for col in newdata.loc[:, newcol].columns:
                        data.insert(n, col, newdata.loc[:, col])
                        n = n + 1
                return
        except ImportError:
            pass
        raise WorkflowError("Unsupported data type for validation.")
    else:
        try:
            if set_default:
                DefaultValidator(schema, resolver=resolver).validate(data)
            else:
                jsonschema.validate(data, schema, resolver=resolver)
        except jsonschema.exceptions.ValidationError as e:
            raise WorkflowError("Error validating config file.", e)
    def expect_file_to_be_valid_json(
        self,
        schema=None,
        result_format=None,
        include_config=True,
        catch_exceptions=None,
        meta=None,
    ):
        """
        Args:
            schema : string
                optional JSON schema file on which JSON data file is validated against

            result_format (str or None):
                Which output mode to use: `BOOLEAN_ONLY`, `BASIC`, `COMPLETE`, or `SUMMARY`. \
                For more detail, see :ref:`result_format <result_format>`.

            include_config (boolean):
                If True, then include the expectation config as part of the result object. \
                For more detail, see :ref:`include_config`.

            catch_exceptions (boolean or None):
                If True, then catch exceptions and include them as part of the result object. \
                For more detail, see :ref:`catch_exceptions`.

            meta (dict or None):
                A JSON-serializable dictionary (nesting allowed) that will \
                be included in the output without modification.

        For more detail, see :ref:`meta`.

        Returns:
            A JSON-serializable expectation result object.

        Exact fields vary depending on the values passed to :ref:`result_format <result_format>` and \
        :ref:`include_config`, :ref:`catch_exceptions`, and :ref:`meta`.

        """
        if schema is None:
            try:
                with open(self._path) as f:
                    json.load(f)
                success = True
            except ValueError:
                success = False
        else:
            try:
                with open(schema) as s:
                    schema_data = s.read()
                sdata = json.loads(schema_data)
                with open(self._path) as f:
                    json_data = f.read()
                jdata = json.loads(json_data)
                jsonschema.validate(jdata, sdata)
                success = True
            except jsonschema.ValidationError:
                success = False
            except jsonschema.SchemaError:
                raise
            except:
                raise
        return {"success": success}
Example #49
0
 def _validate_changes(self, configuration):
     service_schema = {
         "title": "service",
         "type": "object",
         "properties": {
             "http_interface": {
                 "type":
                 "object",
                 "properties": {
                     "internal": {
                         "type": "boolean"
                     },
                     "restrict_access_to": {
                         "type": "array",
                         "items": {
                             "type": "string"
                         }
                     },
                     "container_port": {
                         "type": "number"
                     },
                     "health_check_path": {
                         "type": "string",
                         "pattern": "^\/.*$"
                     }
                 },
                 "required":
                 ["internal", "restrict_access_to", "container_port"]
             },
             "memory_reservation": {
                 "type": "number",
                 "minimum": 10,
                 "maximum": 30000
             },
             "fargate": {
                 "type": "object",
                 "properties": {
                     "cpu": {
                         "type": "number",
                         "minimum": 256,
                         "maximum": 4096
                     },
                     "memory": {
                         "type": "number",
                         "minimum": 512,
                         "maximum": 30720
                     }
                 }
             },
             "command": {
                 "oneOf": [{
                     "type": "string"
                 }, {
                     "type": "null"
                 }]
             }
         },
         "required": ["memory_reservation", "command"]
     }
     schema = {
         # "$schema": "http://json-schema.org/draft-04/schema#",
         "title": "configuration",
         "type": "object",
         "properties": {
             "notifications_arn": {
                 "type": "string"
             },
             "services": {
                 "type": "object",
                 "patternProperties": {
                     "^[a-zA-Z]+$": service_schema
                 }
             },
             "cloudlift_version": {
                 "type": "string"
             }
         },
         "required": ["cloudlift_version", "services"]
     }
     try:
         validate(configuration, schema)
     except ValidationError as validation_error:
         raise UnrecoverableException(
             validation_error.message + " in " +
             str(".".join(list(validation_error.relative_path))))
     log_bold("Schema valid!")
    def _validate_changes(self, configuration):
        log_bold("\nValidating schema..")
        # TODO: add cidr etc validation
        schema = {
            # "$schema": "http://json-schema.org/draft-04/schema#",
            "title": "configuration",
            "type": "object",
            "properties": {
                self.environment: {
                    "type": "object",
                    "properties": {
                        "cluster": {
                            "type": "object",
                            "properties": {
                                "min_instances": {"type": "integer"},
                                "max_instances": {"type": "integer"},
                                "instance_type": {"type": "string"},
                                "key_name": {"type": "string"},
                            },
                            "required": [
                                "min_instances",
                                "max_instances",
                                "instance_type",
                                "key_name"
                            ]
                        },
                        "environment": {
                            "type": "object",
                            "properties": {
                                "notifications_arn": {"type": "string"},
                                "ssl_certificate_arn": {"type": "string"}
                            },
                            "required": [
                                "notifications_arn",
                                "ssl_certificate_arn"
                            ]
                        },
                        "region": {"type": "string"},
                        "vpc": {
                            "type": "object",
                            "oneOf": [
                                {
                                    "properties": {
                                        "create_new": {
                                            "enum": [True]
                                        },
                                        "cidr": {
                                            "type": "string"
                                        },
                                        "nat-gateway": {
                                            "type": "object",
                                            "properties": {
                                                "elastic-ip-allocation-id": {
                                                    "type": "string"
                                                }
                                            },
                                            "required": [
                                                "elastic-ip-allocation-id"
                                            ]
                                        },
                                        "subnets": {
                                            "type": "object",
                                            "properties": {
                                                "private": {
                                                    "type": "object",
                                                    "properties": {
                                                        "subnet-1": {
                                                            "type": "object",
                                                            "properties": {
                                                                "cidr": {
                                                                    "type": "string"
                                                                }
                                                            },
                                                            "required": [
                                                                "cidr"
                                                            ]
                                                        },
                                                        "subnet-2": {
                                                            "type": "object",
                                                            "properties": {
                                                                "cidr": {
                                                                    "type": "string"
                                                                }
                                                            },
                                                            "required": [
                                                                "cidr"
                                                            ]
                                                        }
                                                    },
                                                    "required": [
                                                        "subnet-1",
                                                        "subnet-2"
                                                    ]
                                                },
                                                "public": {
                                                    "type": "object",
                                                    "properties": {
                                                        "subnet-1": {
                                                            "type": "object",
                                                            "properties": {
                                                                "cidr": {
                                                                    "type": "string"
                                                                }
                                                            },
                                                            "required": [
                                                                "cidr"
                                                            ]
                                                        },
                                                        "subnet-2": {
                                                            "type": "object",
                                                            "properties": {
                                                                "cidr": {
                                                                    "type": "string"
                                                                }
                                                            },
                                                            "required": [
                                                                "cidr"
                                                            ]
                                                        }
                                                    },
                                                    "required": [
                                                        "subnet-1",
                                                        "subnet-2"
                                                    ]
                                                }
                                            },
                                            "required": [
                                                "private",
                                                "public"
                                            ]
                                        }
                                    }
                                },
                                {
                                    "properties": {
                                        "create_new": {
                                            "enum": [False]
                                        },
                                        "subnets": {
                                            "type": "object",
                                            "properties": {
                                                "private": {
                                                    "type": "object",
                                                    "patternProperties": {
                                                        "^subnet-[0-9]$": {
                                                            "type": "object",
                                                            "properties": {
                                                                "id": {
                                                                    "type": "string"
                                                                }
                                                            },
                                                            "required": [
                                                                "id"
                                                            ]
                                                        },
                                                    },
                                                    "additionalProperties": False,
                                                    "maxProperties": 5,
                                                    "minProperties": 1
                                                },
                                                "public": {
                                                    "type": "object",
                                                    "patternProperties": {
                                                        "^subnet-[0-9]$": {
                                                            "type": "object",
                                                            "properties": {
                                                                "id": {
                                                                    "type": "string"
                                                                }
                                                            },
                                                            "required": [
                                                                "id"
                                                            ]
                                                        },
                                                    },
                                                    "additionalProperties": False,
                                                    "maxProperties": 5,
                                                    "minProperties": 1
                                                }
                                            },
                                            "required": [
                                                "private",
                                                "public"
                                            ]
                                        }
                                    }
                                }
                            ],
                            "required": [
                                "subnets"
                            ]
                        }

                    },
                    "required": [
                        "cluster",
                        "environment",
                        "region",
                        "vpc"
                    ]
                }
            },
            "required": [self.environment]
        }
        try:
            validate(configuration, schema)
        except ValidationError as validation_error:
            error_path = str(".".join(list(validation_error.relative_path)))
            if error_path:
                raise UnrecoverableException(validation_error.message + " in " + error_path)
            else:
                raise UnrecoverableException(validation_error.message)
        log_bold("Schema valid!")
 def validate(self, schema):
     from jsonschema import validate
     fp = open(schema)
     schema = json.load(fp)
     fp.close()
     validate(self.data, schema)
Example #52
0
 def validateCustomLevel(self, level):
     lk = json.loads(level)
     jsonschema.validate(instance=lk, schema=levelJsonSchema)
 def validate_params(self):
     validate(self.params, self.get_schema())
     DBUtil.validate_article_existence(self.dynamodb,
                                       self.params['article_id'],
                                       status='public')
Example #54
0
 def test_returns_valid_schema(self):
     setting = make_setting_field(factory.make_name("name"),
                                  factory.make_name("label"))
     # doesn't raise ValidationError
     validate(setting, SETTING_PARAMETER_FIELD_SCHEMA)
                prim['prim_type'] = 'forward'
                prim['grid_size'] = grid_size
                prim['start_v'] = vi
                prim['end_v'] = vf
                prim['assm'] = sim_set[(vi, vf)][sequence_idx][0]
                prim['guart'] = sim_set[(vi, vf)][sequence_idx][2]
                prim['all_test_info'] = list(sim_set[(vi, vf)][sequence_idx])
                all_prims[prim_id] = prim
                prim_id += 1
    return all_prims



def export_json(data, outfile):
    b = json.dumps(all_prims, indent=2, cls=CustomJSONEncoder)
    b = b.replace('"##<', "").replace('>##"', "")
    with open(outfile, 'w') as f:
        f.write(b)

def import_json(infile):
    with open(infile) as f:
        data = json.load(f, object_pairs_hook=OrderedDict)
    return data


all_prims = recompute()
export_json(all_prims, '10px_prims.json')
my_prims = import_json('10px_prims.json')
for instance in my_prims:
    validate(instance=my_prims[instance], schema=schema)
Example #56
0
 def from_dict(cls, d):
     validate(d, cls.schema)
     attrs = [d.get(key, None) for key in cls._fields]
     return cls(*attrs)
Example #57
0
    def validate_schema(self):
        # Count each expected type
        found = {}
        result = True

        # Stored errors are dict on the following form:
        # { "message": "", "type": "<schema type>"}
        entries = {
            k: {
                "type": "unknow",
                "err": []
            }
            for k in self.datastate.keys()
        }
        missing_entries = list(self.schema.keys())
        for k in self.schema.keys():
            found[k] = 0
        try:
            # TODO take it from repo
            with open("rudder.jsonschema", "r") as json_file:
                rudder_schema = json.load(json_file)
            resolver = RefResolver.from_schema(rudder_schema)
            # Iterate over input to compare with schema
            for data_key, data_entry in self.datastate.items():
                for schema_key, schema_entry in self.schema.items():
                    try:
                        validate(instance=data_entry,
                                 schema=schema_entry["schema"],
                                 format_checker=draft7_format_checker,
                                 resolver=resolver)
                        found[schema_key] = found[schema_key] + 1
                        entries[data_key]["type"] = schema_key
                    except Exception as e:
                        entries[data_key]["err"].append({
                            "message": e,
                            "type": schema_key
                        })

            # Compare with expected occurences
            for k in self.schema.keys():
                if "min" in self.schema[k] and self.schema[k]["min"] > found[k]:
                    print("Expected at least %s %s, but found %s" %
                          (self.schema[k]["min"], k, found[k]))
                    result = False
                elif "max" in self.schema[
                        k] and self.schema[k]["max"] < found[k]:
                    print("Expected at most %s %s, but found %s" %
                          (self.schema[k]["max"], k, found[k]))
                    result = False
                else:
                    missing_entries.remove(k)

            # Display missing schema
            if result == False:
                # Print failures for the first error
                print("\n")
                first_wrong_entry = [{
                    "name": k,
                    "value": entries[k]
                } for k in entries.keys() if entries[k]["type"] == "unknow"][0]
                print("ERROR for data entry %s:" % first_wrong_entry["name"])
                print("Could be due to one of the following errors:")
                for i in first_wrong_entry["value"]["err"]:
                    if i["type"] == missing_entries[0]:
                        print(i["message"])

                # Print parsing resume
                print("\n")
                print("Parsing resume:")
                to_print = {k: entries[k]["type"] for k in entries.keys()}
                print(json.dumps(to_print, indent=2, sort_keys=True))

        except Exception as err:
            print(err)
            result = False
        finally:
            return result
Example #58
0
 def validate(self, config=None):
     # TODO(sskripnick): remove this checking when config schema
     # is done for all available engines
     if hasattr(self, "CONFIG_SCHEMA"):
         jsonschema.validate(config or self.config, self.CONFIG_SCHEMA)
 def decorated_function_wrapper(**kwargs):
     validate(kwargs['query'], schema)
     return f(**kwargs)
Example #60
0
def validate_game_data(data):
    """Validate game data based on existing data VS a JSON Schema"""
    return validate(data, schema)