def test_register_metaschema_property(): r"""Test errors in register_metaschema_property.""" # Error when property class already registered args = ('ReplacementClassSchema', (MetaschemaProperty, ), { 'name': existing_class }) assert_raises(ValueError, type, *args) # Error when replacement class has schema args = ('ReplacementClassSchema', (MetaschemaProperty, ), { 'name': existing_validator, 'schema': {} }) assert_raises(ValueError, type, *args) # Error when validate set def fake_validate(*args, **kwargs): # pragma: no cover return args = ('ReplacementClassSchema', (MetaschemaProperty, ), { 'name': existing_validator, '_validate': fake_validate }) assert_raises(ValueError, type, *args) args = ('ReplacementClassSchema', (MetaschemaProperty, ), { 'name': existing_validator, 'schema': {} }) assert_raises(ValueError, type, *args) # Error when property not in existing metaschema get_metaschema() # ensures it has been initialized args = ('ReplacementClassSchema', (MetaschemaProperty, ), { 'name': non_existant }) assert_raises(ValueError, type, *args)
def test_get_metaschema(): r"""Test get_metaschema and ensure the metaschema is current.""" temp = os.path.join(tempfile.gettempdir(), metaschema._metaschema_fbase) old_metaschema = metaschema.get_metaschema() try: shutil.move(metaschema._metaschema_fname, temp) metaschema._metaschema = None new_metaschema = metaschema.get_metaschema() new_id = new_metaschema.get('$id', new_metaschema.get('id', None)) old_id = old_metaschema.get('$id', old_metaschema.get('id', None)) assert (new_id is not None) assert (old_id is not None) if new_id != old_id: # pragma: debug warnings.warn( ("The locally generated metaschema would have a different " "id than the default (%s vs. %s). Check that your " "installation of jsonschema is up to date.") % (new_id, old_id)) else: try: assert_equal(new_metaschema, old_metaschema) except AssertionError: # pragma: debug print("Old:\n%s" % pprint.pformat(old_metaschema)) print("New:\n%s" % pprint.pformat(new_metaschema)) raise except BaseException: # pragma: debug shutil.move(temp, metaschema._metaschema_fname) raise shutil.move(temp, metaschema._metaschema_fname)
def regen_metaschema(): r"""Regenerate the yggdrasil metaschema.""" from yggdrasil import metaschema if os.path.isfile(metaschema._metaschema_fname): os.remove(metaschema._metaschema_fname) metaschema._metaschema = None metaschema._validator = None metaschema.get_metaschema()
def update_serializer(self, *args, **kwargs): # Transform scalar into array for table if kwargs.get('type', 'array') != 'array': old_typedef = {} _metaschema = get_metaschema() for k in _metaschema['properties'].keys(): if k in kwargs: old_typedef[k] = kwargs.pop(k) if old_typedef['type'] == 'object': names = self.get_field_names() if not names: names = list(old_typedef['properties'].keys()) assert (len(old_typedef['properties']) == len(names)) new_typedef = {'type': 'array', 'items': []} for n in names: new_typedef['items'].append( dict(old_typedef['properties'][n], title=n)) else: new_typedef = {'type': 'array', 'items': [old_typedef]} kwargs.update(new_typedef) out = super(AsciiTableSerialize, self).update_serializer(*args, **kwargs) self.initialized = (self.typedef != self.default_datatype) self.update_format_str() self.update_field_names() self.update_field_units() return out
def __init__(self, *args, **kwargs): super(TestMetaschemaProperty, self).__init__(*args, **kwargs) self._valid = [] self._invalid = [] self._encode_errors = [] self._valid_compare = [(0, 0)] self._invalid_compare = [(0, 1)] self._valid_normalize_schema = [] self.validator = get_validator()(get_metaschema())
def _normalize_datatype(normalizer, value, instance, schema): r"""Normalize the datatype if the type information is in the comm.""" if isinstance(instance, dict) and ('datatype' not in instance): type_keys = list(metaschema.get_metaschema()['properties'].keys()) datatype = {} for k in type_keys: if k in instance: datatype[k] = instance.pop(k) if datatype: instance['datatype'] = datatype return instance
def _normalize_datatype(normalizer, value, instance, schema): r"""Normalize the datatype if the type information is in the comm.""" if isinstance(instance, dict) and ('datatype' not in instance): type_keys = list(metaschema.get_metaschema()['properties'].keys()) # Don't include args in type_keys if driver in the instance if ('driver' in instance) and ('args' in type_keys): type_keys.remove('args') datatype = {} migrate_keys(instance, [datatype], include_key_list=type_keys) if datatype: instance['datatype'] = datatype return instance
def update_serializer(self, *args, **kwargs): # Transform scalar into array for table if kwargs.get('type', 'array') != 'array': old_typedef = {} _metaschema = get_metaschema() for k in _metaschema['properties'].keys(): if k in kwargs: old_typedef[k] = kwargs.pop(k) new_typedef = {'type': 'array', 'items': [old_typedef]} kwargs.update(new_typedef) out = super(AsciiTableSerialize, self).update_serializer(*args, **kwargs) self.update_format_str() self.update_field_names() self.update_field_units() return out
def _normalize_modelio_elements(normalizer, value, instance, schema): r"""Normalize case of models singular.""" io = normalizer.current_schema_path[2] # Register io if dict set iodict = getattr(normalizer, 'iodict', None) s = getattr(normalizer, 'schema_registry', None) if (iodict is not None) and isinstance(instance, dict) and ('name' in instance): # Register io if dict set if instance['name'] not in iodict[io]: iodict[io][instance['name']] = instance # Move non-comm keywords to a buffer if (s is not None): comm_keys = s.get_component_keys('comm') type_keys = list( metaschema.get_metaschema()['properties'].keys()) extra_keys = {} migrate_keys(instance, [extra_keys], comm_keys + type_keys) iodict['%s_extra' % io][instance['name']] = extra_keys # type_dict = {} # migrate_keys(instance, [type_dict], comm_keys) # instance.setdefault('datatype', {}) # instance['datatype'].update(type_dict) # Add driver to list if ('driver' in instance) and ('args' in instance): opp_map = {'inputs': 'output', 'outputs': 'input'} for i, (opp_arg, opp_name) in enumerate(iodict['%s_drivers' % opp_map[io]]): if instance['args'] == opp_arg: if io == 'inputs': iodict['pairs'].append( (iodict['%s_drivers' % opp_map[io]].pop(i)[1], instance['name'])) else: # pragma: debug # This won't be called because inputs are processed first # but this code is here for symmetries sake iodict['pairs'].append( (instance['name'], iodict['%s_drivers' % opp_map[io]].pop(i)[1])) break else: iodict['%s_drivers' % io[:-1]].append( (instance['args'], instance['name'])) return instance
def update_serializer(self, *args, **kwargs): # Transform scalar into array for table if kwargs.get('type', 'array') != 'array': old_typedef = {} _metaschema = get_metaschema() for k in _metaschema['properties'].keys(): if k in kwargs: old_typedef[k] = kwargs.pop(k) new_typedef = {'type': 'array', 'items': [old_typedef]} kwargs.update(new_typedef) out = super(AsciiTableSerialize, self).update_serializer(*args, **kwargs) for k in ['format_str', 'delimiter', 'newline', 'comment']: v = getattr(self, k, None) if isinstance(v, backwards.string_types): setattr(self, k, backwards.as_bytes(v)) self.update_format_str() self.update_field_names() self.update_field_units() return out
def test_create_normalizer(): r"""Test create normalizer with default types.""" cls = normalizer.create(get_metaschema()) assert_equal(cls({'type': 'int'}).normalize('1'), '1')
def update_serializer(self, extract=False, skip_type=False, **kwargs): r"""Update serializer with provided information. Args: extract (bool, optional): If True, the updated typedef will be the bare minimum as extracted from total set of provided keywords, otherwise the entire set will be sued. Defaults to False. skip_type (bool, optional): If True, everything is updated except the data type. Defaults to False. **kwargs: Additional keyword arguments are processed as part of they type definition and are parsed for old-style keywords. Raises: RuntimeError: If there are keywords that are not valid typedef keywords (currect or old-style). """ old_datatype = None if self.initialized: old_datatype = copy.deepcopy(self.datatype) _metaschema = get_metaschema() # Raise an error if the types are not compatible seritype = kwargs.pop('seritype', self.seritype) if (seritype != self._seritype) and (seritype != 'default'): # pragma: debug raise Exception("Cannot change types form %s to %s." % (self._seritype, seritype)) # Remove metadata keywords unrelated to serialization # TODO: Find a better way of tracking these _remove_kws = [ 'body', 'address', 'size', 'id', 'incomplete', 'raw', 'commtype', 'filetype', 'response_address', 'request_id', 'append', 'in_temp', 'is_series', 'working_dir', 'fmts', 'model_driver', 'env', 'send_converter', 'recv_converter', 'typedef_base' ] kws = list(kwargs.keys()) for k in kws: if (k in _remove_kws) or k.startswith('zmq'): kwargs.pop(k) # Set attributes and remove unused metadata keys for k in self._schema_properties.keys(): if (k in kwargs) and (k != 'datatype'): setattr(self, k, kwargs.pop(k)) # Create preliminary typedef typedef = kwargs.pop('datatype', {}) for k in _metaschema['properties'].keys(): if k in kwargs: typedef[k] = kwargs.pop(k) # Update extra keywords if (len(kwargs) > 0): self.extra_kwargs.update(kwargs) self.debug("Extra kwargs: %s" % str(self.extra_kwargs)) # Update type if not skip_type: # Update typedef from oldstyle keywords in extra_kwargs typedef = self.update_typedef_from_oldstyle(typedef) if typedef.get('type', None): if extract: cls = get_type_class(typedef['type']) typedef = cls.extract_typedef(typedef) self.datatype = get_type_from_def(typedef) # Check to see if new datatype is compatible with new one if old_datatype is not None: errors = list( compare_schema(self.typedef, old_datatype._typedef) or ()) if errors: raise RuntimeError(( "Updated datatype is not compatible with the existing one." + " New:\n%s\nOld:\n%s\n") % (pprint.pformat(self.typedef), pprint.pformat(old_datatype._typedef))) # Enfore that strings used with messages are in bytes for k in self._attr_conv: v = getattr(self, k, None) if isinstance(v, (str, bytes)): setattr(self, k, tools.str2bytes(v))
def metaschema(cls): r"""JSON meta schema for validating schemas for this type.""" return get_metaschema()
def validator(self): r"""Schema validator.""" return get_validator()(get_metaschema())
def test_create_metaschema(): r"""Test errors in create_metaschema.""" assert (metaschema.get_metaschema()) assert_raises(RuntimeError, metaschema.create_metaschema, overwrite=False)
def test_create_metaschema(): r"""Test errors in create_metaschema.""" assert(metaschema.get_metaschema()) with pytest.raises(RuntimeError): metaschema.create_metaschema(overwrite=False)
def test_normalize_schema(): r"""Test normalize_schema method on Normalizer.""" kwargs = dict(normalizer_validators={'invalid': None}) cls = normalizer.create(get_metaschema()) cls.normalize_schema({'type': 'int'}, **kwargs)