def test_type_to_schema__with_spec(): class MyTypeWithSpec(TypeWithSpec): def get_spec(self) -> ArgList: return [Field('field', int, False)] assert type_to_schema(MyTypeWithSpec(), False) == { 'type': 'object', 'properties': { 'field': type_to_schema(int) }, 'required': ['field'] }
def test_type_to_schema__class(): class Holder: def __init__(self, field: int): self.field = field assert type_to_schema(Holder, False) == { 'type': 'object', 'properties': { 'field': type_to_schema(int) }, 'required': ['field'] }
def test_type_to_schema__with_spec__list_different_types(): class MyTypeWithSpec(TypeWithSpec): def get_spec(self) -> ArgList: return [Field('field', int, False), Field('field', str, False)] def is_list(self): return True def list_size(self): return 1 with pytest.raises(ValueError): type_to_schema(MyTypeWithSpec(), False)
def test_make_object__additional_and_default(): obj = make_object(None, int, True, 'DEFAULT') assert obj == { 'type': 'object', 'additionalProperties': type_to_schema(int), 'default': 'DEFAULT' }
def test_make_array__default(): arr = make_array(int, has_default=True, default=[1]) assert arr == { 'type': 'array', 'items': type_to_schema(int), 'default': [1] }
def test_make_array__max(): arr = make_array(int, maximum_size=10) assert arr == { 'type': 'array', 'items': type_to_schema(int), 'maxItems': 10 }
def test_df__schema(dtype_df): schema = spec.type_to_schema(dtype_df) assert schema == {'properties': {'values': {'items': {'properties': {'a': {'type': 'number'}}, 'required': ['a'], 'type': 'object'}, 'type': 'array'}}, 'required': ['values'], 'type': 'object'}
def test_np__schema(dtype_np): schema = spec.type_to_schema(dtype_np) assert schema == {'items': {'items': {'type': 'number'}, 'maxItems': 1, 'minItems': 1, 'type': 'array'}, 'maxItems': 5, 'minItems': 5, 'type': 'array'}
def test_df__schema(dtype_df): schema = spec.type_to_schema(dtype_df) assert schema == { 'properties': { 'a': { 'type': 'integer' } }, 'required': ['a'], 'type': 'object' }
def test_type_to_schema__with_spec__list(): class MyTypeWithSpec(TypeWithSpec): def get_spec(self) -> ArgList: return [Field('field', int, False)] def is_list(self): return True def list_size(self): return 1 assert type_to_schema(MyTypeWithSpec(), False) == make_array(int, 1, 1)
def test_torch__tensors_list(first_tensor, second_tensor): # this import ensures that this dataset type is registered in `DatasetAnalyzer` from ebonite.ext.torch.dataset import TorchTensorDatasetType # noqa tensor_list = [first_tensor, second_tensor] tdt = DatasetAnalyzer.analyze(tensor_list) assert len(tdt.items) == 2 assert tdt.items[0].shape == (5, 5) assert tdt.items[0].dtype == 'int32' assert tdt.items[0].list_size() == 5 assert tdt.items[1].shape == (5, 10) assert tdt.items[1].dtype == 'float32' assert tdt.items[1].list_size() == 5 assert type_to_schema(tdt) == { 'properties': { 0: { 'items': { 'items': {'type': 'integer'}, 'maxItems': 5, 'minItems': 5, 'type': 'array' }, 'maxItems': 5, 'minItems': 5, 'type': 'array' }, 1: { 'items': { 'items': {'type': 'number'}, 'maxItems': 10, 'minItems': 10, 'type': 'array' }, 'maxItems': 5, 'minItems': 5, 'type': 'array' } }, 'required': [0, 1], 'type': 'object' } tensor_list_deser = tdt.deserialize(tdt.serialize(tensor_list)) assert len(tensor_list) == len(tensor_list_deser) assert all(torch.equal(tensor, tensor_deser) and tensor.dtype == tensor_deser.dtype for tensor, tensor_deser in zip(tensor_list, tensor_list_deser))
def test_feed_dict_type__openapi_schema_3d(tftt_3d): assert type_to_schema(tftt_3d) == { 'items': { 'items': { 'items': { 'type': 'number' }, 'maxItems': 20, 'minItems': 20, 'type': 'array' }, 'maxItems': 32, 'minItems': 32, 'type': 'array' }, 'type': 'array' }
def test_torch__tensors_list(tdt_list, first_tensor, second_tensor): assert tdt_list.requirements.modules == ['torch'] assert len(tdt_list.items) == 2 assert tdt_list.items[0].shape == (None, 5) assert tdt_list.items[0].dtype == 'int32' assert tdt_list.items[1].shape == (None, 10) assert tdt_list.items[1].dtype == 'float32' assert type_to_schema(tdt_list) == { 'properties': { '0': { 'items': { 'items': { 'type': 'integer' }, 'maxItems': 5, 'minItems': 5, 'type': 'array' }, 'type': 'array' }, '1': { 'items': { 'items': { 'type': 'number' }, 'maxItems': 10, 'minItems': 10, 'type': 'array' }, 'type': 'array' } }, 'required': ['0', '1'], 'type': 'object' } tensor_list = [first_tensor, second_tensor] tensor_list_deser = tdt_list.deserialize(tdt_list.serialize(tensor_list)) assert len(tensor_list) == len(tensor_list_deser) assert all( torch.equal(tensor, tensor_deser) and tensor.dtype == tensor_deser.dtype for tensor, tensor_deser in zip(tensor_list, tensor_list_deser))
def test_torch__single_tensor(first_tensor): # this import ensures that this dataset type is registered in `DatasetAnalyzer` from ebonite.ext.torch.dataset import TorchTensorDatasetType # noqa tdt = DatasetAnalyzer.analyze(first_tensor) assert tdt.requirements.modules == ['torch'] assert tdt.shape == (None, 5) assert tdt.dtype == 'int32' assert type_to_schema(tdt) == { 'items': { 'items': { 'type': 'integer' }, 'maxItems': 5, 'minItems': 5, 'type': 'array' }, 'type': 'array' } tensor_deser = tdt.deserialize(tdt.serialize(first_tensor)) assert torch.equal(first_tensor, tensor_deser) assert first_tensor.dtype == tensor_deser.dtype
def test_type_to_schema__generic_list(): assert type_to_schema(List[int], False) == { 'type': 'array', 'items': type_to_schema(int) }
def test_type_to_schema__generic_map_not_str_key(): with pytest.raises(ValueError): type_to_schema(Dict[int, int], False)
def test_type_to_schema__generic_map(): assert type_to_schema(Dict[str, int], False) == { 'type': 'object', 'additionalProperties': type_to_schema(int) }
def test_type_to_schema__builtin_default(): assert type_to_schema(int, True, 5) == {'type': 'integer', 'default': 5}
def test_type_to_schema__builtin_no_default(): assert type_to_schema(int, False) == {'type': 'integer'}
def test_make_array(): arr = make_array(int) assert arr == {'type': 'array', 'items': type_to_schema(int)}