def test_unordered_columns(df_type, data): data_rev = data[list(reversed(data.columns))] obj = serialize(data_rev, df_type) data2 = deserialize(obj, df_type) assert data.equals(data2), f'{data} \n!=\n{data2}' assert data2 is not data
def wrapper(self) -> 'ModelWrapper': if self._wrapper is None: if self._wrapper_meta is None: raise ValueError( "Either 'wrapper' or 'wrapper_meta' should be provided") self._wrapper = deserialize(self._wrapper_meta, ModelWrapper) return self._wrapper
def __call__(self, *args, **kwargs): if args and kwargs: raise ValueError( 'Parameters should be passed either in positional or in keyword fashion, not both' ) if len(args) > len(self.method.args) or len(kwargs) > len( self.method.args): raise ValueError( f'Too much parameters given, expected: {len(self.method.args)}' ) data = {} for i, arg in enumerate(self.method.args): obj = None if len(args) > i: obj = args[i] if arg.name in kwargs: obj = kwargs[arg.name] if obj is None: raise ValueError( f'Parameter with name "{arg.name}" (position {i}) should be passed' ) data[arg.name] = serialize(obj, arg.type) logger.debug('Calling server method "%s", args: %s ...', self.method.name, data) out = self.call_method(self.method.name, data) logger.debug('Server call returned %s', out) return deserialize(out, self.method.out_type)
def serde_and_compare(obj, obj_type=None, true_payload=None, check_payload=True): if obj_type is None: obj_type = type(obj) check_subtype = False check_instance = True else: check_subtype = not issubclass(obj_type, Serializer) check_instance = False payload = pyjackson.serialize(obj, obj_type) if true_payload is not None: if check_payload: assert true_payload == payload payload = true_payload new_obj = pyjackson.deserialize(payload, obj_type) if check_subtype: assert issubclass(type(new_obj), obj_type), '{} type must be subtype of {}'.format( new_obj, obj_type) elif check_instance: assert isinstance(new_obj, obj_type) assert obj == new_obj
def test_dataframe_type(df_type): assert df_type.requirements.modules == ['pandas'] data = pd.DataFrame([{'a': 1, 'b': 1}, {'a': 2, 'b': 2}]) obj = serialize(data, df_type) data2 = deserialize(obj, df_type) assert data.equals(data2)
def test_df_type(df_type_fx, request): df_type = request.getfixturevalue(df_type_fx) assert issubclass(df_type, DataFrameType) obj = serialize(df_type) new_df_type = deserialize(obj, DatasetType) assert df_type == new_df_type
def test_dict_with_list_dataset_type(): data = {'a': ['b']} dt = DatasetAnalyzer.analyze(data) assert dt == DictDatasetType( {'a': TupleLikeListDatasetType([PrimitiveDatasetType('str')])}) assert serialize(data, dt) == data assert deserialize(data, dt) == data with pytest.raises(DeserializationError): deserialize('', dt) with pytest.raises(SerializationError): serialize('', dt) payload = serialize(dt) assert payload == { 'type': 'dict', 'item_types': { 'a': { 'type': 'tuple_like_list', 'items': [{ 'type': 'primitive', 'ptype': 'str' }] } } } payload = serialize(DTHolder(dt)) assert payload == { 'dt': { 'type': 'dict', 'item_types': { 'a': { 'type': 'tuple_like_list', 'items': [{ 'type': 'primitive', 'ptype': 'str' }] } } } }
def test_dataframe_type(): data = pd.DataFrame([{'a': 1, 'b': 1}, {'a': 2, 'b': 2}]) df_type = DataFrameType(['a', 'b']) obj = serialize(data, df_type) data2 = deserialize(obj, df_type) assert data.equals(data2)
def test_unsized(times): real_data = [[1, 2] for _ in range(times)] array = MockNumpyNdarray(real_data) container = MultidimUnsizedArrayContainer(array) ser = serialize(container) assert real_data == ser['arr'] new_container = deserialize(ser, MultidimUnsizedArrayContainer) assert new_container == container
def test_dataframe_type(df_type, data): assert df_type.requirements.modules == ['pandas'] obj = serialize(data, df_type) payload = json.dumps(obj) loaded = json.loads(payload) data2 = deserialize(loaded, df_type) assert data.equals(data2)
def test_with_serde(pd_model: Model): interface = model_interface(pd_model) obj = {'values': [{'a': 1, 'b': 1}]} data_type, _ = pd_model.wrapper.method_signature('predict') data = deserialize(obj, data_type) interface.execute('predict', {'vector': data})
def test_with_serde(pd_model: Model): interface = model_interface(pd_model) obj = {'values': [{'a': 1, 'b': 1}]} data_type = pd_model.input_meta data = deserialize(obj, data_type) interface.predict(data)
def test_all(df): df_type = DatasetAnalyzer.analyze(df) obj = serialize(df, df_type) payload = json.dumps(obj) loaded = json.loads(payload) data = deserialize(loaded, df_type) assert df is not data pandas_assert(data, df)
def test_all(data2): df_type = DatasetAnalyzer.analyze(data2) obj = serialize(data2, df_type) payload = json.dumps(obj) loaded = json.loads(payload) data = deserialize(loaded, df_type) assert data2.equals(data) assert data2 is not data
def test_multidim(): real_data = [[1, 2, 3], [4, 5, 6]] ext_type = MockNumpyNdarray(real_data) c = MultidimArrayContainer(ext_type) ser = serialize(c) assert real_data == ser['arr'] deser = deserialize(ser, MultidimArrayContainer) assert deser == c
def test_sized(): real_data = [1, 2, 3] ext_type = MockNumpyNdarray(real_data) c = SizedArrayContainer(ext_type) ser = serialize(c) assert real_data == ser['arr'] deser = deserialize(ser, SizedArrayContainer) assert deser == c
def test_type_hierarchy__type_import(): payload = { 'type': 'tests.not_imported_directly.ChildClass', 'field': 'aaa' } obj = deserialize(payload, RootClass) assert isinstance(obj, RootClass) assert obj.__class__.__name__ == 'ChildClass' assert obj.field == 'aaa' new_payload = serialize(obj) assert new_payload == payload
def test_pipeline__load(meta, model, task_saved_art): task_saved_art.push_model(model) p = model.as_pipeline('predict') task_saved_art.add_pipeline(p) p = deserialize(serialize(meta.get_pipeline_by_id(p.id)), Pipeline) assert p is not None assert len(p.models) == 0 p.bind_meta_repo(meta) p.load() assert len(p.models) == 1 assert model.name in p.models assert p.models[model.name] == model
def create_configurable(cls: Type[Configurable], kind): kind = cls.KNOWN.get(kind, kind) args = {"type": kind} clazz = resolve_subtype(cls, args) for field in get_class_fields(clazz): try: cast = field.type.__args__[0] if is_union( field.type) else field.type args[field.name] = cast( click.prompt(f"{field.name} value?", default=field.default)) except ValueError: raise NotImplementedError( f"Not yet implemented for type {field.type}") return deserialize(args, cls)
def _deserialize_json(interface: Interface, method: str, request_json: dict): args = {a.name: a for a in interface.exposed_method_args(method)} try: return { k: deserialize(v, args[k].type) for k, v in request_json.items() } except KeyError: raise MalformedHTTPRequestException( f'Invalid request: arguments are {set(args.keys())}, got {set(request_json.keys())}' ) except DeserializationError as e: raise MalformedHTTPRequestException(e.args[0])
def _extract_request_data(method_args): """ :param method_args: :return: """ args = {a.name: a for a in method_args} if request.content_type == 'application/json': request_data = request.json try: request_data = {k: deserialize(v, args[k].type) for k, v in request_data.items()} except KeyError: raise WrongArgumentsError(args.keys(), request_data.keys()) else: request_data = dict(itertools.chain(request.form.items(), request.files.items())) rlogger.debug('Got request[%s] with data %s', flask.g.ebonite_id, request_data) return request_data
def test_datetime(): data = pd.DataFrame([{ 'a': 1, 'b': datetime.now() }, { 'a': 2, 'b': datetime.now() }]) df_type = DatasetAnalyzer.analyze(data) assert issubclass(df_type, DataFrameType) obj = serialize(data, df_type) payload = json.dumps(obj) loaded = json.loads(payload) data2 = deserialize(loaded, df_type) assert data.equals(data2) assert data2 is not data
def test_tuple_like_list_dataset_type_deserialize(tlldt): assert deserialize(['c', 3], tlldt) == ['c', 3] with pytest.raises(DeserializationError): assert tlldt.deserialize('abc')
def test_primitive_dataset_type_deserialize(dt): assert deserialize(123, dt) with pytest.raises(DeserializationError): assert dt.deserialize('abc')
def test_dict_dataset_type_deserialize(ddt): assert deserialize({'a': 3}, ddt) == {'a': 3} with pytest.raises(DeserializationError): assert ddt.deserialize('abc')
def test_list_dataset_type_deserialize(ldt): assert deserialize([3, 3], ldt) == [3, 3] with pytest.raises(DeserializationError): assert ldt.deserialize('abc')
def test_tuple_dataset_type_deserialize(tdt): assert deserialize(('c', 3), tdt) == ('c', 3) with pytest.raises(DeserializationError): assert tdt.deserialize('abc')
class ExternalSerializer(StaticSerializer): real_type = External @classmethod def serialize(cls, instance: External) -> dict: return {'a': instance.b} @classmethod def deserialize(cls, obj: dict) -> object: return External(obj['a']) payload = serialize(External('value')) # {'a': 'value'} new_instance = deserialize(payload, External) # External('value') class Container: def __init__(self, externals: List[External]): self.externals = externals container_payload = serialize(Container([External('value')])) new_container = deserialize(container_payload, Container) class SizedListSerializer(Serializer): real_type = list def __init__(self, size: int):
def load(cls) -> "DeployConfig": return deserialize(SSCIConf.DEPLOY, cls)
from pyjackson import deserialize, serialize from pyjackson.decorators import type_field @type_field('type_alias') class Parent: type_alias = 'parent' # also could be None for abstract parents class Child1(Parent): type_alias = 'child1' def __init__(self, a: int): self.a = a class Child2(Parent): type_alias = 'child2' def __init__(self, b: str): self.b = b serialize(Child1(1), Parent) # {'type_alias': 'child1', 'a': 1} deserialize({'type_alias': 'child2', 'b': 'b'}, Parent) # Child2('b')