Example #1
0
 def dump(self) -> FilesContextManager:
     with self.io.dump(self.model) as artifact:
         yield artifact + Blobs({
             self.methods_json:
             InMemoryBlob(dumps(self.methods).encode('utf-8')),
             self.requirements_json:
             InMemoryBlob(dumps(self.requirements).encode('utf-8'))
         })
Example #2
0
 def get_kwargs(cls, image: Image) -> dict:
     return dict(id=image.id,
                 name=image.name,
                 author=image.author,
                 creation_date=image.creation_date,
                 task_id=image.task_id,
                 params=dumps(image.params),
                 source=dumps(image.source),
                 environment_id=image.environment_id)
Example #3
0
 def get_kwargs(cls, pipeline: Pipeline) -> dict:
     return dict(id=pipeline.id,
                 name=pipeline.name,
                 author=pipeline.author,
                 creation_date=pipeline.creation_date,
                 steps=dumps(pipeline.steps),
                 input_data=dumps(pipeline.input_data),
                 output_data=dumps(pipeline.output_data),
                 task_id=pipeline.task_id)
Example #4
0
 def get_kwargs(cls, model: Model) -> dict:
     return dict(id=model.id,
                 name=model.name,
                 author=model.author,
                 creation_date=model.creation_date,
                 wrapper=dumps(model.wrapper_meta),
                 artifact=dumps(model.artifact),
                 requirements=dumps(model.requirements),
                 description=model.description,
                 params=dumps(model.params),
                 task_id=model.task_id)
Example #5
0
 def get_kwargs(cls, model: Model) -> dict:
     return dict(id=model.id,
                 name=model.name,
                 author=model.author,
                 creation_date=model.creation_date,
                 wrapper=dumps(model.wrapper),
                 artifact=dumps(model.artifact_req_persisted),
                 input_meta=dumps(model.input_meta),
                 output_meta=dumps(model.output_meta),
                 requirements=dumps(model.requirements),
                 task_id=model.task_id)
Example #6
0
 def get_kwargs(cls, model: Model) -> dict:
     return dict(id=model.id,
                 name=model.name,
                 author=model.author,
                 creation_date=model.creation_date,
                 wrapper=dumps(model.wrapper_meta),
                 artifact=dumps(model.artifact),
                 requirements=dumps(model.requirements),
                 description=model.description,
                 params=dumps(model.params),
                 task_id=model.task_id,
                 images=[SImage.from_obj(i) for i in model.images.values()])
Example #7
0
 def get_kwargs(cls, task: Task) -> dict:
     return dict(
         id=task.id,
         name=task.name,
         author=task.author,
         creation_date=task.creation_date,
         project_id=task.project_id,
         models=[SModel.from_obj(m) for m in task.models.values()],
         images=[SImage.from_obj(i) for i in task.images.values()],
         pipelines=[SPipeline.from_obj(p) for p in task.pipelines.values()],
         datasets=dumps(task.datasets),
         metrics=dumps(task.metrics),
         evaluation_sets=dumps(task.evaluation_sets))
Example #8
0
 def get_sources(self):
     """Returns models meta file and custom requirements"""
     return {
         MODELS_META_PATH:
         dumps([model.without_artifacts() for model in self.models]),
         **self._get_sources()
     }
Example #9
0
def test_ndarray():
    nat = DatasetAnalyzer.analyze(np.array([1, 2, 3]))
    assert issubclass(nat, NumpyNdarrayDatasetType)
    payload = dumps(nat)
    nat2 = loads(payload, DatasetType)

    assert nat == nat2
Example #10
0
def test_ndarray(nat):
    assert issubclass(nat, NumpyNdarrayDatasetType)
    assert nat.requirements.modules == ['numpy']
    payload = dumps(nat)
    nat2 = loads(payload, DatasetType)

    assert nat == nat2
Example #11
0
 def get_kwargs(cls, image: Image) -> dict:
     return dict(id=image.id,
                 name=image.name,
                 author=image.author,
                 creation_date=image.creation_date,
                 model_id=image.model_id,
                 params=dumps(image.params))
Example #12
0
def test_number():
    ndt = DatasetAnalyzer.analyze(np.float32(.5))
    assert issubclass(ndt, NumpyNumberDatasetType)
    assert ndt.requirements.modules == ['numpy']
    payload = dumps(ndt)
    ndt2 = loads(payload, DatasetType)
    assert ndt == ndt2
Example #13
0
 def get_kwargs(cls, instance: RuntimeInstance) -> dict:
     return dict(id=instance.id,
                 name=instance.name,
                 author=instance.author,
                 creation_date=instance.creation_date,
                 image_id=instance.image_id,
                 environment_id=instance.environment_id,
                 params=dumps(instance.params))
Example #14
0
def test_feed_dict_type__serialization(tensor):
    obj = {tensor: np.array([[1]])}
    fdt = DatasetAnalyzer.analyze(obj)

    payload = dumps(obj, fdt)
    obj2 = loads(payload, fdt)

    assert obj[tensor] == obj2[tensor.name]
Example #15
0
def test_feed_dict_type__self_serialization(tftt):
    from ebonite.ext.tensorflow_v2 import TFTensorDatasetType

    assert issubclass(tftt, TFTensorDatasetType)
    assert tftt.requirements.modules == ['tensorflow']
    payload = dumps(tftt)
    tftt2 = loads(payload, DatasetType)
    assert tftt == tftt2
Example #16
0
def test_feed_dict_type__self_serialization(fdt, tensor):
    from ebonite.ext.tensorflow import FeedDictDatasetType

    assert issubclass(fdt, FeedDictDatasetType)
    assert set(fdt.requirements.modules) == {'tensorflow', 'numpy'}
    payload = dumps(fdt)
    fdt2 = loads(payload, DatasetType)
    assert fdt == fdt2
Example #17
0
def test_feed_dict_type__serialization():
    tensor = tf.placeholder('float', (1, 1), name="weight")
    obj = {tensor: np.array([[1]])}
    fdt = DatasetAnalyzer.analyze(obj)

    payload = dumps(obj, fdt)
    obj2 = loads(payload, fdt)

    assert obj[tensor] == obj2[tensor.name]
Example #18
0
def test_feed_dict_type__self_serialization():
    tensor = tf.placeholder('float', (1, 1), name="weight")
    fdt = DatasetAnalyzer.analyze({
        tensor: np.array([[1]]),
        'a': np.array([[1]])
    })
    assert issubclass(fdt, FeedDictDatasetType)
    payload = dumps(fdt)
    fdt2 = loads(payload, DatasetType)
    assert fdt == fdt2
Example #19
0
 def get_sources(self):
     """Returns model metadata file and sources of custom modules from requirements"""
     return {
         MODEL_META_PATH: dumps(self.model.without_artifacts()),
         **self._get_sources(),
         **{
             os.path.basename(f): read(f)
             for f in self.server.additional_sources
         }
     }
Example #20
0
 def get_sources(self):
     """Returns model metadata file and sources of custom modules from requirements"""
     meta = PipelineMeta(self.pipeline, {
         k: v.without_artifacts() for k, v in self.pipeline.models.items()
     })
     return {
         PIPELINE_META_PATH: dumps(meta),
         **self._get_sources(),
         **{os.path.basename(f): read(f) for f in self.server.additional_sources}
     }
Example #21
0
 def get_sources(self):
     """Returns models meta file and custom requirements"""
     return {
         MODELS_META_PATH:
         dumps([model.without_artifacts() for model in self.models]),
         **self._get_sources(),
         **{
             os.path.basename(f): read(f)
             for f in self.server.additional_sources
         }
     }
Example #22
0
 def get_sources(self):
     """Returns model metadata file and sources of custom modules from requirements"""
     return {
         MODEL_META_PATH: dumps(self.model.without_artifacts()),
         **self._get_sources()
     }
Example #23
0
def test_number():
    ndt = DatasetAnalyzer.analyze(np.float32(.5))
    assert issubclass(ndt, NumpyNumberDatasetType)
    payload = dumps(ndt)
    ndt2 = loads(payload, DatasetType)
    assert ndt == ndt2
Example #24
0
def test_feed_dict_type__serialization(tftt, tensor_data):
    payload = dumps(tensor_data, tftt)
    tensor_data2 = loads(payload, tftt)

    tf.assert_equal(tensor_data, tensor_data2)
Example #25
0
 def get_kwargs(cls, environment: RuntimeEnvironment) -> dict:
     return dict(id=environment.id,
                 name=environment.name,
                 author=environment.author,
                 creation_date=environment.creation_date,
                 params=dumps(environment.params))