def dump(self, model) -> ArtifactCollection: """ Dumps model artifacts as :class:`~ebonite.core.objects.ArtifactCollection` :return: context manager with :class:`~ebonite.core.objects.ArtifactCollection` """ model_blob, refs = self._serialize_model(model) blobs = {self.model_filename: InMemoryBlob(model_blob)} artifact_cms = [] uuids = [] for uuid, (io, obj) in refs.items(): blobs[uuid + self.io_ext] = InMemoryBlob(self._serialize_io(io)) artifact_cms.append(io.dump(obj)) uuids.append(uuid) from ebonite.core.objects.artifacts import _enter_all_cm, _ExitAllCm, _RelativePathWrapper additional_artifacts = _enter_all_cm(artifact_cms) with _ExitAllCm(artifact_cms): additional_artifacts = [ _RelativePathWrapper(art, uuid) for art, uuid in zip(additional_artifacts, uuids) ] yield CompositeArtifactCollection([Blobs(blobs)] + additional_artifacts)
def dump(self) -> FilesContextManager: with self.io.dump(self.model) as artifact: yield artifact + Blobs({ self.methods_json: InMemoryBlob(dumps(self.methods).encode('utf-8')), self.requirements_json: InMemoryBlob(dumps(self.requirements).encode('utf-8')) })
def artifact_collection(): blobs = Blobs({ '1': InMemoryBlob(bytes(123)), '2': InMemoryBlob(bytes(321)) }) ac = blobs + _RelativePathWrapper(blobs, 'first') + _RelativePathWrapper( blobs, 'second') ac += _RelativePathWrapper(blobs, 'third') ret = ac + _RelativePathWrapper(ac, 'go') ret += _RelativePathWrapper(ac, 'be') return ret
def _push_artifact(self, model_id: str, blobs: typing.Dict[str, Blob]) -> ArtifactCollection: if model_id in self._cache: raise ArtifactExistsError(model_id, self) self._cache[model_id] = Blobs( {k: InMemoryBlob(v.bytes()) for k, v in blobs.items()}) return self._cache[model_id]
def dump(self) -> ArtifactCollection: """ Dumps `torch.nn.Module` instance to :class:`.InMemoryBlob` and creates :class:`.ArtifactCollection` from it :return: context manager with :class:`~ebonite.core.objects.ArtifactCollection` """ buffer = BytesIO() torch.save(self.model, buffer) yield Blobs({self.model_file_name: InMemoryBlob(buffer.getvalue())})
def push_artifact(self, artifact_type, artifact_id: str, blobs: typing.Dict[str, Blob]) -> ArtifactCollection: artifact_id = f'{artifact_type}/{artifact_id}' if artifact_id in self._cache: raise ArtifactExistsError(artifact_id, self) self._cache[artifact_id] = Blobs( {k: InMemoryBlob(v.bytes()) for k, v in blobs.items()}) return self._cache[artifact_id]
def dump(self) -> ArtifactCollection: """ Dumps model artifacts as :class:`~ebonite.core.objects.ArtifactCollection` :return: context manager with :class:`~ebonite.core.objects.ArtifactCollection` """ model_blob, refs = self._serialize_model() blobs = {self.model_filename: InMemoryBlob(model_blob)} additional_artifacts = [] for uuid, wrapper in refs.items(): blobs[uuid + self.wrapper_ext] = InMemoryBlob( self._serialize_wrapper(wrapper)) with wrapper.dump() as artifact: additional_artifacts.append( _RelativePathWrapper(artifact, uuid)) yield CompositeArtifactCollection([Blobs(blobs)] + additional_artifacts)
def dump(self, model: _TfModel) -> FilesContextManager: """ Dumps session to temporary directory and creates :class:`~ebonite.core.objects.ArtifactCollection` from it :return: context manager with :class:`~ebonite.core.objects.ArtifactCollection` """ with tempfile.TemporaryDirectory(prefix='ebonite_tensor_') as tempdir: dumper = self._get_dumper(model.is_frozen) with dumper.dump(model.get_session(), tempdir) as artifact: meta = json.dumps([model.tensor_names, model.is_frozen]).encode('utf-8') yield artifact + Blobs({self.meta_json: InMemoryBlob(meta)})
def test_task__delete_model_with_artifacts(task_saved, model, artifact_repo): model._unpersisted_artifacts = Blobs({'data': InMemoryBlob(b'data')}) task_saved.bind_artifact_repo(artifact_repo) task_saved.push_model(model) assert model.id is not None assert model.task_id is not None task_saved.delete_model(model) assert len(task_saved.models) == 0 assert model.id is None assert model.task_id is None
def dump(self, model) -> ArtifactCollection: """ Dumps `torch.nn.Module` instance to :class:`.InMemoryBlob` and creates :class:`.ArtifactCollection` from it :return: context manager with :class:`~ebonite.core.objects.ArtifactCollection` """ is_jit = isinstance(model, torch.jit.ScriptModule) save = torch.jit.save if is_jit else torch.save model_name = self.model_jit_file_name if is_jit else self.model_file_name buffer = BytesIO() save(model, buffer) yield Blobs({model_name: InMemoryBlob(buffer.getvalue())})
def write(self, dataset: Dataset) -> Tuple[DatasetReader, ArtifactCollection]: return TestDatasetReader(), Blobs( {'data': InMemoryBlob(dataset.data.encode('utf8'))})
def artifact(): return Blobs({'kek': InMemoryBlob(b'kek')})
def dump(self, model) -> FilesContextManager: yield Blobs({'test.bin': InMemoryBlob(b'test')})
def write(self, dataset: Dataset) -> Tuple[DatasetReader, ArtifactCollection]: return OneFileDatasetReader(dataset.dataset_type), \ Blobs.from_blobs({self.FILENAME: InMemoryBlob(self.convert(dataset.data))})
def get_artifacts(self): return Blobs({'test.bin': InMemoryBlob(b'test_bytes')})
def dump(self) -> ArtifactCollection: content = str(self.model.b).encode('utf-8') yield Blobs({self.model_filename: InMemoryBlob(content)})
def blobs(): return {'blob1': InMemoryBlob(b'blob1'), 'blob2': InMemoryBlob(b'blob2')}
def condition(path, value): assert blob_dict[path] == InMemoryBlob(value)