def dump(self, session, path) -> FilesContextManager: # path = os.path.join(path, TF_MODEL_FILENAME) # TODO check if it is needed saver = tf.train.Saver() saver.save(session, path) yield Blobs({ name: LocalFileBlob(os.path.join(path, name)) for name in os.listdir(path) })
def _get_artifact(self, model_id: str) -> ArtifactCollection: path = os.path.join(self.path, model_id) if not os.path.exists(path): raise NoSuchArtifactError(model_id, self) return Blobs({ os.path.relpath(file, path): LocalFileBlob(os.path.join(self.path, file)) for file in glob.glob(os.path.join(path, '**'), recursive=True) if os.path.isfile(file) })
def dump(self, session, path) -> FilesContextManager: with session.as_default(), session.graph.as_default(): saver = tf.train.Saver(save_relative_paths=True) saver.save(session, os.path.join(path, TF_MODEL_FILENAME)) yield Blobs({ name: LocalFileBlob(os.path.join(path, name)) for name in os.listdir(path) })
def dump(self, session, path) -> FilesContextManager: tf.train.write_graph(session.graph.as_graph_def(), path, TF_MODEL_FILENAME, as_text=False) yield Blobs({ TF_MODEL_FILENAME: LocalFileBlob(os.path.join(path, TF_MODEL_FILENAME)) })
def dump(self, model) -> FilesContextManager: with tempfile.TemporaryDirectory(prefix='ebonite_tf_v2') as tmpdir: dir_path = os.path.join(tmpdir, self.model_dir_name) model.save(dir_path) shutil.make_archive(dir_path, 'zip', dir_path) yield Blobs({ self.model_dir_name + self.ext: LocalFileBlob(dir_path + self.ext) })
def get_artifacts(self) -> ArtifactCollection: """Return model binaries""" artifacts = CompositeArtifactCollection( [_RelativePathWrapper(m.artifact_any, os.path.join(MODEL_BIN_PATH, m.name)) for m in self.pipeline.models.values()]) if len(self.server.additional_binaries) > 0: artifacts = CompositeArtifactCollection([ artifacts, Blobs({os.path.basename(f): LocalFileBlob(f) for f in self.server.additional_binaries}) ]) return artifacts
def get_artifacts(self) -> ArtifactCollection: """Return model binaries""" artifacts = _RelativePathWrapper(self.model.artifact_any, MODEL_BIN_PATH) if len(self.server.additional_binaries) > 0: artifacts = CompositeArtifactCollection([ artifacts, Blobs({ os.path.basename(f): LocalFileBlob(f) for f in self.server.additional_binaries }) ]) return artifacts
def dump(self, model) -> ArtifactCollection: """ Dumps `catboost.CatBoostClassifier` or `catboost.CatBoostRegressor` instance to :class:`.LocalFileBlob` and creates :class:`.ArtifactCollection` from it :return: context manager with :class:`~ebonite.core.objects.ArtifactCollection` """ model_file = tempfile.mktemp() try: model.save_model(model_file) yield Blobs({self._get_model_file_name(model): LocalFileBlob(model_file)}) finally: os.remove(model_file)
def _push_artifact(self, model_id: str, blobs: typing.Dict[str, Blob]) -> ArtifactCollection: path = os.path.join(self.path, model_id) if os.path.exists(path): raise ArtifactExistsError(model_id, self) os.makedirs(path, exist_ok=True) result = {} for filepath, blob in blobs.items(): join = os.path.join(path, filepath) os.makedirs(os.path.dirname(join), exist_ok=True) logger.debug('Writing artifact %s to %s', blob, join) blob.materialize(join) result[filepath] = LocalFileBlob(join) return Blobs(result)
def dump(self) -> FilesContextManager: model: lgb.Booster = self.model with tempfile.TemporaryDirectory(prefix='ebonite_lightgbm_dump') as f: path = os.path.join(f, self.model_path) model.save_model(path) yield Blobs({self.model_path: LocalFileBlob(path)})
def dump(self, model: xgboost.Booster) -> FilesContextManager: with tempfile.TemporaryDirectory(prefix='ebonite_xgboost_dump') as f: path = os.path.join(f, self.model_path) model.save_model(path) yield Blobs({self.model_path: LocalFileBlob(path)})