def from_artifact_uri(artifact_uri, store): """ Given an artifact URI for an Experiment Run (e.g., /local/file/path or s3://my/bucket), returns an ArtifactReposistory instance capable of logging and downloading artifacts on behalf of this URI. :param store: An instance of AbstractStore which the artifacts are registered in. """ if artifact_uri.startswith("s3:/"): # Import these locally to avoid creating a circular import loop from mlflow.store.s3_artifact_repo import S3ArtifactRepository return S3ArtifactRepository(artifact_uri) elif artifact_uri.startswith("gs:/"): from mlflow.store.gcs_artifact_repo import GCSArtifactRepository return GCSArtifactRepository(artifact_uri) elif artifact_uri.startswith("wasbs:/"): from mlflow.store.azure_blob_artifact_repo import AzureBlobArtifactRepository return AzureBlobArtifactRepository(artifact_uri) elif artifact_uri.startswith("sftp:/"): from mlflow.store.sftp_artifact_repo import SFTPArtifactRepository return SFTPArtifactRepository(artifact_uri) elif artifact_uri.startswith("dbfs:/"): from mlflow.store.dbfs_artifact_repo import DbfsArtifactRepository if not isinstance(store, DatabricksStore): raise MlflowException( '`store` must be an instance of DatabricksStore.') return DbfsArtifactRepository(artifact_uri, store.http_request_kwargs) else: from mlflow.store.local_artifact_repo import LocalArtifactRepository return LocalArtifactRepository(artifact_uri)
def dbfs_artifact_repo_factory(artifact_uri): """ Returns an ArtifactRepository subclass for storing artifacts on DBFS. This factory method is used with URIs of the form ``dbfs:/<path>``. DBFS-backed artifact storage can only be used together with the RestStore. :param artifact_uri: DBFS root artifact URI (string). :return: Subclass of ArtifactRepository capable of storing artifacts on DBFS. """ cleaned_artifact_uri = artifact_uri.rstrip('/') if mlflow.utils.databricks_utils.is_dbfs_fuse_available() \ and os.environ.get(USE_FUSE_ENV_VAR, "").lower() != "false": # If the DBFS FUSE mount is available, write artifacts directly to /dbfs/... using # local filesystem APIs file_uri = "file:///dbfs/{}".format( strip_prefix(cleaned_artifact_uri, "dbfs:/")) return LocalArtifactRepository(file_uri) return DbfsRestArtifactRepository(cleaned_artifact_uri)
def from_artifact_uri(artifact_uri): """ Given an artifact URI for an Experiment Run (e.g., /local/file/path or s3://my/bucket), returns an ArtifactReposistory instance capable of logging and downloading artifacts on behalf of this URI. """ if artifact_uri.startswith("s3:/"): # Import these locally to avoid creating a circular import loop from mlflow.store.s3_artifact_repo import S3ArtifactRepository return S3ArtifactRepository(artifact_uri) elif artifact_uri.startswith("gs:/"): from mlflow.store.gcs_artifact_repo import GCSArtifactRepository return GCSArtifactRepository(artifact_uri) elif artifact_uri.startswith("wasbs:/"): from mlflow.store.azure_blob_artifact_repo import AzureBlobArtifactRepository return AzureBlobArtifactRepository(artifact_uri) else: from mlflow.store.local_artifact_repo import LocalArtifactRepository return LocalArtifactRepository(artifact_uri)
def local_artifact_repo(local_artifact_root): from mlflow.utils.file_utils import path_to_local_file_uri return LocalArtifactRepository(artifact_uri=path_to_local_file_uri(local_artifact_root))
def local_artifact_repo(local_artifact_root): return LocalArtifactRepository(artifact_uri=local_artifact_root)