示例#1
0
文件: _plugin.py 项目: turbaszek/dbnd
def dbnd_setup_plugin():
    register_file_system(AZURE_BLOB_FS_NAME, build_azure_blob_fs_client)
    register_file_system_name_custom_resolver(match_azure_blob_path)

    from dbnd_azure.env import AzureCloudConfig

    register_config_cls(AzureCloudConfig)
示例#2
0
    def test_model_marshalling_target_to_value_remote_target(self, model):
        pytest.importorskip("dbnd_aws")
        model_marshaller = TensorflowKerasModelMarshaller()
        from dbnd_aws.fs import build_s3_fs_client
        from targets.fs import FileSystems, register_file_system

        register_file_system(FileSystems.s3, build_s3_fs_client)
        t = target("s3://path_to/my_bucket")
        with pytest.raises(DatabandRuntimeError):
            model_marshaller.target_to_value(t)
示例#3
0
文件: _plugin.py 项目: turbaszek/dbnd
def dbnd_setup_plugin():
    from dbnd_hdfs.fs.hdfs_hdfscli import HdfsCli
    from dbnd_hdfs.fs.hdfs_pyox import HdfsPyox

    register_config_cls(HdfsCli)
    register_config_cls(HdfsPyox)

    from dbnd_hdfs.fs.hdfs import create_hdfs_client

    register_file_system("hdfs", create_hdfs_client)
示例#4
0
def dbnd_setup_plugin():
    from dbnd_gcp.dataflow.dataflow_config import DataflowConfig
    from dbnd_gcp.env import GcpEnvConfig

    register_config_cls(GcpEnvConfig)
    register_config_cls(DataflowConfig)

    if is_plugin_enabled("dbnd-spark"):
        from dbnd_gcp.dataproc.dataproc_config import DataprocConfig

        register_config_cls(DataprocConfig)

    register_file_system(FileSystems.gcs, build_gcs_client)
示例#5
0
def dbnd_setup_plugin():
    # register configs
    from dbnd_aws.emr.emr_config import EmrConfig
    from dbnd_aws.env import AwsEnvConfig

    register_config_cls(EmrConfig)
    register_config_cls(AwsEnvConfig)

    if is_plugin_enabled("dbnd-docker"):
        from dbnd_aws.batch.aws_batch_ctrl import AwsBatchConfig

        register_config_cls(AwsBatchConfig)

    from dbnd_aws.fs import build_s3_fs_client

    register_file_system(FileSystems.s3, build_s3_fs_client)
示例#6
0
    def as_dict(self):
        return dict(self.sub_results)

    @property
    def op(self):
        return safe_get_context_manager_dag().get_task(
            self.sub_results[0][1].task_id)

    def __getitem__(self, key):
        return self.as_dict()[key]


class AirflowXComFileSystem(FileSystem):
    def exists(self, path):
        return False

    def open_read(self, path, mode="r"):
        return FileWrapper(io.BufferedReader(io.FileIO(path, mode)))

    def open_write(self, path, mode="w"):
        return AtomicLocalFile(path, fs=self, mode=mode)


class XComAtomicFile(AtomicLocalFile):
    def move_to_final_destination(self):
        self.fs.move_from_local(self.tmp_path, self.path)


register_file_system("xcom", AirflowXComFileSystem)
register_file_system("jinja", AirflowXComFileSystem)