def setup(self):
     variables = Variables()
     print(variables['storage'])
     self.service = Parameter.expand(variables['storage'])[0]
     self.p = Provider(service=self.service)
     self.sourcedir = path_expand("~/.cloudmesh/storage/test")
     print()
 def test_list_bucket(self):
     HEADING()
     from cloudmesh.google.storage.Provider import Provider
     provider = Provider(service=cloud)
     StopWatch.start("test_list_bucket_google")
     provider.list_bucket()
     StopWatch.stop("test_list_bucket_google")
Пример #3
0
    def __init__(self,
                 source,
                 destination,
                 name="local",
                 parallelism=4):
        """
        :param name: The name of the queue (used as a collection in mongodb)
        :param source: The name of the service in cloudmesh.data from which
                       to copy
        :param destination: The name of the service in cloudmesh.data from
                            which to copy
        :param parallelism: The number of parallel threads
        """
        self.source = source
        self.destination = destination
        self.parallelism = parallelism

        config = Config()

        self.source_spec = config[f"cloudmesh.storage.{source}"]
        self.destination_spec = config[f"cloudmesh.storage.{destination}"]

        self.provider_source = Provider(service=source)
        self.provider_destination = Provider(service=destination)

        self.name = name
        self.collection = f"storage-queue-{name}-{source}-{destination}"
        self.number = 0

        #
        # TODO: create collection in mongodb
        #
        Console.ok(f"Collection: {self.name}")
Пример #4
0
 def setup(self):
     variables = Variables()
     self.service = Parameter.expand(variables['storage'])[0]
     self.p = Provider(service=self.service)
     self.sourcedir = os.path.expanduser(
         "~/Documents/cloudmesh/storage/test")
     print()
Пример #5
0
    def test_copy(self):
        HEADING()
        sources = ['local', 'awss3', 'azure', 'oracle', 'google']
        # sources = ['local', 'oracle']
        local_source = "~/.cloudmesh/storage/test"

        sizes = [1]
        for size in sizes:
            file = f"size-{size}-file.txt"
            print("0" * 100)
            print(file)
            print("0" * 100)
            pass_flag = True

            for source in sources:
                targets = sources.copy()
                targets.remove(source)

                for target in targets:

                    storage = target

                    if source == "local":
                        src = str(Path(Path(local_source) / file))
                    else:
                        src = file

                    if target == "local":
                        dst = str(Path(Path(local_source) / file))
                        storage = source
                    elif target == "azure":
                        dst = '/'
                    else:
                        dst = file

                    print("0"*100)
                    provider = Provider(service=storage)

                    banner(f"copy {source}:{src} to {target}:{dst}")
                    texttag = f"copy {source}:{src} to {target}:{dst}"

                    StopWatch.start(texttag)

                    try:
                        response = provider.copy(f'{source}:{src}',
                                                 f'{target}:{dst}')
                        if response is None:
                            Console.error(f"NULL response for copy {source}:"
                                          f"{src} to {target}:{dst}")
                            pass_flag = False
                    except Exception as e:
                        pass_flag = False
                        Console.error(f"Exception: copy {source}:{src} to "
                                      f"{target}:{dst} - {e}")

                    StopWatch.stop(texttag)
                    print("0" * 100)

        assert pass_flag
 def test_blob_metadata(self):
     HEADING()
     from cloudmesh.google.storage.Provider import Provider
     provider = Provider(service=cloud)
     blob_name = 'a/a.txt'
     StopWatch.start("test_blob_metadata")
     provider.blob_metadata(blob_name)
     StopWatch.stop("test_blob_metadata")
 def test_create_bucket(self):
     HEADING()
     from cloudmesh.google.storage.Provider import Provider
     provider = Provider(service=cloud)
     new_bucket_name = 'cloudmesh_gcp2'
     StopWatch.start("test_create_bucket_google")
     provider.create_bucket(new_bucket_name)
     StopWatch.stop("test_create_bucket_google")
 def test_copy_blob_btw_buckets(self):
     HEADING()
     from cloudmesh.google.storage.Provider import Provider
     provider = Provider(service=cloud)
     blob_name = 'a/a.txt'
     bucket_name_dest = 'cloudmesh_gcp2'
     blob_name_dest = 'a/a.txt'
     StopWatch.start("test_copy_blob_btw_buckets")
     provider.copy_blob_btw_buckets(blob_name, bucket_name_dest,
                                    blob_name_dest)
     StopWatch.stop("test_copy_blob_btw_buckets")
Пример #9
0
class Provider(object):
    def __init__(self, config="~/.cloudmesh/cloudmesh.yaml"):

        self.config = Config()

    def copy(self, source_cloud, source, target_cloud, target, local_dir=None):

        self.provider_source = Provider(service=source_cloud,
                                        config=self.config)
        self.provider_target = Provider(service=target_cloud,
                                        config=self.config)

        if local_dir is None:
            unique = uuid.uuid4()
            self.local_dir = path_expand("~/.cloudmesh/storage/tmp/{unique}")

        print(source_cloud + target_cloud + source + target)
        status = None

        if source_cloud == "local" and target_cloud == "local":

            shutil.copy(source, target)

        else:

            #
            # first copy from source provider to local
            #

            try:
                _local = f"{self.local_dir}/{source}"
                result = self.provider_source.get(source=source,
                                                  destination=_local)
            except Exception as e:
                Console.error("Error fetching directory to local")
                print(e)
                raise SystemError
            #
            # second copy from local to target provider
            #
            try:
                result = self.provider_source.get(source=_local,
                                                  destination=target)
            except Exception as e:
                Console.error("Error fetching directory to local")
                print(e)
                raise SystemError

        return status
Пример #10
0
 def setup(self):
     StopWatch.start("vdir setup")
     self.vdir = Vdir()
     self.endpoint = 'box:/test.txt'
     self.directory_and_name = '/testdir/test'
     self.directory = 'testdir'
     self.file = 'test'
     self.create_file('~/.cloudmesh/vdir/test/test.txt', 'test file')
     self.destination = path_expand("~/.cloudmesh/vdir/test")
     variables = Variables()
     service = Parameter.expand(variables['storage'])[0]
     self.p = Provider(service=service)
     self.p.put(source='~/.cloudmesh/vdir/test/test.txt', destination='/',
                recursive=False)
     StopWatch.stop("vdir setup")
Пример #11
0
    def test_googletoawsDir2(self):
        HEADING()

        sourcecloud = "google"
        targetcloud = "aws"
        sourceFile = "a1/testfolder/"
        targetFile = "a1/testfolder_1/"

        Benchmark.Start("GOOGLE_TO_AWS3")
        awsProvider = Provider(service=sourcecloud)
        testResult = awsProvider.copyFiles(source_cloud=sourcecloud,
                                           source_file=sourceFile,
                                           target_cloud=targetcloud,
                                           target_file=targetFile)
        Benchmark.Stop()
Пример #12
0
    def test_googletoaws(self):
        HEADING()
        sourcecloud = "google"
        targetcloud = "aws"
        sourceFile = "uploadtest_awsgoogle.txt"
        targetFile = "uploadtest_googeaws.txt"

        Benchmark.Start("GOOGLE_TO_AWS1")
        awsProvider = Provider(service=sourcecloud)
        testResult = awsProvider.copyFiles(source_cloud=sourcecloud,
                                           source_file=sourceFile,
                                           target_cloud=targetcloud,
                                           target_file=targetFile)

        Benchmark.Stop()
class TestObjectstorage(object):

    def setup(self):
        variables = Variables()
        self.service = Parameter.expand(variables['storage'])[0]
        self.p = Provider(service=self.service)
        self.sourcedir = path_expand("~/.cloudmesh/storage/test")
        print()


    def test_list(self):
        HEADING()
        StopWatch.start("LIST Directory")
        contents = self.p.list(self.p.service, "/")
        StopWatch.stop("LIST Directory")
        for c in contents:
            pprint(c)

        assert len(contents) > 0
        found = False
        for entry in contents:
            if entry["cm"]["name"] == "a1.txt":
                found = True
        assert found


    def test_results(self):
        HEADING()
        # storage = self.p.service
        service = self.service
        banner(f"Benchmark results for {service} Storage")
        StopWatch.benchmark()
Пример #14
0
class TestLocal(object):

    def setup_class(self):
        # variables = Variables()
        # service = Parameter.expand(variables['storage'])[0]

        self.service = "local"
        self.p = Provider(service=self.service)

    def test_00__config(self):
        VERBOSE(self.p)
        VERBOSE(self.p.kind)
        assert self.p.kind == self.service

    def test_01_create_source(self):
        HEADING()

        self.sourcedir = path_expand("~/.cloudmesh/storage/test/")
        create_file("~/.cloudmesh/storage/README.md", "content of a")
        create_file("~/.cloudmesh/storage/test/a/a.txt", "content of a")
        create_file("~/.cloudmesh/storage/test/a/b/b.txt", "content of b")
        create_file("~/.cloudmesh/storage/test/a/b/c/c.txt", "content of c")

        # test if the files are ok
        assert True

    def test_02_list(self):
        HEADING()
        src = '/'
        contents = self.p.list(source=src)

        VERBOSE(contents, label="c")

        for c in contents:
            VERBOSE(c)

    def test_05_search(self):
        HEADING()
        src = '/'
        filename = 'a.txt'
        #
        # bug use named arguments
        #
        files = self.p.search(directory=src, filename=filename, recursive=True)
        pprint(files)

        assert len(files) > 0
    def test_googletoawsDir2(self):
        HEADING()

        sourcecloud = "google"
        targetcloud = "aws"
        sourceFile = "a1/testfolder/"
        targetFile = "a1/testfolder2/"

        StopWatch.start("google_to_aws_directory2")
        awsProvider = Provider(service=sourcecloud)
        try:
            testResult = awsProvider.copyFiles(source_cloud=sourcecloud,
                                               source_file=sourceFile,
                                               target_cloud=targetcloud,
                                               target_file=targetFile)
            StopWatch.status("google_to_aws_directory2", "Success")
        finally:
            StopWatch.stop("google_to_aws_directory2")
    def test_awstogoogleDir(self):
        HEADING()

        sourcecloud = "aws"
        targetcloud = "google"
        sourceFile = "uploadtest1.txt"
        targetFile = "a2/testAwsToGoogle.txt"

        StopWatch.start("aws_to_google_directory")
        awsProvider = Provider(service=sourcecloud)
        try:
            testResult = awsProvider.copyFiles(source_cloud=sourcecloud,
                                               source_file=sourceFile,
                                               target_cloud=targetcloud,
                                               target_file=targetFile)
            StopWatch.status("aws_to_google_directory", "Success")
        finally:
            StopWatch.stop("aws_to_google_directory")
    def test_googletoaws(self):
        HEADING()

        sourcecloud = "google"
        targetcloud = "aws"
        for fileSize in fileSizes:
            targetFile = f"{sourcecloud}_to_{targetcloud}_fileSize_{fileSize}.txt"
            sourceFile = f'local_to_google_fileSize_{fileSize}.txt'
            StopWatch.start(targetFile)
            provider = Provider(service=sourcecloud)
            try:
                testResult = provider.copyFiles(source_cloud=sourcecloud,
                                                source_file=sourceFile,
                                                target_cloud=targetcloud,
                                                target_file=targetFile)
                StopWatch.status(targetFile, "Success")
            finally:
                StopWatch.stop(targetFile)
    def test_setup_provider(self):

        global provider
        global config
        global bucket

        provider = Provider(service=cloud)
        assert provider.kind == "google"
        config = Config()
        bucket = config[f'cloudmesh.storage.{cloud}.default.directory']
Пример #19
0
 def get(self, name):
     try:
         doc = self.col.find_one({'cm.name': name, 'type': 'fileendpoint'})
         if doc is not None:
             self.col.update_one({
                 'cm.name': name,
                 'type': 'fileendpoint'
             }, {'$set': {
                 'modified': datetime.utcnow()
             }})
             service = doc['provider']
             source = os.path.join(doc['cloud_directory'], doc['filename'])
             destination = '~/.cloudmesh'
             p = Provider(service)
             file = p.get(source, destination, False)
             return file
         else:
             Console.error("File not found.")
     except Exception as e:
         print(e)
    def test_googletolocal(self):
        HEADING()

        local_test = "~/.cloudmesh/storage/test"

        sourcecloud = "google"
        targetcloud = "local"
        for fileSize in fileSizes:
            targetFileName = f"{sourcecloud}_to_{targetcloud}_fileSize_{fileSize}.txt"
            targetFile = path_expand(f'{local_test}/{targetFileName}')
            sourceFile = f'local_to_google_fileSize_{fileSize}.txt'
            StopWatch.start(targetFileName)
            provider = Provider(service=sourcecloud)
            try:
                testResult = provider.copyFiles(source_cloud=sourcecloud,
                                                source_file=sourceFile,
                                                target_cloud=targetcloud,
                                                target_file=targetFile)
                StopWatch.status(targetFileName, "Success")
            finally:
                StopWatch.stop(targetFileName)
    def test_localtoaws(self):
        HEADING()

        local_test = "~/.cloudmesh/storage/test"

        sourcecloud = "local"
        targetcloud = "aws"
        for fileSize in fileSizes:
            sourceFile = path_expand(
                f'{local_test}/test_file_size_{fileSize}.txt')
            # targetFile = f'local_to_aws_fileSize_{fileSize}.txt'
            targetFile = f'{sourcecloud}_to_{targetcloud}_fileSize_{fileSize}.txt'
            StopWatch.start(targetFile)
            provider = Provider(service=sourcecloud)
            try:
                testResult = provider.copyFiles(source_cloud=sourcecloud,
                                                source_file=sourceFile,
                                                target_cloud=targetcloud,
                                                target_file=targetFile)
                StopWatch.status(targetFile, "Success")
            finally:
                StopWatch.stop(targetFile)
 def test_rename_blob(self):
     HEADING()
     from cloudmesh.google.storage.Provider import Provider
     provider = Provider(service=cloud)
     blob_name = 'top_folder11/sub_folder7/test2'
     provider.create_dir(blob_name)
     new_name = 'top_folder11/sub_folder7/test2_new'
     StopWatch.start("test_rename_blob")
     provider.rename_blob(blob_name, new_name)
     StopWatch.stop("test_rename_blob")
Пример #23
0
class TestStorageBox(object):
    def create_file(self, location, content):
        Shell.mkdir(os.dirname(path_expand(location)))
        writefile(location, content)

    def setup(self):
        variables = Variables()
        self.service = Parameter.expand(variables['storage'])[0]
        self.p = Provider(service=self.service)
        self.sourcedir = path_expand("~/.cloudmesh/storage/test")
        print()

    def test_create_source(self):
        HEADING()
        home = self.sourcedir
        # Setup a dir
        self.content = []
        self.files = [
            "a/a1.txt", "a/a2.txt", "a/a3.txt", "a/b/b1.txt", "a/b/b2.txt",
            "a/b/b3.txt", "a/b/c/c1.txt", "a/b/c/c2.txt", "a/b/c/c3.txt",
            "a/b/d/d1.txt", "a/b/d/d2.txt", "a/b/d/d3.txt", "a/b/d/a1.txt"
        ]

        for f in self.files:
            location = f"{home}/{f}"
            self.create_file(location, f"content of {f}")
            self.content.append(location)

        # setup empty dir in a
        d1 = Path(path_expand(f"{home}/a/empty"))
        d1.mkdir(parents=True, exist_ok=True)

        for f in self.files:
            assert os.path.isfile(f"{home}/{f}")

        assert os.path.isdir(f"{home}/a/empty")

    def test_put_and_get(self):
        HEADING()
        home = self.sourcedir
        StopWatch.start("PUT file")
        test_file = self.p.put(self.p.service, f"{home}/a/a1.txt", "/")
        StopWatch.stop("PUT file")
        assert test_file is not None

        StopWatch.start("GET file")
        test_file = self.p.get(self.p.service, f"/a1.txt", f"{home}/hello.txt")
        StopWatch.stop("GET file")
        assert test_file is not None

        content = readfile(f"{home}/hello.txt")
        assert "a1.txt" in content

    def test_list(self):
        HEADING()
        StopWatch.start("LIST Directory")
        contents = self.p.list(self.p.service, "/")
        StopWatch.stop("LIST Directory")
        for c in contents:
            pprint(c)

        assert len(contents) > 0
        found = False
        for entry in contents:
            if entry["cm"]["name"] == "a1.txt":
                found = True
        assert found

    def test_create_dir(self):
        HEADING()
        src = '/a/created_dir'
        StopWatch.start("CREATE DIR")
        directory = self.p.createdir(self.p.service, src)
        StopWatch.stop("CREATE DIR")
        pprint(directory)

        assert dir is not None
        assert "a/created_dir" in directory[0]["name"]

    def test_search(self):
        HEADING()
        src = '/'
        filename = "a1.txt"
        StopWatch.start("SEARCH file")
        search_files = self.p.search(self.p.service, src, filename, True)
        StopWatch.stop("SEARCH file")
        pprint(search_files)
        assert len(search_files) > 0
        assert search_files[0]["name"] == filename

    def test_delete(self):
        HEADING()
        src = "/a/created_dir"
        StopWatch.start("DELETE Directory")
        contents = self.p.delete(self.p.service, src)
        StopWatch.stop("DELETE Directory")
        deleted = False
        for entry in contents:
            if "created_dir" in entry["cm"]["name"]:
                if entry["cm"]["status"] == "deleted":
                    deleted = True
        assert deleted

    def test_recursive_put(self):
        # must be implemented by student from ~/.cloudmesh/storage/test
        # make sure all files are in the list see self.content which contains
        # all files
        home = self.sourcedir
        StopWatch.start("PUT Directory --recursive")
        upl_files = self.p.put(self.p.service, f"{home}", "/a", True)
        StopWatch.stop("PUT Directory --recursive")
        pprint(upl_files)

        assert upl_files is not None

    def test_recursive_get(self):
        # must be implemented by student into ~/.cloudmesh/storage/test/get
        # see self.content which contains all files but you must add get/
        home = self.sourcedir
        d2 = Path(path_expand(f"{home}/get"))
        d2.mkdir(parents=True, exist_ok=True)
        StopWatch.start("GET Directory --recursive")
        dnld_files = self.p.get(self.p.service, "/a", f"{home}/get", True)
        StopWatch.stop("GET Directory --recursive")
        pprint(dnld_files)

        assert dnld_files is not None

    def test_recursive_delete(self):
        # must be implemented by student into ~/.cloudmesh/storage/test/get
        # see self.content which contains all files but you must add get/
        src = "/a/a/b/c"
        StopWatch.start("DELETE Sub-directory")
        del_files = self.p.delete(self.p.service, src)
        StopWatch.stop("DELETE Sub-directory")

        assert len(del_files) > 0

    def test_exhaustive_list(self):
        # must be implemented by student into ~/.cloudmesh/storage/test/
        # see self.content which contains all files that you can test against
        # in the list return. all of them must be in there
        StopWatch.start("LIST Directory --recursive")
        contents = self.p.list(self.p.service, "/a", True)
        StopWatch.stop("LIST Directory --recursive")

        assert len(contents) > 0

    def test_selective_list(self):
        # must be implemented by student into ~/.cloudmesh/storage/test/a/b
        # see self.content which contains all files that you can test against
        # in the list return. all of them must be in there but not more?
        # I am unsure if we implemented a secive list. If not let us know
        # full list for now is fine
        StopWatch.start("LIST Sub-directory --recursive")
        contents = self.p.list(self.p.service, "/a/a/b", True)
        StopWatch.stop("LIST Sub-directory --recursive")

        assert len(contents) > 0

    def test_search_b1(self):
        # search for b1.txt
        src = '/a'
        filename = 'b1.txt'
        StopWatch.start("SEARCH file --recursive")
        search_files = self.p.search(self.p.service, src, filename, True)
        StopWatch.stop("SEARCH file --recursive")

        assert search_files is not None

    def test_search_b1_dir(self):
        # search for b/b2.txt see that this one has even the dir in the search
        src = '/a'
        filename = '/b/b1.txt'
        StopWatch.start("SEARCH file under a sub-dir --r")
        search_files = self.p.search(self.p.service, src, filename, True)
        StopWatch.stop("SEARCH file under a sub-dir --r")
        assert search_files is not None

    def test_search_a1(self):
        # search for a1.txt which shold return 2 entries
        src = '/a'
        filename = 'a1.txt'
        StopWatch.start("SEARCH file under root dir --r")
        search_files = self.p.search(self.p.service, src, filename, True)
        StopWatch.stop("SEARCH file under root dir --r")

        assert len(search_files) == 2

    def test_results(self):
        HEADING()
        # storage = self.p.service
        service = self.service
        banner(f"Benchmark results for {service} Storage")
        StopWatch.benchmark()
Пример #24
0
    def setup_class(self):
        # variables = Variables()
        # service = Parameter.expand(variables['storage'])[0]

        self.service = "local"
        self.p = Provider(service=self.service)
Пример #25
0
class TestLocal(object):

    def setup_class(self):
        # variables = Variables()
        # service = Parameter.expand(variables['storage'])[0]

        self.service = "local"
        self.p = Provider(service=self.service)

    def test_00__config(self):
        VERBOSE(self.p)
        VERBOSE(self.p.kind)
        assert self.p.kind == self.service

    def test_01_create_source(self):
        HEADING()

        self.sourcedir = path_expand(f"{location}/test/")
        create_file(f"{location}/README.md", "content of a")
        create_file(f"{location}/test/a/a.txt", "content of a")
        create_file(f"{location}/test/a/b/b.txt", "content of b")
        create_file(f"{location}/test/a/b/c/c.txt", "content of c")

        # test if the files are ok
        assert True

    def test_02_list(self):
        HEADING()
        StopWatch.start("list")
        src = '/'
        contents = self.p.list(source=src)

        VERBOSE(contents, label="c")

        for c in contents:
            VERBOSE(c)
        StopWatch.stop("list")

    def test_05_search(self):
        HEADING()
        StopWatch.start("search")
        src = '/'
        filename = 'a.txt'
        #
        # bug use named arguments
        #
        files = self.p.search(directory=src, filename=filename, recursive=True)
        # pprint(files)
        StopWatch.stop("search")

        assert len(files) > 0

    def test_02_put(self):
        HEADING()
        StopWatch.start("put")
        src = path_expand("{location}/test/a/a.txt")
        dst = f"{location}/destination"
        test_file = self.p.put(src, dst)
        # pprint(test_file)
        StopWatch.stop("put")

        assert test_file is not None

    def test_03_get(self):
        HEADING()
        StopWatch.start("get")
        src = path_expand(f"{location}/destination/a.txt")
        dst = path_expand(f"{location}/destination/test.txt")
        file = self.p.get(src, dst)
        # pprint(file)
        StopWatch.stop("get")

        assert file is not None

        # assert len(content) > 0

    def test_06_create_dir(self):
        HEADING()
        dst = f"{location}/destination"
        src = path_expand("{dst}/created_dir")
        StopWatch.start("create_dir")
        directory = self.p.create_dir(src)
        # pprint(directory)
        StopWatch.stop("create_dir")

        assert directory is not None

    def test_07_delete(self):
        HEADING()
        dst = f"{location}/destination"
        src = path_expand("{dst}/created_dir")
        StopWatch.start("delete")
        self.p.delete(src)
        StopWatch.stop("delete")

    def test_benchmark(self):
        Benchmark.print(sysinfo=False, csv=True, tag=cloud)
Пример #26
0

Benchmark.debug()

user = Config()["cloudmesh.profile.user"]
variables = Variables()
VERBOSE(variables.dict())

cloud = variables.parameter('storage')

print(f"Test run for {cloud}")

if cloud is None:
    raise ValueError("storage is not set")

provider = Provider(service=cloud)
print('provider:', provider, provider.kind)


def create_file(location, content):
    d = Path(os.path.dirname(path_expand(location)))
    print()
    print("TESTDIR:", d)

    d.mkdir(parents=True, exist_ok=True)

    writefile(path_expand(location), content)


location = "/tmp/cloudmesh/storage"
Пример #27
0
Benchmark.debug()

location = "/tmp/cloudmesh/storage"

user = Config()["cloudmesh.profile.user"]
variables = Variables()
VERBOSE(variables.dict())

service = variables.parameter('storage')

print(f"Test run for {service}")

if service is None:
    raise ValueError("storage is not set")

provider = Provider(service=service)
print('provider:', provider, provider.kind)


@pytest.mark.incremental
class TestStorage(object):
    def create_file(self, location, content):
        print(f"create: {location}")
        Shell.mkdir(location)
        writefile(location, content)

    def test_clean(self):
        HEADING()

        try:
            shutil.rmtree(location)
Пример #28
0
    def do_storage(self, args, arguments):
        """
        ::

           Usage:
             storage [--storage=SERVICE] create dir DIRECTORY
             storage [--storage=SERVICE] get SOURCE DESTINATION [--recursive]
             storage [--storage=SERVICE] put SOURCE DESTINATION [--recursive]
             storage [--storage=SERVICE] list [SOURCE] [--recursive] [--output=OUTPUT]
             storage [--storage=SERVICE] delete SOURCE
             storage [--storage=SERVICE] search  DIRECTORY FILENAME [--recursive] [--output=OUTPUT]
             storage [--storage=SERVICE] sync SOURCE DESTINATION [--name=NAME] [--async]
             storage [--storage=SERVICE] sync status [--name=NAME]
             storage config list [--output=OUTPUT]
             storage copy SOURCE DESTINATION [--recursive]


           This command does some useful things.

           Arguments:
             SOURCE        SOURCE can be a directory or file
             DESTINATION   DESTINATION can be a directory or file
             DIRECTORY     DIRECTORY refers to a folder on the cloud service


           Options:
             --storage=SERVICE  specify the cloud service name like aws or
                                azure or box or google

           Description:
             commands used to upload, download, list files on different
             cloud storage services.

             storage put [options..]
               Uploads the file specified in the filename to specified
               cloud from the SOURCEDIR.

             storage get [options..]
               Downloads the file specified in the filename from the
               specified cloud to the DESTDIR.

             storage delete [options..]
                Deletes the file specified in the filename from the
                specified cloud.

             storage list [options..]
               lists all the files from the container name specified on
               the specified cloud.

             storage create dir [options..]
               creates a folder with the directory name specified on the
               specified cloud.

             storage search [options..]
               searches for the source in all the folders on the specified
               cloud.

             sync SOURCE DESTINATION
               puts the content of source to the destination.
                If --recursive is specified this is done recursively from
                   the source
                If --async is specified, this is done asynchronously
                If a name is specified, the process can also be monitored
                   with the status command by name.
                If the name is not specified all date is monitored.

             sync status
               The status for the asynchronous sync can be seen with this
               command

             config list
               Lists the configures storage services in the yaml file

             storage copy SOURCE DESTINATION
               Copies files from source storage to destination storage.
               The syntax of SOURCE and DESTINATION is:
               SOURCE - awss3:source.txt
               DESTINATION - azure:target.txt

           Example:
              set storage=azureblob
              storage put SOURCE DESTINATION --recursive

              is the same as
              storage --storage=azureblob put SOURCE DESTINATION --recursive

              storage copy azure:source.txt oracle:target.txt

        """
        # arguments.CONTAINER = arguments["--container"]

        map_parameters(arguments, "recursive", "storage")
        VERBOSE(arguments)

        if arguments.storage is None:
            if arguments.copy is None:
                try:
                    v = Variables()
                    arguments.storage = v['storage']
                except Exception as e:
                    arguments.storage = None
                    raise ValueError("Storage provider is not defined")
            else:
                if arguments.DESTINATION.split(":")[0] == "local":
                    arguments.storage = arguments.SOURCE.split(":")[0]
                else:
                    arguments.storage = arguments.DESTINATION.split(":")[0]

        arguments.storage = Parameter.expand(arguments.storage)

        if arguments["get"]:
            provider = Provider(arguments.storage[0])

            result = provider.get(arguments.SOURCE, arguments.DESTINATION,
                                  arguments.recursive)

        elif arguments.put:
            provider = Provider(arguments.storage[0])

            result = provider.put(arguments.SOURCE, arguments.DESTINATION,
                                  arguments.recursive)

        elif arguments.create and arguments.dir:
            provider = Provider(arguments.storage[0])

            result = provider.create_dir(arguments.DIRECTORY)

        elif arguments.list:

            source = arguments.SOURCE or '.'

            for storage in arguments.storage:
                provider = Provider(storage)

                result = provider.list(source, arguments.recursive)

        elif arguments.delete:

            for storage in arguments.storage:
                provider = Provider(storage)

                provider.delete(arguments.SOURCE)

        elif arguments.search:

            for storage in arguments.storage:
                provider = Provider(storage)

                provider.search(arguments.DIRECTORY, arguments.FILENAME,
                                arguments.recursive)

        elif arguments.rsync:
            # TODO: implement
            raise NotImplementedError

        elif arguments.copy:
            VERBOSE(f"COPY: Executing Copy command from {arguments.SOURCE} to "
                    f"{arguments.DESTINATION} providers")
            print(f"DEBUG storage.py: INITIALIZE with {arguments.storage[0]} "
                  "provider.")

            provider = Provider(arguments.storage[0])

            result = provider.copy(arguments.SOURCE, arguments.DESTINATION,
                                   arguments.recursive)
Пример #29
0
class TestVdir(object):
    def create_file(self, location, content):
        d = Path(os.path.dirname(path_expand(location)))
        print()
        print("TESTDIR:", d)

        d.mkdir(parents=True, exist_ok=True)

        writefile(path_expand(location), content)

    def setup(self):
        StopWatch.start("vdir setup")
        self.vdir = Vdir()
        self.endpoint = 'box:/test.txt'
        self.directory_and_name = '/testdir/test'
        self.directory = 'testdir'
        self.file = 'test'
        self.create_file('~/.cloudmesh/vdir/test/test.txt', 'test file')
        self.destination = path_expand("~/.cloudmesh/vdir/test")
        variables = Variables()
        service = Parameter.expand(variables['storage'])[0]
        self.p = Provider(service=service)
        self.p.put(source='~/.cloudmesh/vdir/test/test.txt',
                   destination='/',
                   recursive=False)
        StopWatch.stop("vdir setup")

    def test_collection(self):
        HEADING()
        StopWatch.start("vdir collection")
        col = self.vdir.col
        StopWatch.stop("vdir collection")

        assert col.name == 'local-vdir'

    @pytest.fixture(scope='class')
    def dummy_file(self):
        self.endpoint = 'box:/test.txt'
        self.directory_and_name = 'test'

        StopWatch.start("vdir add")
        self.vdir = Vdir()
        testfile = self.vdir.add(endpoint=self.endpoint,
                                 dir_and_name=self.directory_and_name)
        StopWatch.stop("vdir add")
        return testfile

    @pytest.fixture(scope='class')
    def dummy_dir(self):
        self.directory = 'testdir'

        StopWatch.start("vdir mkdir")
        self.vdir = Vdir()
        testdir = self.vdir.mkdir(dirname=self.directory)
        StopWatch.stop("vdir mkdir")
        return testdir

    def test_mkdir(self, dummy_dir):
        HEADING()

        assert dummy_dir is not None

    def test_add(self, dummy_file):
        HEADING()

        assert dummy_file is not None

    def test_ls(self):
        HEADING()
        StopWatch.start("vdir ls")
        results = self.vdir.ls(directory=None)
        StopWatch.stop("vdir ls")

        assert results is not None

    def test_get(self):
        HEADING()
        StopWatch.start("vdir get")
        file = self.vdir.get(name=self.file, destination=self.destination)
        StopWatch.stop("vdir get")
        print(file)

        assert file is not None

    def test_status(self):
        HEADING()
        StopWatch.start("vdir status")
        file = self.vdir.status(dir_or_name=self.file)
        StopWatch.stop("vdir status")

        assert file is not None

    def test_cd(self):
        HEADING()
        StopWatch.start("vdir cd")
        self.vdir.cd(dirname=self.directory)
        StopWatch.stop("vdir cd")

        assert self.vdir.directory == self.directory

    def test_delete(self):
        HEADING()
        StopWatch.start("vdir delete")
        file = self.vdir.delete(dir_or_name=self.file)
        directory = self.vdir.delete(dir_or_name=self.directory)
        StopWatch.stop("vdir delete")

        assert all(obj is not None for obj in [file, directory])

    def test_results(self):
        HEADING()

        StopWatch.benchmark()
Пример #30
0
    def do_storage(self, args, arguments):
        """
        ::

          Usage:
                storage [--storage=SERVICE] create dir DIRECTORY
                storage [--storage=SERVICE] get SOURCE DESTINATION [--recursive]
                storage [--storage=SERVICE] put SOURCE DESTINATION [--recursive]
                storage [--storage=SERVICE] list SOURCE [--recursive]
                storage [--storage=SERVICE] delete SOURCE
                storage [--storage=SERVICE] search  DIRECTORY FILENAME [--recursive]


          This command does some useful things.

          Arguments:
              SOURCE        SOURCE can be a directory or file
              DESTINATION   DESTINATION can be a directory or file
              DIRECTORY     DIRECTORY refers to a folder on the cloud service


          Options:
              --storage=SERVICE  specify the cloud service name like aws or azure or box or google
          Description:
                commands used to upload, download, list files on different cloud storage services.

                storage put [options..]
                    Uploads the file specified in the filename to specified cloud from the SOURCEDIR.

                storage get [options..]
                    Downloads the file specified in the filename from the specified cloud to the DESTDIR.

                storage delete [options..]
                    Deletes the file specified in the filename from the specified cloud.

                storage list [options..]
                    lists all the files from the container name specified on the specified cloud.

                storage create dir [options..]
                    creates a folder with the directory name specified on the specified cloud.

                storage search [options..]
                    searches for the source in all the folders on the specified cloud.

          Example:
            set storage=azureblob
            storage put SOURCE DESTINATION --recursive

            is the same as
            storage --storage=azureblob put SOURCE DESTINATION --recursive

        """
        # arguments.CONTAINER = arguments["--container"]

        map_parameters(arguments,
                       "recursive",
                       "storage")
        arguments.storage = arguments["--storage"]
        pprint(arguments)

        m = Provider()

        service = None

        #
        # BUG
        # services = Parameter.expand(arguments.storage)
        # service = services[0]
        # if services is None:
        #  ... do second try

        ##### BUG
        try:
            service = arguments["--storage"][0]
        except Exception as e:
            try:
                v = Variables()
                service = v['storage']
            except Exception as e:
                service = None

        if service is None:
            Console.error("storage service not defined")
            return

        # bug this is now done twice ....
        if arguments.storage is None:
            variables = Variables()
            arguments.storage = variables['storage']

        ##### Prvious code needs to be modified

        if arguments.get:
            m.get(arguments.storage, arguments.SOURCE, arguments.DESTINATION,
                  arguments.recursive)

        elif arguments.put:
            m.put(arguments.storage, arguments.SOURCE, arguments.DESTINATION,
                  arguments.recursive)

        elif arguments.list:
            print('in List')
            m.list(arguments.storage, arguments.SOURCE, arguments.recursive)

        elif arguments.create and arguments.dir.:
            m.createdir(arguments.storage, arguments.DIRECTORY)

        elif arguments.delete.:
            m.delete(arguments.storage, arguments.SOURCE)

        elif arguments['search']:
            m.search(arguments.storage, arguments.DIRECTORY, arguments.FILENAME,
                     arguments.recursive)