def test_config(): HEADING(myself()) config = Config() print(config) print(config.dict()) #pprint(config.credentials('local')) assert config is not None
def __init__(self, cloud=None): if cloud is None: self.cloud = "google" else: self.cloud = cloud config = Config() self.gcs_client = storage.Client.from_service_account_json( config.credentials('google_cloud') ['GOOGLE_CLOUD_CREDENTIALS_JSON']) localprovider = LocalProvider() self.dir = LocalProvider.create(localprovider, str(os.getcwd()), self.cloud + 'dump')
def __init__(self, cloud): if cloud is None: self.cloud = "aws" else: self.cloud = cloud config = Config() credentials = config.credentials(cloud) self.session = boto3.Session( aws_access_key_id=credentials['S3_ACCESS_ID'], aws_secret_access_key=credentials['S3_SECRET_KEY']) self.s3 = self.session.resource('s3') localprovider = LocalProvider() self.dir = LocalProvider.create(localprovider, str(os.getcwd()), self.cloud + 'dump')
def main(): """ Main function for the Data Manager. Processes the input arguments. """ version = 1.0 arguments = docopt(__doc__, version=version) if arguments['data'] and arguments['add']: provider = arguments['PROVIDER'] file = arguments['FILENAME'] upload_file_by_name(provider, file) elif arguments['data'] and arguments['list']: output_format = arguments["--format"] or 'table' if output_format == "table": config = Config() print(config.table()) elif output_format == "yaml": config = Config() print(config) elif arguments['data'] and arguments['get']: provider = arguments['PROVIDER'] user_uuid = arguments['USER_UUID'] file = arguments['FILENAME'] get_file_by_name(provider, file, user_uuid) elif arguments['data'] and arguments['ls']: provider = arguments['PROVIDER'] files = get_files(provider) config = Config() config.print(files) elif arguments['data'] and arguments['copy']: file = arguments['FILENAME'] source = arguments['PROVIDER'] dest = arguments['DEST'] copy_file(file, source, dest) elif arguments['data'] and arguments['rsync']: source = arguments['SOURCE'] dest = arguments['DEST'] filename = arguments['FILENAME'] # rsync_file(filename, source, dest) elif arguments['data'] and arguments['del']: provider = arguments['PROVIDER'] filename = arguments['FILENAME'] delete_file(provider, filename) elif arguments['update'] and arguments['user'] and arguments['file']: username = arguments['USER'] filename = arguments['FILENAME']
def __init__(self): config = Config() connect(config.database()['database'], host=config.database()['host'], port=config.database()['port']) self.client = MongoClient('localhost', 27017) self.db = self.client['mongoengine_test'] self.filecollection = self.db.get_collection("file") self.usercollection = self.db.get_collection("user") self.vdircollection = self.db.get_collection("virtualdirectory") self.fileproperty = generate("File") self.userproperty = generate("User") self.vdirproperty = generate("Virtualdirectory")
def get_files(service): config = Config() kind = config['cloud']['data'][service] provider = Provider() provider = provider.get_provider(kind) files_list = provider.list(provider["location"]) return files_list
def get_file_by_name(service, filename, user_uuid): config = Config() kind = config.data[service]['kind'] provider = Provider(kind) provider = provider.get_provider(kind) file_path = provider.download(kind, config.data[service]['location'], filename) mongo = Mongo() mongo.save_file_to_db(service, file_path, filename, user_uuid)
def test_config(): HEADING(myself()) config = Config() print(config) #pprint(config.credentials('local')) assert config is not None #assert 'cloud' in config.cloud
def test_local_list(): HEADING(myself()) config = Config() print(config) source = config.data['local_a'] pprint(source) destination = config.data['local_b'] pprint(destination) pprint(source["location"]) pprint(destination["location"])
def get_files(service): config = Config() kind = config.data[service]['kind'] provider = Provider(kind) provider = provider.get_provider(kind) files_list = provider.list(kind, config.data[service]['location']) i = 1 filelist = [] for value in files_list: list = [{'SNo': i, 'Filename': value}] i = i + 1 filelist.append(list) return jsonify(results=filelist)
def test_local_exists(): HEADING(myself()) config = Config() print(config) source = config.data['local_a'] pprint(source) destination = config.data['local_b'] pprint(destination) pprint(source["location"]) pprint(destination["location"]) provider = LocalProvider() # # CREATE DIR # provider.create(source["location"]) provider.create(destination["location"]) # # EXISTS # assert provider.exists(source["location"]) assert provider.exists(destination["location"]) # # CREATE FILE # path = source["location"] + "/a.txt" print (path) provider.create(path, dir=False) assert provider.exists(path) # # COPY FILE # s_path = source["location"] + "/a.txt" d_path = destination["location"] + "/a.txt" print (s_path, d_path) provider.copy(s_path, d_path) assert provider.exists(d_path) assert provider.exists(s_path)
def copy_file(filename, service, dest): if service == dest: print("Target cloud needs to different than the source cloud") exit else: config = Config() kind = config.data[service]['kind'] provider = Provider(kind) provider = provider.get_provider(kind) destination = provider.get_provider(dest) file_path = provider.download(kind, config.data[service]['location'], filename) mongo = Mongo() mongo.save_file_to_db(service, file_path, filename) destination.upload(kind, config.data[service]['location'], filename)
from mongoengine import * import datetime from pymongo import MongoClient from cloudmesh_data.data.Config import Config from cloudmesh_data.data.util.retrieve_yaml_definition_properties import generate from cloudmesh_data.data.util import get_file_size_and_checksum # # BUG: needs to come from config # config = Config() connect(config.database()['database'], host=config.database()['host'], port=config.database()['port']) client = MongoClient('localhost', 27017) db = client['mongoengine_test'] filecollection = db.get_collection("file") usercollection = db.get_collection("user") vdircollection = db.get_collection("virtualdirectory") # To print all records for file table # for file in vdircollection.find(): # print(file) # To delete all records # myquery = {} # usercollection.delete_many({}) fileproperty = generate("File")
def delete_file(service, filename): config = Config() kind = config.data[service]['kind'] provider = Provider(kind) provider = provider.get_provider(kind) provider.delete(kind, config.data[service]['location'], filename)
def upload_file_by_name(service, filename): config = Config() kind = config.data[service]['kind'] provider = Provider(kind) provider = provider.get_provider(kind) provider.upload(kind, config.data[service]['location'], filename)
def main(): """ Main function for the Data Manager. Processes the input arguments. """ version = 1.0 arguments = docopt(__doc__, version=version) if arguments['data'] and arguments['add']: provider = arguments['PROVIDER'] bucketname = arguments['BUCKETNAME'] file = arguments['FILENAME'] # upload_file_by_name(provider, bucketname, file) elif arguments['data'] and arguments['list']: output_format = arguments["--format"] or 'table' if output_format == "table": config = Config() print(config.table()) elif output_format == "yaml": config = Config() print(config) elif arguments['data'] and arguments['get']: provider = arguments['PROVIDER'] bucketname = arguments['BUCKETNAME'] user_uuid = arguments['USER_UUID'] file = arguments['FILENAME'] # get_file_by_name(provider, bucketname, file, user_uuid) elif arguments['data'] and arguments['ls']: provider = arguments['PROVIDER'] bucket = arguments['BUCKETNAME'] # files = get_files(provider, bucket) # x = PrettyTable(["SNo", "Filename"]) # i = 1 # for file in files: # x.add_row([i, file]) # i = i + 1 #print(x) elif arguments['data'] and arguments['copy']: file = arguments['FILENAME'] source = arguments['PROVIDER'] sourcebucket = arguments['PROVIDER_BUCKET'] dest = arguments['DEST'] destbucket = arguments['DEST_BUCKET'] # copy_file(file, source, sourcebucket, dest, destbucket) elif arguments['data'] and arguments['rsync']: source = arguments['SOURCE'] dest = arguments['DEST'] filename = arguments['FILENAME'] # rsync_file(filename, source, dest) elif arguments['data'] and arguments['del']: provider = arguments['PROVIDER'] bucketname = arguments['BUCKETNAME'] filename = arguments['FILENAME'] # delete_file(provider, bucketname, filename) elif arguments['update'] and arguments['user'] and arguments['file']: username = arguments['USER'] filename = arguments['FILENAME']
from google.cloud import storage import os import yaml import logging from cloudmesh_data.data.Config import Config logging.basicConfig(filename='debug.log', level=logging.DEBUG) with open('cloudmesh-data.yaml', 'r') as f: dataMap = yaml.safe_load(f) #os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = dataMap['cloud']['data'['google_cloud']['credentials']['GOOGLE_CLOUD_CREDENTIALS_JSON'] config = Config() gcs_client = storage.Client.from_service_account_json( config.credentials('google_cloud')['GOOGLE_CLOUD_CREDENTIALS_JSON']) #storage_client = storage.Client() # The name for the bucket # print(bucket_name)