class FdsDownload(object): ''' 从fds的bucket="xxxxxx-test"下载object="OPMS/monitor.txt",验证文件从OSS拉取到FDS的链路是否畅通,不通则切换dns解析 ''' def __init__(self): self.config = FDSClientConfiguration( region_name=current_app.config['FDSREGIONNAME'], endpoint=current_app.config['FDSENDPOINT'], enable_https=current_app.config['ENABLEHTTPS'], enable_cdn_for_upload=current_app.config['ENABLECDNFORUPLOAD'], enable_cdn_for_download=current_app.config['ENABLECDNFORDOWNLOAD']) self.client = GalaxyFDSClient(current_app.config['FDSACCESSKEYID'], current_app.config['FDSACCESSKEYSECRET'], config=self.config) # 重试三次再改域名解析 @retry(tries=APP_ENV.TRIES, delay=APP_ENV.DELAY) def download(self): try: f = self.client.get_object(current_app.config['FDSBUCKETNAME'], current_app.config['FDSKEY']) print(datetime.now(), "monitor.txt文件存在") # with open("success.txt", 'wb+') as file: # file.writelines(f.stream) time.sleep(0.5) self.client.delete_object(current_app.config['FDSBUCKETNAME'], current_app.config['FDSKEY']) print(datetime.now(), "验证文件存在,删除monitor.txt成功") except Exception as e: print(datetime.now(), "monitor.txt文件不存在") raise e
def __init__(self, ak=None, sk=None, endpoint=None): self._fds_prefix = r'fds://' self._fds_prefix_len = len(self._fds_prefix) self._local_config = LocalConfig() self._ak = self._local_config.ak if ak is None else ak if self._ak is None: self.config() self._sk = self._local_config.sk if sk is None else sk if self._sk is None: self.config() self._endpoint = self._local_config.endpoint if endpoint is None else endpoint if self._sk is None: self.config() logger.debug("endpoint: " + self._endpoint) self._fds_config = FDSClientConfiguration( region_name="awsde0", enable_https=False, enable_cdn_for_download=False, enable_cdn_for_upload=False, endpoint=self._endpoint) self._fds = GalaxyFDSClient(access_key=self._ak, access_secret=self._sk, config=self._fds_config)
def write_fds_file(object_content, name): bucket_name = "mlmodel" AK = "AKYX4NXAR6QJOK2RBW" SK = "MuS+R5nNehuCogsqtztD+HT0JTsmjHS88NW8J8Z+" ENDPOINT = "cnbj1-fds.api.xiaomi.net" config = FDSClientConfiguration() config.set_endpoint(ENDPOINT) client = GalaxyFDSClient(AK, SK, config) client.put_object(bucket_name, name, object_content)
def __init__(self): self.config = FDSClientConfiguration( region_name=current_app.config['FDSREGIONNAME'], endpoint=current_app.config['FDSENDPOINT'], enable_https=current_app.config['ENABLEHTTPS'], enable_cdn_for_upload=current_app.config['ENABLECDNFORUPLOAD'], enable_cdn_for_download=current_app.config['ENABLECDNFORDOWNLOAD']) self.client = GalaxyFDSClient(current_app.config['FDSACCESSKEYID'], current_app.config['FDSACCESSKEYSECRET'], config=self.config)
class multithreadingClientTest(unittest.TestCase): def setUp(self): config = FDSClientConfiguration(region_name, False, False, False) self.client = GalaxyFDSClient(access_key, access_secret, config) self.bucket_name = "1024" def checkAndDeleteBucket(self, index): for i in range(50): self.client.list_objects(self.bucket_name, "", "/") print index def testMultithreadingClient(self): pool = ThreadPool(50) if not self.client.does_bucket_exist(self.bucket_name): self.client.create_bucket(self.bucket_name) pool.map(self.checkAndDeleteBucket, range(50)) pool.close() pool.join()
def read_fds_file(paths): bucket_name = "mlmodel" AK = "" SK = "" ENDPOINT = "cnbj1-fds.api.xiaomi.net" config = FDSClientConfiguration() config.set_endpoint(ENDPOINT) client = GalaxyFDSClient(AK, SK, config) X = [] y = [] parts = [] for path in paths: print("read path:", path) try: obj = client.get_object(bucket_name, path) for chunk in obj.stream: parts.append(chunk) except GalaxyFDSClientException as e: print("reading fds something is wrong") print(e.message) whole_file = "".join(parts) del parts whole_lines = whole_file.split("\n") del whole_file gc.collect() whole_lines.pop() for i in range(len(whole_lines)): eles = whole_lines[i].strip().split(",") if len(eles) == 2: X.append(int(eles[0])) y.append([int(eles[1])]) del whole_lines gc.collect() return X, y
def cli(ctx, ak, sk, endpoint, https, cdn_download, timeout, part_size): ctx.ensure_object(dict) global fds_client global fds_ak global fds_sk global fds_endpoint global fds_config if ctx.invoked_subcommand != "config": local_config = LocalConfig() env_ak = os.environ.get("FDS_AK") env_sk = os.environ.get("FDS_SK") env_endpoint = os.environ.get("FDS_ENDPOINT") ak = ak or env_ak or local_config.ak if not ak: raise WrongEnvironmentException( "Please setup ak, the loading order is: command line > environment > config file" ) sk = sk or env_sk or local_config.sk if not sk: raise WrongEnvironmentException( "Please setup sk, the loading order is: command line > environment > config file" ) endpoint = endpoint or env_endpoint or local_config.endpoint if not endpoint: raise WrongEnvironmentException( "Please setup endpoint, the loading order is: command line > environment > config file" ) config = FDSClientConfiguration( region_name="awsde0", enable_https=https, enable_cdn_for_download=cdn_download, enable_cdn_for_upload=False, endpoint=endpoint, part_size=part_size, timeout=timeout, ) fds_config = config fds_ak = ak fds_sk = sk fds_endpoint = endpoint fds_client = GalaxyFDSClient(access_key=ak, access_secret=sk, config=config)
def bucket_name_completer(prefix, parsed_args, **kwargs): parse_argument(args=parsed_args) if not (access_key is None) and not (secret_key is None) and not (region is None): argcomplete.warn(str(enable_https) + ' ' + str(enable_cdn) + ' ' + str(region)) fds_config = FDSClientConfiguration(region_name=region, enable_https=enable_https, enable_cdn_for_download=enable_cdn, enable_cdn_for_upload=enable_cdn) fds_client = GalaxyFDSClient(access_key=access_key, access_secret=secret_key, config=fds_config) bucket_list = get_buckets(fds_client=fds_client) rtn = [] for i in bucket_list: if i.startswith(prefix): rtn.append(i) return rtn return ['a', 'b', 'c']
def __init__(self, ak=None, sk=None, endpoint=None): self._fds_prefix = r'fds://' self._fds_prefix_len = len(self._fds_prefix) self._local_config = LocalConfig() self._ak = self._local_config.ak if ak is None else ak if self._ak is None: self.config() self._sk = self._local_config.sk if sk is None else sk if self._sk is None: self.config() self._endpoint = self._local_config.endpoint if endpoint is None else endpoint if self._endpoint is None: self.config() self._fds_config = FDSClientConfiguration(region_name="awsde0", enable_https=False, enable_cdn_for_download=False, enable_cdn_for_upload=False, endpoint=self._endpoint) self._fds = GalaxyFDSClient(access_key=self._ak, access_secret=self._sk, config=self._fds_config)
# create a random tag import hashlib tag = hashlib.md5(os.urandom(32)).hexdigest()[:8] bucket = "torcs-" + tag # WARNING: the following two won't do since child process cann't change # the settings for its parents #os.environ['XIAOMI_FDS_DEFAULT_BUCKET'] = bucket #os.system("export XIAOMI_FDS_DEFAULT_BUCKET='%s'"%bucket) # permanently for next restart with open(os.path.expanduser("~/.bashrc"), "a") as f: f.write("export XIAOMI_FDS_DEFAULT_BUCKET='%s'\n" % bucket) print 'Warning: You must restart a terminal window to take into effect.' # endpoint is read from ~/.config/xiaomi/config #client = GalaxyFDSClient(XIAOMI_ACCESS_KEY_ID, XIAOMI_SECRET_ACCESS_KEY) client = GalaxyFDSClient() if not client.does_bucket_exist(bucket): try: print 'Create bucket ', bucket client.create_bucket(bucket) except GalaxyFDSClientException as e: print e.message # no need to delete since duplicate objects will be replaced when put # try: # client.delete_object(bucket, "tf_train-1.0.tar.gz") # except GalaxyFDSClientException as e: # print e.message tensorflow_model = 'TFTrainerPredictor-1.4.tar.gz'
def main(): parser = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter, epilog="Doc - http://docs.api.xiaomi.com/fds/") parser.add_argument('-m', '--method', nargs='?', metavar='method', const='put', type=str, dest='method', help='Method of the request. Can be one of put/get/delete/post/head (default: put)' ).completer = ChoicesCompleter(('put', 'get', 'delete', 'post', 'head')) parser.add_argument('-b', '--bucket', nargs='?', metavar='bucket', type=str, dest='bucket', help='Name of bucket to operate' ).completer = bucket_name_completer parser.add_argument('-o', '--object', nargs='?', metavar='object', type=str, dest='object', help='Name of object to operate' ) parser.add_argument('-r', '--region', nargs='?', metavar='region', type=str, dest='region', help='Can be one of cnbj0/cnbj1/cnbj2/awsbj0/awsusor0/awssgp0/awsde0 (default: cnbj0)' ) parser.add_argument('-e', '--end_point', nargs='?', metavar='end point', type=str, dest='end_point', help='can be [cnbj1.fds.api.xiaomi.com] or empty' ) parser.add_argument('-c', '--CDN', metavar='CDN', action='store_const', const=False, dest='CDN', default=False, help='If toggled, CDN is enabled' ) parser.add_argument('-p', '--presigned_url', action='store_true', dest='presigned_url', default=False, help='If toggled, generate presigned url' ) parser.add_argument('--https', metavar='https', nargs='?', dest='https', default=True, help='If toggled, https is enabled' ) parser.add_argument('--ak', nargs='?', metavar='ACCESS_KEY', dest='ak', help='Specify access key' ) parser.add_argument('--sk', nargs='?', metavar='SECRET_KEY', dest='sk', help='Specify secret key' ) parser.add_argument('-L', '--list', nargs='?', metavar='list directory', const='', type=str, dest='list_dir', help='List Bucket/Object under current user') parser.add_argument('-l', '--list_objects', nargs='?', metavar='list objects', const='', type=str, dest='list_objects', help='List Bucket/Object under current user') parser.add_argument('-d', '--data', nargs='?', metavar='data file', dest='data_file', help='file to be uploaded or stored') parser.add_argument('-D', '--directory', nargs='?', metavar='data dir', dest='data_dir', help="Directory to be uploaded or stored. Use '/' to download all objects under the bucket") parser.add_argument('-R', '--recursive', action='store_true', dest='recursive', default=False, help='If toggled, download the directory recursively') parser.add_argument('--offset', nargs='?', metavar='offset', type=int, const=0, default=0, dest='offset', help='offset of object to be read') parser.add_argument('--length', nargs='?', metavar='length', type=int, dest='length', const=-1, default=-1, help='length of object to be read') parser.add_argument('--metadata', nargs='?', metavar='meta data of object to be uploaded', dest='metadata', help='example: "content-type:text/json;x-xiaomi-meta-user-defined:foo"') parser.add_argument('--start', nargs='?', metavar='start mark', type=str, dest='start_mark', const=None, default=None, help='used with -l or -L option, returned object name should be *no less* than start mark in dictionary order' ) parser.add_argument('--debug', metavar='debug', action='store_const', const=True, default=False, dest='debug', help='If toggled, print debug log') parser.add_argument('--expiration', nargs='?', type=str, metavar='expiration in hour', default='1.0', dest='expiration_in_hour', help='used with --presigned_url, set expiration of presigned url generated from now on(hour), default to one hour') parser.add_argument('--force', action='store_true', dest='force_delete', default=False, help='If toggled, delete bucket and objects') parser.add_argument('--disable_trash', action='store_true', dest='disable_trash', default=False, help='If toggled, delete object without move to trash') parser.add_argument('-P', '--object_prefix', nargs='?', metavar="object's prefix", type=str, dest='object_prefix', help="object's prefix") parser.add_argument('--lifecycle', nargs='?', metavar='lifecycle config, json format', dest='lifecycle', const=True, default=None, help='''Put or get lifecycle configof the bucket. Please use \\" instead of " in this argument when putting lifecycle config due to shell may eat double quotes.''') parser.add_argument('--cors', nargs='?', metavar='cors config, json format', dest='cors', const=True, default=None, help='''Put or get cors config of the bucket. Please use \\" instead of " in this argument when putting cors config due to shell may eat double quotes.''') parser.add_argument('--lifecycle-rule', nargs='?', metavar='lifecycle rule, json format', dest='lifecycle_rule', const=True, default=None, help='''Add/update or get one rule of lifecycle config of the bucket. Please use \\" instead of " in this argument when putting lifecycle config due to shell may eat double quotes.''') parser.add_argument('--cors-rule', nargs='?', metavar='cors rule, json format', dest='cors_rule', const=True, default=None, help='''Add/update or get one rule of cors config of the bucket. Please use \\" instead of " in this argument when putting cors config due to shell may eat double quotes.''') parser.add_argument('--webp-quality', nargs='?', dest='webp_quality', const=-1, default=None, help='Integer indicates webp quality, -1 will disable bucket auto convert webp') parser.add_argument('--gif-extract-type', nargs='?', dest='gif_extract_type', const='unknown', default=None, help='String indicates gif extract type, unknown will disable bucket auto gif extract') parser.add_argument('--restore-archived', action='store_true', dest='restore_archived', default=False, help='''If toggled, restore archived object as standard.''') group = parser.add_argument_group('acl') group.add_argument('--gratee', nargs='+', metavar='user, group, ALL_USERS, AUTHENTICATED_USERS', dest='gratee', help='Add acl to bucket') group.add_argument('--permission', nargs='?', metavar="READ, WRITE, READ_OBJECTS, FULL_CONTROL", dest='permission', choices=['READ', 'WRITE', 'READ_OBJECTS', 'FULL_CONTROL'], help='Add acl to bucket') cp = parser.add_argument_group('cp') cp.add_argument('-srcb', '--src_bucket', nargs='?', metavar='bucket, name', dest='src_bucket_name', help='Copy object from src_bucket to dst_bucket') cp.add_argument('-srco', '--src_object', nargs='?', metavar='object, name', dest='src_object_name', help='Copy object from src_bucket to dst_bucket') cp.add_argument('-dstb', '--dst_bucket', nargs='?', metavar='bucket, name', dest='dst_bucket_name', help='Copy object from src_bucket to dst_bucket') cp.add_argument('-dsto', '--dst_object', nargs='?', metavar='object, name', dest='dst_object_name', help='Copy object from src_bucket to dst_bucket') argcomplete.autocomplete(parser) args = parser.parse_args() # set logging log_format = '%(asctime)-15s [%(filename)s:%(lineno)d] %(message)s' logging.basicConfig(format=log_format) global logger logger = logging.getLogger('fds.cmd') debug_enabled = args.debug if debug_enabled: logger.setLevel(logging.DEBUG) else: logger.setLevel(logging.INFO) ## read config parse_argument(args=args) check_region(region=region) check_bucket_name(bucket_name=bucket_name) global fds_config fds_config = FDSClientConfiguration(region_name=region, enable_https=enable_https, enable_cdn_for_download=enable_cdn, enable_cdn_for_upload=enable_cdn, threshold_size=multipart_upload_threshold_size, part_size=multipart_upload_buffer_size) global end_point if not end_point is None: fds_config.set_endpoint(end_point) global fds_client fds_client = GalaxyFDSClient(access_key=access_key, access_secret=secret_key, config=fds_config) global force_delete global disable_trash global object_prefix global recursive, webp_quality, gif_extract_type try: if presigned_url: expiration = int(1000 * (float(eval(expiration_in_hour)) * 3600 + float((datetime.utcnow() - datetime(1970, 1, 1, 0, 0, 0, 0)).total_seconds()))) meta = parse_metadata_from_str(metadata=metadata) content_type = None if meta and 'content-type' in meta.metadata: content_type = meta.metadata['content-type'] url = fds_client.generate_presigned_uri(fds_config.get_base_uri(), bucket_name=bucket_name, object_name=object_name, expiration=expiration, http_method=method.upper(), content_type=content_type) print(url) elif not (list_dir is None): if not (bucket_name is None): list_directory(bucket_name=bucket_name, object_name_prefix=list_dir, start_mark=start_mark) else: list_buckets(fds_client=fds_client, prefix=list_dir, start_mark=start_mark) elif not (list_objects is None): if not (bucket_name is None): if object_name is not None: list_version_ids(bucket_name=bucket_name, object_name=object_name) else: list_object(bucket_name=bucket_name, object_name_prefix=list_objects, start_mark=start_mark) else: list_buckets(fds_client=fds_client, prefix=list_objects, start_mark=start_mark) pass else: if method == 'put': if src_bucket_name and src_object_name and dst_bucket_name and dst_object_name: copy_object(src_bucket_name, src_object_name, dst_bucket_name, dst_object_name) elif object_name: if data_dir: put_directory(data_dir=data_dir, bucket_name=bucket_name, object_name_prefix=object_name, metadata=metadata) elif gratee and permission: put_object_acl(bucket_name, object_name, gratee, permission) elif restore_archived: restore_archived_object(bucket_name, object_name) else: put_object(data_file=data_file, bucket_name=bucket_name, object_name=object_name, metadata=metadata) elif gratee and permission: put_bucket_acl(bucket_name, gratee, permission) elif lifecycle: put_bucket_lifecycle_config(bucket_name, lifecycle) elif lifecycle_rule: put_bucket_lifecycle_rule(bucket_name, lifecycle_rule) elif cors: put_bucket_cors_config(bucket_name, cors) elif cors_rule: put_bucket_cors_rule(bucket_name, cors_rule) elif webp_quality: set_bucket_default_webp_quality(bucket_name=bucket_name, webp_quality=webp_quality) elif gif_extract_type: set_bucket_default_gif_extract_type(bucket_name=bucket_name, gif_extract_type=gif_extract_type) else: put_bucket(bucket_name) pass elif method == 'get': if object_prefix: download_directory(bucket_name=bucket_name, object_prefix=object_prefix, data_dir=data_dir, recursive=recursive) elif object_name: get_object(data_file=data_file, bucket_name=bucket_name, object_name=object_name, metadata=metadata, offset=offset, length=length, webp_quality=webp_quality, gif_extract_type=gif_extract_type) elif lifecycle: get_bucket_lifecycle_config(bucket_name) elif cors: get_bucket_cors_config(bucket_name) else: get_bucket_acl(bucket_name=bucket_name) pass elif method == 'post': post_object(data_file=data_file, bucket_name=bucket_name, metadata=metadata) pass elif method == 'delete': if object_name: if gratee and permission: delete_object_acl(bucket_name, object_name, gratee, permission) else: delete_object(bucket_name=bucket_name, object_name=object_name, enable_trash=not disable_trash) elif object_prefix is not None: delete_objects(bucket_name=bucket_name, object_prefix=object_prefix, enable_trash=not disable_trash) elif force_delete: delete_bucket_and_objects(bucket_name=bucket_name) else: if gratee and permission: delete_bucket_acl(bucket_name, gratee, permission) else: delete_bucket(bucket_name=bucket_name) pass elif method == 'head': if object_name: if not head_object(bucket_name=bucket_name, object_name=object_name): exit(1) else: if not head_bucket(bucket_name=bucket_name): exit(1) else: parser.print_help() except Exception as e: print(e) print("\n") ex_type, ex, tb = sys.exc_info() traceback.print_tb(tb) # sys.stderr.write(str(e)) # sys.stderr.flush() if debug_enabled: logger.debug(e, exc_info=True) exit(1)
def setUp(self): config = FDSClientConfiguration(region_name, False, False, False) self.client = GalaxyFDSClient(access_key, access_secret, config) self.bucket_name = "1024"
def main(): parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter, epilog="Doc - http://docs.api.xiaomi.com/fds/") parser.add_argument( '-m', '--method', nargs='?', metavar='method', const='put', type=str, dest='method', help= 'Method of the request. Can be one of put/get/delete/post/head (default: put)' ).completer = ChoicesCompleter(('put', 'get', 'delete', 'post', 'head')) parser.add_argument( '-b', '--bucket', nargs='?', metavar='bucket', type=str, dest='bucket', help='Name of bucket to operate').completer = bucket_name_completer parser.add_argument('-o', '--object', nargs='?', metavar='object', type=str, dest='object', help='Name of object to operate') parser.add_argument( '-r', '--region', nargs='?', metavar='region', type=str, dest='region', help= 'Can be one of cnbj0/cnbj1/cnbj2/awsbj0/awsusor0/awssgp0/awsde0 (default: cnbj0)' ) parser.add_argument('-e', '--end_point', nargs='?', metavar='end point', type=str, dest='end_point', help='can be [cnbj1.fds.api.xiaomi.com] or empty') parser.add_argument('-c', '--CDN', metavar='CDN', action='store_const', const=False, dest='CDN', default=False, help='If toggled, CDN is enabled') parser.add_argument('-p', '--presigned_url', action='store_true', dest='presigned_url', default=False, help='If toggled, generate presigned url') parser.add_argument('--https', metavar='https', nargs='?', dest='https', default=True, help='If toggled, https is enabled') parser.add_argument('--ak', nargs='?', metavar='ACCESS_KEY', dest='ak', help='Specify access key') parser.add_argument('--sk', nargs='?', metavar='SECRET_KEY', dest='sk', help='Specify secret key') parser.add_argument('-L', '--list', nargs='?', metavar='list directory', const='', type=str, dest='list_dir', help='List Bucket/Object under current user') parser.add_argument('-l', '--list_objects', nargs='?', metavar='list objects', const='', type=str, dest='list_objects', help='List Bucket/Object under current user') parser.add_argument('-d', '--data', nargs='?', metavar='data file', dest='data_file', help='file to be uploaded or stored') parser.add_argument('-D', '--directory', nargs='?', metavar='data dir', dest='data_dir', help='directory to be uploaded or stored') parser.add_argument('--offset', nargs='?', metavar='offset', type=long, const=0, default=0, dest='offset', help='offset of object to be read') parser.add_argument('--length', nargs='?', metavar='length', type=long, dest='length', const=-1, default=-1, help='length of object to be read') parser.add_argument( '--metadata', nargs='?', metavar='meta data of object to be uploaded', dest='metadata', help='example: "content-type:text/json;x-xiaomi-meta-user-defined:foo"' ) parser.add_argument( '--start', nargs='?', metavar='start mark', type=str, dest='start_mark', const=None, default=None, help= 'used with -l or -L option, returned object name should be *no less* than start mark in dictionary order' ) parser.add_argument('--debug', metavar='debug', action='store_const', const=True, default=False, dest='debug', help='If toggled, print debug log') parser.add_argument( '--expiration', nargs='?', type=str, metavar='expiration in hour', default='1.0', dest='expiration_in_hour', help= 'used with --presigned_url, set expiration of presigned url generated from now on(hour), default to one hour' ) argcomplete.autocomplete(parser) args = parser.parse_args() # set logging log_format = '%(asctime)-15s [%(filename)s:%(lineno)d] %(message)s' logging.basicConfig(format=log_format) global logger logger = logging.getLogger('fds.cmd') debug_enabled = args.debug if debug_enabled: logger.setLevel(logging.DEBUG) else: logger.setLevel(logging.INFO) ## read config parse_argument(args=args) check_region(region=region) check_bucket_name(bucket_name=bucket_name) global fds_config fds_config = FDSClientConfiguration(region_name=region, enable_https=enable_https, enable_cdn_for_download=enable_cdn, enable_cdn_for_upload=enable_cdn) global end_point if not end_point is None: fds_config.set_endpoint(end_point) global fds_client fds_client = GalaxyFDSClient(access_key=access_key, access_secret=secret_key, config=fds_config) try: if presigned_url: expiration = int( 1000 * (float(eval(expiration_in_hour)) * 3600 + float( (datetime.utcnow() - datetime(1970, 1, 1, 0, 0, 0, 0)).total_seconds()))) meta = parse_metadata_from_str(metadata=metadata) content_type = None if meta and 'content-type' in meta.metadata: content_type = meta.metadata['content-type'] url = fds_client.generate_presigned_uri(fds_config.get_base_uri(), bucket_name=bucket_name, object_name=object_name, expiration=expiration, http_method=method.upper(), content_type=content_type) print url elif not (list_dir is None): if not (bucket_name is None): list_directory(bucket_name=bucket_name, object_name_prefix=list_dir, start_mark=start_mark) else: list_buckets(fds_client=fds_client, prefix=list_dir, start_mark=start_mark) elif not (list_objects is None): if not (bucket_name is None): list_object(bucket_name=bucket_name, object_name_prefix=list_objects, start_mark=start_mark) else: list_buckets(fds_client=fds_client, prefix=list_objects, start_mark=start_mark) pass else: if method == 'put': if object_name: if data_dir: put_directory(data_dir=data_dir, bucket_name=bucket_name, object_name_prefix=object_name, metadata=metadata) else: put_object(data_file=data_file, bucket_name=bucket_name, object_name=object_name, metadata=metadata) else: put_bucket(bucket_name) pass elif method == 'get': if object_name: get_object(data_file=data_file, bucket_name=bucket_name, object_name=object_name, metadata=metadata, offset=offset, length=length) else: get_bucket_acl(bucket_name=bucket_name) pass elif method == 'post': post_object(data_file=data_file, bucket_name=bucket_name, metadata=metadata) pass elif method == 'delete': if object_name: delete_object(bucket_name=bucket_name, object_name=object_name) else: delete_bucket(bucket_name=bucket_name) pass elif method == 'head': if object_name: if not head_object(bucket_name=bucket_name, object_name=object_name): exit(1) else: if not head_bucket(bucket_name=bucket_name): exit(1) else: parser.print_help() except Exception as e: sys.stderr.write(e.message) sys.stderr.flush() if debug_enabled: logger.debug(e, exc_info=True) exit(1)
def main(): parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter, epilog="Doc - http://docs.api.xiaomi.com/fds/") parser.add_argument( '-m', '--method', nargs='?', metavar='method', const='put', type=str, dest='method', help= 'Method of the request. Can be one of put/get/delete/post/head (default: put)' ).completer = ChoicesCompleter(('put', 'get', 'delete', 'post', 'head')) parser.add_argument( '-b', '--bucket', nargs='?', metavar='bucket', type=str, dest='bucket', help='Name of bucket to operate').completer = bucket_name_completer parser.add_argument('-o', '--object', nargs='?', metavar='object', type=str, dest='object', help='Name of object to operate') parser.add_argument( '-r', '--region', nargs='?', metavar='region', type=str, dest='region', help= 'Can be one of cnbj0/cnbj1/cnbj2/awsbj0/awsusor0/awssgp0/awsde0 (default: cnbj0)' ) parser.add_argument('-e', '--end_point', nargs='?', metavar='end point', type=str, dest='end_point', help='can be [cnbj1.fds.api.xiaomi.com] or empty') parser.add_argument('-c', '--CDN', metavar='CDN', action='store_const', const=False, dest='CDN', default=False, help='If toggled, CDN is enabled') parser.add_argument('-p', '--presigned_url', action='store_true', dest='presigned_url', default=False, help='If toggled, generate presigned url') parser.add_argument('--https', metavar='https', nargs='?', dest='https', default=True, help='If toggled, https is enabled') parser.add_argument('--ak', nargs='?', metavar='ACCESS_KEY', dest='ak', help='Specify access key') parser.add_argument('--sk', nargs='?', metavar='SECRET_KEY', dest='sk', help='Specify secret key') parser.add_argument('-L', '--list', nargs='?', metavar='list directory', const='', type=str, dest='list_dir', help='List Bucket/Object under current user') parser.add_argument('-l', '--list_objects', nargs='?', metavar='list objects', const='', type=str, dest='list_objects', help='List Bucket/Object under current user') parser.add_argument('-d', '--data', nargs='?', metavar='data file', dest='data_file', help='file to be uploaded or stored') parser.add_argument( '-D', '--directory', nargs='?', metavar='data dir', dest='data_dir', help= "Directory to be uploaded or stored. Use '/' to download all objects under the bucket" ) parser.add_argument('-R', '--recursive', action='store_true', dest='recursive', default=False, help='If toggled, download the directory recursively') parser.add_argument('--offset', nargs='?', metavar='offset', type=int, const=0, default=0, dest='offset', help='offset of object to be read') parser.add_argument('--length', nargs='?', metavar='length', type=int, dest='length', const=-1, default=-1, help='length of object to be read') parser.add_argument( '--metadata', nargs='?', metavar='meta data of object to be uploaded', dest='metadata', help='example: "content-type:text/json;x-xiaomi-meta-user-defined:foo"' ) parser.add_argument( '--start', nargs='?', metavar='start mark', type=str, dest='start_mark', const=None, default=None, help= 'used with -l or -L option, returned object name should be *no less* than start mark in dictionary order' ) parser.add_argument('--debug', metavar='debug', action='store_const', const=True, default=False, dest='debug', help='If toggled, print debug log') parser.add_argument( '--expiration', nargs='?', type=str, metavar='expiration in hour', default='1.0', dest='expiration_in_hour', help= 'used with --presigned_url, set expiration of presigned url generated from now on(hour), default to one hour' ) parser.add_argument('--force', action='store_true', dest='force_delete', default=False, help='If toggled, delete bucket and objects') parser.add_argument('--disable_trash', action='store_true', dest='disable_trash', default=False, help='If toggled, delete object without move to trash') parser.add_argument('-P', '--object_prefix', nargs='?', metavar="object's prefix", type=str, dest='object_prefix', help="object's prefix") parser.add_argument( '--lifecycle', nargs='?', metavar='lifecycle config, json format', dest='lifecycle', const=True, default=None, help= '''Put or get lifecycle configof the bucket. Please use \\" instead of " in this argument when putting lifecycle config due to shell may eat double quotes.''' ) parser.add_argument( '--lifecycle-rule', nargs='?', metavar='lifecycle rule, json format', dest='lifecycle_rule', const=True, default=None, help= '''Add/update or get one rule of lifecycle config of the bucket. Please use \\" instead of " in this argument when putting lifecycle config due to shell may eat double quotes.''' ) parser.add_argument( '--webp-quality', nargs='?', dest='webp_quality', const=-1, default=None, help= 'Integer indicates webp quality, -1 will disable bucket auto convert webp' ) parser.add_argument( '--restore-archived', action='store_true', dest='restore_archived', default=False, help='''If toggled, restore archived object as standard.''') group = parser.add_argument_group('acl') group.add_argument('--gratee', nargs='+', metavar='user, group, ALL_USERS, AUTHENTICATED_USERS', dest='gratee', help='Add acl to bucket') group.add_argument( '--permission', nargs='?', metavar="READ, WRITE, READ_OBJECTS, FULL_CONTROL", dest='permission', choices=['READ', 'WRITE', 'READ_OBJECTS', 'FULL_CONTROL'], help='Add acl to bucket') cp = parser.add_argument_group('cp') cp.add_argument('-srcb', '--src_bucket', nargs='?', metavar='bucket, name', dest='src_bucket_name', help='Copy object from src_bucket to dst_bucket') cp.add_argument('-srco', '--src_object', nargs='?', metavar='object, name', dest='src_object_name', help='Copy object from src_bucket to dst_bucket') cp.add_argument('-dstb', '--dst_bucket', nargs='?', metavar='bucket, name', dest='dst_bucket_name', help='Copy object from src_bucket to dst_bucket') cp.add_argument('-dsto', '--dst_object', nargs='?', metavar='object, name', dest='dst_object_name', help='Copy object from src_bucket to dst_bucket') argcomplete.autocomplete(parser) args = parser.parse_args() # set logging log_format = '%(asctime)-15s [%(filename)s:%(lineno)d] %(message)s' logging.basicConfig(format=log_format) global logger logger = logging.getLogger('fds.cmd') debug_enabled = args.debug if debug_enabled: logger.setLevel(logging.DEBUG) else: logger.setLevel(logging.INFO) ## read config parse_argument(args=args) check_region(region=region) check_bucket_name(bucket_name=bucket_name) global fds_config fds_config = FDSClientConfiguration( region_name=region, enable_https=enable_https, enable_cdn_for_download=enable_cdn, enable_cdn_for_upload=enable_cdn, threshold_size=multipart_upload_threshold_size, part_size=multipart_upload_buffer_size) global end_point if not end_point is None: fds_config.set_endpoint(end_point) global fds_client fds_client = GalaxyFDSClient(access_key=access_key, access_secret=secret_key, config=fds_config) global force_delete global disable_trash global object_prefix global recursive, webp_quality try: if presigned_url: expiration = int( 1000 * (float(eval(expiration_in_hour)) * 3600 + float( (datetime.utcnow() - datetime(1970, 1, 1, 0, 0, 0, 0)).total_seconds()))) meta = parse_metadata_from_str(metadata=metadata) content_type = None if meta and 'content-type' in meta.metadata: content_type = meta.metadata['content-type'] url = fds_client.generate_presigned_uri(fds_config.get_base_uri(), bucket_name=bucket_name, object_name=object_name, expiration=expiration, http_method=method.upper(), content_type=content_type) print(url) elif not (list_dir is None): if not (bucket_name is None): list_directory(bucket_name=bucket_name, object_name_prefix=list_dir, start_mark=start_mark) else: list_buckets(fds_client=fds_client, prefix=list_dir, start_mark=start_mark) elif not (list_objects is None): if not (bucket_name is None): if object_name is not None: list_version_ids(bucket_name=bucket_name, object_name=object_name) else: list_object(bucket_name=bucket_name, object_name_prefix=list_objects, start_mark=start_mark) else: list_buckets(fds_client=fds_client, prefix=list_objects, start_mark=start_mark) pass else: if method == 'put': if src_bucket_name and src_object_name and dst_bucket_name and dst_object_name: copy_object(src_bucket_name, src_object_name, dst_bucket_name, dst_object_name) elif object_name: if data_dir: put_directory(data_dir=data_dir, bucket_name=bucket_name, object_name_prefix=object_name, metadata=metadata) elif gratee and permission: put_object_acl(bucket_name, object_name, gratee, permission) elif restore_archived: restore_archived_object(bucket_name, object_name) else: put_object(data_file=data_file, bucket_name=bucket_name, object_name=object_name, metadata=metadata) elif gratee and permission: put_bucket_acl(bucket_name, gratee, permission) elif lifecycle: put_bucket_lifecycle_config(bucket_name, lifecycle) elif lifecycle_rule: put_bucket_lifecycle_rule(bucket_name, lifecycle_rule) elif webp_quality: set_bucket_default_webp_quality(bucket_name=bucket_name, webp_quality=webp_quality) else: put_bucket(bucket_name) pass elif method == 'get': if object_prefix: download_directory(bucket_name=bucket_name, object_prefix=object_prefix, data_dir=data_dir, recursive=recursive) elif object_name: get_object(data_file=data_file, bucket_name=bucket_name, object_name=object_name, metadata=metadata, offset=offset, length=length, webp_quality=webp_quality) elif lifecycle: get_bucket_lifecycle_config(bucket_name) else: get_bucket_acl(bucket_name=bucket_name) pass elif method == 'post': post_object(data_file=data_file, bucket_name=bucket_name, metadata=metadata) pass elif method == 'delete': if object_name: delete_object(bucket_name=bucket_name, object_name=object_name, enable_trash=not disable_trash) elif object_prefix is not None: delete_objects(bucket_name=bucket_name, object_prefix=object_prefix, enable_trash=not disable_trash) elif force_delete: delete_bucket_and_objects(bucket_name=bucket_name) else: delete_bucket(bucket_name=bucket_name) pass elif method == 'head': if object_name: if not head_object(bucket_name=bucket_name, object_name=object_name): exit(1) else: if not head_bucket(bucket_name=bucket_name): exit(1) else: parser.print_help() except Exception as e: print(e) print("\n") ex_type, ex, tb = sys.exc_info() traceback.print_tb(tb) # sys.stderr.write(str(e)) # sys.stderr.flush() if debug_enabled: logger.debug(e, exc_info=True) exit(1)
class FDSCli(object): """ Advanced fds cli you deserved! """ def __init__(self, ak=None, sk=None, endpoint=None): self._fds_prefix = r'fds://' self._fds_prefix_len = len(self._fds_prefix) self._local_config = LocalConfig() self._ak = self._local_config.ak if ak is None else ak if self._ak is None: self.config() self._sk = self._local_config.sk if sk is None else sk if self._sk is None: self.config() self._endpoint = self._local_config.endpoint if endpoint is None else endpoint if self._sk is None: self.config() logger.debug("endpoint: " + self._endpoint) self._fds_config = FDSClientConfiguration( region_name="awsde0", enable_https=False, enable_cdn_for_download=False, enable_cdn_for_upload=False, endpoint=self._endpoint) self._fds = GalaxyFDSClient(access_key=self._ak, access_secret=self._sk, config=self._fds_config) def config(self): """ config command configures ak sk and endpoint :return: """ default_ak = self._local_config.ak default_sk = self._local_config.sk default_endpoint = self._local_config.endpoint ak = input("enter access key id[default: %s]: " % default_ak) if ak == '': ak = default_ak sk = input("enter secret access key[default: %s]: " % default_sk) if sk == '': sk = default_sk endpoint = input("enter endpoint[default: %s]: " % default_endpoint) if endpoint == '': endpoint = default_endpoint self._local_config.ak = ak self._local_config.sk = sk self._local_config.endpoint = endpoint def mb(self, fds_url): """ create(make) a bucket :param fds_url: fds url format like fds://bucket_name_to_make """ url = FDSURL(fds_url) if not url.is_bucket_url(): CLIPrinter.wrong_format() bucket_name = url.bucket_name() try: self._fds.create_bucket(bucket_name) except GalaxyFDSClientException as e: CLIPrinter.fail(e.message) return CLIPrinter.done("create bucket [%s]" % bucket_name) def rb(self, fds_url, force=False): """ delete(remove) a bucket :param fds_url: fds url format like fds://bucket_name_to_delete :param force: remove a bucket even if this bucket is not empty """ url = FDSURL(fds_url) if not url.is_bucket_url(): CLIPrinter.wrong_format() return bucket_name = url.bucket_name() if force: all_objects = self._fds.list_all_objects(bucket_name, '', '') names = [] try: for o in all_objects: names.append(o.object_name) except GalaxyFDSClientException as e: CLIPrinter.warn(e.message) try: self._fds.delete_objects(bucket_name, names) except GalaxyFDSClientException as e: CLIPrinter.fail(e.message) return try: self._fds.delete_bucket(bucket_name) except GalaxyFDSClientException as e: CLIPrinter.fail(e.message) return CLIPrinter.done("remove bucket [%s]" % bucket_name) def rm(self, fds_url): """ delete(remove) a object :param fds_url: fds url format like fds://bucket_name/object_name_to_delete """ url = FDSURL(fds_url) bucket_name = url.bucket_name() if url.is_bucket_url(): CLIPrinter.fail("please enter a object resource address to remove") return object_name = url.object_name() try: self._fds.delete_object(bucket_name, object_name) except GalaxyFDSClientException as e: CLIPrinter.fail(e.message) return CLIPrinter.done("remove object: [%s] in bucket [%s]" % (object_name, bucket_name)) def ls(self, fds_url=None, recursive=False, human='k'): """ list all buckets or objects in a bucket :param fds_url: fds url format like fds://bucket :param recursive: recursive listing """ if human != 'k' and human != 'm' and human != 'g': CLIPrinter.fail("human should be in 'k|m|g'") return # bucket_url is None means listing all bucket name if fds_url is None: buckets = None try: buckets = self._fds.list_authorized_buckets() except GalaxyFDSClientException as e: CLIPrinter.fail(e.message) return for bucket in buckets: CLIPrinter.print_bucket(bucket) else: delimiter = "/" if recursive: delimiter = "" url = FDSURL(fds_url) bucket_name = url.bucket_name() prefix = '' if url.is_bucket_url() or url.is_object_dir(): if url.is_object_dir(): prefix = url.object_dir() results = self._fds.list_all_objects(bucket_name, prefix, delimiter) try: for result in results: metadata = self._fds.get_object_metadata( bucket_name, result.object_name) CLIPrinter.print_object(result.object_name, metadata, human) except GalaxyFDSClientException as e: CLIPrinter.fail(e.message) else: object_name = url.object_name() metadata = None try: metadata = self._fds.get_object_metadata( bucket_name, object_name) except GalaxyFDSClientException as e: CLIPrinter.fail(e.message) return if metadata is not None: CLIPrinter.print_object(object_name, metadata, human) def presigned(self, fds_url, expires=1, cdn=False): """ presigned command generates presigned url for download project :param fds_url: format url like fds://bucket/a.txt :param expires: expiration time in minutes :return: presigned url for downloading """ url = FDSURL(fds_url) bucket_name = url.bucket_name() if url.is_bucket_url(): CLIPrinter.fail("%uri is illegal" % fds_url) object_name = url.object_name() expiration = int(1000 * (float(expires) * 3600 + float( (datetime.utcnow() - datetime(1970, 1, 1, 0, 0, 0, 0)).total_seconds()))) try: if cdn: base_uri = self._fds_config.get_cdn_base_uri() else: base_uri = self._fds_config.get_base_uri() u = self._fds.generate_presigned_uri(base_uri, bucket_name, object_name, expiration) except GalaxyFDSClientException as e: CLIPrinter.fail(e.message) return CLIPrinter.done('generated presigned url: ' + u) def ttl(self, fds_url): """ ttl command shows the lifecycle information of a bucket or a object :param fds_url: format url like fds://bucket/a.txt or fds://bucket/ """ url = FDSURL(fds_url) bucket_name = url.bucket_name() try: ttl = self._fds.get_lifecycle_config(bucket_name) except GalaxyFDSClientException as e: CLIPrinter.fail(e.message) return if url.is_bucket_url(): CLIPrinter.print_lifecycle(ttl) elif url.is_object_url(): if not self._fds.does_object_exists(bucket_name, url.object_name()): CLIPrinter.fail("object does not exists") if url.is_object_dir(): prefix = url.object_dir() else: prefix = url.object_name() rules = [rule for rule in ttl['rules'] if rule['prefix'] in prefix] CLIPrinter.print_lifecycle({"rules": rules}) else: CLIPrinter.wrong_format() def cp(self, src, dst, recursive=False, autodetect_mimetype=False): """ cp command do lots of things. 1. file upload 2. batch files upload 3. file download 4. batch files download 5. rename object 6. object copy 7. batch objects copy :param src: source fds url format like fds://bucket :param dst: target fds url format like fds://bucket :param recursive: recursive listing """ if FDSURL.is_fds_url(src) and FDSURL.is_fds_url(dst): src_url = FDSURL(src) dst_url = FDSURL(dst) if src_url.is_object_url(): self._cp(src_url, dst_url) elif not src_url.is_object_url() and not dst_url.is_object_url(): self._cp_batch(src_url, dst_url, recursive) else: CLIPrinter.wrong_format() elif FDSURL.is_fds_url(src) and not FDSURL.is_fds_url(dst): src_url = FDSURL(src) if src_url.is_object_url(): self._download(src_url, dst) elif src_url.is_object_dir() and os.path.isdir(dst): self._download_batch(src_url, dst, recursive) else: CLIPrinter.wrong_format() elif not FDSURL.is_fds_url(src) and FDSURL.is_fds_url(dst): dst_url = FDSURL(dst) if os.path.isfile(src): self._upload(src, dst_url, autodetect_mimetype=autodetect_mimetype) elif os.path.isdir(src) and not dst_url.is_object_url(): self._upload_batch(src, dst_url, recursive, autodetect_mimetype=autodetect_mimetype) else: CLIPrinter.wrong_format() else: CLIPrinter.fail("don't support copy file from local to local") def _cp(self, src_url, dst_url): src_bucket_name = src_url.bucket_name() src_object_name = src_url.object_name() dst_bucket_name = dst_url.bucket_name() if dst_url.is_object_url(): dst_object_name = dst_url.object_name() else: dst_object_name = src_object_name try: self._fds.copy_object(src_bucket_name, src_object_name, dst_bucket_name, dst_object_name) except GalaxyFDSClientException as e: CLIPrinter.fail(e.message) CLIPrinter.done("copy %s/%s to %s/%s" % (src_bucket_name, src_object_name, dst_bucket_name, dst_object_name)) def _cp_batch(self, src_url, dst_url, recursive): src_bucket_name = src_url.bucket_name() dst_bucket_name = dst_url.bucket_name() prefix = "" if src_url.is_object_dir(): prefix = src_url.object_dir() delimiter = "/" if recursive: delimiter = "" all_objects = self._fds.list_all_objects(bucket_name=src_bucket_name, prefix=prefix, delimiter=delimiter) try: for o in all_objects: o_name = o.object_name self._fds.copy_object(src_bucket_name, o_name, dst_bucket_name, o_name) CLIPrinter.done( "copy %s/%s to %s/%s" % (src_bucket_name, o_name, dst_bucket_name, o_name)) except GalaxyFDSClientException as e: CLIPrinter.fail(e.message) def _download(self, src_url, dst): src_bucket_name = src_url.bucket_name() src_object_name = src_url.object_name() if os.path.isdir(dst): if dst == '.' or dst == '..': dst_name = src_url.file_name() elif dst.endswith('/'): dst_name = dst + src_url.file_name() else: dst_name = dst + '/' + src_object_name.split('/')[-1] else: dst_name = dst mtime = None if os.path.isfile(dst_name): local_md5 = file_md5(dst_name) remote_md5 = self._fds.get_object_metadata( src_bucket_name, src_object_name).metadata.get(Common.CONTENT_MD5) if remote_md5 is not None and local_md5 == remote_md5: CLIPrinter.done( "download %s/%s to local(skip because of same md5)" % (src_bucket_name, src_object_name)) return mtime = os.path.getmtime(dst_name) try: fds_object = self._fds.get_object(bucket_name=src_bucket_name, object_name=src_object_name, stream=True) except GalaxyFDSClientException as e: CLIPrinter.fail(e.message) return lm = fds_object.metadata.metadata['last-modified'] remote_modified = rfc822_timestamp(lm) # if last-modified of local file is not less last-modified of remote file, skip if mtime is not None and datetime.fromtimestamp( mtime) >= remote_modified: CLIPrinter.done( "download %s/%s to local(skip because of updated)" % (src_bucket_name, src_object_name)) return length_left = IS_PY3 and sys.maxsize or sys.maxint try: with open(dst_name, 'wb') as f: for chunk in fds_object.stream: length = min(length_left, len(chunk)) f.write(chunk[0:length]) length_left -= length if length_left <= 0: break except Exception as exception: print(exception) finally: fds_object.stream.close() CLIPrinter.done("download %s/%s to local" % (src_bucket_name, src_object_name)) def _download_batch(self, src_url, dst, recursive): src_bucket_name = src_url.bucket_name() prefix = "" if src_url.is_object_dir(): prefix = src_url.object_dir() delimiter = "/" if recursive: delimiter = "" all_objects = self._fds.list_all_objects(bucket_name=src_bucket_name, prefix=prefix, delimiter=delimiter) try: for o in all_objects: o_name = o.object_name url = FDSURL(fds_prefix + src_bucket_name + "/" + o_name) if url.is_object_url(): self._download(url, dst) except GalaxyFDSClientException as e: CLIPrinter.fail(e.message) def _upload(self, filename, dst_url, autodetect_mimetype, sync=False): if not os.path.exists(filename): CLIPrinter.warn("{} is a bad file".format(filename)) return dst_bucket_name = dst_url.bucket_name() if dst_url.is_object_url(): dst_object_name = dst_url.object_name() elif sync: dst_object_name = filename[2:] elif dst_url.is_object_dir(): dst_object_name = dst_url.object_dir() + os.path.basename(filename) else: dst_object_name = os.path.basename(filename) try: if self._fds.does_object_exists(dst_bucket_name, dst_object_name): # check md5 firstly metadata = self._fds.get_object_metadata( dst_bucket_name, dst_object_name) if metadata.metadata.get(Common.CONTENT_MD5) is not None: local_md5 = file_md5(filename) if local_md5 == metadata.metadata.get(Common.CONTENT_MD5): CLIPrinter.done( 'upload object %s/%s(skip because of same md5)' % (dst_bucket_name, dst_object_name)) return # check last-modified mtime = None if os.path.isfile(filename): mtime = os.path.getmtime(filename) lm = metadata.metadata[Common.LAST_MODIFIED] remote_modified = rfc822_timestamp(lm) # if last-modified of local file is not less last-modified of remote file, skip if mtime is not None and datetime.fromtimestamp( mtime) <= remote_modified: CLIPrinter.done( 'upload object %s/%s(skip because of updated)' % (dst_bucket_name, dst_object_name)) return except Exception as e: CLIPrinter.fail(e.message) return mimetype = None if autodetect_mimetype: mimetype = mimetypes.guess_type(filename)[0] metadata = FDSObjectMetadata() if mimetype is not None: metadata.add_header(Common.CONTENT_TYPE, mimetype) result = None with open(filename, "rb") as f: file_length = os.path.getsize(filename) if file_length < multipart_upload_buffer_size: try: result = self._fds.put_object(dst_bucket_name, dst_object_name, f, metadata=metadata) except GalaxyFDSClientException as e: CLIPrinter.fail(e.message) else: try: upload_token = self._fds.init_multipart_upload( dst_bucket_name, dst_object_name) part_number = 1 result_list = [] while True: data = f.read(multipart_upload_buffer_size) if len(data) <= 0: break for i in range(max_upload_retry_time): upload_result = None try: upload_result = self._fds.upload_part( dst_bucket_name, dst_object_name, upload_token.upload_id, part_number, data) result_list.append(upload_result) break except GalaxyFDSClientException as e: sleep_seconds = (i + 1) * 10 CLIPrinter.warn( "upload part %d failed, retry after %d seconds" % (part_number, sleep_seconds)) time.sleep(sleep_seconds) part_number = part_number + 1 upload_part_result = UploadPartResultList( {"uploadPartResultList": result_list}) result = self._fds.complete_multipart_upload( upload_token.bucket_name, upload_token.object_name, upload_token.upload_id, metadata, json.dumps(upload_part_result)) except Exception as e: self._fds.abort_multipart_upload(dst_bucket_name, dst_object_name, upload_token.upload_id) CLIPrinter.fail(e.message) if result is not None: CLIPrinter.done('upload object %s/%s' % (dst_bucket_name, dst_object_name)) else: CLIPrinter.fail('upload object %s/%s' % (dst_bucket_name, dst_object_name)) def _upload_batch(self, d, dst_url, recursive, autodetect_mimetype, sync=False): for root, dirs, files in os.walk(d): relative_dir = os.path.relpath(root, d) if relative_dir != '.' and relative_dir != '..' and relative_dir.startswith( '.'): CLIPrinter.warn('skipping hidden dir ' + relative_dir) continue for filename in files: object_name = os.path.join(root, filename) object_name = '/'.join(object_name.split('\\')) self._upload(object_name, dst_url, autodetect_mimetype, sync) if not recursive: break def sync(self, src, dst, autodetect_mimetype=False): """ sync command syncs between (local directory and fds) (fds and local directory) (fds and fds) :param src: src can be a fds bucket url like fds://bucketname or '.' :param dst: src can be a fds bucket url like fds://bucketname or '.' :param delete: todo delete target file if source file is deleted :param exclude: todo :param include: todo """ if FDSURL.is_fds_url(src) and not FDSURL.is_fds_url(dst): src_url = FDSURL(src) if not src_url.is_bucket_url() or not dst.strip() == '.': CLIPrinter.wrong_format() src_bucket_name = src_url.bucket_name() all_objects = self._fds.list_all_objects( bucket_name=src_bucket_name, prefix='', delimiter='') try: for o in all_objects: o_name = o.object_name url = FDSURL(fds_prefix + src_bucket_name + '/' + o_name) if '/' not in o_name: self._download(url, dst) elif url.is_object_url(): o_file_name = o_name.split('/')[-1] o_dir = o_name.split(o_file_name)[0] mkdirs(o_dir) self._download(url, o_dir) except GalaxyFDSClientException as e: CLIPrinter.fail(e.message) elif not FDSURL.is_fds_url(src) and FDSURL.is_fds_url(dst): dst_url = FDSURL(dst) if not src.strip() == '.' or not dst_url.is_bucket_url(): CLIPrinter.wrong_format() self._upload_batch(src, dst_url, True, autodetect_mimetype, True) elif FDSURL.is_fds_url(src) and FDSURL.is_fds_url(dst): self.cp(src, dst) else: CLIPrinter.wrong_format() def make_public(self, url): if not FDSURL.is_fds_url(url): CLIPrinter.wrong_format() return url = FDSURL(url) if url.is_object_url(): try: self._fds.set_public(url.bucket_name(), url.object_name()) except GalaxyFDSClientException as e: CLIPrinter.fail(e.message) return elif url.is_bucket_url(): try: acl = AccessControlList() grant = Grant(Grantee(UserGroups.ALL_USERS), Permission.READ) grant.type = GrantType.GROUP acl.add_grant(grant) self._fds.set_bucket_acl(url.bucket_name(), acl) except GalaxyFDSClientException as e: CLIPrinter.fail(e.message) return else: CLIPrinter.wrong_format() return def make_outside(self, url, close=False): if not FDSURL.is_fds_url(url): CLIPrinter.wrong_format() return url = FDSURL(url) if url.is_bucket_url(): try: if close: self._fds.set_bucket_outside_access( url.bucket_name(), False) else: self._fds.set_bucket_outside_access( url.bucket_name(), True) except GalaxyFDSClientException as e: CLIPrinter.fail(e.message) return elif url.is_object_url(): try: if close: self._fds.set_object_outside_access( url.bucket_name(), url.object_name(), False) else: self._fds.set_object_outside_access( url.bucket_name(), url.object_name(), True) except GalaxyFDSClientException as e: CLIPrinter.fail(e.message) return else: CLIPrinter.wrong_format() return def info(self): print("Access Key ID: {}".format(self._local_config.ak)) print("Access Secret Key: {}".format(self._local_config.sk)) print("Endpoint: {}".format(self._local_config.endpoint))
import glob import os import sys from fds import GalaxyFDSClient, GalaxyFDSClientException from fds.model.fds_object_metadata import FDSObjectMetadata # endpoint is auto-read from ~/.config/xiaomi/config client = GalaxyFDSClient() bucket = os.environ.get('XIAOMI_FDS_DEFAULT_BUCKET') or 'johndoe' #LOG_DIR = '/home/mi/Documents/github/changbinglin/aicontest/logs' DIRS = [ 'camera_data/raw_train', 'camera_data/raw_val', 'keras_model', 'tensorflow_model' ] if len(sys.argv) > 1: LOG_DIR = sys.argv[1] metadata = FDSObjectMetadata() metadata.add_header('x-xiaomi-meta-mode', '33188') # give rights: rw-r--r-- try: for directory in DIRS: for log in glob.glob(directory + '/*'): if os.path.isfile(log): print log.split('/')[-1] if not client.does_object_exists(bucket, log): with open(log, 'r') as f: data = f.read() #path_to = '/'.join(LOG_DIR.split('/')[-3:])
def main(): parser = argparse.ArgumentParser(description="FDS command-line tool", epilog="Doc - http://docs.api.xiaomi.com/fds/") parser.add_argument('-m', '--method', nargs='?', metavar='method', const='put', type=str, dest='method', help='Method of the request. Can be one of put/get/delete/post/head (default: put)' ).completer = ChoicesCompleter(('put', 'get', 'delete', 'post', 'head')) parser.add_argument('-b', '--bucket', nargs='?', metavar='bucket', type=str, dest='bucket', help='Name of bucket to operate' ).completer = bucket_name_completer parser.add_argument('-o', '--object', nargs='?', metavar='object', type=str, dest='object', help='Name of object to operate' ) parser.add_argument('-r', '--region', nargs='?', metavar='region', type=str, dest='region', help='Can be one of cnbj0/cnbj1/cnbj2/awsbj0/awsusor0/awssgp0/awsde0 (default: cnbj0)' ) parser.add_argument('-e', '--end_point', nargs='?', metavar='end point', type=str, dest='end_point', help='can be [cnbj1.fds.api.xiaomi.com] or empty' ) parser.add_argument('-c', '--CDN', metavar='CDN', action='store_const', const=False, dest='CDN', default=False, help='If toggled, CDN is enabled' ) parser.add_argument('--https', metavar='https', nargs='?', dest='https', default=True, help='If toggled, https is enabled' ) parser.add_argument('--ak', nargs='?', metavar='ACCESS_KEY', dest='ak', help='Specify access key' ) parser.add_argument('--sk', nargs='?', metavar='SECRET_KEY', dest='sk', help='Specify secret key' ) parser.add_argument('-L', '--list', nargs='?', metavar='list directory', const='', type=str, dest='list_dir', help='List Bucket/Object under current user') parser.add_argument('-l', '--list_objects', nargs='?', metavar='list objects', const='', type=str, dest='list_objects', help='List Bucket/Object under current user') parser.add_argument('-d', '--data', nargs='?', metavar='data file', dest='data_file', help='file to be uploaded or stored') parser.add_argument('--offset', nargs='?', metavar='offset', type=int, const=0, default=0, dest='offset', help='offset of object to be read') parser.add_argument('--length', nargs='?', metavar='length', type=int, dest='length', const=-1, default=-1, help='length of object to be read') parser.add_argument('--metadata', nargs='?', metavar='meta data of object to be uploaded', dest='metadata', help='example: "content-type:text/json;x-xiaomi-meta-user-defined:foo"') parser.add_argument('--start', nargs='?', metavar='start mark', type=str, dest='start_mark', const=None, default=None, help='used with -l or -L option, returned object name should be *no less* than start mark in dictionary order' ) parser.add_argument('--debug', metavar='debug', action='store_const', const=True, default=False, dest='debug', help='If toggled, print debug log') argcomplete.autocomplete(parser) args = parser.parse_args() # set logging log_format = '%(asctime)-15s %(message)s' logging.basicConfig(format=log_format) global logger logger = logging.getLogger('fds.cmd') debug_enabled = args.debug if debug_enabled: logger.setLevel(logging.DEBUG) else: logger.setLevel(logging.INFO) ## read config parse_argument(args=args) check_region(region=region) check_bucket_name(bucket_name=bucket_name) global fds_config fds_config = FDSClientConfiguration(region_name=region, enable_https=enable_https, enable_cdn_for_download=enable_cdn, enable_cdn_for_upload=enable_cdn) global end_point if not end_point is None: fds_config.set_endpoint(end_point) global fds_client fds_client = GalaxyFDSClient(access_key=access_key, access_secret=secret_key, config=fds_config) try: if not (list_dir is None): if not (bucket_name is None): list_directory(bucket_name=bucket_name, object_name_prefix=list_dir, start_mark=start_mark) else: list_buckets(fds_client=fds_client, prefix=list_dir, start_mark=start_mark) elif not (list_objects is None): if not (bucket_name is None): list_object(bucket_name=bucket_name, object_name_prefix=list_objects, start_mark=start_mark) else: list_buckets(fds_client=fds_client, prefix=list_objects, start_mark=start_mark) pass else: if method == 'put': if object_name: put_object(data_file=data_file, bucket_name=bucket_name, object_name=object_name, metadata=metadata) else: put_bucket(bucket_name) pass elif method == 'get': if object_name: get_object(data_file=data_file, bucket_name=bucket_name, object_name=object_name, metadata=metadata, offset=offset, length=length) else: get_bucket_acl(bucket_name=bucket_name) pass elif method == 'post': post_object(data_file=data_file, bucket_name=bucket_name, metadata=metadata) pass elif method == 'delete': if object_name: delete_object(bucket_name=bucket_name, object_name=object_name) else: delete_bucket(bucket_name=bucket_name) pass elif method == 'head': if object_name: if not head_object(bucket_name=bucket_name, object_name=object_name): exit(1) else: if not head_bucket(bucket_name=bucket_name): exit(1) else: parser.print_help() print("Config:") print("put following json into ~/.config/fds/client.config") print("{") print(" \"ak\":\"ACCESS_KEY\",") print(" \"sk\":\"SECRET_KEY\",") print(" \"region\":\"REGION\",") print(" \"end_point\":\"END_POINT\" (optional)") print("}") print("Usage Example:") print("\t[create bucket]\n\t\tfds -m put -b BUCKET_NAME") print("\t[list buckets]\n\t\tfds -l") print("\t[list objects under bucket]\n\t\tfds -l -b BUCKET_NAME") print("\t[list directory under bucket]\n\t\tfds -L DIR -b BUCKET_NAME") print("\t[create object under bucket]\n\t\tfds -m put -b BUCKET_NAME -o OBJECT_NAME -d FILE_PATH") print("\t[create object with pipline]\n\t\tcat file | fds -m put -b BUCKET_NAME -o OBJECT_NAME") except Exception as e: sys.stderr.write(e.message) sys.stderr.flush() if debug_enabled: logger.debug(e, exc_info=True) exit(1)
# FDS download # https://github.com/XiaoMi/galaxy-fds-sdk-python # install the fds-sdk package with # pip install galaxy-fds-sdk import os, sys from fds import GalaxyFDSClient, GalaxyFDSClientException # you should have set os environment variables for FDS XIAOMI_ACCESS_KEY_ID=os.environ.get('XIAOMI_ACCESS_KEY_ID') XIAOMI_SECRET_ACCESS_KEY=os.environ.get('XIAOMI_SECRET_ACCESS_KEY') # get bucket name bucket = os.environ.get('XIAOMI_FDS_DEFAULT_BUCKET') if bucket is None: print 'Error: Bucket not found.' sys.exit() # endpoint is read from ~/.config/xiaomi/config client = GalaxyFDSClient() model_types = ['keras_model/', 'tensorflow_model/'] for mt in model_types: object_list = client.list_objects(bucket, mt) print 'found ', object_list for obj in object_list.objects: print 'download', obj.object_name try: client.download_object(bucket, obj.object_name, obj.object_name) except GalaxyFDSClientException as e: print e.message
class FDSCli(object): """ Advanced fds cli you deserved! """ def __init__(self, ak=None, sk=None, endpoint=None): self._fds_prefix = r'fds://' self._fds_prefix_len = len(self._fds_prefix) self._local_config = LocalConfig() self._ak = self._local_config.ak if ak is None else ak if self._ak is None: self.config() self._sk = self._local_config.sk if sk is None else sk if self._sk is None: self.config() self._endpoint = self._local_config.endpoint if endpoint is None else endpoint if self._endpoint is None: self.config() self._fds_config = FDSClientConfiguration(region_name="awsde0", enable_https=False, enable_cdn_for_download=False, enable_cdn_for_upload=False, endpoint=self._endpoint) self._fds = GalaxyFDSClient(access_key=self._ak, access_secret=self._sk, config=self._fds_config) def config(self): """ config command configures ak sk and endpoint :return: """ default_ak = self._local_config.ak default_sk = self._local_config.sk default_endpoint = self._local_config.endpoint ak = input("enter access key id[default: %s]: " % default_ak) if ak == '': ak = default_ak sk = input("enter secret access key[default: %s]: " % default_sk) if sk == '': sk = default_sk endpoint = input("enter endpoint[default: %s]: " % default_endpoint) if endpoint == '': endpoint = default_endpoint self._local_config.ak = ak self._local_config.sk = sk self._local_config.endpoint = endpoint def mb(self, fds_url): """ create(make) a bucket :param fds_url: fds url format like fds://bucket_name_to_make """ url = FDSURL(fds_url) if not url.is_bucket_url(): CLIPrinter.wrong_format() bucket_name = url.bucket_name() try: self._fds.create_bucket(bucket_name) except GalaxyFDSClientException as e: CLIPrinter.fail(e.message) return CLIPrinter.done("create bucket [%s]" % bucket_name) def rb(self, fds_url, force=False): """ delete(remove) a bucket :param fds_url: fds url format like fds://bucket_name_to_delete :param force: remove a bucket even if this bucket is not empty """ url = FDSURL(fds_url) if not url.is_bucket_url(): CLIPrinter.wrong_format() return bucket_name = url.bucket_name() if force: result = self._fds.list_objects(bucket_name, '', '') while True: names = [] try: for object_summary in result.objects: names.append(object_summary.object_name) except GalaxyFDSClientException as e: CLIPrinter.warn(e.message) try: self._fds.delete_objects(bucket_name, names) except GalaxyFDSClientException as e: CLIPrinter.fail(e.message) return if result.is_truncated: result = self._fds.list_next_batch_of_objects(result) else: break try: self._fds.delete_bucket(bucket_name) except GalaxyFDSClientException as e: CLIPrinter.fail(e.message) return CLIPrinter.done("remove bucket [%s]" % bucket_name) def rm(self, fds_url): """ delete(remove) a object :param fds_url: fds url format like fds://bucket_name/object_name_to_delete """ url = FDSURL(fds_url) bucket_name = url.bucket_name() if url.is_bucket_url(): CLIPrinter.fail("please enter a object resource address to remove") return object_name = url.object_name() try: self._fds.delete_object(bucket_name, object_name) except GalaxyFDSClientException as e: CLIPrinter.fail(e.message) return CLIPrinter.done("remove object: [%s] in bucket [%s]" % ( object_name, bucket_name)) def ls(self, fds_url=None, recursive=False, human='k'): """ list all buckets or objects in a bucket :param fds_url: fds url format like fds://bucket :param recursive: recursive listing """ if human != 'k' and human != 'm' and human != 'g': CLIPrinter.fail("human should be in 'k|m|g'") return # bucket_url is None means listing all bucket name if fds_url is None: buckets = None try: buckets = self._fds.list_authorized_buckets() except GalaxyFDSClientException as e: CLIPrinter.fail(e.message) return for bucket in buckets: CLIPrinter.print_bucket(bucket) else: delimiter = "/" if recursive: delimiter = "" url = FDSURL(fds_url) bucket_name = url.bucket_name() prefix = '' if url.is_bucket_url() or url.is_object_dir(): if url.is_object_dir(): prefix = url.object_dir() results = self._fds.list_all_objects( bucket_name, prefix, delimiter) try: for result in results: metadata = self._fds.get_object_metadata(bucket_name, result.object_name) CLIPrinter.print_object(result.object_name, metadata, human) except GalaxyFDSClientException as e: CLIPrinter.fail(e.message) else: object_name = url.object_name() metadata = None try: metadata = self._fds.get_object_metadata(bucket_name, object_name) except GalaxyFDSClientException as e: CLIPrinter.fail(e.message) return if metadata is not None: CLIPrinter.print_object(object_name, metadata, human) def presigned(self, fds_url, expires=1, cdn=False): """ presigned command generates presigned url for download project :param fds_url: format url like fds://bucket/a.txt :param expires: expiration time in minutes :return: presigned url for downloading """ url = FDSURL(fds_url) bucket_name = url.bucket_name() if url.is_bucket_url(): CLIPrinter.fail("%uri is illegal" % fds_url) object_name = url.object_name() expiration = int(1000 * (float(expires) * 3600 + float((datetime.utcnow() - datetime(1970, 1, 1, 0, 0, 0, 0)).total_seconds()))) try: if cdn: base_uri = self._fds_config.get_cdn_base_uri() else: base_uri = self._fds_config.get_base_uri() u = self._fds.generate_presigned_uri(base_uri, bucket_name, object_name, expiration) except GalaxyFDSClientException as e: CLIPrinter.fail(e.message) return CLIPrinter.done('generated presigned url: ' + u) def ttl(self, fds_url): """ ttl command shows the lifecycle information of a bucket or a object :param fds_url: format url like fds://bucket/a.txt or fds://bucket/ """ url = FDSURL(fds_url) bucket_name = url.bucket_name() try: ttl = self._fds.get_lifecycle_config(bucket_name) except GalaxyFDSClientException as e: CLIPrinter.fail(e.message) return if url.is_bucket_url(): CLIPrinter.print_lifecycle(ttl) elif url.is_object_url(): if not self._fds.does_object_exists(bucket_name, url.object_name()): CLIPrinter.fail("object does not exists") if url.is_object_dir(): prefix = url.object_dir() else: prefix = url.object_name() rules = [rule for rule in ttl['rules'] if rule['prefix'] in prefix] CLIPrinter.print_lifecycle({"rules": rules}) else: CLIPrinter.wrong_format() def cp(self, src, dst, recursive=False, autodetect_mimetype=False): """ cp command do lots of things. 1. file upload 2. batch files upload 3. file download 4. batch files download 5. rename object 6. object copy 7. batch objects copy :param src: source fds url format like fds://bucket :param dst: target fds url format like fds://bucket :param recursive: recursive listing """ if FDSURL.is_fds_url(src) and FDSURL.is_fds_url(dst): src_url = FDSURL(src) dst_url = FDSURL(dst) if src_url.is_object_url(): self._cp(src_url, dst_url) elif not src_url.is_object_url() and not dst_url.is_object_url(): self._cp_batch(src_url, dst_url, recursive) else: CLIPrinter.wrong_format() elif FDSURL.is_fds_url(src) and not FDSURL.is_fds_url(dst): src_url = FDSURL(src) if src_url.is_object_url(): self._download(src_url, dst) elif src_url.is_object_dir() and os.path.isdir(dst): self._download_batch(src_url, dst, recursive) else: CLIPrinter.wrong_format() elif not FDSURL.is_fds_url(src) and FDSURL.is_fds_url(dst): dst_url = FDSURL(dst) if os.path.isfile(src): self._upload(src, dst_url, autodetect_mimetype=autodetect_mimetype) elif os.path.isdir(src) and not dst_url.is_object_url(): self._upload_batch(src, dst_url, recursive, autodetect_mimetype=autodetect_mimetype) else: CLIPrinter.wrong_format() else: CLIPrinter.fail("don't support copy file from local to local") def _cp(self, src_url, dst_url): src_bucket_name = src_url.bucket_name() src_object_name = src_url.object_name() dst_bucket_name = dst_url.bucket_name() if dst_url.is_object_url(): dst_object_name = dst_url.object_name() else: dst_object_name = src_object_name try: self._fds.copy_object(src_bucket_name, src_object_name, dst_bucket_name, dst_object_name) except GalaxyFDSClientException as e: CLIPrinter.fail(e.message) CLIPrinter.done( "copy %s/%s to %s/%s" % (src_bucket_name, src_object_name, dst_bucket_name, dst_object_name)) def _cp_batch(self, src_url, dst_url, recursive): src_bucket_name = src_url.bucket_name() dst_bucket_name = dst_url.bucket_name() prefix = "" if src_url.is_object_dir(): prefix = src_url.object_dir() delimiter = "/" if recursive: delimiter = "" all_objects = self._fds.list_all_objects(bucket_name=src_bucket_name, prefix=prefix, delimiter=delimiter) try: for o in all_objects: o_name = o.object_name self._fds.copy_object(src_bucket_name, o_name, dst_bucket_name, o_name) CLIPrinter.done("copy %s/%s to %s/%s" % (src_bucket_name, o_name, dst_bucket_name, o_name)) except GalaxyFDSClientException as e: CLIPrinter.fail(e.message) def _download(self, src_url, dst): src_bucket_name = src_url.bucket_name() src_object_name = src_url.object_name() if os.path.isdir(dst): if dst == '.' or dst == '..': dst_name = src_url.file_name() elif dst.endswith('/'): dst_name = dst + src_url.file_name() else: dst_name = dst + '/' + src_object_name.split('/')[-1] else: dst_name = dst mtime = None if os.path.isfile(dst_name): local_md5 = file_md5(dst_name) remote_md5 = self._fds.get_object_metadata(src_bucket_name, src_object_name).metadata.get(Common.CONTENT_MD5) if remote_md5 is not None and local_md5 == remote_md5: CLIPrinter.done("download %s/%s to local(skip because of same md5)" % (src_bucket_name, src_object_name)) return mtime = os.path.getmtime(dst_name) try: fds_object = self._fds.get_object(bucket_name=src_bucket_name, object_name=src_object_name, stream=True) except GalaxyFDSClientException as e: CLIPrinter.fail(e.message) return lm = fds_object.metadata.metadata['last-modified'] remote_modified = rfc822_timestamp(lm) # if last-modified of local file is not less last-modified of remote file, skip if mtime is not None and datetime.fromtimestamp(mtime) >= remote_modified: CLIPrinter.done("download %s/%s to local(skip because of updated)" % (src_bucket_name, src_object_name)) return length_left = IS_PY3 and sys.maxsize or sys.maxint try: with open(dst_name, 'wb') as f: for chunk in fds_object.stream: length = min(length_left, len(chunk)) f.write(chunk[0:length]) length_left -= length if length_left <= 0: break except Exception as exception: print(exception) finally: fds_object.stream.close() CLIPrinter.done("download %s/%s to local" % (src_bucket_name, src_object_name)) def _download_batch(self, src_url, dst, recursive): src_bucket_name = src_url.bucket_name() prefix = "" if src_url.is_object_dir(): prefix = src_url.object_dir() delimiter = "/" if recursive: delimiter = "" all_objects = self._fds.list_all_objects(bucket_name=src_bucket_name, prefix=prefix, delimiter=delimiter) try: for o in all_objects: o_name = o.object_name url = FDSURL(fds_prefix + src_bucket_name + "/" + o_name) if url.is_object_url(): self._download(url, dst) except GalaxyFDSClientException as e: CLIPrinter.fail(e.message) def _upload(self, filename, dst_url, autodetect_mimetype, sync=False): if not os.path.exists(filename): CLIPrinter.warn("{} is a bad file".format(filename)) return dst_bucket_name = dst_url.bucket_name() if dst_url.is_object_url(): dst_object_name = dst_url.object_name() elif sync: dst_object_name = filename[2:] elif dst_url.is_object_dir(): dst_object_name = dst_url.object_dir() + os.path.basename(filename) else: dst_object_name = os.path.basename(filename) try: if self._fds.does_object_exists(dst_bucket_name, dst_object_name): # check md5 firstly metadata = self._fds.get_object_metadata(dst_bucket_name, dst_object_name) if metadata.metadata.get(Common.CONTENT_MD5) is not None: local_md5 = file_md5(filename) if local_md5 == metadata.metadata.get(Common.CONTENT_MD5): CLIPrinter.done('upload object %s/%s(skip because of same md5)' % (dst_bucket_name, dst_object_name)) return # check last-modified mtime = None if os.path.isfile(filename): mtime = os.path.getmtime(filename) lm = metadata.metadata[Common.LAST_MODIFIED] remote_modified = rfc822_timestamp(lm) # if last-modified of local file is not less last-modified of remote file, skip if mtime is not None and datetime.fromtimestamp(mtime) <= remote_modified: CLIPrinter.done('upload object %s/%s(skip because of updated)' % (dst_bucket_name, dst_object_name)) return except Exception as e: CLIPrinter.fail(e.message) return mimetype = None if autodetect_mimetype: mimetype = mimetypes.guess_type(filename)[0] metadata = FDSObjectMetadata() if mimetype is not None: metadata.add_header(Common.CONTENT_TYPE, mimetype) result = None with open(filename, "rb") as f: file_length = os.path.getsize(filename) if file_length < multipart_upload_buffer_size: try: result = self._fds.put_object(dst_bucket_name, dst_object_name, f, metadata=metadata) except GalaxyFDSClientException as e: CLIPrinter.fail(e.message) else: try: upload_token = self._fds.init_multipart_upload(dst_bucket_name, dst_object_name) part_number = 1 result_list = [] while True: data = f.read(multipart_upload_buffer_size) if len(data) <= 0: break for i in range(max_upload_retry_time): upload_result = None try: upload_result = self._fds.upload_part(dst_bucket_name, dst_object_name, upload_token.upload_id, part_number, data) result_list.append(upload_result) break except GalaxyFDSClientException as e: sleep_seconds = (i + 1) * 10 CLIPrinter.warn("upload part %d failed, retry after %d seconds" % ( part_number, sleep_seconds)) time.sleep(sleep_seconds) part_number = part_number + 1 upload_part_result = UploadPartResultList({"uploadPartResultList": result_list}) result = self._fds.complete_multipart_upload(upload_token.bucket_name, upload_token.object_name, upload_token.upload_id, metadata, json.dumps(upload_part_result)) except Exception as e: self._fds.abort_multipart_upload(dst_bucket_name, dst_object_name, upload_token.upload_id) CLIPrinter.fail(e.message) if result is not None: CLIPrinter.done('upload object %s/%s' % (dst_bucket_name, dst_object_name)) else: CLIPrinter.fail('upload object %s/%s' % (dst_bucket_name, dst_object_name)) def _upload_batch(self, d, dst_url, recursive, autodetect_mimetype, sync=False): for root, dirs, files in os.walk(d): relative_dir = os.path.relpath(root, d) if relative_dir != '.' and relative_dir != '..' and relative_dir.startswith('.'): CLIPrinter.warn('skipping hidden dir ' + relative_dir) continue for filename in files: object_name = os.path.join(root, filename) object_name = '/'.join(object_name.split('\\')) self._upload(object_name, dst_url, autodetect_mimetype, sync) if not recursive: break def sync(self, src, dst, autodetect_mimetype=False): """ sync command syncs between (local directory and fds) (fds and local directory) (fds and fds) :param src: src can be a fds bucket url like fds://bucketname or '.' :param dst: src can be a fds bucket url like fds://bucketname or '.' :param delete: todo delete target file if source file is deleted :param exclude: todo :param include: todo """ if FDSURL.is_fds_url(src) and not FDSURL.is_fds_url(dst): src_url = FDSURL(src) if not src_url.is_bucket_url() or not dst.strip() == '.': CLIPrinter.wrong_format() src_bucket_name = src_url.bucket_name() all_objects = self._fds.list_all_objects(bucket_name=src_bucket_name, prefix='', delimiter='') try: for o in all_objects: o_name = o.object_name url = FDSURL(fds_prefix + src_bucket_name + '/' + o_name) if '/' not in o_name: self._download(url, dst) elif url.is_object_url(): o_file_name = o_name.split('/')[-1] o_dir = o_name.split(o_file_name)[0] mkdirs(o_dir) self._download(url, o_dir) except GalaxyFDSClientException as e: CLIPrinter.fail(e.message) elif not FDSURL.is_fds_url(src) and FDSURL.is_fds_url(dst): dst_url = FDSURL(dst) if not src.strip() == '.' or not dst_url.is_bucket_url(): CLIPrinter.wrong_format() self._upload_batch(src, dst_url, True, autodetect_mimetype, True) elif FDSURL.is_fds_url(src) and FDSURL.is_fds_url(dst): self.cp(src, dst) else: CLIPrinter.wrong_format() def make_public(self, url): if not FDSURL.is_fds_url(url): CLIPrinter.wrong_format() return url = FDSURL(url) if url.is_object_url(): try: self._fds.set_public(url.bucket_name(), url.object_name()) except GalaxyFDSClientException as e: CLIPrinter.fail(e.message) return elif url.is_bucket_url(): try: acl = AccessControlList() grant = Grant(Grantee(UserGroups.ALL_USERS), Permission.READ) grant.type = GrantType.GROUP acl.add_grant(grant) self._fds.set_bucket_acl(url.bucket_name(), acl) except GalaxyFDSClientException as e: CLIPrinter.fail(e.message) return else: CLIPrinter.wrong_format() return def make_outside(self, url, close=False): if not FDSURL.is_fds_url(url): CLIPrinter.wrong_format() return url = FDSURL(url) if url.is_bucket_url(): try: if close: self._fds.set_bucket_outside_access(url.bucket_name(), False) else: self._fds.set_bucket_outside_access(url.bucket_name(), True) except GalaxyFDSClientException as e: CLIPrinter.fail(e.message) return elif url.is_object_url(): try: if close: self._fds.set_object_outside_access(url.bucket_name(), url.object_name(), False) else: self._fds.set_object_outside_access(url.bucket_name(), url.object_name(), True) except GalaxyFDSClientException as e: CLIPrinter.fail(e.message) return else: CLIPrinter.wrong_format() return def info(self): print("Access Key ID: {}".format(self._local_config.ak)) print("Access Secret Key: {}".format(self._local_config.sk)) print("Endpoint: {}".format(self._local_config.endpoint))