def start(self): """Retrieve a long list of all files in a container.""" # Package up the Payload payload = http.prep_payload(auth=self.auth, container=None, source=None, args=ARGS) # Prep Actions. self.go = actions.CloudActions(payload=payload) report.reporter(msg='PAYLOAD\t: "%s"' % payload, log=True, lvl='debug', prt=False) with multi.spinner(): if ARGS.get('cdn_info'): url = payload['cnet'] else: url = payload['url'] message = self.go.detail_show(url=url) try: if message.status_code != 404: report.reporter(msg='Object Found...') report.reporter( msg=report.print_virt_table(dict(message.headers))) else: report.reporter(msg='Nothing Found...') except ValueError as exp: report.reporter(msg=('Non-hashable Type, Likley Item is not found.' ' Additional Data: %s' % exp))
def start(self): """Retrieve a long list of all files in a container.""" # Package up the Payload payload = http.prep_payload(auth=self.auth, container=None, source=None, args=ARGS) # Prep Actions. self.go = actions.CloudActions(payload=payload) report.reporter(msg='PAYLOAD\t: "%s"' % payload, log=True, lvl='debug', prt=False) with multi.spinner(): if ARGS.get('cdn_info'): url = payload['cnet'] else: url = payload['url'] message = self.go.detail_show(url=url) if isinstance(message, list): report.reporter(msg='Item Found...') report.reporter(msg=report.print_virt_table(dict(message))) else: report.reporter(msg=message)
def start(self): """This is the archive method. Uses archive (TAR) feature to compress files and then upload the TAR Ball to a specified container. """ report.reporter(msg='Toggling CDN on Container %s.' % ARGS.get('container')) # Package up the Payload payload = http.prep_payload(auth=self.auth, container=ARGS.get('container', basic.rand_string()), source=None, args=ARGS) report.reporter(msg='PAYLOAD\t: "%s"' % payload, log=True, lvl='debug', prt=False) # Set the actions class up self.go = actions.CloudActions(payload=payload) with multi.spinner(): if ARGS.get('purge'): for obj in ARGS.get('purge'): # Perform the purge self.go.container_cdn_command(url=payload['cnet'], container=payload['c_name'], sfile=obj) else: self.go.container_cdn_command(url=payload['cnet'], container=payload['c_name'])
def start(self): """Retrieve a long list of all files in a container.""" # Package up the Payload payload = http.prep_payload( auth=self.auth, container=None, source=None, args=ARGS ) # Prep Actions. self.go = actions.CloudActions(payload=payload) report.reporter( msg='PAYLOAD\t: "%s"' % payload, log=True, lvl='debug', prt=False ) with multi.spinner(): if ARGS.get('cdn_info'): url = payload['cnet'] else: url = payload['url'] message = self.go.detail_show(url=url) if isinstance(message, list): report.reporter(msg='Item Found...') report.reporter(msg=report.print_virt_table(dict(message))) else: report.reporter(msg=message)
def start(self): """Retrieve a long list of all files in a container.""" # Package up the Payload payload = http.prep_payload(auth=self.auth, container=None, source=None, args=ARGS) # Prep Actions. self.go = actions.CloudActions(payload=payload) report.reporter(msg='PAYLOAD\t: "%s"' % payload, log=True, lvl="debug", prt=False) with multi.spinner(): if ARGS.get("cdn_info"): url = payload["cnet"] else: url = payload["url"] message = self.go.detail_show(url=url) try: if message.status_code != 404: report.reporter(msg="Object Found...") report.reporter(msg=report.print_virt_table(dict(message.headers))) else: report.reporter(msg="Nothing Found...") except ValueError as exp: report.reporter(msg=("Non-hashable Type, Likley Item is not found." " Additional Data: %s" % exp))
def start(self): """Return a list of objects from the API for a container.""" def _check_list(list_object): if list_object: return list_object else: return None, None, None def _list(payload, go, last_obj): """Retrieve a long list of all files in a container. :return final_list, list_count, last_obj: """ if ARGS.get("all_containers") is None: return _check_list( list_object=go.object_lister(url=payload["url"], container=payload["c_name"], last_obj=last_obj) ) else: return _check_list(list_object=go.container_lister(url=payload["url"], last_obj=last_obj)) # Package up the Payload payload = http.prep_payload(auth=self.auth, container=ARGS.get("container"), source=None, args=ARGS) # Prep Actions. self.go = actions.CloudActions(payload=payload) report.reporter(msg="API Access for a list of Objects in %s" % payload["c_name"], log=True) report.reporter(msg='PAYLOAD\t: "%s"' % payload, log=True, lvl="debug", prt=False) last_obj = None with multi.spinner(): objects, list_count, last_obj = _list(payload=payload, go=self.go, last_obj=last_obj) if ARGS.get("pattern_match"): objects = basic.match_filter(idx_list=objects, pattern=ARGS["pattern_match"], dict_type=True) if ARGS.get("filter") is not None: objects = [obj for obj in objects if ARGS.get("filter") in obj.get("name")] # Count the number of objects returned. if objects is False: report.reporter(msg="Nothing found.") elif objects is not None: num_files = len(objects) if num_files < 1: report.reporter(msg="Nothing found.") else: return_objects = [] for obj in objects: for item in ["hash", "last_modified", "content_type"]: if item in obj: obj.pop(item) return_objects.append(obj) report.reporter(msg=report.print_horiz_table(return_objects)) report.reporter(msg='I found "%d" Item(s).' % num_files) else: report.reporter(msg="Nothing found.")
def _deleterator(payload): """Multipass Object Delete.""" report.reporter(msg='Getting file list') with multi.spinner(): # Get all objects in a Container objects, list_count, last_obj = self.action( url=payload['url'], container=payload['c_name'] ) if ARGS.get('pattern_match'): objects = basic.match_filter( idx_list=objects, pattern=ARGS['pattern_match'], dict_type=True ) # Count the number of objects returned. if objects is False: report.reporter(msg='No Container found.') return elif objects is not None: # Load the queue obj_list = [obj['name'] for obj in objects] num_files = len(obj_list) if num_files < 1: report.reporter(msg='No Objects found.') return else: report.reporter(msg='Nothing found.') return # Get The rate of concurrency concurrency = multi.set_concurrency(args=ARGS, file_count=num_files) if ARGS.get('object'): obj_names = ARGS.get('object') obj_list = [obj for obj in obj_list if obj in obj_names] if not obj_list: return 'Nothing Found to Delete.' num_files = len(obj_list) report.reporter( msg=('Performing Object Delete for "%s" object(s)...' % num_files) ) kwargs = {'url': payload['url'], 'container': payload['c_name'], 'cf_job': getattr(self.go, 'object_deleter')} multi.job_processer( num_jobs=num_files, objects=obj_list, job_action=multi.doerator, concur=concurrency, kwargs=kwargs ) _deleterator(payload=payload)
def _index_local_files(): """Index Local Files for Upload.""" with multi.spinner(): file_index = methods.get_local_files() if ARGS.get('pattern_match'): return basic.match_filter(idx_list=file_index, pattern=ARGS['pattern_match']) else: return file_index
def start(self): """This is the archive method. Uses archive (TAR) feature to compress files and then upload the TAR Ball to a specified container. """ # Index Local Files for Upload f_indexed = methods.get_local_files() if ARGS.get('pattern_match'): f_indexed = basic.match_filter( idx_list=f_indexed, pattern=ARGS['pattern_match'] ) num_files = len(f_indexed) report.reporter(msg='MESSAGE: "%s" Files have been found.' % num_files) # Package up the Payload payload = http.prep_payload( auth=self.auth, container=ARGS.get('container', basic.rand_string()), source=None, args=ARGS ) report.reporter( msg='PAYLOAD\t: "%s"' % payload, log=True, lvl='debug', prt=False ) # Set the actions class up self.go = actions.CloudActions(payload=payload) self.go.container_create( url=payload['url'], container=payload['c_name'] ) self.action = getattr(self.go, 'object_putter') with multi.spinner(): # Compression Job wfile = methods.compress_files(file_list=f_indexed) source, name = os.path.split(wfile) report.reporter(msg='MESSAGE: "%s" is being uploaded.' % name) # Perform the upload self.action(url=payload['url'], container=payload['c_name'], source=source, u_file=wfile) # Remove the archive unless instructed not too. if ARGS.get('no_cleanup') is None: basic.remove_file(wfile)
def start(self): """This is the upload method. Uses file_upload is to simply upload all files and folders to a specified container. """ # Index Local Files for Upload with multi.spinner(): f_indexed = methods.get_local_files() if ARGS.get('pattern_match'): f_indexed = basic.match_filter( idx_list=f_indexed, pattern=ARGS['pattern_match'] ) num_files = len(f_indexed) # Get The rate of concurrency concurrency = multi.set_concurrency(args=ARGS, file_count=num_files) # Package up the Payload payload = multi.manager_dict( http.prep_payload( auth=self.auth, container=ARGS.get('container', basic.rand_string()), source=basic.get_local_source(), args=ARGS ) ) LOG.info('MESSAGE\t: "%s" Files have been found.', num_files) LOG.debug('PAYLOAD\t: "%s"', payload) # Set the actions class up self.go = actions.CloudActions(payload=payload) kwargs = {'url': payload['url'], 'container': payload['c_name']} # get that the container exists if not create it. self.go.container_create(**kwargs) kwargs['source'] = payload['source'] kwargs['cf_job'] = getattr(self.go, 'object_putter') multi.job_processer( num_jobs=num_files, objects=f_indexed, job_action=multi.doerator, concur=concurrency, kwargs=kwargs ) if ARGS.get('delete_remote') is True: self.remote_delete(payload=payload, f_indexed=f_indexed)
def _index_local_files(): """Index Local Files for Upload.""" with multi.spinner(): file_index = methods.get_local_files() if ARGS.get('pattern_match'): return basic.match_filter( idx_list=file_index, pattern=ARGS['pattern_match'] ) else: return file_index
def start(self): """This is the archive method. Uses archive (TAR) feature to compress files and then upload the TAR Ball to a specified container. """ # Index Local Files for Upload f_indexed = methods.get_local_files() if ARGS.get('pattern_match'): f_indexed = basic.match_filter(idx_list=f_indexed, pattern=ARGS['pattern_match']) num_files = len(f_indexed) report.reporter(msg='MESSAGE: "%s" Files have been found.' % num_files) # Package up the Payload payload = http.prep_payload(auth=self.auth, container=ARGS.get('container', basic.rand_string()), source=None, args=ARGS) report.reporter( msg='PAYLOAD : [ %s ]' % payload, prt=False, lvl='debug', ) # Set the actions class up self.go = actions.CloudActions(payload=payload) self.go.container_create(url=payload['url'], container=payload['c_name']) self.action = getattr(self.go, 'object_putter') with multi.spinner(): # Compression Job wfile = methods.compress_files(file_list=f_indexed) source, name = os.path.split(wfile) report.reporter(msg='MESSAGE: "%s" is being uploaded.' % name) # Perform the upload self.action(url=payload['url'], container=payload['c_name'], source=source, u_file=wfile) # Remove the archive unless instructed not too. if ARGS.get('no_cleanup') is None: basic.remove_file(wfile)
def start(self): """Retrieve a long list of all files in a container.""" # Package up the Payload payload = http.prep_payload( auth=self.auth, container=None, source=None, args=ARGS ) # Prep Actions. self.go = actions.CloudActions(payload=payload) report.reporter( msg='PAYLOAD : [ %s ]' % payload, prt=False, lvl='debug', ) with multi.spinner(): if ARGS.get('cdn_info'): url = payload['cnet'] else: url = payload['url'] message = self.go.detail_show(url=url) try: if message.status_code != 404: report.reporter(msg='Object Found...') report.reporter( msg=report.print_virt_table(dict(message.headers)), log=False ) else: report.reporter(msg='Nothing Found...') except ValueError as exp: report.reporter( msg=('Non-hashable Type, Likley Item is not found.' ' Additional Data: %s' % exp) )
def start(self): """This is the archive method. Uses archive (TAR) feature to compress files and then upload the TAR Ball to a specified container. """ report.reporter( msg='Toggling CDN on Container %s.' % ARGS.get('container') ) # Package up the Payload payload = http.prep_payload( auth=self.auth, container=ARGS.get('container', basic.rand_string()), source=None, args=ARGS ) report.reporter( msg='PAYLOAD\t: "%s"' % payload, log=True, lvl='debug', prt=False ) # Set the actions class up self.go = actions.CloudActions(payload=payload) with multi.spinner(): if ARGS.get('purge'): for obj in ARGS.get('purge'): # Perform the purge self.go.container_cdn_command(url=payload['cnet'], container=payload['c_name'], sfile=obj) else: self.go.container_cdn_command(url=payload['cnet'], container=payload['c_name'])
def start(self): """Return a list of objects from the API for a container.""" def _check_list(list_object): if list_object: return list_object else: return None, None, None def _list(l_payload, go, l_last_obj): """Retrieve a long list of all files in a container. :return final_list, list_count, last_obj: """ # object_lister(url, container, object_count=None, last_obj=None) return _check_list( list_object=go.object_lister( url=l_payload['url'], container=l_payload['c_name'], last_obj=l_last_obj ) ) # Package up the Payload payload = http.prep_payload( auth=self.auth, container=ARGS.get('container'), source=None, args=ARGS ) # Prep Actions. self.go = actions.CloudActions(payload=payload) report.reporter( msg='API Access for a list of Objects in %s' % payload['c_name'], log=True ) report.reporter( msg='PAYLOAD\t: "%s"' % payload, log=True, lvl='debug', prt=False ) last_obj = None with multi.spinner(): objects, list_count, last_obj = _list( l_payload=payload, go=self.go, l_last_obj=last_obj ) if 'pattern_match' in ARGS: objects = basic.match_filter( idx_list=objects, pattern=ARGS['pattern_match'], dict_type=True ) if ARGS.get('filter') is not None: objects = [obj for obj in objects if ARGS.get('filter') in obj.get('name')] # Count the number of objects returned. if objects is False: report.reporter(msg='Nothing found.') elif len(objects) < 1: report.reporter(msg='Nothing found.') elif ARGS.get('object'): self.go.object_updater( url=payload['url'], container=payload['c_name'], u_file=last_obj ) elif objects is not None: kwargs = { 'url': payload['url'], 'container': payload['c_name'], 'cf_job': getattr(self.go, 'object_updater'), } object_names = [i['name'] for i in objects] num_files = len(object_names) concurrency = multi.set_concurrency( args=ARGS, file_count=num_files ) multi.job_processer( num_jobs=num_files, objects=object_names, job_action=multi.doerator, concur=concurrency, kwargs=kwargs ) else: report.reporter(msg='Nothing found.')
def start(self): """Return a list of objects from the API for a container.""" def _list(payload, go, last_obj): """Retrieve a long list of all files in a container. :return final_list, list_count, last_obj: """ if ARGS.get('all_containers') is None: return go.object_lister(url=payload['url'], container=payload['c_name'], last_obj=last_obj) else: return go.container_lister(url=payload['url'], last_obj=last_obj) # Package up the Payload payload = http.prep_payload(auth=self.auth, container=ARGS.get('container'), source=None, args=ARGS) # Prep Actions. self.go = actions.CloudActions(payload=payload) report.reporter(msg='API Access for a list of Objects in %s' % payload['c_name'], log=True) report.reporter(msg='PAYLOAD\t: "%s"' % payload, log=True, lvl='debug', prt=False) last_obj = None with multi.spinner(): objects, list_count, last_obj = _list(payload=payload, go=self.go, last_obj=last_obj) if ARGS.get('pattern_match'): objects = basic.match_filter(idx_list=objects, pattern=ARGS['pattern_match'], dict_type=True) if ARGS.get('filter') is not None: objects = [ obj for obj in objects if ARGS.get('filter') in obj.get('name') ] # Count the number of objects returned. if objects is False: report.reporter(msg='Nothing found.') elif objects is not None: num_files = len(objects) if num_files < 1: report.reporter(msg='Nothing found.') else: return_objects = [] for obj in objects: for item in ['hash', 'last_modified', 'content_type']: if item in obj: obj.pop(item) return_objects.append(obj) report.reporter(msg=report.print_horiz_table(return_objects)) report.reporter(msg='I found "%d" Item(s).' % num_files) else: report.reporter(msg='Nothing found.')
def start(self): """Clone onjects from one container to another. NOTE: This method was intended for use with inter-datacenter cloning of objects. """ # Package up the Payload payload = http.prep_payload( auth=self.auth, container=ARGS.get('source_container'), source=None, args=ARGS ) # Prep action class self.go = actions.CloudActions(payload=payload) # Ensure we have a target region. target_region = ARGS.get('target_region') if target_region is None: raise turbo.NoSource('No target Region was specified.') else: target_region = target_region.upper() # check for a target type URL if ARGS.get('target_snet') is True: target_type = 'internalURL' else: target_type = 'publicURL' # Format the target URL target_url = auth.get_surl( region=target_region, cf_list=payload['acfep'], lookup=target_type ) if target_url is None: raise turbo.NoSource('No url was found from the target region') else: payload['turl'] = target_url # Ensure we have a target Container. target_container = ARGS.get('target_container') if target_container is None: raise turbo.NoSource('No target Container was specified.') else: payload['tc_name'] = target_container # Check if the source and target containers exist. If not Create them. # Source Container. self.go.container_create(url=payload['url'], container=payload['c_name']) # Target Container. self.go.container_create(url=target_url, container=target_container) report.reporter(msg='Getting Object list from the Source.') with multi.spinner(): # Get a list of Objects from the Source/Target container. objects, list_count, last_obj = self.go.object_lister( url=payload['url'], container=payload['c_name'] ) if ARGS.get('pattern_match'): objects = basic.match_filter( idx_list=objects, pattern=ARGS['pattern_match'], dict_type=True ) if objects is None: raise turbo.NoSource('The source container is empty.') # Get the number of objects and set Concurrency num_files = len(objects) concurrency = multi.set_concurrency(args=ARGS, file_count=num_files) report.reporter(msg='Beginning Sync Operation.') kwargs = {'surl': payload['url'], 'turl': payload['turl'], 'scontainer': payload['c_name'], 'tcontainer': payload['tc_name'], 'cf_job': getattr(self.go, 'object_syncer')} multi.job_processer( num_jobs=num_files, objects=objects, job_action=multi.doerator, concur=concurrency, kwargs=kwargs )
def start(self): """Return a list of objects from the API for a container.""" def _check_list(list_object): if list_object: return list_object else: return None, None, None def _list(payload, go, last_obj): """Retrieve a long list of all files in a container. :return final_list, list_count, last_obj: """ if ARGS.get('all_containers') is None: return _check_list( list_object=go.object_lister( url=payload['url'], container=payload['c_name'], last_obj=last_obj ) ) else: return _check_list( list_object=go.container_lister( url=payload['url'], last_obj=last_obj ) ) # Package up the Payload payload = http.prep_payload( auth=self.auth, container=ARGS.get('container'), source=None, args=ARGS ) # Prep Actions. self.go = actions.CloudActions(payload=payload) report.reporter( msg='API Access for a list of Objects in %s' % payload['c_name'], log=True ) report.reporter( msg='PAYLOAD : [ %s ]' % payload, prt=False, lvl='debug', ) last_obj = None with multi.spinner(): objects, list_count, last_obj = _list(payload=payload, go=self.go, last_obj=last_obj) if 'pattern_match' in ARGS: objects = basic.match_filter( idx_list=objects, pattern=ARGS['pattern_match'], dict_type=True ) if ARGS.get('filter') is not None: objects = [obj for obj in objects if ARGS.get('filter') in obj.get('name')] # Count the number of objects returned. if objects is False: report.reporter(msg='Nothing found.') elif ARGS.get('object_index'): report.reporter( msg=report.print_horiz_table([{'name': last_obj}]), log=False ) elif objects is not None: num_files = len(objects) if num_files < 1: report.reporter(msg='Nothing found.') else: return_objects = [] for obj in objects: for item in ['hash', 'last_modified', 'content_type']: if item in obj: obj.pop(item) return_objects.append(obj) report.reporter( msg=report.print_horiz_table(return_objects), log=False ) report.reporter(msg='I found "%d" Item(s).' % num_files) else: report.reporter(msg='Nothing found.')
def start(self): """Return a list of objects from the API for a container.""" def _check_list(list_object): if list_object: return list_object else: return None, None, None def _list(l_payload, go, l_last_obj): """Retrieve a long list of all files in a container. :return final_list, list_count, last_obj: """ # object_lister(url, container, object_count=None, last_obj=None) return _check_list( list_object=go.object_lister( url=l_payload['url'], container=l_payload['c_name'], last_obj=l_last_obj ) ) # Package up the Payload payload = http.prep_payload( auth=self.auth, container=ARGS.get('container'), source=None, args=ARGS ) # Prep Actions. self.go = actions.CloudActions(payload=payload) report.reporter( msg='API Access for a list of Objects in %s' % payload['c_name'], log=True ) report.reporter( msg='PAYLOAD : [ %s ]' % payload, prt=False, lvl='debug', ) last_obj = None with multi.spinner(): objects, list_count, last_obj = _list( l_payload=payload, go=self.go, l_last_obj=last_obj ) if 'pattern_match' in ARGS: objects = basic.match_filter( idx_list=objects, pattern=ARGS['pattern_match'], dict_type=True ) if ARGS.get('filter') is not None: objects = [obj for obj in objects if ARGS.get('filter') in obj.get('name')] # Count the number of objects returned. if objects is False: report.reporter(msg='Nothing found.') elif len(objects) < 1: report.reporter(msg='Nothing found.') elif ARGS.get('object'): self.go.object_updater( url=payload['url'], container=payload['c_name'], u_file=last_obj ) elif objects is not None: kwargs = { 'url': payload['url'], 'container': payload['c_name'], 'cf_job': getattr(self.go, 'object_updater'), } object_names = [i['name'] for i in objects] num_files = len(object_names) concurrency = multi.set_concurrency( args=ARGS, file_count=num_files ) multi.job_processer( num_jobs=num_files, objects=object_names, job_action=multi.doerator, concur=concurrency, kwargs=kwargs ) else: report.reporter(msg='Nothing found.')
def start(self): """Clone onjects from one container to another. NOTE: This method was intended for use with inter-datacenter cloning of objects. """ # Package up the Payload payload = http.prep_payload(auth=self.auth, container=ARGS.get('source_container'), source=None, args=ARGS) # Prep action class self.go = actions.CloudActions(payload=payload) # Ensure we have a target region. target_region = ARGS.get('target_region') if target_region is None: raise turbo.NoSource('No target Region was specified.') else: target_region = target_region.upper() # check for a target type URL if ARGS.get('target_snet') is True: target_type = 'internalURL' else: target_type = 'publicURL' # Format the target URL target_url = auth.get_surl(region=target_region, cf_list=payload['acfep'], lookup=target_type) if target_url is None: raise turbo.NoSource('No url was found from the target region') else: payload['turl'] = target_url # Ensure we have a target Container. target_container = ARGS.get('target_container') if target_container is None: raise turbo.NoSource('No target Container was specified.') else: payload['tc_name'] = target_container # Check if the source and target containers exist. If not Create them. # Source Container. self.go.container_create(url=payload['url'], container=payload['c_name']) # Target Container. self.go.container_create(url=target_url, container=target_container) report.reporter(msg='Getting Object list from the Source.') with multi.spinner(): # Get a list of Objects from the Source/Target container. objects, list_count, last_obj = self.go.object_lister( url=payload['url'], container=payload['c_name']) if ARGS.get('pattern_match'): objects = basic.match_filter(idx_list=objects, pattern=ARGS['pattern_match'], dict_type=True) if objects is None: raise turbo.NoSource('The source container is empty.') # Get the number of objects and set Concurrency num_files = len(objects) concurrency = multi.set_concurrency(args=ARGS, file_count=num_files) report.reporter(msg='Beginning Sync Operation.') kwargs = { 'surl': payload['url'], 'turl': payload['turl'], 'scontainer': payload['c_name'], 'tcontainer': payload['tc_name'], 'cf_job': getattr(self.go, 'object_syncer') } multi.job_processer(num_jobs=num_files, objects=objects, job_action=multi.doerator, concur=concurrency, kwargs=kwargs)
def start(self): """Retrieve a long list of all files in a container.""" # Package up the Payload payload = http.prep_payload( auth=self.auth, container=ARGS.get('container'), source=ARGS.get('source'), args=ARGS ) self.go = actions.CloudActions(payload=payload) self.action = getattr(self.go, 'object_lister') LOG.info('Attempting Download of Remote path %s', payload['c_name']) if ARGS.get('verbose'): LOG.info( 'Accessing API for a list of Objects in %s', payload['c_name'] ) report.reporter( msg='PAYLOAD\t: "%s"' % payload, log=True, lvl='debug', prt=False ) report.reporter(msg='getting file list') with multi.spinner(): # Get all objects in a Container objects, list_count, last_obj = self.action( url=payload['url'], container=payload['c_name'], last_obj=ARGS.get('index_from') ) if ARGS.get('pattern_match'): objects = basic.match_filter( idx_list=objects, pattern=ARGS['pattern_match'], dict_type=True ) # Count the number of objects returned. if objects is False: report.reporter(msg='No Container found.') return elif objects is not None: num_files = len(objects) if num_files < 1: report.reporter(msg='No Objects found.') return else: report.reporter(msg='No Objects found.') return # Get The rate of concurrency concurrency = multi.set_concurrency(args=ARGS, file_count=num_files) # Load the queue obj_list = [obj['name'] for obj in objects if obj.get('name')] report.reporter(msg='Building Directory Structure.') with multi.spinner(): if ARGS.get('object'): obj_names = ARGS.get('object') obj_list = [obj for obj in obj_list if obj in obj_names] num_files = len(obj_list) elif ARGS.get('dir'): objpath = ARGS.get('dir') obj_list = [obj for obj in obj_list if obj.startswith(objpath)] num_files = len(obj_list) # from objects found set a unique list of directories unique_dirs = basic.set_unique_dirs(object_list=obj_list, root_dir=payload['source']) for udir in unique_dirs: basic.mkdir_p(path=udir) kwargs = {'url': payload['url'], 'container': payload['c_name'], 'source': payload['source'], 'cf_job': getattr(self.go, 'object_downloader')} report.reporter(msg='Performing Object Download.') multi.job_processer( num_jobs=num_files, objects=obj_list, job_action=multi.doerator, concur=concurrency, kwargs=kwargs ) if ARGS.get('max_jobs') is not None: report.reporter( msg=('This is the last object downloaded. [ %s ]' % last_obj), log=True )
def start(self): """Retrieve a long list of all files in a container.""" def _deleterator(payload): """Multipass Object Delete.""" report.reporter(msg='Getting file list') with multi.spinner(): # Get all objects in a Container objects, list_count, last_obj = self.action( url=payload['url'], container=payload['c_name'] ) if ARGS.get('pattern_match'): objects = basic.match_filter( idx_list=objects, pattern=ARGS['pattern_match'], dict_type=True ) # Count the number of objects returned. if objects is False: report.reporter(msg='No Container found.') return elif objects is not None: # Load the queue obj_list = [obj['name'] for obj in objects] num_files = len(obj_list) if num_files < 1: report.reporter(msg='No Objects found.') return else: report.reporter(msg='Nothing found.') return # Get The rate of concurrency concurrency = multi.set_concurrency(args=ARGS, file_count=num_files) if ARGS.get('object'): obj_names = ARGS.get('object') obj_list = [obj for obj in obj_list if obj in obj_names] if not obj_list: return 'Nothing Found to Delete.' num_files = len(obj_list) report.reporter( msg=('Performing Object Delete for "%s" object(s)...' % num_files) ) kwargs = {'url': payload['url'], 'container': payload['c_name'], 'cf_job': getattr(self.go, 'object_deleter')} multi.job_processer( num_jobs=num_files, objects=obj_list, job_action=multi.doerator, concur=concurrency, kwargs=kwargs ) _deleterator(payload=payload) # Package up the Payload payload = http.prep_payload( auth=self.auth, container=ARGS.get('container'), source=None, args=ARGS ) report.reporter( msg='PAYLOAD\t: "%s"' % payload, log=True, lvl='debug', prt=False ) self.go = actions.CloudActions(payload=payload) self.action = getattr(self.go, 'object_lister') report.reporter( msg='Accessing API for list of Objects in %s' % payload['c_name'], log=True, lvl='info', prt=True ) # Delete the objects and report when done. _deleterator(payload=payload) sup_args = [ARGS.get('object'), ARGS.get('pattern_match')] if ARGS.get('save_container') is None and not any(sup_args): report.reporter(msg='Performing Container Delete.') with multi.spinner(): self.go.container_deleter(url=payload['url'], container=payload['c_name'])
def start(self): """Retrieve a long list of all files in a container.""" # Package up the Payload payload = http.prep_payload(auth=self.auth, container=ARGS.get('container'), source=ARGS.get('source'), args=ARGS) self.go = actions.CloudActions(payload=payload) self.action = getattr(self.go, 'object_lister') LOG.info('Attempting Download of Remote path %s', payload['c_name']) if ARGS.get('verbose'): LOG.info('Accessing API for a list of Objects in %s', payload['c_name']) report.reporter( msg='PAYLOAD : [ %s ]' % payload, prt=False, lvl='debug', ) report.reporter(msg='getting file list') with multi.spinner(): # Get all objects in a Container objects, list_count, last_obj = self.action( url=payload['url'], container=payload['c_name'], last_obj=ARGS.get('index_from')) if ARGS.get('pattern_match'): objects = basic.match_filter(idx_list=objects, pattern=ARGS['pattern_match'], dict_type=True) # Count the number of objects returned. if objects is False: report.reporter(msg='No Container found.') return elif objects is not None: num_files = len(objects) if num_files < 1: report.reporter(msg='No Objects found.') return else: report.reporter(msg='No Objects found.') return # Get The rate of concurrency concurrency = multi.set_concurrency(args=ARGS, file_count=num_files) # Load the queue obj_list = [obj['name'] for obj in objects if obj.get('name')] report.reporter(msg='Building Directory Structure.') with multi.spinner(): if ARGS.get('object'): obj_names = ARGS.get('object') obj_list = [obj for obj in obj_list if obj in obj_names] num_files = len(obj_list) elif ARGS.get('dir'): objpath = ARGS.get('dir') obj_list = [obj for obj in obj_list if obj.startswith(objpath)] num_files = len(obj_list) # from objects found set a unique list of directories unique_dirs = basic.set_unique_dirs(object_list=obj_list, root_dir=payload['source']) for udir in unique_dirs: basic.mkdir_p(path=udir) kwargs = { 'url': payload['url'], 'container': payload['c_name'], 'source': payload['source'], 'cf_job': getattr(self.go, 'object_downloader') } report.reporter(msg='Performing Object Download.') multi.job_processer(num_jobs=num_files, objects=obj_list, job_action=multi.doerator, concur=concurrency, kwargs=kwargs) if ARGS.get('max_jobs') is not None: report.reporter(msg=('This is the last object downloaded. [ %s ]' % last_obj), log=True)