def remote_delete(self, payload): """If Remote Delete was True run. NOTE: Remote delete will delete ALL Objects in a remote container which differ from the objects in the SOURCED LOCAL FILESYSTEM. IE: If this option is used, on one directory and then another directory and the files were different any difference would be deleted and based on the index information found in LOCAL FILE SYSTEM on the LAST command run. :param payload: ``dict`` """ report.reporter(msg='Getting file list for REMOTE DELETE') # From the remote system see if we have differences in the local system f_indexed = self._index_local_files() objects = self.go.object_lister(url=payload['url'], container=payload['c_name']) source = payload['source'] obj_names = [ basic.jpath(root=source, inode=obj.get('name')) for obj in objects[0] ] obj_names = set(obj_names) # Sort the difference between remote files and local files. objects = [obj for obj in obj_names if obj not in f_indexed] if objects: # Set Basic Data for file delete. num_files = len(objects) report.reporter( msg=('MESSAGE: "%d" Files have been found to be removed' ' from the REMOTE CONTAINER.' % num_files)) concurrency = multi.set_concurrency(args=ARGS, file_count=num_files) # Delete the difference in Files. report.reporter(msg='Performing REMOTE DELETE') del_objects = [ basic.get_sfile(ufile=obj, source=payload['source']) for obj in objects if obj is not None ] kwargs = { 'url': payload['url'], 'container': payload['c_name'], 'cf_job': getattr(self.go, 'object_deleter') } multi.job_processer(num_jobs=num_files, objects=del_objects, job_action=multi.doerator, concur=concurrency, kwargs=kwargs) else: report.reporter( msg='No Difference between REMOTE and LOCAL Directories.')
def _deleterator(payload): """Multipass Object Delete.""" report.reporter(msg='Getting file list') with multi.spinner(): # Get all objects in a Container objects, list_count, last_obj = self.action( url=payload['url'], container=payload['c_name'] ) if ARGS.get('pattern_match'): objects = basic.match_filter( idx_list=objects, pattern=ARGS['pattern_match'], dict_type=True ) # Count the number of objects returned. if objects is False: report.reporter(msg='No Container found.') return elif objects is not None: # Load the queue obj_list = [obj['name'] for obj in objects] num_files = len(obj_list) if num_files < 1: report.reporter(msg='No Objects found.') return else: report.reporter(msg='Nothing found.') return # Get The rate of concurrency concurrency = multi.set_concurrency(args=ARGS, file_count=num_files) if ARGS.get('object'): obj_names = ARGS.get('object') obj_list = [obj for obj in obj_list if obj in obj_names] if not obj_list: return 'Nothing Found to Delete.' num_files = len(obj_list) report.reporter( msg=('Performing Object Delete for "%s" object(s)...' % num_files) ) kwargs = {'url': payload['url'], 'container': payload['c_name'], 'cf_job': getattr(self.go, 'object_deleter')} multi.job_processer( num_jobs=num_files, objects=obj_list, job_action=multi.doerator, concur=concurrency, kwargs=kwargs ) _deleterator(payload=payload)
def start(self): """This is the upload method. Uses file_upload is to simply upload all files and folders to a specified container. """ # Index Local Files for Upload with multi.spinner(): f_indexed = methods.get_local_files() if ARGS.get('pattern_match'): f_indexed = basic.match_filter( idx_list=f_indexed, pattern=ARGS['pattern_match'] ) num_files = len(f_indexed) # Get The rate of concurrency concurrency = multi.set_concurrency(args=ARGS, file_count=num_files) # Package up the Payload payload = multi.manager_dict( http.prep_payload( auth=self.auth, container=ARGS.get('container', basic.rand_string()), source=basic.get_local_source(), args=ARGS ) ) LOG.info('MESSAGE\t: "%s" Files have been found.', num_files) LOG.debug('PAYLOAD\t: "%s"', payload) # Set the actions class up self.go = actions.CloudActions(payload=payload) kwargs = {'url': payload['url'], 'container': payload['c_name']} # get that the container exists if not create it. self.go.container_create(**kwargs) kwargs['source'] = payload['source'] kwargs['cf_job'] = getattr(self.go, 'object_putter') multi.job_processer( num_jobs=num_files, objects=f_indexed, job_action=multi.doerator, concur=concurrency, kwargs=kwargs ) if ARGS.get('delete_remote') is True: self.remote_delete(payload=payload, f_indexed=f_indexed)
def start(self): """This is the upload method. Uses file_upload is to simply upload all files and folders to a specified container. """ # Index Local Files for Upload f_indexed = methods.get_local_files() if ARGS.get('pattern_match'): f_indexed = basic.match_filter( idx_list=f_indexed, pattern=ARGS['pattern_match'] ) num_files = len(f_indexed) # Get The rate of concurrency concurrency = multi.set_concurrency(args=ARGS, file_count=num_files) # Package up the Payload payload = multi.manager_dict( http.prep_payload( auth=self.auth, container=ARGS.get('container', basic.rand_string()), source=basic.get_local_source(), args=ARGS ) ) LOG.info('MESSAGE\t: "%s" Files have been found.', num_files) LOG.debug('PAYLOAD\t: "%s"', payload) # Set the actions class up self.go = actions.CloudActions(payload=payload) kwargs = {'url': payload['url'], 'container': payload['c_name']} # get that the container exists if not create it. self.go.container_create(**kwargs) kwargs['source'] = payload['source'] kwargs['cf_job'] = getattr(self.go, 'object_putter') multi.job_processer( num_jobs=num_files, objects=f_indexed, job_action=multi.doerator, concur=concurrency, kwargs=kwargs ) if ARGS.get('delete_remote') is True: self.remote_delete(payload=payload, f_indexed=f_indexed)
def remote_delete(self, payload, f_indexed): """If Remote Delete was True run. NOTE: Remote delete will delete ALL Objects in a remote container which differ from the objects in the SOURCED LOCAL FILESYSTEM. IE: If this option is used, on one directory and then another directory and the files were different any difference would be deleted and based on the index information found in LOCAL FILE SYSTEM on the LAST command run. :return: """ report.reporter(msg='Getting file list for REMOTE DELETE') objects = self.go.object_lister( url=payload['url'], container=payload['c_name'] ) source = payload['source'] obj_names = [basic.jpath(root=source, inode=obj.get('name')) for obj in objects[0]] # From the remote system see if we have differences in the local system objects = multi.return_diff().difference(target=f_indexed, source=obj_names) if objects: # Set Basic Data for file delete. num_files = len(objects) LOG.info('MESSAGE\t: "%s" Files have been found to be removed from' ' the REMOTE CONTAINER.', num_files) concurrency = multi.set_concurrency( args=ARGS, file_count=num_files ) # Delete the difference in Files. report.reporter(msg='Performing Remote Delete') objects = [basic.get_sfile( ufile=obj, source=payload['source'] ) for obj in objects] kwargs = {'url': payload['url'], 'container': payload['c_name'], 'cf_job': getattr(self.go, 'object_deleter')} multi.job_processer( num_jobs=num_files, objects=objects, job_action=multi.doerator, concur=concurrency, kwargs=kwargs ) else: report.reporter( msg='No Difference between REMOTE and LOCAL Directories.' )
def start(self): """This is the upload method. Uses file_upload is to simply upload all files and folders to a specified container. """ f_indexed = self._index_local_files() num_files = len(f_indexed) # Get The rate of concurrency concurrency = multi.set_concurrency(args=ARGS, file_count=num_files) # Package up the Payload payload = multi.manager_dict( http.prep_payload( auth=self.auth, container=ARGS.get('container', basic.rand_string()), source=basic.get_local_source(), args=ARGS ) ) report.reporter(msg='MESSAGE : "%s" Files found.' % num_files) report.reporter( msg='PAYLOAD : [ %s ]' % payload, prt=False, lvl='debug' ) # Set the actions class up self.go = actions.CloudActions(payload=payload) kwargs = {'url': payload['url'], 'container': payload['c_name']} # get that the container exists if not create it. self.go.container_create(**kwargs) kwargs['source'] = payload['source'] kwargs['cf_job'] = getattr(self.go, 'object_putter') multi.job_processer( num_jobs=num_files, objects=f_indexed, job_action=multi.doerator, concur=concurrency, kwargs=kwargs ) if ARGS.get('delete_remote') is True: self.remote_delete(payload=payload)
def start(self): """Return a list of objects from the API for a container.""" def _check_list(list_object): if list_object: return list_object else: return None, None, None def _list(l_payload, go, l_last_obj): """Retrieve a long list of all files in a container. :return final_list, list_count, last_obj: """ # object_lister(url, container, object_count=None, last_obj=None) return _check_list( list_object=go.object_lister( url=l_payload['url'], container=l_payload['c_name'], last_obj=l_last_obj ) ) # Package up the Payload payload = http.prep_payload( auth=self.auth, container=ARGS.get('container'), source=None, args=ARGS ) # Prep Actions. self.go = actions.CloudActions(payload=payload) report.reporter( msg='API Access for a list of Objects in %s' % payload['c_name'], log=True ) report.reporter( msg='PAYLOAD\t: "%s"' % payload, log=True, lvl='debug', prt=False ) last_obj = None with multi.spinner(): objects, list_count, last_obj = _list( l_payload=payload, go=self.go, l_last_obj=last_obj ) if 'pattern_match' in ARGS: objects = basic.match_filter( idx_list=objects, pattern=ARGS['pattern_match'], dict_type=True ) if ARGS.get('filter') is not None: objects = [obj for obj in objects if ARGS.get('filter') in obj.get('name')] # Count the number of objects returned. if objects is False: report.reporter(msg='Nothing found.') elif len(objects) < 1: report.reporter(msg='Nothing found.') elif ARGS.get('object'): self.go.object_updater( url=payload['url'], container=payload['c_name'], u_file=last_obj ) elif objects is not None: kwargs = { 'url': payload['url'], 'container': payload['c_name'], 'cf_job': getattr(self.go, 'object_updater'), } object_names = [i['name'] for i in objects] num_files = len(object_names) concurrency = multi.set_concurrency( args=ARGS, file_count=num_files ) multi.job_processer( num_jobs=num_files, objects=object_names, job_action=multi.doerator, concur=concurrency, kwargs=kwargs ) else: report.reporter(msg='Nothing found.')
def start(self): """Clone onjects from one container to another. NOTE: This method was intended for use with inter-datacenter cloning of objects. """ # Package up the Payload payload = http.prep_payload( auth=self.auth, container=ARGS.get('source_container'), source=None, args=ARGS ) # Prep action class self.go = actions.CloudActions(payload=payload) # Ensure we have a target region. target_region = ARGS.get('target_region') if target_region is None: raise turbo.NoSource('No target Region was specified.') else: target_region = target_region.upper() # check for a target type URL if ARGS.get('target_snet') is True: target_type = 'internalURL' else: target_type = 'publicURL' # Format the target URL target_url = auth.get_surl( region=target_region, cf_list=payload['acfep'], lookup=target_type ) if target_url is None: raise turbo.NoSource('No url was found from the target region') else: payload['turl'] = target_url # Ensure we have a target Container. target_container = ARGS.get('target_container') if target_container is None: raise turbo.NoSource('No target Container was specified.') else: payload['tc_name'] = target_container # Check if the source and target containers exist. If not Create them. # Source Container. self.go.container_create(url=payload['url'], container=payload['c_name']) # Target Container. self.go.container_create(url=target_url, container=target_container) report.reporter(msg='Getting Object list from the Source.') with multi.spinner(): # Get a list of Objects from the Source/Target container. objects, list_count, last_obj = self.go.object_lister( url=payload['url'], container=payload['c_name'] ) if ARGS.get('pattern_match'): objects = basic.match_filter( idx_list=objects, pattern=ARGS['pattern_match'], dict_type=True ) if objects is None: raise turbo.NoSource('The source container is empty.') # Get the number of objects and set Concurrency num_files = len(objects) concurrency = multi.set_concurrency(args=ARGS, file_count=num_files) report.reporter(msg='Beginning Sync Operation.') kwargs = {'surl': payload['url'], 'turl': payload['turl'], 'scontainer': payload['c_name'], 'tcontainer': payload['tc_name'], 'cf_job': getattr(self.go, 'object_syncer')} multi.job_processer( num_jobs=num_files, objects=objects, job_action=multi.doerator, concur=concurrency, kwargs=kwargs )
def start(self): """Clone onjects from one container to another. NOTE: This method was intended for use with inter-datacenter cloning of objects. """ # Package up the Payload payload = http.prep_payload(auth=self.auth, container=ARGS.get('source_container'), source=None, args=ARGS) # Prep action class self.go = actions.CloudActions(payload=payload) # Ensure we have a target region. target_region = ARGS.get('target_region') if target_region is None: raise turbo.NoSource('No target Region was specified.') else: target_region = target_region.upper() # check for a target type URL if ARGS.get('target_snet') is True: target_type = 'internalURL' else: target_type = 'publicURL' # Format the target URL target_url = auth.get_surl(region=target_region, cf_list=payload['acfep'], lookup=target_type) if target_url is None: raise turbo.NoSource('No url was found from the target region') else: payload['turl'] = target_url # Ensure we have a target Container. target_container = ARGS.get('target_container') if target_container is None: raise turbo.NoSource('No target Container was specified.') else: payload['tc_name'] = target_container # Check if the source and target containers exist. If not Create them. # Source Container. self.go.container_create(url=payload['url'], container=payload['c_name']) # Target Container. self.go.container_create(url=target_url, container=target_container) report.reporter(msg='Getting Object list from the Source.') with multi.spinner(): # Get a list of Objects from the Source/Target container. objects, list_count, last_obj = self.go.object_lister( url=payload['url'], container=payload['c_name']) if ARGS.get('pattern_match'): objects = basic.match_filter(idx_list=objects, pattern=ARGS['pattern_match'], dict_type=True) if objects is None: raise turbo.NoSource('The source container is empty.') # Get the number of objects and set Concurrency num_files = len(objects) concurrency = multi.set_concurrency(args=ARGS, file_count=num_files) report.reporter(msg='Beginning Sync Operation.') kwargs = { 'surl': payload['url'], 'turl': payload['turl'], 'scontainer': payload['c_name'], 'tcontainer': payload['tc_name'], 'cf_job': getattr(self.go, 'object_syncer') } multi.job_processer(num_jobs=num_files, objects=objects, job_action=multi.doerator, concur=concurrency, kwargs=kwargs)
def start(self): """Retrieve a long list of all files in a container.""" # Package up the Payload payload = http.prep_payload(auth=self.auth, container=ARGS.get('container'), source=ARGS.get('source'), args=ARGS) self.go = actions.CloudActions(payload=payload) self.action = getattr(self.go, 'object_lister') LOG.info('Attempting Download of Remote path %s', payload['c_name']) if ARGS.get('verbose'): LOG.info('Accessing API for a list of Objects in %s', payload['c_name']) report.reporter( msg='PAYLOAD : [ %s ]' % payload, prt=False, lvl='debug', ) report.reporter(msg='getting file list') with multi.spinner(): # Get all objects in a Container objects, list_count, last_obj = self.action( url=payload['url'], container=payload['c_name'], last_obj=ARGS.get('index_from')) if ARGS.get('pattern_match'): objects = basic.match_filter(idx_list=objects, pattern=ARGS['pattern_match'], dict_type=True) # Count the number of objects returned. if objects is False: report.reporter(msg='No Container found.') return elif objects is not None: num_files = len(objects) if num_files < 1: report.reporter(msg='No Objects found.') return else: report.reporter(msg='No Objects found.') return # Get The rate of concurrency concurrency = multi.set_concurrency(args=ARGS, file_count=num_files) # Load the queue obj_list = [obj['name'] for obj in objects if obj.get('name')] report.reporter(msg='Building Directory Structure.') with multi.spinner(): if ARGS.get('object'): obj_names = ARGS.get('object') obj_list = [obj for obj in obj_list if obj in obj_names] num_files = len(obj_list) elif ARGS.get('dir'): objpath = ARGS.get('dir') obj_list = [obj for obj in obj_list if obj.startswith(objpath)] num_files = len(obj_list) # from objects found set a unique list of directories unique_dirs = basic.set_unique_dirs(object_list=obj_list, root_dir=payload['source']) for udir in unique_dirs: basic.mkdir_p(path=udir) kwargs = { 'url': payload['url'], 'container': payload['c_name'], 'source': payload['source'], 'cf_job': getattr(self.go, 'object_downloader') } report.reporter(msg='Performing Object Download.') multi.job_processer(num_jobs=num_files, objects=obj_list, job_action=multi.doerator, concur=concurrency, kwargs=kwargs) if ARGS.get('max_jobs') is not None: report.reporter(msg=('This is the last object downloaded. [ %s ]' % last_obj), log=True)
def start(self): """Retrieve a long list of all files in a container.""" # Package up the Payload payload = http.prep_payload( auth=self.auth, container=ARGS.get('container'), source=ARGS.get('source'), args=ARGS ) self.go = actions.CloudActions(payload=payload) self.action = getattr(self.go, 'object_lister') LOG.info('Attempting Download of Remote path %s', payload['c_name']) if ARGS.get('verbose'): LOG.info( 'Accessing API for a list of Objects in %s', payload['c_name'] ) report.reporter( msg='PAYLOAD\t: "%s"' % payload, log=True, lvl='debug', prt=False ) report.reporter(msg='getting file list') with multi.spinner(): # Get all objects in a Container objects, list_count, last_obj = self.action( url=payload['url'], container=payload['c_name'], last_obj=ARGS.get('index_from') ) if ARGS.get('pattern_match'): objects = basic.match_filter( idx_list=objects, pattern=ARGS['pattern_match'], dict_type=True ) # Count the number of objects returned. if objects is False: report.reporter(msg='No Container found.') return elif objects is not None: num_files = len(objects) if num_files < 1: report.reporter(msg='No Objects found.') return else: report.reporter(msg='No Objects found.') return # Get The rate of concurrency concurrency = multi.set_concurrency(args=ARGS, file_count=num_files) # Load the queue obj_list = [obj['name'] for obj in objects if obj.get('name')] report.reporter(msg='Building Directory Structure.') with multi.spinner(): if ARGS.get('object'): obj_names = ARGS.get('object') obj_list = [obj for obj in obj_list if obj in obj_names] num_files = len(obj_list) elif ARGS.get('dir'): objpath = ARGS.get('dir') obj_list = [obj for obj in obj_list if obj.startswith(objpath)] num_files = len(obj_list) # from objects found set a unique list of directories unique_dirs = basic.set_unique_dirs(object_list=obj_list, root_dir=payload['source']) for udir in unique_dirs: basic.mkdir_p(path=udir) kwargs = {'url': payload['url'], 'container': payload['c_name'], 'source': payload['source'], 'cf_job': getattr(self.go, 'object_downloader')} report.reporter(msg='Performing Object Download.') multi.job_processer( num_jobs=num_files, objects=obj_list, job_action=multi.doerator, concur=concurrency, kwargs=kwargs ) if ARGS.get('max_jobs') is not None: report.reporter( msg=('This is the last object downloaded. [ %s ]' % last_obj), log=True )
def start(self): """Return a list of objects from the API for a container.""" def _check_list(list_object): if list_object: return list_object else: return None, None, None def _list(l_payload, go, l_last_obj): """Retrieve a long list of all files in a container. :return final_list, list_count, last_obj: """ # object_lister(url, container, object_count=None, last_obj=None) return _check_list( list_object=go.object_lister( url=l_payload['url'], container=l_payload['c_name'], last_obj=l_last_obj ) ) # Package up the Payload payload = http.prep_payload( auth=self.auth, container=ARGS.get('container'), source=None, args=ARGS ) # Prep Actions. self.go = actions.CloudActions(payload=payload) report.reporter( msg='API Access for a list of Objects in %s' % payload['c_name'], log=True ) report.reporter( msg='PAYLOAD : [ %s ]' % payload, prt=False, lvl='debug', ) last_obj = None with multi.spinner(): objects, list_count, last_obj = _list( l_payload=payload, go=self.go, l_last_obj=last_obj ) if 'pattern_match' in ARGS: objects = basic.match_filter( idx_list=objects, pattern=ARGS['pattern_match'], dict_type=True ) if ARGS.get('filter') is not None: objects = [obj for obj in objects if ARGS.get('filter') in obj.get('name')] # Count the number of objects returned. if objects is False: report.reporter(msg='Nothing found.') elif len(objects) < 1: report.reporter(msg='Nothing found.') elif ARGS.get('object'): self.go.object_updater( url=payload['url'], container=payload['c_name'], u_file=last_obj ) elif objects is not None: kwargs = { 'url': payload['url'], 'container': payload['c_name'], 'cf_job': getattr(self.go, 'object_updater'), } object_names = [i['name'] for i in objects] num_files = len(object_names) concurrency = multi.set_concurrency( args=ARGS, file_count=num_files ) multi.job_processer( num_jobs=num_files, objects=object_names, job_action=multi.doerator, concur=concurrency, kwargs=kwargs ) else: report.reporter(msg='Nothing found.')