def _index_local_files(): """Index Local Files for Upload.""" with multi.spinner(): file_index = methods.get_local_files() if ARGS.get('pattern_match'): return basic.match_filter(idx_list=file_index, pattern=ARGS['pattern_match']) else: return file_index
def start(self): """This is the upload method. Uses file_upload is to simply upload all files and folders to a specified container. """ # Index Local Files for Upload with multi.spinner(): f_indexed = methods.get_local_files() if ARGS.get('pattern_match'): f_indexed = basic.match_filter( idx_list=f_indexed, pattern=ARGS['pattern_match'] ) num_files = len(f_indexed) # Get The rate of concurrency concurrency = multi.set_concurrency(args=ARGS, file_count=num_files) # Package up the Payload payload = multi.manager_dict( http.prep_payload( auth=self.auth, container=ARGS.get('container', basic.rand_string()), source=basic.get_local_source(), args=ARGS ) ) LOG.info('MESSAGE\t: "%s" Files have been found.', num_files) LOG.debug('PAYLOAD\t: "%s"', payload) # Set the actions class up self.go = actions.CloudActions(payload=payload) kwargs = {'url': payload['url'], 'container': payload['c_name']} # get that the container exists if not create it. self.go.container_create(**kwargs) kwargs['source'] = payload['source'] kwargs['cf_job'] = getattr(self.go, 'object_putter') multi.job_processer( num_jobs=num_files, objects=f_indexed, job_action=multi.doerator, concur=concurrency, kwargs=kwargs ) if ARGS.get('delete_remote') is True: self.remote_delete(payload=payload, f_indexed=f_indexed)
def start(self): """This is the archive method. Uses archive (TAR) feature to compress files and then upload the TAR Ball to a specified container. """ # Index Local Files for Upload f_indexed = methods.get_local_files() if ARGS.get('pattern_match'): f_indexed = basic.match_filter( idx_list=f_indexed, pattern=ARGS['pattern_match'] ) num_files = len(f_indexed) report.reporter(msg='MESSAGE: "%s" Files have been found.' % num_files) # Package up the Payload payload = http.prep_payload( auth=self.auth, container=ARGS.get('container', basic.rand_string()), source=None, args=ARGS ) report.reporter( msg='PAYLOAD\t: "%s"' % payload, log=True, lvl='debug', prt=False ) # Set the actions class up self.go = actions.CloudActions(payload=payload) self.go.container_create( url=payload['url'], container=payload['c_name'] ) self.action = getattr(self.go, 'object_putter') with multi.spinner(): # Compression Job wfile = methods.compress_files(file_list=f_indexed) source, name = os.path.split(wfile) report.reporter(msg='MESSAGE: "%s" is being uploaded.' % name) # Perform the upload self.action(url=payload['url'], container=payload['c_name'], source=source, u_file=wfile) # Remove the archive unless instructed not too. if ARGS.get('no_cleanup') is None: basic.remove_file(wfile)
def start(self): """This is the upload method. Uses file_upload is to simply upload all files and folders to a specified container. """ # Index Local Files for Upload f_indexed = methods.get_local_files() if ARGS.get('pattern_match'): f_indexed = basic.match_filter( idx_list=f_indexed, pattern=ARGS['pattern_match'] ) num_files = len(f_indexed) # Get The rate of concurrency concurrency = multi.set_concurrency(args=ARGS, file_count=num_files) # Package up the Payload payload = multi.manager_dict( http.prep_payload( auth=self.auth, container=ARGS.get('container', basic.rand_string()), source=basic.get_local_source(), args=ARGS ) ) LOG.info('MESSAGE\t: "%s" Files have been found.', num_files) LOG.debug('PAYLOAD\t: "%s"', payload) # Set the actions class up self.go = actions.CloudActions(payload=payload) kwargs = {'url': payload['url'], 'container': payload['c_name']} # get that the container exists if not create it. self.go.container_create(**kwargs) kwargs['source'] = payload['source'] kwargs['cf_job'] = getattr(self.go, 'object_putter') multi.job_processer( num_jobs=num_files, objects=f_indexed, job_action=multi.doerator, concur=concurrency, kwargs=kwargs ) if ARGS.get('delete_remote') is True: self.remote_delete(payload=payload, f_indexed=f_indexed)
def _index_local_files(): """Index Local Files for Upload.""" with multi.spinner(): file_index = methods.get_local_files() if ARGS.get('pattern_match'): return basic.match_filter( idx_list=file_index, pattern=ARGS['pattern_match'] ) else: return file_index
def start(self): """This is the archive method. Uses archive (TAR) feature to compress files and then upload the TAR Ball to a specified container. """ # Index Local Files for Upload f_indexed = methods.get_local_files() if ARGS.get('pattern_match'): f_indexed = basic.match_filter(idx_list=f_indexed, pattern=ARGS['pattern_match']) num_files = len(f_indexed) report.reporter(msg='MESSAGE: "%s" Files have been found.' % num_files) # Package up the Payload payload = http.prep_payload(auth=self.auth, container=ARGS.get('container', basic.rand_string()), source=None, args=ARGS) report.reporter( msg='PAYLOAD : [ %s ]' % payload, prt=False, lvl='debug', ) # Set the actions class up self.go = actions.CloudActions(payload=payload) self.go.container_create(url=payload['url'], container=payload['c_name']) self.action = getattr(self.go, 'object_putter') with multi.spinner(): # Compression Job wfile = methods.compress_files(file_list=f_indexed) source, name = os.path.split(wfile) report.reporter(msg='MESSAGE: "%s" is being uploaded.' % name) # Perform the upload self.action(url=payload['url'], container=payload['c_name'], source=source, u_file=wfile) # Remove the archive unless instructed not too. if ARGS.get('no_cleanup') is None: basic.remove_file(wfile)
payload = http.prep_payload(auth=auth, container=args.get('container'), source=args.get('source'), args=args) # Load all of our available cloud actions from turbolift.clouderator import actions cf_actions = actions.CloudActions(payload=payload) # Upload file(s) # ============================================================================= import turbolift.utils.multi_utils as multi from turbolift import methods f_indexed = methods.get_local_files() # Index all of the local files num_files = len(f_indexed) # counts the indexed files kwargs = { 'url': payload['url'], # Defines the Upload URL 'container': payload['c_name'], # Sets the container 'source': payload['source'], # Defines the local source to upload 'cf_job': cf_actions.object_putter # sets the job } # Perform the upload job multi.job_processer(num_jobs=num_files, objects=f_indexed, job_action=multi.doerator, concur=25, kwargs=kwargs)
# Package up the Payload import turbolift.utils.http_utils as http payload = http.prep_payload(auth=authentication, container=args.get("container"), source=args.get("source"), args=args) # Load all of our available cloud actions from turbolift.clouderator import actions cf_actions = actions.CloudActions(payload=payload) # Upload file(s) # ============================================================================= import turbolift.utils.multi_utils as multi from turbolift import methods f_indexed = methods.get_local_files() # Index all of the local files num_files = len(f_indexed) # counts the indexed files kwargs = { "url": payload["url"], # Defines the Upload URL "container": payload["c_name"], # Sets the container "source": payload["source"], # Defines the local source to upload "cf_job": cf_actions.object_putter, # sets the job } # Perform the upload job multi.job_processer(num_jobs=num_files, objects=f_indexed, job_action=multi.doerator, concur=25, kwargs=kwargs)