def parse_region(): """Pull region/auth url information from context.""" base_auth_url = 'identity.api.rackspacecloud.com/v2.0/tokens' if ARGS.get('os_region'): region = ARGS.get('os_region') elif ARGS.get('os_rax_auth'): region = ARGS.get('os_rax_auth') else: raise turbo.SystemProblem('You Are required to specify a REGION') if region is 'LON': return ARGS.get('os_auth_url', 'lon.%s' % base_auth_url), True elif region.lower() in info.__rax_regions__: return ARGS.get('os_auth_url', '%s' % base_auth_url), True else: if ARGS.get('os_auth_url'): if 'racksapce' in ARGS.get('os_auth_url'): return ARGS.get('os_auth_url', '%s' % base_auth_url), True else: return ARGS.get('os_auth_url'), False else: LOG.error('FAILURE: No Region Found. ARGS DUMP:\t %s', ARGS) raise turbo.AuthenticationProblem('You Are required to specify a' ' REGION and an AUTHURL')
def start(self): """This is the archive method. Uses archive (TAR) feature to compress files and then upload the TAR Ball to a specified container. """ report.reporter(msg='Toggling CDN on Container %s.' % ARGS.get('container')) # Package up the Payload payload = http.prep_payload(auth=self.auth, container=ARGS.get('container', basic.rand_string()), source=None, args=ARGS) report.reporter(msg='PAYLOAD\t: "%s"' % payload, log=True, lvl='debug', prt=False) # Set the actions class up self.go = actions.CloudActions(payload=payload) with multi.spinner(): if ARGS.get('purge'): for obj in ARGS.get('purge'): # Perform the purge self.go.container_cdn_command(url=payload['cnet'], container=payload['c_name'], sfile=obj) else: self.go.container_cdn_command(url=payload['cnet'], container=payload['c_name'])
def get_authversion(): """Get or infer the auth version.""" authversion = ARGS.get('auth_version') authversion = AUTH_VERSION_MAP.get(authversion) or authversion if authversion: supported = ['v1.0', 'v2.0'] if authversion not in supported: raise ValueError("Auth Version must be one of %s." % supported) else: # infer version if possible else v2.0 if any((ARGS.get(s) for s in ('st_auth', 'st_user', 'st_key'))): authversion = 'v1.0' elif '/v1.0' in ARGS.get('os_auth_url'): raise ValueError("Specify v1 auth endpoint with 'st_auth'" "instead of 'os_auth_url'") elif '/v2.0' in ARGS.get('st_auth'): raise ValueError("Specify v2 auth endpoint with 'os_auth_url'" "instead of 'st_auth'") else: authversion = 'v2.0' if authversion == 'v1.0': if not ARGS.get('st_auth'): # TODO(samstav): automatically determine this for rax & hp raise AttributeError("Specify the v1 auth endpoint " "with 'st_auth'") ARGS['auth_version'] = authversion return authversion
def time_delta(lmobj, compare_time=None): """Check to see if a date delta exists based on filter for an object. :param lmobj: :param compare_time: :return True|False: """ fmt, date, delta, now = basic.time_stamp() # Set time objects odate = date.strptime(lmobj, fmt) if not compare_time: # Time Options time_factor = ARGS.get('time_factor', 1) offset = ARGS.get('time_offset') if (odate + delta(**{offset: time_factor})) > now: return False else: return True else: if date.strptime(compare_time, fmt) > odate: return True else: return False
def cdn_toggle(headers): """Set headers to Enable or Disable the CDN.""" enable_or_disable = ARGS.get("enabled", ARGS.get("disable", False)) return headers.update( {"X-CDN-Enabled": enable_or_disable, "X-TTL": ARGS.get("cdn_ttl"), "X-Log-Retention": ARGS.get("cdn_logs")} )
def _obj_index(b_path, m_path): f_list = [] l_obj = None while True: resp = http.get_request(url=url, rpath=m_path, headers=fheaders) self.resp_exception(resp=resp) return_list = resp.json() for obj in return_list: time_offset = ARGS.get("time_offset") if time_offset is not None: # Get the last_modified data from the Object. if cloud.time_delta(lmobj=time_offset) is True: f_list.append(obj) else: f_list.append(obj) last_obj_in_list = f_list[-1].get("name") if ARGS.get("max_jobs", ARGS.get("object_index")) is not None: max_jobs = ARGS.get("max_jobs", ARGS.get("object_index")) if max_jobs <= len(f_list): return f_list[:max_jobs] elif l_obj is last_obj_in_list: return f_list else: l_obj = last_obj_in_list m_path = _marker_type(base=b_path, last=last_obj_in_list) else: if l_obj is last_obj_in_list: return f_list else: l_obj = last_obj_in_list m_path = _marker_type(base=b_path, last=last_obj_in_list)
def object_updater(self, url, container, u_file): """Update an existing object in a swift container. This method will place new headers on an existing object. :param url: :param container: :param u_file: """ for retry in basic.retryloop(attempts=ARGS.get('error_retry'), delay=2, obj=u_file): # HTML Encode the path for the file rpath = http.quoter(url=url.path, cont=container, ufile=u_file) fheaders = self.payload['headers'] if ARGS.get('object_headers') is not None: fheaders.update(ARGS.get('object_headers')) if ARGS.get('save_perms') is not None: fheaders.update(basic.stat_file(local_file=u_file)) with meth.operation(retry, obj='%s %s' % (fheaders, u_file)): self._header_poster(url=url, rpath=rpath, fheaders=fheaders)
def cdn_toggle(headers): """Set headers to Enable or Disable the CDN.""" enable_or_disable = ARGS.get('enabled', ARGS.get('disable', False)) return headers.update({'X-CDN-Enabled': enable_or_disable, 'X-TTL': ARGS.get('cdn_ttl'), 'X-Log-Retention': ARGS.get('cdn_logs')})
def get_authversion(): """Get or infer the auth version.""" authversion = ARGS.get('auth_version') authversion = AUTH_VERSION_MAP.get(authversion) or authversion if authversion: supported = ['v1.0', 'v2.0'] if authversion not in supported: raise ValueError("Auth Version must be one of %s." % supported) else: # infer version if possible else v2.0 if any((ARGS.get(s) for s in ('st_auth', 'st_user', 'st_key'))): authversion = 'v1.0' elif '/v1.0' in ARGS.get('os_auth_url', ''): raise ValueError("Specify v1 auth endpoint with 'st_auth'" "instead of 'os_auth_url'") elif '/v2.0' in ARGS.get('st_auth', ''): raise ValueError("Specify v2 auth endpoint with 'os_auth_url'" "instead of 'st_auth'") else: authversion = 'v2.0' if authversion == 'v1.0': if not ARGS.get('st_auth'): raise AttributeError("Specify the v1 auth endpoint " "with 'st_auth'") ARGS['auth_version'] = authversion return authversion
def _deleterator(payload): """Multipass Object Delete.""" report.reporter(msg='Getting file list') with multi.spinner(): # Get all objects in a Container objects, list_count, last_obj = self.action( url=payload['url'], container=payload['c_name'] ) if ARGS.get('pattern_match'): objects = basic.match_filter( idx_list=objects, pattern=ARGS['pattern_match'], dict_type=True ) # Count the number of objects returned. if objects is False: report.reporter(msg='No Container found.') return elif objects is not None: # Load the queue obj_list = [obj['name'] for obj in objects] num_files = len(obj_list) if num_files < 1: report.reporter(msg='No Objects found.') return else: report.reporter(msg='Nothing found.') return # Get The rate of concurrency concurrency = multi.set_concurrency(args=ARGS, file_count=num_files) if ARGS.get('object'): obj_names = ARGS.get('object') obj_list = [obj for obj in obj_list if obj in obj_names] if not obj_list: return 'Nothing Found to Delete.' num_files = len(obj_list) report.reporter( msg=('Performing Object Delete for "%s" object(s)...' % num_files) ) kwargs = {'url': payload['url'], 'container': payload['c_name'], 'cf_job': getattr(self.go, 'object_deleter')} multi.job_processer( num_jobs=num_files, objects=obj_list, job_action=multi.doerator, concur=concurrency, kwargs=kwargs ) _deleterator(payload=payload)
def start(self): """Return a list of objects from the API for a container.""" def _check_list(list_object): if list_object: return list_object else: return None, None, None def _list(payload, go, last_obj): """Retrieve a long list of all files in a container. :return final_list, list_count, last_obj: """ if ARGS.get("all_containers") is None: return _check_list( list_object=go.object_lister(url=payload["url"], container=payload["c_name"], last_obj=last_obj) ) else: return _check_list(list_object=go.container_lister(url=payload["url"], last_obj=last_obj)) # Package up the Payload payload = http.prep_payload(auth=self.auth, container=ARGS.get("container"), source=None, args=ARGS) # Prep Actions. self.go = actions.CloudActions(payload=payload) report.reporter(msg="API Access for a list of Objects in %s" % payload["c_name"], log=True) report.reporter(msg='PAYLOAD\t: "%s"' % payload, log=True, lvl="debug", prt=False) last_obj = None with multi.spinner(): objects, list_count, last_obj = _list(payload=payload, go=self.go, last_obj=last_obj) if ARGS.get("pattern_match"): objects = basic.match_filter(idx_list=objects, pattern=ARGS["pattern_match"], dict_type=True) if ARGS.get("filter") is not None: objects = [obj for obj in objects if ARGS.get("filter") in obj.get("name")] # Count the number of objects returned. if objects is False: report.reporter(msg="Nothing found.") elif objects is not None: num_files = len(objects) if num_files < 1: report.reporter(msg="Nothing found.") else: return_objects = [] for obj in objects: for item in ["hash", "last_modified", "content_type"]: if item in obj: obj.pop(item) return_objects.append(obj) report.reporter(msg=report.print_horiz_table(return_objects)) report.reporter(msg='I found "%d" Item(s).' % num_files) else: report.reporter(msg="Nothing found.")
def cdn_toggle(headers): """Set headers to Enable or Disable the CDN.""" enable_or_disable = ARGS.get('enabled', ARGS.get('disable', False)) return headers.update({ 'X-CDN-Enabled': enable_or_disable, 'X-TTL': ARGS.get('cdn_ttl'), 'X-Log-Retention': ARGS.get('cdn_logs') })
def start(self): """This is the archive method. Uses archive (TAR) feature to compress files and then upload the TAR Ball to a specified container. """ # Index Local Files for Upload f_indexed = methods.get_local_files() if ARGS.get('pattern_match'): f_indexed = basic.match_filter( idx_list=f_indexed, pattern=ARGS['pattern_match'] ) num_files = len(f_indexed) report.reporter(msg='MESSAGE: "%s" Files have been found.' % num_files) # Package up the Payload payload = http.prep_payload( auth=self.auth, container=ARGS.get('container', basic.rand_string()), source=None, args=ARGS ) report.reporter( msg='PAYLOAD\t: "%s"' % payload, log=True, lvl='debug', prt=False ) # Set the actions class up self.go = actions.CloudActions(payload=payload) self.go.container_create( url=payload['url'], container=payload['c_name'] ) self.action = getattr(self.go, 'object_putter') with multi.spinner(): # Compression Job wfile = methods.compress_files(file_list=f_indexed) source, name = os.path.split(wfile) report.reporter(msg='MESSAGE: "%s" is being uploaded.' % name) # Perform the upload self.action(url=payload['url'], container=payload['c_name'], source=source, u_file=wfile) # Remove the archive unless instructed not too. if ARGS.get('no_cleanup') is None: basic.remove_file(wfile)
def parse_reqtype(): """Setup our Authentication POST. username and setup are only used in APIKEY/PASSWORD Authentication """ setup = {'username': ARGS.get('os_user')} if ARGS.get('os_token') is not None: auth_body = {'auth': {'token': {'id': ARGS.get('os_token')}}} elif ARGS.get('os_password') is not None: prefix = 'passwordCredentials' setup['password'] = ARGS.get('os_password') auth_body = {'auth': {prefix: setup}} elif ARGS.get('os_apikey') is not None: prefix = 'RAX-KSKEY:apiKeyCredentials' setup['apiKey'] = ARGS.get('os_apikey') auth_body = {'auth': {prefix: setup}} else: LOG.error(traceback.format_exc()) raise AttributeError('No Password, APIKey, or Token Specified') if ARGS.get('os_tenant'): auth_body['auth']['tenantName'] = ARGS.get('os_tenant') LOG.debug('AUTH Request Type > %s', auth_body) return auth_body
def parse_region(): """Pull region/auth url information from context.""" if ARGS.get('os_rax_auth'): region = ARGS.get('os_rax_auth') auth_url = 'identity.api.rackspacecloud.com/v2.0/tokens' if region is 'LON': return ARGS.get('os_auth_url', 'https://lon.%s' % auth_url) elif region.lower() in info.__rax_regions__: return ARGS.get('os_auth_url', 'https://%s' % auth_url) else: raise turbo.SystemProblem('No Known RAX Region Was Specified') elif ARGS.get('os_hp_auth'): region = ARGS.get('os_hp_auth') auth_url = 'https://%s.identity.hpcloudsvc.com:35357/v2.0/tokens' if region.lower() in info.__hpc_regions__: return ARGS.get('os_auth_url', auth_url % region) else: raise turbo.SystemProblem('No Known HP Region Was Specified') elif ARGS.get('os_auth_url'): return ARGS.get('os_auth_url') else: raise turbo.SystemProblem( 'You Are required to specify an Auth URL, Region or Plugin' )
def parse_reqtype(): """Setup our Authentication POST. username and setup are only used in APIKEY/PASSWORD Authentication """ if get_authversion() == 'v1.0': return else: setup = {'username': ARGS.get('os_user')} if ARGS.get('os_token') is not None: auth_body = {'auth': {'token': {'id': ARGS.get('os_token')}}} elif ARGS.get('os_password') is not None: prefix = 'passwordCredentials' setup['password'] = ARGS.get('os_password') auth_body = {'auth': {prefix: setup}} elif ARGS.get('os_apikey') is not None: prefix = 'RAX-KSKEY:apiKeyCredentials' setup['apiKey'] = ARGS.get('os_apikey') auth_body = {'auth': {prefix: setup}} else: LOG.error(traceback.format_exc()) raise AttributeError('No Password, APIKey, or Token Specified') if ARGS.get('os_tenant'): auth_body['auth']['tenantName'] = ARGS.get('os_tenant') LOG.debug('AUTH Request Type > %s', auth_body) return auth_body
def start(self): """This is the upload method. Uses file_upload is to simply upload all files and folders to a specified container. """ # Index Local Files for Upload with multi.spinner(): f_indexed = methods.get_local_files() if ARGS.get('pattern_match'): f_indexed = basic.match_filter( idx_list=f_indexed, pattern=ARGS['pattern_match'] ) num_files = len(f_indexed) # Get The rate of concurrency concurrency = multi.set_concurrency(args=ARGS, file_count=num_files) # Package up the Payload payload = multi.manager_dict( http.prep_payload( auth=self.auth, container=ARGS.get('container', basic.rand_string()), source=basic.get_local_source(), args=ARGS ) ) LOG.info('MESSAGE\t: "%s" Files have been found.', num_files) LOG.debug('PAYLOAD\t: "%s"', payload) # Set the actions class up self.go = actions.CloudActions(payload=payload) kwargs = {'url': payload['url'], 'container': payload['c_name']} # get that the container exists if not create it. self.go.container_create(**kwargs) kwargs['source'] = payload['source'] kwargs['cf_job'] = getattr(self.go, 'object_putter') multi.job_processer( num_jobs=num_files, objects=f_indexed, job_action=multi.doerator, concur=concurrency, kwargs=kwargs ) if ARGS.get('delete_remote') is True: self.remote_delete(payload=payload, f_indexed=f_indexed)
def start(self): """This is the upload method. Uses file_upload is to simply upload all files and folders to a specified container. """ # Index Local Files for Upload f_indexed = methods.get_local_files() if ARGS.get('pattern_match'): f_indexed = basic.match_filter( idx_list=f_indexed, pattern=ARGS['pattern_match'] ) num_files = len(f_indexed) # Get The rate of concurrency concurrency = multi.set_concurrency(args=ARGS, file_count=num_files) # Package up the Payload payload = multi.manager_dict( http.prep_payload( auth=self.auth, container=ARGS.get('container', basic.rand_string()), source=basic.get_local_source(), args=ARGS ) ) LOG.info('MESSAGE\t: "%s" Files have been found.', num_files) LOG.debug('PAYLOAD\t: "%s"', payload) # Set the actions class up self.go = actions.CloudActions(payload=payload) kwargs = {'url': payload['url'], 'container': payload['c_name']} # get that the container exists if not create it. self.go.container_create(**kwargs) kwargs['source'] = payload['source'] kwargs['cf_job'] = getattr(self.go, 'object_putter') multi.job_processer( num_jobs=num_files, objects=f_indexed, job_action=multi.doerator, concur=concurrency, kwargs=kwargs ) if ARGS.get('delete_remote') is True: self.remote_delete(payload=payload, f_indexed=f_indexed)
def set_headers(headers): """Set the headers used in the Cloud Files Request. :return headers: """ # Set the headers if some custom ones were specified if ARGS.get('base_headers'): headers.update(ARGS.get('base_headers')) return headers
def start(self): """This is the archive method. Uses archive (TAR) feature to compress files and then upload the TAR Ball to a specified container. """ # Index Local Files for Upload f_indexed = methods.get_local_files() if ARGS.get('pattern_match'): f_indexed = basic.match_filter(idx_list=f_indexed, pattern=ARGS['pattern_match']) num_files = len(f_indexed) report.reporter(msg='MESSAGE: "%s" Files have been found.' % num_files) # Package up the Payload payload = http.prep_payload(auth=self.auth, container=ARGS.get('container', basic.rand_string()), source=None, args=ARGS) report.reporter( msg='PAYLOAD : [ %s ]' % payload, prt=False, lvl='debug', ) # Set the actions class up self.go = actions.CloudActions(payload=payload) self.go.container_create(url=payload['url'], container=payload['c_name']) self.action = getattr(self.go, 'object_putter') with multi.spinner(): # Compression Job wfile = methods.compress_files(file_list=f_indexed) source, name = os.path.split(wfile) report.reporter(msg='MESSAGE: "%s" is being uploaded.' % name) # Perform the upload self.action(url=payload['url'], container=payload['c_name'], source=source, u_file=wfile) # Remove the archive unless instructed not too. if ARGS.get('no_cleanup') is None: basic.remove_file(wfile)
def _parse_v2_region(): """Pull region/auth url information from context.""" if ARGS.get('os_rax_auth'): region = ARGS.get('os_rax_auth') auth_url = 'identity.api.rackspacecloud.com/v2.0/tokens' if region is 'LON': return ARGS.get('os_auth_url', 'https://lon.%s' % auth_url) elif region.lower() in info.__rax_regions__: return ARGS.get('os_auth_url', 'https://%s' % auth_url) else: raise turbo.SystemProblem('No Known RAX Region Was Specified') elif ARGS.get('os_hp_auth'): region = ARGS.get('os_hp_auth') auth_url = 'https://%s.identity.hpcloudsvc.com:35357/v2.0/tokens' if region.lower() in info.__hpc_regions__: return ARGS.get('os_auth_url', auth_url % region) else: raise turbo.SystemProblem('No Known HP Region Was Specified') elif 'os_auth_url' in ARGS: auth_url = ARGS.get('os_auth_url') if not auth_url.endswith('/tokens'): auth_url = '%s/tokens' % auth_url return auth_url else: raise turbo.SystemProblem( 'You Are required to specify an Auth URL, Region or Plugin')
def start(self): """This is the upload method. Uses file_upload is to simply upload all files and folders to a specified container. """ f_indexed = self._index_local_files() num_files = len(f_indexed) # Get The rate of concurrency concurrency = multi.set_concurrency(args=ARGS, file_count=num_files) # Package up the Payload payload = multi.manager_dict( http.prep_payload( auth=self.auth, container=ARGS.get('container', basic.rand_string()), source=basic.get_local_source(), args=ARGS ) ) report.reporter(msg='MESSAGE : "%s" Files found.' % num_files) report.reporter( msg='PAYLOAD : [ %s ]' % payload, prt=False, lvl='debug' ) # Set the actions class up self.go = actions.CloudActions(payload=payload) kwargs = {'url': payload['url'], 'container': payload['c_name']} # get that the container exists if not create it. self.go.container_create(**kwargs) kwargs['source'] = payload['source'] kwargs['cf_job'] = getattr(self.go, 'object_putter') multi.job_processer( num_jobs=num_files, objects=f_indexed, job_action=multi.doerator, concur=concurrency, kwargs=kwargs ) if ARGS.get('delete_remote') is True: self.remote_delete(payload=payload)
def container_create(self, url, container): """Create a container if it is not Found. :param url: :param container: """ rty_count = ARGS.get("error_retry") for retry in basic.retryloop(attempts=rty_count, delay=5, obj=container): rpath = http.quoter(url=url.path, cont=container) fheaders = self.payload["headers"] with meth.operation(retry, obj="%s %s" % (fheaders, rpath)): resp = self._header_getter(url=url, rpath=rpath, fheaders=fheaders) # Check that the status was a good one if resp.status_code == 404: report.reporter(msg="Creating Container => %s" % container) http.put_request(url=url, rpath=rpath, headers=fheaders) self.resp_exception(resp=resp) report.reporter(msg='Container "%s" Created' % container) return True else: report.reporter(msg='Container "%s" Found' % container) return False
def container_cdn_command(self, url, container, sfile=None): """Command your CDN enabled Container. :param url: :param container: """ rty_count = ARGS.get("error_retry") for retry in basic.retryloop(attempts=rty_count, delay=2, obj=sfile): cheaders = self.payload["headers"] if sfile is not None: rpath = http.quoter(url=url.path, cont=container, ufile=sfile) # perform CDN Object DELETE adddata = "%s %s" % (cheaders, container) with meth.operation(retry, obj=adddata): resp = http.delete_request(url=url, rpath=rpath, headers=cheaders) self.resp_exception(resp=resp) else: rpath = http.quoter(url=url.path, cont=container) http.cdn_toggle(headers=cheaders) # perform CDN Enable PUT adddata = "%s %s" % (cheaders, container) with meth.operation(retry, obj=adddata): resp = http.put_request(url=url, rpath=rpath, headers=cheaders) self.resp_exception(resp=resp) report.reporter( msg="OBJECT %s MESSAGE %s %s %s" % (rpath, resp.status_code, resp.reason, resp.request), prt=False, lvl="debug", )
def start(self): """Retrieve a long list of all files in a container.""" # Package up the Payload payload = http.prep_payload(auth=self.auth, container=None, source=None, args=ARGS) # Prep Actions. self.go = actions.CloudActions(payload=payload) report.reporter(msg='PAYLOAD\t: "%s"' % payload, log=True, lvl='debug', prt=False) with multi.spinner(): if ARGS.get('cdn_info'): url = payload['cnet'] else: url = payload['url'] message = self.go.detail_show(url=url) try: if message.status_code != 404: report.reporter(msg='Object Found...') report.reporter( msg=report.print_virt_table(dict(message.headers))) else: report.reporter(msg='Nothing Found...') except ValueError as exp: report.reporter(msg=('Non-hashable Type, Likley Item is not found.' ' Additional Data: %s' % exp))
def object_deleter(self, url, container, u_file): """Deletes an objects in a container. :param url: :param container: :param u_file: """ rty_count = ARGS.get('error_retry') for retry in basic.retryloop(attempts=rty_count, delay=2, obj=u_file): # Open Connection conn = http.open_connection(url=url) # Open connection and perform operation with meth.operation(retry, conn): rpath = http.quoter(url=url.path, cont=container, ufile=u_file) # Make a connection resp = self._header_getter(conn=conn, rpath=rpath, fheaders=self.payload['headers'], retry=retry) if not resp.status == 404: # Perform delete. self._deleter(conn=conn, rpath=rpath, fheaders=self.payload['headers'], retry=retry)
def spinner(work_q=None): """Show a fancy spinner while we have work running. :param work_q: :return: """ if any([ARGS.get('verbose') is True, ARGS.get('quiet') is True]): yield else: set_itd = IndicatorThread(work_q=work_q) try: itd = set_itd.indicator_thread() yield finally: itd.terminate()
def start(self): """Retrieve a long list of all files in a container.""" # Package up the Payload payload = http.prep_payload( auth=self.auth, container=None, source=None, args=ARGS ) # Prep Actions. self.go = actions.CloudActions(payload=payload) report.reporter( msg='PAYLOAD\t: "%s"' % payload, log=True, lvl='debug', prt=False ) with multi.spinner(): if ARGS.get('cdn_info'): url = payload['cnet'] else: url = payload['url'] message = self.go.detail_show(url=url) if isinstance(message, list): report.reporter(msg='Item Found...') report.reporter(msg=report.print_virt_table(dict(message))) else: report.reporter(msg=message)
def _downloader(self, url, rpath, fheaders, lfile, source, skip=False): """Download a specified object in the container. :param url: :param rpath: :param fheaders: :param lfile: :param skip: """ resp = None if source is None: local_f = lfile else: local_f = basic.jpath(root=source, inode=lfile) if self._checker(url, rpath, local_f, fheaders, skip) is True: report.reporter( msg='Downloading remote %s to local file %s' % (rpath, lfile), prt=False, lvl='debug', ) # Perform Object GET resp = http.get_request(url=url, rpath=rpath, headers=fheaders, stream=True) self.resp_exception(resp=resp) local_f = basic.collision_rename(file_name=local_f) # Open our source file and write it with open(local_f, 'wb') as f_name: for chunk in resp.iter_content(chunk_size=2048): if chunk: f_name.write(chunk) f_name.flush() resp.close() if ARGS.get('restore_perms') is not None: # Make a connection if resp is None: resp = self._header_getter(url=url, rpath=rpath, fheaders=fheaders) all_headers = resp.headers if all([ 'x-object-meta-group' in all_headers, 'x-object-meta-owner' in all_headers, 'x-object-meta-perms' in all_headers ]): basic.restor_perms(local_file=local_f, headers=all_headers) else: report.reporter( msg=('No Permissions were restored, because none were' ' saved on the object "%s"' % rpath), lvl='warn', log=True)
def _checker(self, conn, rpath, lpath, fheaders, retry, skip): """Check to see if a local file and a target file are different. :param conn: :param rpath: :param lpath: :param retry: :param fheaders: :return True|False: """ if skip is True: return True elif ARGS.get('sync'): resp = self._header_getter(conn=conn, rpath=rpath, fheaders=fheaders, retry=retry) if resp.status == 404: return True elif cloud.md5_checker(resp=resp, local_f=lpath) is True: return True else: return False else: return True
def object_downloader(self, url, container, source, u_file): """Download an Object from a Container. :param url: :param container: :param u_file: """ rty_count = ARGS.get('error_retry') for retry in basic.retryloop(attempts=rty_count, delay=2, obj=u_file): # Open Connection conn = http.open_connection(url=url) # Perform operation with meth.operation(retry, conn): fheaders = self.payload['headers'] rpath = http.quoter(url=url.path, cont=container, ufile=u_file) # Perform Download. self._downloader(conn=conn, rpath=rpath, fheaders=fheaders, lfile=u_file, source=source, retry=retry)
def _downloader(self, url, rpath, fheaders, lfile, source, skip=False): """Download a specified object in the container. :param url: :param rpath: :param fheaders: :param lfile: :param skip: """ resp = None if source is None: local_f = lfile else: local_f = basic.jpath(root=source, inode=lfile) if self._checker(url, rpath, local_f, fheaders, skip) is True: report.reporter( msg='Downloading remote %s to local file %s' % (rpath, lfile), prt=False, lvl='debug', ) # Perform Object GET resp = http.get_request( url=url, rpath=rpath, headers=fheaders, stream=True ) self.resp_exception(resp=resp) local_f = basic.collision_rename(file_name=local_f) # Open our source file and write it with open(local_f, 'wb') as f_name: for chunk in resp.iter_content(chunk_size=2048): if chunk: f_name.write(chunk) f_name.flush() resp.close() if ARGS.get('restore_perms') is not None: # Make a connection if resp is None: resp = self._header_getter( url=url, rpath=rpath, fheaders=fheaders ) all_headers = resp.headers if all(['x-object-meta-group' in all_headers, 'x-object-meta-owner' in all_headers, 'x-object-meta-perms' in all_headers]): basic.restor_perms(local_file=local_f, headers=all_headers) else: report.reporter( msg=('No Permissions were restored, because none were' ' saved on the object "%s"' % rpath), lvl='warn', log=True )
def object_lister(self, url, container, object_count=None, last_obj=None): """Builds a long list of objects found in a container. NOTE: This could be millions of Objects. :param url: :param container: :param object_count: :param last_obj: :return None | list: """ for retry in basic.retryloop(attempts=ARGS.get("error_retry"), obj="Object List"): fheaders = self.payload["headers"] fpath = http.quoter(url=url.path, cont=container) with meth.operation(retry, obj="%s %s" % (fheaders, fpath)): resp = self._header_getter(url=url, rpath=fpath, fheaders=fheaders) if resp.status_code == 404: report.reporter(msg="Not found. %s | %s" % (resp.status_code, resp.request)) return None, None, None else: if object_count is None: object_count = resp.headers.get("x-container-object-count") if object_count: object_count = int(object_count) if not object_count > 0: return None, None, None else: return None, None, None # Set the number of loops that we are going to do return self._list_getter(url=url, filepath=fpath, fheaders=fheaders, last_obj=last_obj)
def start(self): """Retrieve a long list of all files in a container.""" # Package up the Payload payload = http.prep_payload(auth=self.auth, container=None, source=None, args=ARGS) # Prep Actions. self.go = actions.CloudActions(payload=payload) report.reporter(msg='PAYLOAD\t: "%s"' % payload, log=True, lvl='debug', prt=False) with multi.spinner(): if ARGS.get('cdn_info'): url = payload['cnet'] else: url = payload['url'] message = self.go.detail_show(url=url) if isinstance(message, list): report.reporter(msg='Item Found...') report.reporter(msg=report.print_virt_table(dict(message))) else: report.reporter(msg=message)
def container_lister(self, url, last_obj=None): """Builds a long list of objects found in a container. NOTE: This could be millions of Objects. :param url: :return None | list: """ for retry in basic.retryloop(attempts=ARGS.get("error_retry"), obj="Container List"): fheaders = self.payload["headers"] fpath = http.quoter(url=url.path) with meth.operation(retry, obj="%s %s" % (fheaders, fpath)): resp = self._header_getter(url=url, rpath=fpath, fheaders=fheaders) head_check = resp.headers container_count = head_check.get("x-account-container-count") if container_count: container_count = int(container_count) if not container_count > 0: return None else: return None # Set the number of loops that we are going to do return self._list_getter(url=url, filepath=fpath, fheaders=fheaders, last_obj=last_obj)
def post_request(url, headers, body=None, rpath=None): """Perform HTTP(s) POST request based on Provided Params. :param url: :param rpath: :param headers: :param body: :return resp: """ try: if rpath is not None: _url = urlparse.urljoin(urlparse.urlunparse(url), rpath) else: _url = urlparse.urlunparse(url) kwargs = {'timeout': ARGS.get('timeout', 60)} resp = requests.post(_url, data=body, headers=headers, **kwargs) except Exception as exp: LOG.error('Not able to perform Request ERROR: %s', exp) raise AttributeError( "Failure to perform Authentication %s ERROR:\n%s" % (exp, traceback.format_exc())) else: return resp
def container_lister(self, url, last_obj=None): """Builds a long list of objects found in a container. NOTE: This could be millions of Objects. :param url: :return None | list: """ for retry in basic.retryloop(attempts=ARGS.get('error_retry'), obj='Container List'): fheaders = self.payload['headers'] fpath = http.quoter(url=url.path) with meth.operation(retry, obj='%s %s' % (fheaders, fpath)): resp = self._header_getter(url=url, rpath=fpath, fheaders=fheaders) head_check = resp.headers container_count = head_check.get('x-account-container-count') if container_count: container_count = int(container_count) if not container_count > 0: return None else: return None # Set the number of loops that we are going to do return self._list_getter(url=url, filepath=fpath, fheaders=fheaders, last_obj=last_obj)
def container_create(self, url, container): """Create a container if it is not Found. :param url: :param container: """ rty_count = ARGS.get('error_retry') for retry in basic.retryloop(attempts=rty_count, delay=5, obj=container): rpath = http.quoter(url=url.path, cont=container) fheaders = self.payload['headers'] with meth.operation(retry, obj='%s %s' % (fheaders, rpath)): resp = self._header_getter(url=url, rpath=rpath, fheaders=fheaders) # Check that the status was a good one if resp.status_code == 404: report.reporter(msg='Creating Container => %s' % container) http.put_request(url=url, rpath=rpath, headers=fheaders) self.resp_exception(resp=resp) report.reporter(msg='Container "%s" Created' % container) return True else: report.reporter(msg='Container "%s" Found' % container) return False
def container_cdn_command(self, url, container, sfile=None): """Command your CDN enabled Container. :param url: :param container: """ rty_count = ARGS.get('error_retry') for retry in basic.retryloop(attempts=rty_count, delay=2, obj=sfile): # Open Connection conn = http.open_connection(url=url) with meth.operation(retry, conn): cheaders = self.payload['headers'] if sfile is not None: rpath = http.quoter(url=url.path, cont=container, ufile=sfile) # perform CDN Object DELETE conn.request('DELETE', rpath, headers=cheaders) resp, read = http.response_get(conn=conn, retry=retry) self.resp_exception(resp=resp, rty=retry) else: rpath = http.quoter(url=url.path, cont=container) http.cdn_toggle(headers=cheaders) # perform CDN Enable POST conn.request('PUT', rpath, headers=cheaders) resp, read = http.response_get(conn=conn, retry=retry) self.resp_exception(resp=resp, rty=retry) report.reporter( msg=('OBJECT %s MESSAGE %s %s %s' % (rpath, resp.status, resp.reason, resp.msg)), prt=False, lvl='debug' )
def start(self): """Retrieve a long list of all files in a container.""" # Package up the Payload payload = http.prep_payload(auth=self.auth, container=None, source=None, args=ARGS) # Prep Actions. self.go = actions.CloudActions(payload=payload) report.reporter(msg='PAYLOAD\t: "%s"' % payload, log=True, lvl="debug", prt=False) with multi.spinner(): if ARGS.get("cdn_info"): url = payload["cnet"] else: url = payload["url"] message = self.go.detail_show(url=url) try: if message.status_code != 404: report.reporter(msg="Object Found...") report.reporter(msg=report.print_virt_table(dict(message.headers))) else: report.reporter(msg="Nothing Found...") except ValueError as exp: report.reporter(msg=("Non-hashable Type, Likley Item is not found." " Additional Data: %s" % exp))
def container_cdn_command(self, url, container, sfile=None): """Command your CDN enabled Container. :param url: :param container: """ rty_count = ARGS.get('error_retry') for retry in basic.retryloop(attempts=rty_count, delay=2, obj=sfile): # Open Connection conn = http.open_connection(url=url) with meth.operation(retry, conn): cheaders = self.payload['headers'] if sfile is not None: rpath = http.quoter(url=url.path, cont=container, ufile=sfile) # perform CDN Object DELETE conn.request('DELETE', rpath, headers=cheaders) resp, read = http.response_get(conn=conn, retry=retry) self.resp_exception(resp=resp, rty=retry) else: rpath = http.quoter(url=url.path, cont=container) http.cdn_toggle(headers=cheaders) # perform CDN Enable POST conn.request('PUT', rpath, headers=cheaders) resp, read = http.response_get(conn=conn, retry=retry) self.resp_exception(resp=resp, rty=retry) report.reporter( msg=('OBJECT %s MESSAGE %s %s %s' % (rpath, resp.status, resp.reason, resp.msg)), prt=False, lvl='debug')
def _base_headers(headers): """Set and return custom headers. :param headers: :return headers: """ return headers.update(ARGS.get("base_headers"))
def _base_headers(headers): """Set and return custom headers. :param headers: :return headers: """ return headers.update(ARGS.get('base_headers'))
def parse_auth_response(auth_response): """Parse the auth response and return the tenant, token, and username. :param auth_response: the full object returned from an auth call :returns: tuple (token, tenant, username, internalurl, externalurl, cdnurl) """ access = auth_response.get('access') token = access.get('token').get('id') if 'tenant' in access.get('token'): tenant = access.get('token').get('tenant').get('name') user = access.get('user').get('name') elif 'user' in access: tenant = None user = access.get('user').get('name') else: LOG.error('No Token Found to Parse.\nHere is the DATA: %s\n%s', auth_response, traceback.format_exc()) raise turbo.NoTenantIdFound('When attempting to grab the ' 'tenant or user nothing was found.') scat = access.pop('serviceCatalog') for srv in scat: if srv.get('name') in info.__srv_types__: if srv.get('name') == 'cloudFilesCDN': cdn = srv.get('endpoints') if srv.get('name') == ARGS.get('service_type'): cfl = srv.get('endpoints') if ARGS.get('os_region') is not None: region = ARGS.get('os_region') elif ARGS.get('os_rax_auth') is not None: region = ARGS.get('os_rax_auth') else: raise turbo.SystemProblem('No Region Set') if cfl is not None: inet = get_surl(region=region, cf_list=cfl, lookup='internalURL') enet = get_surl(region=region, cf_list=cfl, lookup='publicURL') if cdn is not None: cnet = get_surl(region=region, cf_list=cdn, lookup='publicURL') return token, tenant, user, inet, enet, cnet, cfl
def _index_local_files(): """Index Local Files for Upload.""" with multi.spinner(): file_index = methods.get_local_files() if ARGS.get('pattern_match'): return basic.match_filter(idx_list=file_index, pattern=ARGS['pattern_match']) else: return file_index
def _list(payload, go, last_obj): """Retrieve a long list of all files in a container. :return final_list, list_count, last_obj: """ if ARGS.get("all_containers") is None: return go.object_lister(url=payload["url"], container=payload["c_name"], last_obj=last_obj) else: return go.container_lister(url=payload["url"], last_obj=last_obj)
def get_local_files(): """Find all files specified in the "source" path. This creates a list for all of files using the full path. """ def not_list(item): """Exclude items. :param item: :return True|False: """ if all([not os.path.islink(item), not os.path.ismount(item)]): if not os.path.getsize(item) > 4831838208: return True else: return False def indexer(location): """Return a list of indexed files. :param location: :return: """ _location = basic.real_full_path(location.encode('utf8')) if os.path.isdir(_location): r_walk = os.walk(_location) indexes = [(root, fls) for root, sfs, fls in r_walk] return [ basic.jpath(root=inx[0], inode=inode) for inx in indexes for inode in inx[1] ] elif os.path.isfile(_location): return [_location] else: raise turbo.NoFileProvided('No Path was Found for %s' % _location) try: d_paths = ARGS.get('source') if not isinstance(d_paths, list): d_paths = [d_paths] # Local Index Path c_index = [indexer(location=d_path) for d_path in d_paths] # make sure my files are only files, and compare it with the not_list f_index = [ item for subl in c_index for item in subl if not_list(item=item) ] except Exception as exp: raise turbo.SystemProblem('Died for some reason. MESSAGE:\t%s' % exp) else: report.reporter(msg='FILE LIST:\t%s' % f_index, lvl='debug', prt=False) return f_index
def object_putter(self, url, container, source, u_file): """This is the Sync method which uploads files to the swift repository if they are not already found. If a file "name" is found locally and in the swift repository an MD5 comparison is done between the two files. If the MD5 is miss-matched the local file is uploaded to the repository. If custom meta data is specified, and the object exists the method will put the metadata onto the object. :param url: :param container: :param source: :param u_file: """ for retry in basic.retryloop(attempts=ARGS.get('error_retry'), delay=2, obj=u_file): # Open connection and perform operation # Get the path ready for action sfile = basic.get_sfile(ufile=u_file, source=source) if ARGS.get('dir'): container = '%s/%s' % (container, ARGS['dir'].strip('/')) rpath = http.quoter(url=url.path, cont=container, ufile=sfile) fheaders = self.payload['headers'] if ARGS.get('object_headers') is not None: fheaders.update(ARGS.get('object_headers')) if ARGS.get('save_perms') is not None: fheaders.update(basic.stat_file(local_file=u_file)) with meth.operation(retry, obj='%s %s' % (fheaders, u_file)): self._putter(url=url, fpath=u_file, rpath=rpath, fheaders=fheaders)
def get_headers(): """Setup headers for authentication request.""" if get_authversion() == 'v1.0': if all([ARGS.get(s) for s in ('st_user', 'st_key')]): return { 'X-Auth-User': ARGS['st_user'], 'X-Auth-Key': ARGS['st_key'] } else: LOG.error(traceback.format_exc()) raise AttributeError('Missing Password, APIKey, Token, or User')
def _index_local_files(): """Index Local Files for Upload.""" with multi.spinner(): file_index = methods.get_local_files() if ARGS.get('pattern_match'): return basic.match_filter( idx_list=file_index, pattern=ARGS['pattern_match'] ) else: return file_index