def start(self): """This is the archive method. Uses archive (TAR) feature to compress files and then upload the TAR Ball to a specified container. """ report.reporter(msg='Toggling CDN on Container %s.' % ARGS.get('container')) # Package up the Payload payload = http.prep_payload(auth=self.auth, container=ARGS.get('container', basic.rand_string()), source=None, args=ARGS) report.reporter(msg='PAYLOAD\t: "%s"' % payload, log=True, lvl='debug', prt=False) # Set the actions class up self.go = actions.CloudActions(payload=payload) with multi.spinner(): if ARGS.get('purge'): for obj in ARGS.get('purge'): # Perform the purge self.go.container_cdn_command(url=payload['cnet'], container=payload['c_name'], sfile=obj) else: self.go.container_cdn_command(url=payload['cnet'], container=payload['c_name'])
def _putter(self, conn, fpath, rpath, fheaders, retry, skip=False): """Place object into the container. :param conn: :param fpath: :param rpath: :param fheaders: :param retry: """ if self._checker(conn, rpath, fpath, fheaders, retry, skip) is True: report.reporter(msg='OBJECT ORIGIN %s RPATH %s' % (fpath, rpath), prt=False, lvl='debug') if basic.file_exists(fpath) is False: return None else: with open(fpath, 'rb') as f_open: conn.request('PUT', rpath, body=f_open, headers=fheaders) resp, read = http.response_get(conn=conn, retry=retry) self.resp_exception(resp=resp, rty=retry) report.reporter(msg=('MESSAGE %s %s %s' % (resp.status, resp.reason, resp.msg)), prt=False, lvl='debug')
def _downloader(self, url, rpath, fheaders, lfile, source, skip=False): """Download a specified object in the container. :param url: :param rpath: :param fheaders: :param lfile: :param skip: """ resp = None if source is None: local_f = lfile else: local_f = basic.jpath(root=source, inode=lfile) if self._checker(url, rpath, local_f, fheaders, skip) is True: report.reporter( msg='Downloading remote %s to local file %s' % (rpath, lfile), prt=False, lvl='debug', ) # Perform Object GET resp = http.get_request(url=url, rpath=rpath, headers=fheaders, stream=True) self.resp_exception(resp=resp) local_f = basic.collision_rename(file_name=local_f) # Open our source file and write it with open(local_f, 'wb') as f_name: for chunk in resp.iter_content(chunk_size=2048): if chunk: f_name.write(chunk) f_name.flush() resp.close() if ARGS.get('restore_perms') is not None: # Make a connection if resp is None: resp = self._header_getter(url=url, rpath=rpath, fheaders=fheaders) all_headers = resp.headers if all([ 'x-object-meta-group' in all_headers, 'x-object-meta-owner' in all_headers, 'x-object-meta-perms' in all_headers ]): basic.restor_perms(local_file=local_f, headers=all_headers) else: report.reporter( msg=('No Permissions were restored, because none were' ' saved on the object "%s"' % rpath), lvl='warn', log=True)
def container_cdn_command(self, url, container, sfile=None): """Command your CDN enabled Container. :param url: :param container: """ rty_count = ARGS.get('error_retry') for retry in basic.retryloop(attempts=rty_count, delay=2, obj=sfile): # Open Connection conn = http.open_connection(url=url) with meth.operation(retry, conn): cheaders = self.payload['headers'] if sfile is not None: rpath = http.quoter(url=url.path, cont=container, ufile=sfile) # perform CDN Object DELETE conn.request('DELETE', rpath, headers=cheaders) resp, read = http.response_get(conn=conn, retry=retry) self.resp_exception(resp=resp, rty=retry) else: rpath = http.quoter(url=url.path, cont=container) http.cdn_toggle(headers=cheaders) # perform CDN Enable POST conn.request('PUT', rpath, headers=cheaders) resp, read = http.response_get(conn=conn, retry=retry) self.resp_exception(resp=resp, rty=retry) report.reporter( msg=('OBJECT %s MESSAGE %s %s %s' % (rpath, resp.status, resp.reason, resp.msg)), prt=False, lvl='debug')
def job_processer(num_jobs, objects, job_action, concur, kwargs=None, opt=None): """Process all jobs in batches. :param num_jobs: :param objects: :param job_action: :param concur: :param payload: """ count = 0 batch_size = basic.batcher(num_files=num_jobs) while objects: count += 1 report.reporter(msg='Job Count %s' % count) work = [ objects.pop(objects.index(obj)) for obj in objects[0:batch_size] ] work_q = basic_queue(work) with spinner(work_q=work_q): worker_proc( job_action=job_action, concurrency=concur, queue=work_q, opt=opt, kwargs=kwargs ) basic.stupid_hack(wait=.2) work_q.close()
def object_lister(self, url, container, object_count=None, last_obj=None): """Builds a long list of objects found in a container. NOTE: This could be millions of Objects. :param url: :param container: :param object_count: :param last_obj: :return None | list: """ for retry in basic.retryloop(attempts=ARGS.get("error_retry"), obj="Object List"): fheaders = self.payload["headers"] fpath = http.quoter(url=url.path, cont=container) with meth.operation(retry, obj="%s %s" % (fheaders, fpath)): resp = self._header_getter(url=url, rpath=fpath, fheaders=fheaders) if resp.status_code == 404: report.reporter(msg="Not found. %s | %s" % (resp.status_code, resp.request)) return None, None, None else: if object_count is None: object_count = resp.headers.get("x-container-object-count") if object_count: object_count = int(object_count) if not object_count > 0: return None, None, None else: return None, None, None # Set the number of loops that we are going to do return self._list_getter(url=url, filepath=fpath, fheaders=fheaders, last_obj=last_obj)
def container_cdn_command(self, url, container, sfile=None): """Command your CDN enabled Container. :param url: :param container: """ rty_count = ARGS.get("error_retry") for retry in basic.retryloop(attempts=rty_count, delay=2, obj=sfile): cheaders = self.payload["headers"] if sfile is not None: rpath = http.quoter(url=url.path, cont=container, ufile=sfile) # perform CDN Object DELETE adddata = "%s %s" % (cheaders, container) with meth.operation(retry, obj=adddata): resp = http.delete_request(url=url, rpath=rpath, headers=cheaders) self.resp_exception(resp=resp) else: rpath = http.quoter(url=url.path, cont=container) http.cdn_toggle(headers=cheaders) # perform CDN Enable PUT adddata = "%s %s" % (cheaders, container) with meth.operation(retry, obj=adddata): resp = http.put_request(url=url, rpath=rpath, headers=cheaders) self.resp_exception(resp=resp) report.reporter( msg="OBJECT %s MESSAGE %s %s %s" % (rpath, resp.status_code, resp.reason, resp.request), prt=False, lvl="debug", )
def authenticate(): """Authentication For Openstack API. Pulls the full Openstack Service Catalog Credentials are the Users API Username and Key/Password "osauth" has a Built in Rackspace Method for Authentication Set a DC Endpoint and Authentication URL for the OpenStack environment """ # Setup the request variables url = auth.parse_region() a_url = http.parse_url(url=url, auth=True) auth_json = auth.parse_reqtype() # remove the prefix for the Authentication URL if Found LOG.debug("POST == REQUEST DICT > JSON DUMP %s", auth_json) auth_json_req = json.dumps(auth_json) headers = {"Content-Type": "application/json"} # Send Request request = ("POST", a_url.path, auth_json_req, headers) resp_read = auth.request_process(aurl=a_url, req=request) LOG.debug("POST Authentication Response %s", resp_read) try: auth_resp = json.loads(resp_read) except ValueError as exp: LOG.error("Authentication Failure %s\n%s", exp, traceback.format_exc()) raise turbo.SystemProblem("JSON Decode Failure. ERROR: %s - RESP %s" % (exp, resp_read)) else: auth_info = auth.parse_auth_response(auth_resp) token, tenant, user, inet, enet, cnet, acfep = auth_info report.reporter(msg=("API Access Granted. TenantID: %s Username: %s" % (tenant, user)), prt=False, log=True) return token, tenant, user, inet, enet, cnet, a_url, acfep
def container_cdn_command(self, url, container, sfile=None): """Command your CDN enabled Container. :param url: :param container: """ rty_count = ARGS.get('error_retry') for retry in basic.retryloop(attempts=rty_count, delay=2, obj=sfile): # Open Connection conn = http.open_connection(url=url) with meth.operation(retry, conn): cheaders = self.payload['headers'] if sfile is not None: rpath = http.quoter(url=url.path, cont=container, ufile=sfile) # perform CDN Object DELETE conn.request('DELETE', rpath, headers=cheaders) resp, read = http.response_get(conn=conn, retry=retry) self.resp_exception(resp=resp, rty=retry) else: rpath = http.quoter(url=url.path, cont=container) http.cdn_toggle(headers=cheaders) # perform CDN Enable POST conn.request('PUT', rpath, headers=cheaders) resp, read = http.response_get(conn=conn, retry=retry) self.resp_exception(resp=resp, rty=retry) report.reporter( msg=('OBJECT %s MESSAGE %s %s %s' % (rpath, resp.status, resp.reason, resp.msg)), prt=False, lvl='debug' )
def _putter(self, conn, fpath, rpath, fheaders, retry, skip=False): """Place object into the container. :param conn: :param fpath: :param rpath: :param fheaders: :param retry: """ if self._checker(conn, rpath, fpath, fheaders, retry, skip) is True: report.reporter( msg='OBJECT ORIGIN %s RPATH %s' % (fpath, rpath), prt=False, lvl='debug' ) if basic.file_exists(fpath) is False: return None else: with open(fpath, 'rb') as f_open: conn.request('PUT', rpath, body=f_open, headers=fheaders) resp, read = http.response_get(conn=conn, retry=retry) self.resp_exception(resp=resp, rty=retry) report.reporter( msg=('MESSAGE %s %s %s' % (resp.status, resp.reason, resp.msg)), prt=False, lvl='debug' )
def set_concurrency(args, file_count): """Concurrency is a user specified variable when the arguments parsed. :param args: However if the number of things Turbo lift has to do is less than the desired concurency, then turbolift will lower the concurency rate to the number of operations. """ def verbose(ccr): report.reporter( msg='MESSAGE: We are creating %s Processes' % ccr, prt=False ) return ccr _cc = args.get('cc') if _cc > file_count: report.reporter( msg=('MESSAGE: There are less things to do than the number of' ' concurrent processes specified by either an override' ' or the system defaults. I am leveling the number of' ' concurrent processes to the number of jobs to perform.'), lvl='warn' ) return verbose(ccr=file_count) else: return verbose(ccr=_cc)
def start(self): """Retrieve a long list of all files in a container.""" # Package up the Payload payload = http.prep_payload(auth=self.auth, container=None, source=None, args=ARGS) # Prep Actions. self.go = actions.CloudActions(payload=payload) report.reporter(msg='PAYLOAD\t: "%s"' % payload, log=True, lvl="debug", prt=False) with multi.spinner(): if ARGS.get("cdn_info"): url = payload["cnet"] else: url = payload["url"] message = self.go.detail_show(url=url) try: if message.status_code != 404: report.reporter(msg="Object Found...") report.reporter(msg=report.print_virt_table(dict(message.headers))) else: report.reporter(msg="Nothing Found...") except ValueError as exp: report.reporter(msg=("Non-hashable Type, Likley Item is not found." " Additional Data: %s" % exp))
def start(self): """Retrieve a long list of all files in a container.""" # Package up the Payload payload = http.prep_payload(auth=self.auth, container=None, source=None, args=ARGS) # Prep Actions. self.go = actions.CloudActions(payload=payload) report.reporter(msg='PAYLOAD\t: "%s"' % payload, log=True, lvl='debug', prt=False) with multi.spinner(): if ARGS.get('cdn_info'): url = payload['cnet'] else: url = payload['url'] message = self.go.detail_show(url=url) try: if message.status_code != 404: report.reporter(msg='Object Found...') report.reporter( msg=report.print_virt_table(dict(message.headers))) else: report.reporter(msg='Nothing Found...') except ValueError as exp: report.reporter(msg=('Non-hashable Type, Likley Item is not found.' ' Additional Data: %s' % exp))
def job_processer(num_jobs, objects, job_action, concur, kwargs=None, opt=None): """Process all jobs in batches. :param num_jobs: :param objects: :param job_action: :param concur: :param payload: """ count = 0 batch_size = basic.batcher(num_files=num_jobs) for work in basic.batch_gen(data=objects, batch_size=batch_size, count=num_jobs): count += 1 report.reporter(msg='Job Count %s' % count) work_q = basic_queue(work) with spinner(work_q=work_q): worker_proc(job_action=job_action, concurrency=concur, queue=work_q, opt=opt, kwargs=kwargs) basic.stupid_hack(wait=1) work_q.close()
def _putter(self, url, fpath, rpath, fheaders, skip=False): """Place object into the container. :param url: :param fpath: :param rpath: :param fheaders: """ if self._checker(url, rpath, fpath, fheaders, skip) is True: report.reporter( msg='OBJECT ORIGIN %s RPATH %s' % (fpath, rpath), prt=False, lvl='debug' ) if basic.file_exists(fpath) is False: return None else: with open(fpath, 'rb') as f_open: resp = http.put_request( url=url, rpath=rpath, body=f_open, headers=fheaders ) self.resp_exception(resp=resp) report.reporter( msg=('MESSAGE %s %s %s' % (resp.status_code, resp.reason, resp.request)), prt=False, lvl='debug' )
def _downloader(self, url, rpath, fheaders, lfile, source, skip=False): """Download a specified object in the container. :param url: :param rpath: :param fheaders: :param lfile: :param skip: """ resp = None if source is None: local_f = lfile else: local_f = basic.jpath(root=source, inode=lfile) if self._checker(url, rpath, local_f, fheaders, skip) is True: report.reporter( msg='Downloading remote %s to local file %s' % (rpath, lfile), prt=False, lvl='debug', ) # Perform Object GET resp = http.get_request( url=url, rpath=rpath, headers=fheaders, stream=True ) self.resp_exception(resp=resp) local_f = basic.collision_rename(file_name=local_f) # Open our source file and write it with open(local_f, 'wb') as f_name: for chunk in resp.iter_content(chunk_size=2048): if chunk: f_name.write(chunk) f_name.flush() resp.close() if ARGS.get('restore_perms') is not None: # Make a connection if resp is None: resp = self._header_getter( url=url, rpath=rpath, fheaders=fheaders ) all_headers = resp.headers if all(['x-object-meta-group' in all_headers, 'x-object-meta-owner' in all_headers, 'x-object-meta-perms' in all_headers]): basic.restor_perms(local_file=local_f, headers=all_headers) else: report.reporter( msg=('No Permissions were restored, because none were' ' saved on the object "%s"' % rpath), lvl='warn', log=True )
def _deleterator(payload): """Multipass Object Delete.""" report.reporter(msg='Getting file list') with multi.spinner(): # Get all objects in a Container objects, list_count, last_obj = self.action( url=payload['url'], container=payload['c_name'] ) if ARGS.get('pattern_match'): objects = basic.match_filter( idx_list=objects, pattern=ARGS['pattern_match'], dict_type=True ) # Count the number of objects returned. if objects is False: report.reporter(msg='No Container found.') return elif objects is not None: # Load the queue obj_list = [obj['name'] for obj in objects] num_files = len(obj_list) if num_files < 1: report.reporter(msg='No Objects found.') return else: report.reporter(msg='Nothing found.') return # Get The rate of concurrency concurrency = multi.set_concurrency(args=ARGS, file_count=num_files) if ARGS.get('object'): obj_names = ARGS.get('object') obj_list = [obj for obj in obj_list if obj in obj_names] if not obj_list: return 'Nothing Found to Delete.' num_files = len(obj_list) report.reporter( msg=('Performing Object Delete for "%s" object(s)...' % num_files) ) kwargs = {'url': payload['url'], 'container': payload['c_name'], 'cf_job': getattr(self.go, 'object_deleter')} multi.job_processer( num_jobs=num_files, objects=obj_list, job_action=multi.doerator, concur=concurrency, kwargs=kwargs ) _deleterator(payload=payload)
def get_local_files(): """Find all files specified in the "source" path. This creates a list for all of files using the full path. """ def not_list(item): """Exclude items. :param item: :return True|False: """ if all([not os.path.islink(item), not os.path.ismount(item)]): if not os.path.getsize(item) > 4831838208: return True else: return False def indexer(location): """Return a list of indexed files. :param location: :return: """ _location = basic.real_full_path(location.encode('utf8')) if os.path.isdir(_location): r_walk = os.walk(_location) indexes = [(root, fls) for root, sfs, fls in r_walk] return [ basic.jpath(root=inx[0], inode=inode) for inx in indexes for inode in inx[1] ] elif os.path.isfile(_location): return [_location] else: raise turbo.NoFileProvided('No Path was Found for %s' % _location) try: d_paths = ARGS.get('source') if not isinstance(d_paths, list): d_paths = [d_paths] # Local Index Path c_index = [indexer(location=d_path) for d_path in d_paths] # make sure my files are only files, and compare it with the not_list f_index = [ item for subl in c_index for item in subl if not_list(item=item) ] except Exception as exp: raise turbo.SystemProblem('Died for some reason. MESSAGE:\t%s' % exp) else: report.reporter(msg='FILE LIST:\t%s' % f_index, lvl='debug', prt=False) return f_index
def authenticate(): """Authentication For Openstack API. Pulls the full Openstack Service Catalog Credentials are the Users API Username and Key/Password "osauth" has a Built in Rackspace Method for Authentication Set a DC Endpoint and Authentication URL for the OpenStack environment """ # Setup the request variables a_url = "https://zebra.zerovm.org/auth/v1.0" #a_url = http.parse_url(url=url, auth=True) auth_json = auth.parse_reqtype() print auth_json # remove the prefix for the Authentication URL if Found # LOG.debug('POST == REQUEST DICT > JSON DUMP %s', auth_json) # auth_json_req = json.dumps(auth_json) headers = { 'Content-Type': 'application/json', "X-Auth-User": auth_json['auth']['passwordCredentials']['username'], "X-Auth-Key": auth_json['auth']['passwordCredentials']['password']} # Send Request try: auth_resp = requests.get( url=a_url, headers=headers ) if auth_resp.status_code >= 300: raise SystemExit( 'Authentication Failure, %s %s' % (auth_resp.status_code, auth_resp.reason) ) except ValueError as exp: LOG.error('Authentication Failure %s\n%s', exp, traceback.format_exc()) raise turbo.SystemProblem('JSON Decode Failure. ERROR: %s' % exp) else: LOG.debug('POST Authentication Response %s', auth_resp.json()) #auth_info = auth.parse_auth_response(auth_resp.json()) #token, tenant, user, inet, enet, cnet, acfep = auth_info token = auth_resp.headers['x-auth-token'] tenant, user = auth_json['auth']['passwordCredentials']['username'].split(":") inet = urlparse.urlparse(auth_resp.headers['x-storage-url']) enet = inet cnet = None acfep = inet report.reporter( msg=('API Access Granted. TenantID: %s Username: %s' % (tenant, user)), prt=False, log=True ) return token, tenant, user, inet, enet, cnet, urlparse.urlparse(a_url), acfep
def get_request(url, headers, rpath, stream=False): try: _url = urlparse.urljoin(urlparse.urlunparse(url), rpath) kwargs = {'timeout': ARGS.get('timeout')} resp = requests.get(_url, headers=headers, stream=stream, **kwargs) report.reporter(msg='INFO: %s %s %s' % (resp.status_code, resp.reason, resp.request), prt=False) except Exception as exp: LOG.error('Not able to perform Request ERROR: %s', exp) else: return resp
def batcher(num_files): """Check the batch size and return it. :param num_files: :return int: """ batch_size = turbo.ARGS.get("batch_size") report.reporter(msg='Job process MAX Batch Size is "%s"' % batch_size, lvl="debug", log=True, prt=False) if num_files > batch_size: ops = num_files / batch_size + 1 report.reporter(msg='This will take "%s" operations to complete.' % ops, lvl="warn", log=True, prt=True) return batch_size
def _compare(resp, obj): if resp.status == 404: report.reporter(msg='Target Object %s not found' % obj['name'], prt=False) return True elif resp.getheader('etag') != obj['hash']: report.reporter(msg='Checksum Mismatch on Target Object %s' % obj['name'], prt=False, lvl='debug') return _time_difference(resp, obj) else: return False
def object_lister(self, url, container, object_count=None, last_obj=None): """Builds a long list of objects found in a container. NOTE: This could be millions of Objects. :param url: :param container: :param object_count: :param last_obj: :return None | list: """ for retry in basic.retryloop(attempts=ARGS.get('error_retry'), obj='Object List'): # Open Connection conn = http.open_connection(url=url) # Open connection and perform operation with meth.operation(retry, conn): # Determine how many files are in the container fpath = http.quoter(url=url.path, cont=container) # Make a connection resp = self._header_getter(conn=conn, rpath=fpath, fheaders=self.payload['headers'], retry=retry) if resp.status == 404: report.reporter( msg='Not found. %s | %s' % (resp.status, resp.msg) ) return None, None, None else: if object_count is None: head_check = dict(resp.getheaders()) object_count = head_check.get( 'x-container-object-count' ) if object_count: object_count = int(object_count) if not object_count > 0: return None, None, None else: return None, None, None # Set the number of loops that we are going to do return self._list_getter(conn=conn, count=object_count, filepath=fpath, fheaders=self.payload['headers'], last_obj=last_obj)
def get_request(url, headers, rpath, stream=False): try: _url = urlparse.urljoin(urlparse.urlunparse(url), rpath) resp = requests.get(_url, headers=headers, stream=stream) report.reporter( msg='INFO: %s %s %s' % (resp.status_code, resp.reason, resp.request), prt=False ) except Exception as exp: LOG.error('Not able to perform Request ERROR: %s', exp) else: return resp
def start(self): """Retrieve a long list of all files in a container.""" # Package up the Payload payload = http.prep_payload( auth=self.auth, container=None, source=None, args=ARGS ) # Prep Actions. self.go = actions.CloudActions(payload=payload) report.reporter( msg='PAYLOAD\t: "%s"' % payload, log=True, lvl='debug', prt=False ) with multi.spinner(): if ARGS.get('cdn_info'): url = payload['cnet'] else: url = payload['url'] message = self.go.detail_show(url=url) if isinstance(message, list): report.reporter(msg='Item Found...') report.reporter(msg=report.print_virt_table(dict(message))) else: report.reporter(msg=message)
def remote_delete(self, payload): """If Remote Delete was True run. NOTE: Remote delete will delete ALL Objects in a remote container which differ from the objects in the SOURCED LOCAL FILESYSTEM. IE: If this option is used, on one directory and then another directory and the files were different any difference would be deleted and based on the index information found in LOCAL FILE SYSTEM on the LAST command run. :param payload: ``dict`` """ report.reporter(msg='Getting file list for REMOTE DELETE') # From the remote system see if we have differences in the local system f_indexed = self._index_local_files() objects = self.go.object_lister(url=payload['url'], container=payload['c_name']) source = payload['source'] obj_names = [ basic.jpath(root=source, inode=obj.get('name')) for obj in objects[0] ] obj_names = set(obj_names) # Sort the difference between remote files and local files. objects = [obj for obj in obj_names if obj not in f_indexed] if objects: # Set Basic Data for file delete. num_files = len(objects) report.reporter( msg=('MESSAGE: "%d" Files have been found to be removed' ' from the REMOTE CONTAINER.' % num_files)) concurrency = multi.set_concurrency(args=ARGS, file_count=num_files) # Delete the difference in Files. report.reporter(msg='Performing REMOTE DELETE') del_objects = [ basic.get_sfile(ufile=obj, source=payload['source']) for obj in objects if obj is not None ] kwargs = { 'url': payload['url'], 'container': payload['c_name'], 'cf_job': getattr(self.go, 'object_deleter') } multi.job_processer(num_jobs=num_files, objects=del_objects, job_action=multi.doerator, concur=concurrency, kwargs=kwargs) else: report.reporter( msg='No Difference between REMOTE and LOCAL Directories.')
def start(self): """Retrieve a long list of all files in a container.""" # Package up the Payload payload = http.prep_payload(auth=self.auth, container=None, source=None, args=ARGS) # Prep Actions. self.go = actions.CloudActions(payload=payload) report.reporter(msg='PAYLOAD\t: "%s"' % payload, log=True, lvl='debug', prt=False) with multi.spinner(): if ARGS.get('cdn_info'): url = payload['cnet'] else: url = payload['url'] message = self.go.detail_show(url=url) if isinstance(message, list): report.reporter(msg='Item Found...') report.reporter(msg=report.print_virt_table(dict(message))) else: report.reporter(msg=message)
def get_local_files(): """Find all files specified in the "source" path. This creates a list for all of files using the full path. """ def not_list(item): """Exclude items. :param item: :return True|False: """ if all([not os.path.islink(item), not os.path.ismount(item)]): if not os.path.getsize(item) > 4831838208: return True else: return False def indexer(location): """Return a list of indexed files. :param location: :return: """ _location = basic.real_full_path(location.encode("utf8")) if os.path.isdir(_location): r_walk = os.walk(_location) indexes = [(root, fls) for root, sfs, fls in r_walk] return [basic.jpath(root=inx[0], inode=inode) for inx in indexes for inode in inx[1]] elif os.path.isfile(_location): return [_location] else: raise turbo.NoFileProvided("No Path was Found for %s" % _location) try: d_paths = ARGS.get("source") if not isinstance(d_paths, list): d_paths = [d_paths] # Local Index Path c_index = [indexer(location=d_path) for d_path in d_paths] # make sure my files are only files, and compare it with the not_list f_index = [item for subl in c_index for item in subl if not_list(item=item)] except Exception as exp: raise turbo.SystemProblem("Died for some reason. MESSAGE:\t%s" % exp) else: report.reporter(msg="FILE LIST:\t%s" % f_index, lvl="debug", prt=False) return f_index
def authenticate(): """Authentication For Openstack API. Pulls the full Openstack Service Catalog Credentials are the Users API Username and Key/Password "osauth" has a Built in Rackspace Method for Authentication Set a DC Endpoint and Authentication URL for the OpenStack environment """ # Setup the request variables a_url = "https://zebra.zerovm.org/auth/v1.0" #a_url = http.parse_url(url=url, auth=True) auth_json = auth.parse_reqtype() print auth_json # remove the prefix for the Authentication URL if Found # LOG.debug('POST == REQUEST DICT > JSON DUMP %s', auth_json) # auth_json_req = json.dumps(auth_json) headers = { 'Content-Type': 'application/json', "X-Auth-User": auth_json['auth']['passwordCredentials']['username'], "X-Auth-Key": auth_json['auth']['passwordCredentials']['password'] } # Send Request try: auth_resp = requests.get(url=a_url, headers=headers) if auth_resp.status_code >= 300: raise SystemExit('Authentication Failure, %s %s' % (auth_resp.status_code, auth_resp.reason)) except ValueError as exp: LOG.error('Authentication Failure %s\n%s', exp, traceback.format_exc()) raise turbo.SystemProblem('JSON Decode Failure. ERROR: %s' % exp) else: LOG.debug('POST Authentication Response %s', auth_resp.json()) #auth_info = auth.parse_auth_response(auth_resp.json()) #token, tenant, user, inet, enet, cnet, acfep = auth_info token = auth_resp.headers['x-auth-token'] tenant, user = auth_json['auth']['passwordCredentials'][ 'username'].split(":") inet = urlparse.urlparse(auth_resp.headers['x-storage-url']) enet = inet cnet = None acfep = inet report.reporter(msg=('API Access Granted. TenantID: %s Username: %s' % (tenant, user)), prt=False, log=True) return token, tenant, user, inet, enet, cnet, urlparse.urlparse( a_url), acfep
def authenticate(): """Authentication For Openstack API. Pulls the full Openstack Service Catalog Credentials are the Users API Username and Key/Password "osauth" has a Built in Rackspace Method for Authentication Set a DC Endpoint and Authentication URL for the OpenStack environment """ # Setup the request variables url = auth.parse_region() LOG.debug('Raw Auth URL: [ %s ]', url) a_url = http.parse_url(url=url, auth=True) headers = { 'Content-Type': 'application/json', 'Accept': 'application/json' } headers.update(auth.get_headers() or {}) auth_json = auth.parse_reqtype() or {} LOG.debug('Parsed Auth URL: [ %s ]', a_url) # remove the prefix for the Authentication URL if Found auth_json_req = json.dumps(auth_json) LOG.debug('Request JSON: [ %s ]', auth_json_req) LOG.debug('Request Headers: [ %s ]', headers) # Send Request try: auth_resp = auth.auth_request(a_url, headers=headers, body=auth_json_req) if auth_resp.status_code >= 300: raise SystemExit( 'Authentication Failure, %s %s' % (auth_resp.status_code, auth_resp.reason) ) except ValueError as exp: LOG.error('Authentication Failure %s\n%s', exp, traceback.format_exc()) raise turbo.SystemProblem('JSON Decode Failure. ERROR: %s' % exp) else: auth_info = auth.parse_auth_response(auth_resp) token, tenant, user, inet, enet, cnet, acfep = auth_info report.reporter( msg=('API Access Granted. TenantID: %s Username: %s' % (tenant, user)), prt=False, log=True ) return token, tenant, user, inet, enet, cnet, a_url, acfep
def head_request(url, headers, rpath): try: _url = urlparse.urljoin(urlparse.urlunparse(url), rpath) kwargs = {'timeout': ARGS.get('timeout')} resp = requests.head(_url, headers=headers, **kwargs) report.reporter(msg='INFO: %s %s %s' % (resp.status_code, resp.reason, resp.request), prt=False) except Exception as exp: report.reporter('Not able to perform Request ERROR: %s' % exp, lvl='error', log=True) else: return resp
def start(self): """This is the upload method. Uses file_upload is to simply upload all files and folders to a specified container. """ f_indexed = self._index_local_files() num_files = len(f_indexed) # Get The rate of concurrency concurrency = multi.set_concurrency(args=ARGS, file_count=num_files) # Package up the Payload payload = multi.manager_dict( http.prep_payload( auth=self.auth, container=ARGS.get('container', basic.rand_string()), source=basic.get_local_source(), args=ARGS ) ) report.reporter(msg='MESSAGE : "%s" Files found.' % num_files) report.reporter( msg='PAYLOAD : [ %s ]' % payload, prt=False, lvl='debug' ) # Set the actions class up self.go = actions.CloudActions(payload=payload) kwargs = {'url': payload['url'], 'container': payload['c_name']} # get that the container exists if not create it. self.go.container_create(**kwargs) kwargs['source'] = payload['source'] kwargs['cf_job'] = getattr(self.go, 'object_putter') multi.job_processer( num_jobs=num_files, objects=f_indexed, job_action=multi.doerator, concur=concurrency, kwargs=kwargs ) if ARGS.get('delete_remote') is True: self.remote_delete(payload=payload)
def object_lister(self, url, container, object_count=None, last_obj=None): """Builds a long list of objects found in a container. NOTE: This could be millions of Objects. :param url: :param container: :param object_count: :param last_obj: :return None | list: """ for retry in basic.retryloop(attempts=ARGS.get('error_retry'), obj='Object List'): # Open Connection conn = http.open_connection(url=url) # Open connection and perform operation with meth.operation(retry, conn): # Determine how many files are in the container fpath = http.quoter(url=url.path, cont=container) # Make a connection resp = self._header_getter(conn=conn, rpath=fpath, fheaders=self.payload['headers'], retry=retry) if resp.status == 404: report.reporter(msg='Not found. %s | %s' % (resp.status, resp.msg)) return None, None, None else: if object_count is None: head_check = dict(resp.getheaders()) object_count = head_check.get( 'x-container-object-count') if object_count: object_count = int(object_count) if not object_count > 0: return None, None, None else: return None, None, None # Set the number of loops that we are going to do return self._list_getter(conn=conn, count=object_count, filepath=fpath, fheaders=self.payload['headers'], last_obj=last_obj)
def delete_request(url, headers, rpath): try: _url = urlparse.urljoin(urlparse.urlunparse(url), rpath) kwargs = {'timeout': ARGS.get('timeout')} resp = requests.delete(_url, headers=headers, **kwargs) report.reporter( msg='INFO: %s %s %s' % (resp.status_code, resp.reason, resp.request), prt=False ) except Exception as exp: LOG.error('Not able to perform Request ERROR: %s', exp) else: return resp
def _compare(resp, obj): if resp.status == 404: report.reporter( msg='Target Object %s not found' % obj['name'], prt=False ) return True elif resp.getheader('etag') != obj['hash']: report.reporter( msg='Checksum Mismatch on Target Object %s' % obj['name'], prt=False, lvl='debug' ) return _time_difference(resp, obj) else: return False
def authenticate(): """Authentication For Openstack API. Pulls the full Openstack Service Catalog Credentials are the Users API Username and Key/Password "osauth" has a Built in Rackspace Method for Authentication Set a DC Endpoint and Authentication URL for the OpenStack environment """ # Setup the request variables url = auth.parse_region() LOG.debug('Raw Auth URL: [ %s ]', url) a_url = http.parse_url(url=url, auth=True) headers = { 'Content-Type': 'application/json', 'Accept': 'application/json' } headers.update(auth.get_headers() or {}) auth_json = auth.parse_reqtype() or {} LOG.debug('Parsed Auth URL: [ %s ]', a_url) # remove the prefix for the Authentication URL if Found auth_json_req = json.dumps(auth_json) LOG.debug('Request JSON: [ %s ]', auth_json_req) LOG.debug('Request Headers: [ %s ]', headers) # Send Request try: auth_resp = auth.auth_request(a_url, headers=headers, body=auth_json_req) if auth_resp.status_code >= 300: raise SystemExit('Authentication Failure, %s %s' % (auth_resp.status_code, auth_resp.reason)) except ValueError as exp: LOG.error('Authentication Failure %s\n%s', exp, traceback.format_exc()) raise turbo.SystemProblem('JSON Decode Failure. ERROR: %s' % exp) else: auth_info = auth.parse_auth_response(auth_resp) token, tenant, user, inet, enet, cnet, acfep = auth_info report.reporter(msg=('API Access Granted. TenantID: %s Username: %s' % (tenant, user)), prt=False, log=True) return token, tenant, user, inet, enet, cnet, a_url, acfep
def _deleter(self, url, rpath, fheaders): """Delete a specified object in the container. :param url: :param rpath: :param fheaders: """ # perform Object Delete resp = http.delete_request(url=url, headers=fheaders, rpath=rpath) self.resp_exception(resp=resp) report.reporter( msg=("OBJECT %s MESSAGE %s %s %s" % (rpath, resp.status_code, resp.reason, resp.request)), prt=False, lvl="debug", )
def _putter(self, url, fpath, rpath, fheaders, skip=False): """Place object into the container. :param url: :param fpath: :param rpath: :param fheaders: """ if self._checker(url, rpath, fpath, fheaders, skip) is True: report.reporter(msg="OBJECT ORIGIN %s RPATH %s" % (fpath, rpath), prt=False, lvl="debug") if basic.file_exists(fpath) is False: return None else: if os.path.islink(fpath): link = os.readlink(fpath) lpath = os.path.abspath(os.path.join(os.path.dirname(fpath), link)) rpath_reversed = rpath.split("/")[::-1] fpath_reversed = fpath.split("/")[::-1] a, b = sorted((rpath_reversed, fpath_reversed), key=len) for i, j in enumerate(a): if j != b[i]: index = i break container = rpath_reversed[index] container_dir = fpath.replace("/".join(rpath_reversed[:index][::-1]), "") if container_dir in lpath: manafest = container + "/" + lpath.replace(container_dir, "") fheaders["X-Object-Manifest"] = manafest resp = http.put_request(url=url, rpath=rpath, body=None, headers=fheaders) self.resp_exception(resp=resp) else: report.reporter( msg="symlink %s points to location %s which is" " outside uploading directory" % (fpath, lpath), lvl="warning", ) else: with open(fpath, "rb") as f_open: resp = http.put_request(url=url, rpath=rpath, body=f_open, headers=fheaders) self.resp_exception(resp=resp)
def _deleter(self, url, rpath, fheaders): """Delete a specified object in the container. :param url: :param rpath: :param fheaders: """ # perform Object Delete resp = http.delete_request(url=url, headers=fheaders, rpath=rpath) self.resp_exception(resp=resp) report.reporter( msg=('OBJECT %s MESSAGE %s %s %s' % (rpath, resp.status_code, resp.reason, resp.request)), prt=False, lvl='debug')
def _header_poster(self, url, rpath, fheaders): """POST Headers on a specified object in the container. :param url: :param rpath: :param fheaders: """ # perform Object POST request for header update. resp = http.post_request(url=url, rpath=rpath, headers=fheaders) self.resp_exception(resp=resp) report.reporter(msg='STATUS: %s MESSAGE: %s REASON: %s' % (resp.status_code, resp.request, resp.reason), prt=False, lvl='debug') return resp.headers
def batcher(num_files): """Check the batch size and return it. :param num_files: :return int: """ batch_size = turbo.ARGS.get('batch_size') report.reporter(msg='Job process MAX Batch Size is "%s"' % batch_size, lvl='debug', log=True, prt=False) ops = num_files / batch_size + 1 report.reporter(msg='This will take "%s" operations to complete.' % ops, lvl='warn', log=True, prt=True) return batch_size
def _putter(self, url, fpath, rpath, fheaders, skip=False): """Place object into the container. :param url: :param fpath: :param rpath: :param fheaders: """ if self._checker(url, rpath, fpath, fheaders, skip) is True: report.reporter(msg="OBJECT ORIGIN %s RPATH %s" % (fpath, rpath), prt=False, lvl="debug") if basic.file_exists(fpath) is False: return None else: with open(fpath, "rb") as f_open: resp = http.put_request(url=url, rpath=rpath, body=f_open, headers=fheaders) self.resp_exception(resp=resp)
def head_request(url, headers, rpath): try: _url = urlparse.urljoin(urlparse.urlunparse(url), rpath) resp = requests.head(_url, headers=headers) report.reporter( msg='INFO: %s %s %s' % (resp.status_code, resp.reason, resp.request), prt=False ) except Exception as exp: report.reporter( 'Not able to perform Request ERROR: %s' % exp, lvl='error', log=True ) else: return resp
def _header_getter(self, conn, rpath, fheaders, retry): """perfrom HEAD request on a specified object in the container. :param conn: :param rpath: :param fheaders: :param retry: """ # perform Object HEAD request conn.request('HEAD', rpath, headers=fheaders) resp, read = http.response_get(conn=conn, retry=retry) self.resp_exception(resp=resp, rty=retry) report.reporter(msg='INFO: %s %s %s' % (resp.status, resp.reason, resp.msg), prt=False) return resp
def operation(retry, conn=None, obj=None, cleanup=None): """This is an operation wrapper, which wraps an operation in try except. If clean up is used, a clean up operation will be run should an exception happen. :param retry: :param conn: :param obj: :param cleanup: :return: """ try: yield retry except turbo.NoSource as exp: report.reporter( msg=('No Source. Message: %s\nADDITIONAL DATA: %s\nTB: %s' % (traceback.format_exc(), exp, obj)), lvl='error' ) retry() except turbo.SystemProblem as exp: report.reporter( msg='System Problems Found %s\nADDITIONAL DATA: %s' % (exp, obj), lvl='error' ) retry() except KeyboardInterrupt: if cleanup is not None: cleanup() turbo.emergency_kill(reclaim=True) except IOError as exp: report.reporter( msg=('IO ERROR: %s. ADDITIONAL DATA: %s' '\nMESSAGE %s will retry.' '\nSTACKTRACE: %s' % (exp, obj, info.__appname__, traceback.format_exc())), lvl='error' ) retry() except Exception: report.reporter( msg=('Failed Operation. ADDITIONAL DATA: %s\n%s will retry\nTB: %s' % (obj, info.__appname__, traceback.format_exc())), lvl='error' ) retry() finally: if cleanup is not None: cleanup() if conn is not None: conn.close()
def _deleter(self, conn, rpath, fheaders, retry): """Delete a specified object in the container. :param conn: :param rpath: :param fheaders: :param retry: """ # perform Object Delete conn.request('DELETE', rpath, headers=fheaders) resp, read = http.response_get(conn=conn, retry=retry) self.resp_exception(resp=resp, rty=retry) report.reporter(msg=('OBJECT %s MESSAGE %s %s %s' % (rpath, resp.status, resp.reason, resp.msg)), prt=False, lvl='debug')
def _header_poster(self, url, rpath, fheaders): """POST Headers on a specified object in the container. :param url: :param rpath: :param fheaders: """ # perform Object POST request for header update. resp = http.post_request(url=url, rpath=rpath, headers=fheaders) self.resp_exception(resp=resp) report.reporter( msg="STATUS: %s MESSAGE: %s REASON: %s" % (resp.status_code, resp.request, resp.reason), prt=False, lvl="debug", ) return resp.headers
def _header_getter(self, conn, rpath, fheaders, retry): """perfrom HEAD request on a specified object in the container. :param conn: :param rpath: :param fheaders: :param retry: """ # perform Object HEAD request conn.request('HEAD', rpath, headers=fheaders) resp, read = http.response_get(conn=conn, retry=retry) self.resp_exception(resp=resp, rty=retry) report.reporter( msg='INFO: %s %s %s' % (resp.status, resp.reason, resp.msg), prt=False ) return resp
def _header_poster(self, conn, rpath, fheaders, retry): """POST Headers on a specified object in the container. :param conn: :param rpath: :param fheaders: :param retry: """ # perform Object POST request for header update. conn.request('POST', rpath, headers=fheaders) resp, read = http.response_get(conn=conn, retry=retry) self.resp_exception(resp=resp, rty=retry) report.reporter(msg=('STATUS: %s MESSAGE: %s REASON: %s' % (resp.status, resp.msg, resp.reason)), prt=False, lvl='debug') return dict(resp.getheaders())
def retryloop(attempts, timeout=None, delay=None, backoff=1, obj=None): """Enter the amount of retries you want to perform. The timeout allows the application to quit on "X". delay allows the loop to wait on fail. Useful for making REST calls. ACTIVE STATE retry loop http://code.activestate.com/recipes/578163-retry-loop/ Example: Function for retring an action. for retry in retryloop(attempts=10, timeout=30, delay=1, backoff=1): something if somecondition: retry() :param attempts: :param timeout: :param delay: :param backoff: """ starttime = time.time() success = set() for _ in range(attempts): success.add(True) yield success.clear if success: return duration = time.time() - starttime if timeout is not None and duration > timeout: break if delay: time.sleep(delay) delay = delay * backoff report.reporter( msg=('RetryError: FAILED TO PROCESS "%s" after "%s" Attempts' % (obj, attempts)), lvl='critical', log=True )
def authenticate(): """Authentication For Openstack API. Pulls the full Openstack Service Catalog Credentials are the Users API Username and Key/Password "osauth" has a Built in Rackspace Method for Authentication Set a DC Endpoint and Authentication URL for the OpenStack environment :param auth_dict: required parameters are auth_url """ # Setup the request variables url, rax = auth.parse_region() a_url = http.parse_url(url=url, auth=True) auth_json = auth.parse_reqtype() # remove the prefix for the Authentication URL if Found LOG.debug('POST == REQUEST DICT > JSON DUMP %s', auth_json) auth_json_req = json.dumps(auth_json) headers = {'Content-Type': 'application/json'} # Send Request request = ('POST', a_url.path, auth_json_req, headers) resp_read = auth.request_process(aurl=a_url, req=request) LOG.debug('POST Authentication Response %s', resp_read) try: auth_resp = json.loads(resp_read) except ValueError as exp: LOG.error('Authentication Failure %s\n%s', exp, traceback.format_exc()) raise turbo.SystemProblem('JSON Decode Failure. ERROR: %s - RESP %s' % (exp, resp_read)) else: auth_info = auth.parse_auth_response(auth_resp) token, tenant, user, inet, enet, cnet, acfep = auth_info report.reporter(msg=('API Access Granted. TenantID: %s Username: %s' % (tenant, user)), prt=False, log=True) return token, tenant, user, inet, enet, cnet, a_url, acfep
def resp_exception(self, resp): """If we encounter an exception in our upload. we will look at how we can attempt to resolve the exception. :param resp: """ # Check to make sure we have all the bits needed if not hasattr(resp, 'status_code'): raise turbo.SystemProblem('No Status to check.') elif resp is None: raise turbo.SystemProblem('No response information.') elif resp.status_code == 401: report.reporter( msg=('Turbolift experienced an Authentication issue.' ' STATUS %s REASON %s REQUEST %s. Turbolift will retry' % (resp.status_code, resp.reason, resp.request)), lvl='warn', log=True, prt=False) # This was done in this manor due to how manager dicts are proxied # related : http://bugs.python.org/issue6766 headers = self.payload['headers'] headers['X-Auth-Token'] = get_new_token() self.payload['headers'] = headers raise turbo.AuthenticationProblem( 'Attempting to resolve the Authentication issue.') elif resp.status_code == 404: report.reporter( msg=('Not found STATUS: %s, REASON: %s, MESSAGE: %s' % (resp.status_code, resp.reason, resp.request)), prt=False, lvl='debug') elif resp.status_code == 413: _di = resp.headers basic.stupid_hack(wait=_di.get('retry_after', 10)) raise turbo.SystemProblem( 'The System encountered an API limitation and will' ' continue in "%s" Seconds' % _di.get('retry_after')) elif resp.status_code == 502: raise turbo.SystemProblem('Failure making Connection') elif resp.status_code == 503: basic.stupid_hack(wait=10) raise turbo.SystemProblem('SWIFT-API FAILURE') elif resp.status_code == 504: basic.stupid_hack(wait=10) raise turbo.SystemProblem('Gateway Time-out') elif resp.status_code >= 300: raise turbo.SystemProblem( 'SWIFT-API FAILURE -> REASON %s REQUEST %s' % (resp.reason, resp.request)) else: report.reporter( msg=('MESSAGE %s %s %s' % (resp.status_code, resp.reason, resp.request)), prt=False, lvl='debug')
def object_lister(self, url, container, object_count=None, last_obj=None): """Builds a long list of objects found in a container. NOTE: This could be millions of Objects. :param url: :param container: :param object_count: :param last_obj: :return None | list: """ for retry in basic.retryloop(attempts=ARGS.get('error_retry'), obj='Object List'): fheaders = self.payload['headers'] fpath = http.quoter(url=url.path, cont=container) with meth.operation(retry, obj='%s %s' % (fheaders, fpath)): resp = self._header_getter(url=url, rpath=fpath, fheaders=fheaders) if resp.status_code == 404: report.reporter(msg='Not found. %s | %s' % (resp.status_code, resp.request)) return None, None, None else: if object_count is None: object_count = resp.headers.get( 'x-container-object-count') if object_count: object_count = int(object_count) if not object_count > 0: return None, None, None else: return None, None, None # Set the number of loops that we are going to do return self._list_getter(url=url, filepath=fpath, fheaders=fheaders, last_obj=last_obj)
def container_create(self, url, container): """Create a container if it is not Found. :param url: :param container: """ rty_count = ARGS.get('error_retry') for retry in basic.retryloop(attempts=rty_count, delay=5, obj=container): rpath = http.quoter(url=url.path, cont=container) fheaders = self.payload['headers'] with meth.operation(retry, obj='%s %s' % (fheaders, rpath)): resp = self._header_getter(url=url, rpath=rpath, fheaders=fheaders) # Check that the status was a good one if resp.status_code == 404: report.reporter(msg='Creating Container => %s' % container) http.put_request(url=url, rpath=rpath, headers=fheaders) self.resp_exception(resp=resp) report.reporter(msg='Container "%s" Created' % container) return True else: report.reporter(msg='Container "%s" Found' % container) return False