def _resp_exception(self, resp): """If we encounter an exception in our upload. we will look at how we can attempt to resolve the exception. :param resp: """ message = [ 'Url: [ %s ] Reason: [ %s ] Request: [ %s ] Status Code: [ %s ]. ', resp.url, resp.reason, resp.request, resp.status_code ] # Check to make sure we have all the bits needed if not hasattr(resp, 'status_code'): message[0] += 'No Status to check. Turbolift will retry...' raise exceptions.SystemProblem(message) elif resp is None: message[0] += 'No response information. Turbolift will retry...' raise exceptions.SystemProblem(message) elif resp.status_code == 401: message[0] += ( 'Turbolift experienced an Authentication issue. Turbolift' ' will retry...') self.job_args.update(auth.authenticate(self.job_args)) raise exceptions.SystemProblem(message) elif resp.status_code == 404: message[0] += 'Item not found.' LOG.debug(*message) elif resp.status_code == 409: message[0] += ('Request Conflict. Turbolift is abandoning this...') elif resp.status_code == 413: return_headers = resp.headers retry_after = return_headers.get('retry_after', 10) cloud_utils.stupid_hack(wait=retry_after) message[0] += ('The System encountered an API limitation and will' ' continue in [ %s ] Seconds' % retry_after) raise exceptions.SystemProblem(message) elif resp.status_code == 502: message[0] += ( 'Failure making Connection. Turbolift will retry...') raise exceptions.SystemProblem(message) elif resp.status_code == 503: cloud_utils.stupid_hack(wait=10) message[0] += 'SWIFT-API FAILURE' raise exceptions.SystemProblem(message) elif resp.status_code == 504: cloud_utils.stupid_hack(wait=10) message[0] += 'Gateway Failure.' raise exceptions.SystemProblem(message) elif resp.status_code >= 300: message[0] += 'General exception.' raise exceptions.SystemProblem(message) else: LOG.debug(*message)
def _compressor(self, file_list): # Set the name of the archive. tar_name = self.job_args.get('tar_name') tar_name = os.path.realpath(os.path.expanduser(tar_name)) if not os.path.isdir(os.path.dirname(tar_name)): raise exceptions.DirectoryFailure( 'The path to save the archive file does not exist.' ' PATH: [ %s ]', tar_name) if not tar_name.endswith('.tgz'): tar_name = '%s.tgz' % tar_name if self.job_args.get('add_timestamp'): # Set date and time date_format = '%a%b%d.%H.%M.%S.%Y' today = datetime.datetime.today() timestamp = today.strftime(date_format) _tar_name = os.path.basename(tar_name) tar_name = os.path.join(os.path.dirname(tar_name), '%s-%s' % (timestamp, _tar_name)) # Begin creating the Archive. verify = self.job_args.get('verify') verify_list = self._return_deque() with tarfile.open(tar_name, 'w:gz') as tar: while file_list: try: local_object = file_list.pop()['local_object'] if verify: verify_list.append(local_object) tar.add(local_object) except IndexError: break if verify: with tarfile.open(tar_name, 'r') as tar: verified_items = self._return_deque() for member_info in tar.getmembers(): verified_items.append(member_info.name) if len(verified_items) != len(verify_list): raise exceptions.SystemProblem( 'ARCHIVE NOT VERIFIED: Archive and File List do not' ' Match.') return { 'meta': dict(), 'local_object': tar_name, 'container_object': os.path.basename(tar_name) }
def _show(self, container, container_objects): if self.job_args.get('cdn_info'): if container_objects: raise exceptions.SystemProblem( 'You can not get CDN information on an object in your' ' container.') url = self.job_args['cdn_storage_url'] else: url = self.job_args['storage_url'] if container_objects: returned_objects = self._return_deque() for container_object in container_objects: returned_objects.append( self.job.show_details(url=url, container=container, container_object=container_object)) else: return returned_objects else: return [self.job.show_details(url=url, container=container)]
def parse_auth_response(self, auth_response): """Parse the auth response and return the tenant, token, and username. :param auth_response: the full object returned from an auth call :returns: ``dict`` """ auth_dict = dict() auth_response = auth_response.json() LOG.debug('Authentication Response Body [ %s ]', auth_response) access = auth_response.get('access') access_token = access.get('token') access_tenant = access_token.get('tenant') access_user = access.get('user') auth_dict['os_token'] = access_token.get('id') auth_dict['os_tenant'] = access_tenant.get('name') auth_dict['os_user'] = access_user.get('name') if not auth_dict['os_token']: raise exceptions.AuthenticationProblem( 'When attempting to grab the tenant or user nothing was' ' found. No Token Found to Parse. Here is the DATA: [ %s ]' ' Stack Trace [ %s ]', auth_response, traceback.format_exc()) region = self.job_args.get('os_region') print(region) if not region: raise exceptions.SystemProblem('No Region Set') service_catalog = access.pop('serviceCatalog') # Get the storage URL object_endpoints = self._service_endpoints( service_catalog=service_catalog, types_list=turbolift.__srv_types__) # In the legacy internal flag is set override the os_endpoint_type # TODO(cloudnull) Remove this in future releases if 'internal' in self.job_args and self.job_args['internal']: LOG.warn( 'The use of the ``--internal`` flag has been deprecated and' ' will be removed in future releases. Please use the' ' ``--os-endpoint-type`` flag and set the type name' ' instead. In the case of using snet (service net) this is' ' generally noted as "internalURL". Example setting:' ' ``--os-endpoint-type internalURL``') self.job_args['os_endpoint_type'] = 'internalURL' auth_dict['storage_url'] = get_service_url( region=region, endpoint_list=object_endpoints, lookup=self.job_args['os_endpoint_type']) # Get the CDN URL cdn_endpoints = self._service_endpoints( service_catalog=service_catalog, types_list=turbolift.__cdn_types__) if cdn_endpoints: auth_dict['cdn_storage_url'] = get_service_url( region=region, endpoint_list=cdn_endpoints, lookup=self.job_args['cdn_endpoint_type']) return auth_dict