def paginate(qp, seen=0): # Note(flaper87) Url encoding should # be moved inside http utils, at least # shouldn't be here. # # Making sure all params are str before # trying to encode them for param, value in qp.iteritems(): if isinstance(value, basestring): qp[param] = utils.ensure_str(value) url = '/v1/images/detail?%s' % urllib.urlencode(qp) images = self._list(url, "images") for image in images: seen += 1 if absolute_limit is not None and seen > absolute_limit: return yield image page_size = qp.get('limit') if (page_size and len(images) == page_size and (absolute_limit is None or 0 < seen < absolute_limit)): qp['marker'] = image.id for image in paginate(qp, seen): yield image
def paginate(qp, seen=0): # Note(flaper87) Url encoding should # be moved inside http utils, at least # shouldn't be here. # # Making sure all params are str before # trying to encode them for param, value in qp.iteritems(): if isinstance(value, basestring): qp[param] = utils.ensure_str(value) url = '/v1/images/detail?%s' % urllib.urlencode(qp) images = self._list(url, "images") for image in images: seen += 1 if absolute_limit is not None and seen > absolute_limit: return yield image page_size = qp.get('limit') if (page_size and len(images) == page_size and (absolute_limit is None or 0 < seen < absolute_limit)): qp['marker'] = image.id for image in paginate(qp, seen): yield image
def log_curl_request(self, method, url, kwargs): curl = ['curl -i -X %s' % method] for (key, value) in kwargs['headers'].items(): header = '-H \'%s: %s\'' % (key, value) curl.append(header) conn_params_fmt = [ ('key_file', '--key %s'), ('cert_file', '--cert %s'), ('cacert', '--cacert %s'), ] for (key, fmt) in conn_params_fmt: value = self.connection_kwargs.get(key) if value: curl.append(fmt % value) if self.connection_kwargs.get('insecure'): curl.append('-k') if 'body' in kwargs: curl.append('-d \'%s\'' % kwargs['body']) curl.append('%s%s' % (self.endpoint, url)) LOG.debug(utils.ensure_str(' '.join(curl)))
def log_http_response(resp, body=None): status = (resp.version / 10.0, resp.status, resp.reason) dump = ['\nHTTP/%.1f %s %s' % status] dump.extend(['%s: %s' % (k, v) for k, v in resp.getheaders()]) dump.append('') if body: dump.extend([body, '']) LOG.debug(utils.ensure_str('\n'.join(dump)))
def do_image_delete(gc, args): """Delete specified image(s).""" for args_image in args.images: image = utils.find_resource(gc.images, args_image) try: if args.verbose: print 'Requesting image delete for %s ...' % \ utils.ensure_str(args_image), gc.images.delete(image) if args.verbose: print '[Done]' except exc.HTTPException as e: if args.verbose: print '[Fail]' print '%s: Unable to delete image %s' % (e, args_image)
def do_image_delete(gc, args): """Delete specified image(s).""" for args_image in args.images: image = utils.find_resource(gc.images, args_image) try: if args.verbose: print 'Requesting image delete for %s ...' % \ utils.ensure_str(args_image), gc.images.delete(image) if args.verbose: print '[Done]' except exc.HTTPException, e: if args.verbose: print '[Fail]' print '%s: Unable to delete image %s' % (e, args_image)
def list(self, **kwargs): """Retrieve a listing of Image objects :param page_size: Number of images to request in each paginated request :returns generator over list of Images """ def paginate(url): resp, body = self.http_client.json_request('GET', url) for image in body['images']: yield image try: next_url = body['next'] except KeyError: return else: for image in paginate(next_url): yield image filters = kwargs.get('filters', {}) if not kwargs.get('page_size'): filters['limit'] = DEFAULT_PAGE_SIZE else: filters['limit'] = kwargs['page_size'] for param, value in filters.iteritems(): if isinstance(value, basestring): filters[param] = utils.ensure_str(value) url = '/v2/images?%s' % urllib.urlencode(filters) for image in paginate(url): #NOTE(bcwaldon): remove 'self' for now until we have an elegant # way to pass it into the model constructor without conflict image.pop('self', None) yield self.model(**image)
def list(self, **kwargs): """Retrieve a listing of Image objects :param page_size: Number of images to request in each paginated request :returns generator over list of Images """ def paginate(url): resp, body = self.http_client.json_request('GET', url) for image in body['images']: yield image try: next_url = body['next'] except KeyError: return else: for image in paginate(next_url): yield image filters = kwargs.get('filters', {}) if not kwargs.get('page_size'): filters['limit'] = DEFAULT_PAGE_SIZE else: filters['limit'] = kwargs['page_size'] for param, value in filters.iteritems(): if isinstance(value, basestring): filters[param] = utils.ensure_str(value) url = '/v2/images?%s' % urllib.urlencode(filters) for image in paginate(url): #NOTE(bcwaldon): remove 'self' for now until we have an elegant # way to pass it into the model constructor without conflict image.pop('self', None) yield self.model(**image)
def _http_request(self, url, method, **kwargs): """ Send an http request with the specified characteristics. Wrapper around httplib.HTTP(S)Connection.request to handle tasks such as setting headers and error handling. """ # Copy the kwargs so we can reuse the original in case of redirects kwargs['headers'] = copy.deepcopy(kwargs.get('headers', {})) kwargs['headers'].setdefault('User-Agent', USER_AGENT) if self.auth_token: kwargs['headers'].setdefault('X-Auth-Token', self.auth_token) self.log_curl_request(method, url, kwargs) conn = self.get_connection() # Note(flaper87): Before letting headers / url fly, # they should be encoded otherwise httplib will # complain. If we decide to rely on python-request # this wont be necessary anymore. kwargs['headers'] = self.encode_headers(kwargs['headers']) try: conn_url = posixpath.normpath('%s/%s' % (self.endpoint_path, url)) # Note(flaper87): Ditto, headers / url # encoding to make httplib happy. conn_url = utils.ensure_str(conn_url) if kwargs['headers'].get('Transfer-Encoding') == 'chunked': conn.putrequest(method, conn_url) for header, value in kwargs['headers'].items(): conn.putheader(header, value) conn.endheaders() chunk = kwargs['body'].read(CHUNKSIZE) # Chunk it, baby... while chunk: conn.send('%x\r\n%s\r\n' % (len(chunk), chunk)) chunk = kwargs['body'].read(CHUNKSIZE) conn.send('0\r\n\r\n') else: conn.request(method, conn_url, **kwargs) resp = conn.getresponse() except socket.gaierror as e: message = "Error finding address for %s: %s" % ( self.endpoint_hostname, e) raise exc.InvalidEndpoint(message=message) except (socket.error, socket.timeout) as e: endpoint = self.endpoint message = "Error communicating with %(endpoint)s %(e)s" % locals() raise exc.CommunicationError(message=message) body_iter = ResponseBodyIterator(resp) # Read body into string if it isn't obviously image data if resp.getheader('content-type', None) != 'application/octet-stream': body_str = ''.join([chunk for chunk in body_iter]) self.log_http_response(resp, body_str) body_iter = StringIO.StringIO(body_str) else: self.log_http_response(resp) if 400 <= resp.status < 600: LOG.error("Request returned failure status.") raise exc.from_response(resp, body_str) elif resp.status in (301, 302, 305): # Redirected. Reissue the request to the new location. return self._http_request(resp['location'], method, **kwargs) elif resp.status == 300: raise exc.from_response(resp) return resp, body_iter