def find_server(server_endpoint, auth_token, server_config, log=None): """ Given a server config, attempts to find a server created with that config. Uses the Nova list server details endpoint to filter out any server that does not have the exact server name (the filter is a regex, so can filter by ``^<name>$``), image ID, and flavor ID (both of which are exact filters). :param str server_endpoint: Server endpoint URI. :param str auth_token: Keystone Auth Token. :param dict server_config: Nova server config. :param log: A bound logger :return: Deferred that fires with a server (in the format of a server detail response) that matches that server config and creation time, or None if none matches :raises: :class:`ServerCreationRetryError` """ query_params = { 'image': server_config.get('imageRef', ''), 'flavor': server_config['flavorRef'], 'name': '^{0}$'.format(re.escape(server_config['name'])) } if query_params['image'] is None: query_params['image'] = '' url = '{path}?{query}'.format(path=append_segments(server_endpoint, 'servers', 'detail'), query=urlencode(query_params)) def _check_if_server_exists(list_server_details): nova_servers = list_server_details['servers'] if len(nova_servers) > 1: raise ServerCreationRetryError( "Nova returned {0} servers that match the same " "image/flavor and name {1}.".format(len(nova_servers), server_config['name'])) elif len(nova_servers) == 1: nova_server = list_server_details['servers'][0] if nova_server['metadata'] != server_config['metadata']: raise ServerCreationRetryError( "Nova found a server of the right name ({name}) but wrong " "metadata. Expected {expected_metadata} and got {nova_metadata}" .format(expected_metadata=server_config['metadata'], nova_metadata=nova_server['metadata'], name=server_config['name'])) return {'server': nova_server} return None d = treq.get(url, headers=headers(auth_token), log=log) d.addCallback(check_success, [200]) d.addCallback(treq.json_content) d.addCallback(_check_if_server_exists) return d
def find_server(server_endpoint, auth_token, server_config, log=None): """ Given a server config, attempts to find a server created with that config. Uses the Nova list server details endpoint to filter out any server that does not have the exact server name (the filter is a regex, so can filter by ``^<name>$``), image ID, and flavor ID (both of which are exact filters). :param str server_endpoint: Server endpoint URI. :param str auth_token: Keystone Auth Token. :param dict server_config: Nova server config. :param log: A bound logger :return: Deferred that fires with a server (in the format of a server detail response) that matches that server config and creation time, or None if none matches :raises: :class:`ServerCreationRetryError` """ query_params = { 'image': server_config.get('imageRef', ''), 'flavor': server_config['flavorRef'], 'name': '^{0}$'.format(re.escape(server_config['name'])) } if query_params['image'] is None: query_params['image'] = '' url = '{path}?{query}'.format( path=append_segments(server_endpoint, 'servers', 'detail'), query=urlencode(query_params)) def _check_if_server_exists(list_server_details): nova_servers = list_server_details['servers'] if len(nova_servers) > 1: raise ServerCreationRetryError( "Nova returned {0} servers that match the same " "image/flavor and name {1}.".format( len(nova_servers), server_config['name'])) elif len(nova_servers) == 1: nova_server = list_server_details['servers'][0] if nova_server['metadata'] != server_config['metadata']: raise ServerCreationRetryError( "Nova found a server of the right name ({name}) but wrong " "metadata. Expected {expected_metadata} and got {nova_metadata}" .format(expected_metadata=server_config['metadata'], nova_metadata=nova_server['metadata'], name=server_config['name'])) return {'server': nova_server} return None d = treq.get(url, headers=headers(auth_token), log=log) d.addCallback(check_success, [200]) d.addCallback(treq.json_content) d.addCallback(_check_if_server_exists) return d
def validate_flavor(log, auth_token, server_endpoint, flavor_ref): """ Validate flavor by getting its information """ url = append_segments(server_endpoint, 'flavors', flavor_ref) d = treq.get(url, headers=headers(auth_token), log=log) d.addCallback(check_success, [200, 203]) d.addErrback(raise_error_on_code, 404, UnknownFlavor(flavor_ref), url, 'get_flavor') # Extracting the content to avoid a strange bug in twisted/treq where next # subsequent call to nova hangs indefintely d.addCallback(treq.content) return d
def validate_image(log, auth_token, server_endpoint, image_ref): """ Validate Image by getting the image information. It ensures that image is active """ url = append_segments(server_endpoint, 'images', image_ref) d = treq.get(url, headers=headers(auth_token), log=log) d.addCallback(check_success, [200, 203]) d.addErrback(raise_error_on_code, 404, UnknownImage(image_ref), url, 'get_image') def is_image_active(image_detail): if image_detail['image']['status'] != 'ACTIVE': raise InactiveImage(image_ref) d.addCallback(treq.json_content) return d.addCallback(is_image_active)
def validate_image(log, auth_token, server_endpoint, image_ref): """ Validate Image by getting the image information. It ensures that image is active. """ url = append_segments(server_endpoint, 'images', image_ref) d = treq.get(url, headers=headers(auth_token), log=log) d.addCallback(check_success, [200, 203]) d.addErrback(raise_error_on_code, 404, UnknownImage(image_ref), url, 'get_image') def is_image_active(image_detail): if image_detail['image']['status'] != 'ACTIVE': raise InactiveImage(image_ref) d.addCallback(treq.json_content) return d.addCallback(is_image_active)
def server_details(server_endpoint, auth_token, server_id, log=None): """ Fetch the details of a server as specified by id. :param str server_endpoint: A str base URI probably from the service catalog. :param str auth_token: The auth token. :param str server_id: The opaque ID of a server. :return: A dict of the server details. """ path = append_segments(server_endpoint, 'servers', server_id) d = treq.get(path, headers=headers(auth_token), log=log) d.addCallback(check_success, [200, 203]) d.addErrback(raise_error_on_code, 404, ServerDeleted(server_id), path, 'server_details') return d.addCallback(treq.json_content)
def endpoints_for_token(auth_endpoint, identity_admin_token, user_token, log=None): """ Get the list of endpoints from the service_catalog for the specified token. :param str auth_endpoint: Identity API endpoint URL. :param str identity_admin_token: An Auth token for an identity admin user who can get the endpoints for a specified user token. :param str user_token: The user token to request endpoints for. :return: decoded JSON response as dict. """ d = treq.get(append_segments(auth_endpoint, 'tokens', user_token, 'endpoints'), headers=headers(identity_admin_token), log=log) d.addCallback(check_success, [200, 203]) d.addErrback(wrap_request_error, auth_endpoint, data='token_endpoints') d.addCallback(treq.json_content) return d
def validate_personality(log, auth_token, server_endpoint, personality): """ Validate personality by checking base64 encoded content and possibly limits """ # Get limits url = append_segments(server_endpoint, 'limits') d = treq.get(url, headers=headers(auth_token), log=log) d.addCallback(check_success, [200, 203]) d.addErrback(wrap_request_error, url, 'get_limits') # Do not invalidate if we don't get limits d.addErrback(lambda f: log.msg( 'Skipping personality size checks due to limits error', reason=f)) # Be optimistic and check base64 encoding anyways encoded_contents = [] for _file in personality: try: if not b64_chars_re.match(_file['contents']): raise TypeError encoded_contents.append( base64.standard_b64decode(str(_file['contents']))) except TypeError: d.cancel() return defer.fail(InvalidBase64Encoding(_file['path'])) def check_sizes(limits): # check max personality max_personality = limits['limits']['absolute']['maxPersonality'] if len(personality) > max_personality: raise InvalidMaxPersonality(max_personality, len(personality)) # check max content size max_file_size = limits['limits']['absolute']['maxPersonalitySize'] for file, encoded_content in itertools.izip(personality, encoded_contents): if len(encoded_content) > max_file_size: raise InvalidFileContentSize(file['path'], max_file_size) d.addCallback(treq.json_content) d.addCallback(check_sizes) return d
def validate_personality(log, auth_token, server_endpoint, personality): """ Validate personality by checking base64 encoded content and possibly limits """ # Get limits url = append_segments(server_endpoint, 'limits') d = treq.get(url, headers=headers(auth_token), log=log) d.addCallback(check_success, [200, 203]) d.addErrback(wrap_request_error, url, 'get_limits') # Do not invalidate if we don't get limits d.addErrback( lambda f: log.msg('Skipping personality size checks due to limits error', reason=f)) # Be optimistic and check base64 encoding anyways encoded_contents = [] for _file in personality: try: if not b64_chars_re.match(_file['contents']): raise TypeError encoded_contents.append(base64.standard_b64decode(str(_file['contents']))) except TypeError: d.cancel() return defer.fail(InvalidBase64Encoding(_file['path'])) def check_sizes(limits): # check max personality max_personality = limits['limits']['absolute']['maxPersonality'] if len(personality) > max_personality: raise InvalidMaxPersonality(max_personality, len(personality)) # check max content size max_file_size = limits['limits']['absolute']['maxPersonalitySize'] for file, encoded_content in itertools.izip(personality, encoded_contents): if len(encoded_content) > max_file_size: raise InvalidFileContentSize(file['path'], max_file_size) d.addCallback(treq.json_content) d.addCallback(check_sizes) return d
def user_for_tenant(auth_endpoint, username, password, tenant_id, log=None): """ Use a super secret API to get the special actual username for a tenant id. :param str auth_endpoint: Identity Admin API endpoint. :param str username: A service username. :param str password: A service password. :param tenant_id: The tenant ID we wish to find the user for. :return: Username of the magical identity:user-admin user for the tenantid. """ d = treq.get( append_segments(auth_endpoint.replace('v2.0', 'v1.1'), 'mosso', str(tenant_id)), auth=(username, password), allow_redirects=False, log=log) d.addCallback(check_success, [301]) d.addErrback(wrap_request_error, auth_endpoint, data='mosso') d.addCallback(treq.json_content) d.addCallback(lambda user: user['user']['id']) return d
def user_for_tenant(auth_endpoint, username, password, tenant_id, log=None): """ Use a super secret API to get the special actual username for a tenant id. :param str auth_endpoint: Identity Admin API endpoint. :param str username: A service username. :param str password: A service password. :param tenant_id: The tenant ID we wish to find the user for. :return: Username of the magical identity:user-admin user for the tenantid. """ d = treq.get(append_segments(auth_endpoint.replace('v2.0', 'v1.1'), 'mosso', str(tenant_id)), auth=(username, password), allow_redirects=False, log=log) d.addCallback(check_success, [301]) d.addErrback(wrap_upstream_error, 'identity', 'mosso', auth_endpoint) d.addCallback(treq.json_content) d.addCallback(lambda user: user['user']['id']) return d
def endpoints_for_token(auth_endpoint, identity_admin_token, user_token, log=None): """ Get the list of endpoints from the service_catalog for the specified token. :param str auth_endpoint: Identity API endpoint URL. :param str identity_admin_token: An Auth token for an identity admin user who can get the endpoints for a specified user token. :param str user_token: The user token to request endpoints for. :return: decoded JSON response as dict. """ d = treq.get(append_segments(auth_endpoint, 'tokens', user_token, 'endpoints'), headers=headers(identity_admin_token), log=log) d.addCallback(check_success, [200, 203]) d.addErrback(wrap_upstream_error, 'identity', 'token_endpoints', auth_endpoint) d.addCallback(treq.json_content) return d
def history(self, request, paginate): """ returns a list of logged autoscale events """ if self.es_host is None: raise NotImplementedError( "Access to audit log history is not yet implemented") data = make_auditlog_query(self.tenant_id, self.region, **paginate) d = treq.get(append_segments(self.es_host, '_search'), data=json.dumps(data), log=self.log) d.addCallback(check_success, [200]) d.addCallback(treq.json_content) def build_response(body): events = [] for hit in body['hits']['hits']: fields = hit['_source'] event = {'timestamp': fields['@timestamp']} for name in AUDIT_LOG_FIELDS.keys(): field = fields.get(name) if field is not None: event[name] = field events.append(event) links = get_collection_links( events, request.uri, 'self', limit=paginate.get('limit'), marker=paginate.get('marker'), next_marker=next_marker_by_timestamp) return json.dumps({'events': events, 'events_links': links}) d.addCallback(build_response) return d