def validate(value): """Validate and convert the input to a LocalLinkConnectionType. :param value: A dictionary of values to validate, switch_id is a MAC address or an OpenFlow based datapath_id, switch_info is an optional field. For example:: { 'switch_id': mac_or_datapath_id(), 'port_id': 'Ethernet3/1', 'switch_info': 'switch1' } :returns: A dictionary. :raises: Invalid if some of the keys in the dictionary being validated are unknown, invalid, or some required ones are missing. """ wtypes.DictType(wtypes.text, wtypes.text).validate(value) keys = set(value) # This is to workaround an issue when an API object is initialized from # RPC object, in which dictionary fields that are set to None become # empty dictionaries if not keys: return value invalid = keys - LocalLinkConnectionType.valid_fields if invalid: raise exception.Invalid(_('%s are invalid keys') % (invalid)) # Check all mandatory fields are present missing = LocalLinkConnectionType.mandatory_fields - keys if missing: msg = _('Missing mandatory keys: %s') % missing raise exception.Invalid(msg) # Check switch_id is either a valid mac address or # OpenFlow datapath_id and normalize it. try: value['switch_id'] = utils.validate_and_normalize_mac( value['switch_id']) except exception.InvalidMAC: try: value['switch_id'] = utils.validate_and_normalize_datapath_id( value['switch_id']) except exception.InvalidDatapathID: raise exception.InvalidSwitchID(switch_id=value['switch_id']) return value
def validate_for_smart_nic(value): """Validates Smart NIC field are present 'port_id' and 'hostname' :param value: local link information of type Dictionary. :return: True if both fields 'port_id' and 'hostname' are present in 'value', False otherwise. """ wtypes.DictType(wtypes.text, wtypes.text).validate(value) keys = set(value) if LocalLinkConnectionType.smart_nic_mandatory_fields <= keys: return True return False
class ListenerPUT(BaseListenerType): """Defines attributes that are acceptable of a PUT request.""" name = wtypes.wsattr(wtypes.StringType(max_length=255)) description = wtypes.wsattr(wtypes.StringType(max_length=255)) admin_state_up = wtypes.wsattr(bool) connection_limit = wtypes.wsattr( wtypes.IntegerType(minimum=constants.MIN_CONNECTION_LIMIT)) default_tls_container_ref = wtypes.wsattr( wtypes.StringType(max_length=255)) sni_container_refs = [wtypes.StringType(max_length=255)] default_pool_id = wtypes.wsattr(wtypes.UuidType()) insert_headers = wtypes.wsattr( wtypes.DictType(str, wtypes.StringType(max_length=255)))
class ListenerPUT(base.BaseType): """Defines attributes that are acceptable of a PUT request.""" name = wtypes.wsattr(wtypes.StringType(max_length=255)) description = wtypes.wsattr(wtypes.StringType(max_length=255)) enabled = wtypes.wsattr(bool) protocol = wtypes.wsattr(wtypes.Enum(str, *constants.SUPPORTED_PROTOCOLS)) protocol_port = wtypes.wsattr(wtypes.IntegerType()) connection_limit = wtypes.wsattr(wtypes.IntegerType()) tls_certificate_id = wtypes.wsattr(wtypes.StringType(max_length=255)) tls_termination = wtypes.wsattr(TLSTermination) sni_containers = [wtypes.StringType(max_length=255)] default_pool_id = wtypes.wsattr(wtypes.UuidType()) insert_headers = wtypes.wsattr(wtypes.DictType(str, str))
class ListenerResponse(base.BaseType): """Defines which attributes are to be shown on any response.""" id = wtypes.wsattr(wtypes.UuidType()) name = wtypes.wsattr(wtypes.StringType()) description = wtypes.wsattr(wtypes.StringType()) provisioning_status = wtypes.wsattr(wtypes.StringType()) operating_status = wtypes.wsattr(wtypes.StringType()) enabled = wtypes.wsattr(bool) protocol = wtypes.wsattr(wtypes.text) protocol_port = wtypes.wsattr(wtypes.IntegerType()) connection_limit = wtypes.wsattr(wtypes.IntegerType()) tls_certificate_id = wtypes.wsattr(wtypes.StringType(max_length=255)) sni_containers = [wtypes.StringType(max_length=255)] project_id = wtypes.wsattr(wtypes.StringType()) default_pool_id = wtypes.wsattr(wtypes.UuidType()) default_pool = wtypes.wsattr(pool.PoolResponse) l7policies = wtypes.wsattr([l7policy.L7PolicyResponse]) insert_headers = wtypes.wsattr(wtypes.DictType(str, str)) created_at = wtypes.wsattr(wtypes.datetime.datetime) updated_at = wtypes.wsattr(wtypes.datetime.datetime) @classmethod def from_data_model(cls, data_model, children=False): listener = super(ListenerResponse, cls).from_data_model(data_model, children=children) # NOTE(blogan): we should show sni_containers for every call to show # a listener listener.sni_containers = [ sni_c.tls_container_id for sni_c in data_model.sni_containers ] if not children: # NOTE(blogan): do not show default_pool if the request does not # want to see children del listener.default_pool del listener.l7policies return listener if data_model.default_pool: listener.default_pool = pool.PoolResponse.from_data_model( data_model.default_pool, children=children) if data_model.l7policies: listener.l7policies = [ l7policy.L7PolicyResponse.from_data_model(policy, children=children) for policy in data_model.l7policies ] if not listener.default_pool: del listener.default_pool del listener.default_pool_id if not listener.l7policies or len(listener.l7policies) <= 0: del listener.l7policies return listener
class ListenerPOST(BaseListenerType): """Defines mandatory and optional attributes of a POST request.""" name = wtypes.wsattr(wtypes.StringType(max_length=255)) description = wtypes.wsattr(wtypes.StringType(max_length=255)) admin_state_up = wtypes.wsattr(bool, default=True) protocol = wtypes.wsattr(wtypes.Enum(str, *constants.SUPPORTED_PROTOCOLS), mandatory=True) protocol_port = wtypes.wsattr(wtypes.IntegerType( minimum=constants.MIN_PORT_NUMBER, maximum=constants.MAX_PORT_NUMBER), mandatory=True) connection_limit = wtypes.wsattr( wtypes.IntegerType(minimum=constants.MIN_CONNECTION_LIMIT), default=constants.DEFAULT_CONNECTION_LIMIT) default_tls_container_ref = wtypes.wsattr( wtypes.StringType(max_length=255)) sni_container_refs = [wtypes.StringType(max_length=255)] # TODO(johnsom) Remove after deprecation (R series) project_id = wtypes.wsattr(wtypes.StringType(max_length=36)) default_pool_id = wtypes.wsattr(wtypes.UuidType()) default_pool = wtypes.wsattr(pool.PoolSingleCreate) l7policies = wtypes.wsattr([l7policy.L7PolicySingleCreate], default=[]) insert_headers = wtypes.wsattr( wtypes.DictType(str, wtypes.StringType(max_length=255))) loadbalancer_id = wtypes.wsattr(wtypes.UuidType(), mandatory=True) timeout_client_data = wtypes.wsattr( wtypes.IntegerType(minimum=constants.MIN_TIMEOUT, maximum=constants.MAX_TIMEOUT), default=CONF.haproxy_amphora.timeout_client_data) timeout_member_connect = wtypes.wsattr( wtypes.IntegerType(minimum=constants.MIN_TIMEOUT, maximum=constants.MAX_TIMEOUT), default=CONF.haproxy_amphora.timeout_member_connect) timeout_member_data = wtypes.wsattr( wtypes.IntegerType(minimum=constants.MIN_TIMEOUT, maximum=constants.MAX_TIMEOUT), default=CONF.haproxy_amphora.timeout_member_data) timeout_tcp_inspect = wtypes.wsattr( wtypes.IntegerType(minimum=constants.MIN_TIMEOUT, maximum=constants.MAX_TIMEOUT), default=CONF.haproxy_amphora.timeout_tcp_inspect) tags = wtypes.wsattr(wtypes.ArrayType(wtypes.StringType(max_length=255))) client_ca_tls_container_ref = wtypes.StringType(max_length=255) client_authentication = wtypes.wsattr(wtypes.Enum( str, *constants.SUPPORTED_CLIENT_AUTH_MODES), default=constants.CLIENT_AUTH_NONE) client_crl_container_ref = wtypes.StringType(max_length=255) allowed_cidrs = wtypes.wsattr([types.CidrType()]) tls_ciphers = wtypes.StringType(max_length=2048) tls_versions = wtypes.wsattr( wtypes.ArrayType(wtypes.StringType(max_length=32))) alpn_protocols = wtypes.wsattr(wtypes.ArrayType(types.AlpnProtocolType()))
class ListenerResponse(BaseListenerType): """Defines which attributes are to be shown on any response.""" id = wtypes.wsattr(wtypes.UuidType()) name = wtypes.wsattr(wtypes.StringType()) description = wtypes.wsattr(wtypes.StringType()) provisioning_status = wtypes.wsattr(wtypes.StringType()) operating_status = wtypes.wsattr(wtypes.StringType()) admin_state_up = wtypes.wsattr(bool) protocol = wtypes.wsattr(wtypes.text) protocol_port = wtypes.wsattr(wtypes.IntegerType()) connection_limit = wtypes.wsattr(wtypes.IntegerType()) default_tls_container_ref = wtypes.wsattr(wtypes.StringType()) sni_container_refs = [wtypes.StringType()] project_id = wtypes.wsattr(wtypes.StringType()) default_pool_id = wtypes.wsattr(wtypes.UuidType()) l7policies = wtypes.wsattr([types.IdOnlyType]) insert_headers = wtypes.wsattr(wtypes.DictType(str, str)) created_at = wtypes.wsattr(wtypes.datetime.datetime) updated_at = wtypes.wsattr(wtypes.datetime.datetime) loadbalancers = wtypes.wsattr([types.IdOnlyType]) timeout_client_data = wtypes.wsattr(wtypes.IntegerType()) timeout_member_connect = wtypes.wsattr(wtypes.IntegerType()) timeout_member_data = wtypes.wsattr(wtypes.IntegerType()) timeout_tcp_inspect = wtypes.wsattr(wtypes.IntegerType()) tags = wtypes.wsattr(wtypes.ArrayType(wtypes.StringType())) @classmethod def from_data_model(cls, data_model, children=False): listener = super(ListenerResponse, cls).from_data_model(data_model, children=children) listener.sni_container_refs = [ sni_c.tls_container_id for sni_c in data_model.sni_containers ] if cls._full_response(): del listener.loadbalancers l7policy_type = l7policy.L7PolicyFullResponse else: listener.loadbalancers = [ types.IdOnlyType.from_data_model(data_model.load_balancer) ] l7policy_type = types.IdOnlyType listener.l7policies = [ l7policy_type.from_data_model(i) for i in data_model.l7policies ] return listener
class CommandResult(base.APIBase): """Object representing the result of a given command.""" id = types.text command_name = types.text command_params = types.DictType(types.text, base.json_type) command_status = types.text command_error = base.exception_type command_result = types.DictType(types.text, base.json_type) @classmethod def from_result(cls, result): """Convert a BaseCommandResult object to a CommandResult object. :param result: a :class:`ironic_python_agent.extensions.base. BaseCommandResult` object. :returns: a :class:`ironic_python_agent.api.controllers.v1.command. CommandResult` object. """ instance = cls() for field in ('id', 'command_name', 'command_params', 'command_status', 'command_error', 'command_result'): setattr(instance, field, getattr(result, field)) return instance
class TimePeriod(types.Base): timeperiod_name = wsme.wsattr(wtypes.text, mandatory=False) exclude = wsme.wsattr(wtypes.ArrayType(wtypes.text), mandatory=False) alias = wsme.wsattr(wtypes.text, mandatory=False) periods = wsme.wsattr(wtypes.DictType(wtypes.text, wtypes.text), mandatory=False) @classmethod def sample(cls): return cls(timeperiod_name='nonworkhours', periods={ "sunday": "0:00-24:00", "february 10": "00:00-24:00" })
class MacroModulation(types.Base): macromodulation_name = wsme.wsattr(wtypes.text, mandatory=False) modulation_period = wsme.wsattr(wtypes.text, mandatory=False) macros = wsme.wsattr( wtypes.DictType(wtypes.text, int), mandatory=False ) @classmethod def sample(cls): return cls( macromodulation_name='HighDuringNight', modulation_period='night', macros={"_CRITICAL": 20, "_WARNING": 10} )
class ListenerResponse(BaseListenerType): """Defines which attributes are to be shown on any response.""" id = wtypes.wsattr(wtypes.UuidType()) name = wtypes.wsattr(wtypes.StringType()) description = wtypes.wsattr(wtypes.StringType()) provisioning_status = wtypes.wsattr(wtypes.StringType()) operating_status = wtypes.wsattr(wtypes.StringType()) admin_state_up = wtypes.wsattr(bool) protocol = wtypes.wsattr(wtypes.text) protocol_port = wtypes.wsattr(wtypes.IntegerType()) connection_limit = wtypes.wsattr(wtypes.IntegerType()) default_tls_container_ref = wtypes.wsattr(wtypes.StringType()) sni_container_refs = [wtypes.StringType()] # TODO(johnsom) Remove after deprecation (R series) project_id = wtypes.wsattr(wtypes.StringType()) # TODO(johnsom) Remove after deprecation (R series) tenant_id = wtypes.wsattr(wtypes.StringType()) default_pool_id = wtypes.wsattr(wtypes.UuidType()) l7policies = wtypes.wsattr([MinimalL7Policy]) insert_headers = wtypes.wsattr(wtypes.DictType(str, str)) created_at = wtypes.wsattr(wtypes.datetime.datetime) updated_at = wtypes.wsattr(wtypes.datetime.datetime) loadbalancers = wtypes.wsattr([MinimalLoadBalancer]) @classmethod def from_data_model(cls, data_model, children=False): listener = super(ListenerResponse, cls).from_data_model(data_model, children=children) listener.tenant_id = data_model.project_id listener.sni_container_refs = [ sni_c.tls_container_id for sni_c in data_model.sni_containers ] listener.loadbalancers = [ MinimalLoadBalancer.from_data_model(data_model.load_balancer) ] listener.l7policies = [ MinimalL7Policy.from_data_model(i) for i in data_model.l7policies ] if not listener.description: listener.description = "" if not listener.name: listener.name = "" return listener
class DeployController(rest.RestController): """REST controller for deployment""" def __init__(self): super(DeployController, self).__init__() @expose.expose(wtypes.text, wtypes.text) def get_one(self, action): if not SUPPORT_ACTIONS.has_key(action): raise exception.ActionNotSupport(action=action) log_name = SUPPORT_ACTIONS[action]['log'] log_file = "{}/{}".format(env_conf.get(key='log_dir'), log_name) if not os.path.isfile(log_file): return "" returncode, output = utils.execute("wc -l {}".format(log_file)) total_line = output.split(' ')[0] if not ACTION_LINES.has_key(action): ACTION_LINES[action] = total_line cur_line = ACTION_LINES[action] returncode, output = utils.execute("sed -n '{},{}'p {}" .format(cur_line, total_line, log_file)) ACTION_LINES[action] = int(total_line) + 1 return str.replace(output, '\n', '<br>') @expose.expose(body=wtypes.DictType(str, str)) def post(self, actions): if not actions.has_key("action"): raise exception.BadRequest(field='action') action = actions['action'] if not SUPPORT_ACTIONS.has_key(action): raise exception.ActionNotSupport(action=action) command = SUPPORT_ACTIONS[action]['command'] # execute deployment thread = threading.Thread(target=utils.execute, args=(command,)) thread.daemon = True thread.start()
class ServiceSpecification(solum_plan.ServiceReference): """CAMP v1.1 ServiceSpecification.""" description = wtypes.text """A description of the specified service.""" tags = [wtypes.text] """Tags for the specified service.""" href = wtypes.text """Reference to a service resource that resolves the service described by this ServiceSpecification.""" characteristics = [wtypes.DictType(wtypes.text, wtypes.text)] """An array of dictionaries that express the desired characteristics of any service that matches this specification.""" def __init__(self, **kwargs): super(ServiceSpecification, self).__init__(**kwargs)
class ListenerPOST(base.BaseType): """Defines mandatory and optional attributes of a POST request.""" id = wtypes.wsattr(wtypes.UuidType()) name = wtypes.wsattr(wtypes.StringType(max_length=255)) description = wtypes.wsattr(wtypes.StringType(max_length=255)) enabled = wtypes.wsattr(bool, default=True) protocol = wtypes.wsattr(wtypes.Enum(str, *constants.SUPPORTED_PROTOCOLS), mandatory=True) protocol_port = wtypes.wsattr(wtypes.IntegerType(), mandatory=True) connection_limit = wtypes.wsattr(wtypes.IntegerType()) tls_certificate_id = wtypes.wsattr(wtypes.StringType(max_length=255)) tls_termination = wtypes.wsattr(TLSTermination) sni_containers = [wtypes.StringType(max_length=255)] project_id = wtypes.wsattr(wtypes.StringType(max_length=36)) default_pool_id = wtypes.wsattr(wtypes.UuidType()) default_pool = wtypes.wsattr(pool.PoolPOST) l7policies = wtypes.wsattr([l7policy.L7PolicyPOST], default=[]) insert_headers = wtypes.wsattr(wtypes.DictType(str, str))
class ListenerSingleCreate(BaseListenerType): """Defines mandatory and optional attributes of a POST request.""" name = wtypes.wsattr(wtypes.StringType(max_length=255)) description = wtypes.wsattr(wtypes.StringType(max_length=255)) admin_state_up = wtypes.wsattr(bool, default=True) protocol = wtypes.wsattr(wtypes.Enum(str, *constants.SUPPORTED_PROTOCOLS), mandatory=True) protocol_port = wtypes.wsattr( wtypes.IntegerType(minimum=constants.MIN_PORT_NUMBER, maximum=constants.MAX_PORT_NUMBER), mandatory=True) connection_limit = wtypes.wsattr( wtypes.IntegerType(minimum=constants.MIN_CONNECTION_LIMIT), default=-1) default_tls_container_ref = wtypes.wsattr( wtypes.StringType(max_length=255)) sni_container_refs = [wtypes.StringType(max_length=255)] default_pool_id = wtypes.wsattr(wtypes.UuidType()) default_pool = wtypes.wsattr(pool.PoolSingleCreate) l7policies = wtypes.wsattr([l7policy.L7PolicySingleCreate], default=[]) insert_headers = wtypes.wsattr( wtypes.DictType(str, wtypes.StringType(max_length=255))) timeout_client_data = wtypes.wsattr( wtypes.IntegerType(minimum=constants.MIN_TIMEOUT, maximum=constants.MAX_TIMEOUT), default=CONF.haproxy_amphora.timeout_client_data) timeout_member_connect = wtypes.wsattr( wtypes.IntegerType(minimum=constants.MIN_TIMEOUT, maximum=constants.MAX_TIMEOUT), default=CONF.haproxy_amphora.timeout_member_connect) timeout_member_data = wtypes.wsattr( wtypes.IntegerType(minimum=constants.MIN_TIMEOUT, maximum=constants.MAX_TIMEOUT), default=CONF.haproxy_amphora.timeout_member_data) timeout_tcp_inspect = wtypes.wsattr( wtypes.IntegerType(minimum=constants.MIN_TIMEOUT, maximum=constants.MAX_TIMEOUT), default=CONF.haproxy_amphora.timeout_tcp_inspect) tags = wtypes.wsattr(wtypes.ArrayType(wtypes.StringType(max_length=255))) client_ca_tls_container_ref = wtypes.StringType(max_length=255) client_authentication = wtypes.wsattr( wtypes.Enum(str, *constants.SUPPORTED_CLIENT_AUTH_MODES), default=constants.CLIENT_AUTH_NONE) client_crl_container_ref = wtypes.StringType(max_length=255)
class ListenerSingleCreate(BaseListenerType): """Defines mandatory and optional attributes of a POST request.""" name = wtypes.wsattr(wtypes.StringType(max_length=255)) description = wtypes.wsattr(wtypes.StringType(max_length=255)) admin_state_up = wtypes.wsattr(bool, default=True) protocol = wtypes.wsattr(wtypes.Enum(str, *constants.SUPPORTED_PROTOCOLS), mandatory=True) protocol_port = wtypes.wsattr(wtypes.IntegerType( minimum=constants.MIN_PORT_NUMBER, maximum=constants.MAX_PORT_NUMBER), mandatory=True) connection_limit = wtypes.wsattr( wtypes.IntegerType(minimum=constants.MIN_CONNECTION_LIMIT), default=-1) default_tls_container_ref = wtypes.wsattr( wtypes.StringType(max_length=255)) sni_container_refs = [wtypes.StringType(max_length=255)] default_pool_id = wtypes.wsattr(wtypes.UuidType()) default_pool = wtypes.wsattr(pool.PoolSingleCreate) l7policies = wtypes.wsattr([l7policy.L7PolicySingleCreate], default=[]) insert_headers = wtypes.wsattr( wtypes.DictType(str, wtypes.StringType(max_length=255)))
class ListenerPOST(BaseListenerType): """Defines mandatory and optional attributes of a POST request.""" name = wtypes.wsattr(wtypes.StringType(max_length=255)) description = wtypes.wsattr(wtypes.StringType(max_length=255)) admin_state_up = wtypes.wsattr(bool, default=True) protocol = wtypes.wsattr(wtypes.Enum(str, *constants.SUPPORTED_PROTOCOLS), mandatory=True) protocol_port = wtypes.wsattr(wtypes.IntegerType( minimum=constants.MIN_PORT_NUMBER, maximum=constants.MAX_PORT_NUMBER), mandatory=True) connection_limit = wtypes.wsattr( wtypes.IntegerType(minimum=constants.MIN_CONNECTION_LIMIT), default=-1) default_tls_container_ref = wtypes.wsattr( wtypes.StringType(max_length=255)) sni_container_refs = [wtypes.StringType(max_length=255)] # TODO(johnsom) Remove after deprecation (R series) project_id = wtypes.wsattr(wtypes.StringType(max_length=36)) default_pool_id = wtypes.wsattr(wtypes.UuidType()) default_pool = wtypes.wsattr(pool.PoolSingleCreate) l7policies = wtypes.wsattr([l7policy.L7PolicySingleCreate], default=[]) insert_headers = wtypes.wsattr( wtypes.DictType(str, wtypes.StringType(max_length=255))) loadbalancer_id = wtypes.wsattr(wtypes.UuidType(), mandatory=True) timeout_client_data = wtypes.wsattr( wtypes.IntegerType(minimum=constants.MIN_TIMEOUT, maximum=constants.MAX_TIMEOUT), default=constants.DEFAULT_TIMEOUT_CLIENT_DATA) timeout_member_connect = wtypes.wsattr( wtypes.IntegerType(minimum=constants.MIN_TIMEOUT, maximum=constants.MAX_TIMEOUT), default=constants.DEFAULT_TIMEOUT_MEMBER_CONNECT) timeout_member_data = wtypes.wsattr( wtypes.IntegerType(minimum=constants.MIN_TIMEOUT, maximum=constants.MAX_TIMEOUT), default=constants.DEFAULT_TIMEOUT_MEMBER_DATA) timeout_tcp_inspect = wtypes.wsattr( wtypes.IntegerType(minimum=constants.MIN_TIMEOUT, maximum=constants.MAX_TIMEOUT), default=constants.DEFAULT_TIMEOUT_TCP_INSPECT)
class TokensController(rest.RestController): """REST token for Default section""" def __init__(self): super(TokensController, self).__init__() @expose.expose(TokenCollection) def get_all(self): tokens = objects.token_obj.get_all() return TokenCollection.convert(tokens) @expose.expose(Token, body=wtypes.DictType(str, str), status_code=201) def post(self, creds): user = creds.get('username') password = creds.get('password') if not objects.token.is_valid_user(password): raise exc.HTTPUnauthorized('401 Unauthorized') return Token.convert(objects.token_obj.create()) @expose.expose(None, wtypes.text) def delete(self, token): objects.token_obj.delete(token)
def validate(value): """Validate the input :param value: A event dict :returns: value :raises: Invalid if event not in proper format """ wtypes.DictType(wtypes.text, wtypes.text).validate(value) keys = set(value) # Check all mandatory fields are present missing = EventType.mandatory_fields.difference(keys) if missing: raise exception.Invalid(_('Missing mandatory keys: %s') % missing) # Check event is a supported event if value['event'] not in EventType.event_validators: raise exception.Invalid( _('%s is not a valid event.') % value['event']) return EventType.event_validators[value['event']](value)
class NetworksController(rest.RestController): """REST network for Default section""" def __init__(self): super(NetworksController, self).__init__() @expose.expose(NetworkCollection) def get_all(self): networks = objects.network_obj.get_all() return NetworkCollection.convert(networks) @expose.expose(node.Node, wtypes.text) def get_one(self, fqdn): network = objects.network_obj.get_by_fqdn(fqdn) if network is None: raise exception.ResourceNotFound(name='network', id=fqdn) return node.Node.convert(network) @expose.expose(node.Node, body=node.Node, status_code=201) def post(self, network): network_dict = network.as_dict() for (key, value) in node.options.items(): if not network_dict.has_key(key): network_dict[key] = value new_network = objects.Network(**network_dict) return node.Node.convert(objects.network_obj.create(new_network)) @expose.expose(None, wtypes.text) def delete(self, fqdn): objects.network_obj.delete(fqdn) @expose.expose(node.Node, wtypes.text, body=wtypes.DictType(str, str)) def patch(self, fqdn, patch): network = objects.network_obj.update(fqdn, patch) if network is None: raise exception.ResourceNotFound(name='network', id=fqdn) return network
class MacroModulation(types.Base): macromodulation_name = wsme.wsattr(wtypes.text, mandatory=True) modulation_period = wsme.wsattr(wtypes.text, mandatory=True) # _CRITICAL = wsme.wsattr(int, mandatory=True) # _WARNING = wsme.wsattr(int, mandatory=True) macros = wsme.wsattr(wtypes.DictType(wtypes.text, int), mandatory=False) def __init__(self, **kwargs): super(MacroModulation, self).__init__(**kwargs) # Custom fields start with '_'. Detect them and assign them. macros = [ i for i in kwargs.items() if isinstance(i[0], str) and i[0].startswith('_') ] if len(macros) > 0: self.macros = {} for item in macros: self.macros[item[0]] = item[1] def as_dict(self): mod_dict = super(MacroModulation, self).as_dict() macros = mod_dict.pop("macros", None) if macros: for item in macros.items(): mod_dict[item[0]] = item[1] return mod_dict @classmethod def sample(cls): return cls(macromodulation_name='HighDuringNight', modulation_period='night', macros={ "_CRITICAL": 20, "_WARNING": 10 })
class ComputesController(rest.RestController): """REST compute for Default section""" def __init__(self): super(ComputesController, self).__init__() @expose.expose(ComputeCollection) def get_all(self): computes = objects.compute_obj.get_all() return ComputeCollection.convert(computes) @expose.expose(node.Node, wtypes.text) def get_one(self, fqdn): compute = objects.compute_obj.get_by_fqdn(fqdn) if compute is None: raise exception.ResourceNotFound(name='compute', id=fqdn) return node.Node.convert(compute) @expose.expose(node.Node, body=node.Node, status_code=201) def post(self, compute): compute_dict = compute.as_dict() for (key, value) in node.options.items(): if not compute_dict.has_key(key): compute_dict[key] = value new_compute = objects.Compute(**compute_dict) return node.Node.convert(objects.compute_obj.create(new_compute)) @expose.expose(None, wtypes.text) def delete(self, fqdn): objects.compute_obj.delete(fqdn) @expose.expose(node.Node, wtypes.text, body=wtypes.DictType(str, str)) def patch(self, fqdn, patch): compute = objects.compute_obj.update(fqdn, patch) if compute is None: raise exception.ResourceNotFound(name='compute', id=fqdn) return compute
class Lease(base._Base): id = types.UuidType() "The UUID of the lease" name = wtypes.text "The name of the lease" start_date = types.Datetime(service.LEASE_DATE_FORMAT) "Datetime when the lease should start" end_date = types.Datetime(service.LEASE_DATE_FORMAT) "Datetime when the lease should end" user_id = types.UuidType(without_dashes=True) "The ID of the user who creates the lease" project_id = types.UuidType(without_dashes=True) "The ID of the project or tenant the lease belongs to" trust_id = types.UuidType(without_dashes=True) "The ID of the trust created for delegating the rights of the user" reservations = wtypes.ArrayType(wtypes.DictType(wtypes.text, wtypes.text)) "The list of reservations belonging to the lease" events = wtypes.ArrayType(wtypes.DictType(wtypes.text, wtypes.text)) "The list of events attached to the lease" before_end_notification = types.Datetime(service.LEASE_DATE_FORMAT) "Datetime when notifications will be sent before lease ending" action = wtypes.text "The current action running" status = wtypes.text "The status of the action running" status_reason = wtypes.text "A brief description of the status, if any" @classmethod def sample(cls): return cls( id=u'2bb8720a-0873-4d97-babf-0d906851a1eb', name=u'lease_test', start_date=u'2014-01-01 01:23', end_date=u'2014-02-01 13:37', user_id=u'efd8780712d24b389c705f5c2ac427ff', project_id=u'bd9431c18d694ad3803a8d4a6b89fd36', trust_id=u'35b17138b3644e6aa1318f3099c5be68', reservations=[{ u'resource_id': u'1234', u'resource_type': u'virtual:instance' }], events=[], before_end_notification=u'2014-02-01 10:37', action=u'START', status=u'COMPLETE', status_reason=u'Lease currently running', )
class NodeGroup(base.APIBase): """API representation of a Node group. This class enforces type checking and value constraints, and converts between the internal object model and the API representation of NodeGroup. """ id = wsme.wsattr(wtypes.IntegerType(minimum=1)) """unique id""" uuid = types.uuid """Unique UUID for this nodegroup""" name = wsme.wsattr(wtypes.StringType(min_length=1, max_length=255), default=None) """Name of this nodegroup""" cluster_id = types.uuid """Unique UUID for the cluster where the nodegroup belongs to""" project_id = wsme.wsattr(wtypes.text, readonly=True) """Project UUID for this nodegroup""" docker_volume_size = wtypes.IntegerType(minimum=1) """The size in GB of the docker volume""" labels = wtypes.DictType(wtypes.text, types.MultiType(wtypes.text, six.integer_types, bool, float)) """One or more key/value pairs""" links = wsme.wsattr([link.Link], readonly=True) """A list containing a self link and associated nodegroup links""" flavor_id = wtypes.StringType(min_length=1, max_length=255) """The flavor of this nodegroup""" image_id = wtypes.StringType(min_length=1, max_length=255) """The image used for this nodegroup""" node_addresses = wsme.wsattr([wtypes.text], readonly=True) """IP addresses of nodegroup nodes""" node_count = wsme.wsattr(wtypes.IntegerType(minimum=1), default=1) """The node count for this nodegroup. Default to 1 if not set""" role = wsme.wsattr(wtypes.StringType(min_length=1, max_length=255), default='worker') """The role of the nodes included in this nodegroup""" min_node_count = wsme.wsattr(wtypes.IntegerType(minimum=1), default=1) """The minimum allowed nodes for this nodegroup. Default to 1 if not set""" max_node_count = wsme.wsattr(wtypes.IntegerType(minimum=1), default=None) """The maximum allowed nodes for this nodegroup. Default to 1 if not set""" is_default = types.BooleanType() """Specifies is a nodegroup was created by default or not""" stack_id = wsme.wsattr(wtypes.text, readonly=True) """Stack id of the heat stack""" status = wtypes.Enum(wtypes.text, *fields.ClusterStatus.ALL) """Status of the nodegroup from the heat stack""" status_reason = wtypes.text """Status reason of the nodegroup from the heat stack""" version = wtypes.text """Version of the nodegroup""" merge_labels = wsme.wsattr(types.boolean, default=False) """Indicates whether the labels will be merged with the cluster labels.""" labels_overridden = wtypes.DictType( wtypes.text, types.MultiType( wtypes.text, six.integer_types, bool, float)) """Contains labels that have a value different than the parent labels.""" labels_added = wtypes.DictType( wtypes.text, types.MultiType( wtypes.text, six.integer_types, bool, float)) """Contains labels that do not exist in the parent.""" labels_skipped = wtypes.DictType( wtypes.text, types.MultiType( wtypes.text, six.integer_types, bool, float)) """Contains labels that exist in the parent but were not inherited.""" def __init__(self, **kwargs): super(NodeGroup, self).__init__() self.fields = [] for field in objects.NodeGroup.fields: # Skip fields we do not expose. if not hasattr(self, field): continue self.fields.append(field) setattr(self, field, kwargs.get(field, wtypes.Unset)) @classmethod def convert(cls, nodegroup, expand=True): url = pecan.request.host_url cluster_path = 'clusters/%s' % nodegroup.cluster_id nodegroup_path = 'nodegroups/%s' % nodegroup.uuid ng = NodeGroup(**nodegroup.as_dict()) if not expand: ng.unset_fields_except(["uuid", "name", "flavor_id", "node_count", "role", "is_default", "image_id", "status", "stack_id"]) else: ng.links = [link.Link.make_link('self', url, cluster_path, nodegroup_path), link.Link.make_link('bookmark', url, cluster_path, nodegroup_path, bookmark=True)] cluster = api_utils.get_resource('Cluster', ng.cluster_id) overridden, added, skipped = api_utils.get_labels_diff( cluster.labels, ng.labels) ng.labels_overridden = overridden ng.labels_added = added ng.labels_skipped = skipped return ng
class LanguagePack(api_types.Base): """Representation of a language pack. When a user creates an application, he specifies the language pack to be used. The language pack is responsible for building the application and producing an artifact for deployment. For a complete list of language pack attributes please refer: https://etherpad.openstack.org/p/Solum-Language-pack-json-format """ def __init__(self, **kwds): self.__name = wsme.Unset super(LanguagePack, self).__init__(**kwds) def get_name(self): return self.__name def set_name(self, value): if len(value) > 100: raise ValueError( _('Names must not be longer than 100 ' 'characters')) allowed_chars = string.ascii_lowercase + string.digits + '-_' for ch in value: if ch not in allowed_chars: raise ValueError(_('Names must only contain a-z,0-9,-,_')) self.__name = value name = wtypes.wsproperty(str, get_name, set_name, mandatory=True) language_pack_type = wtypes.text """Type of the language pack. Identifies the language supported by the language pack. This attribute value will use the org.openstack.solum namespace. """ compiler_versions = [wtypes.text] """List of all the compiler versions supported by the language pack. Example: For a java language pack supporting Java versions 1.4 to 1.7, version = ['1.4', '1.6', '1.7'] """ runtime_versions = [wtypes.text] """List of all runtime versions supported by the language pack. Runtime version can be different than compiler version. Example: An application can be compiled with java 1.7 but it should run in java 1.6 as it is backward compatible. """ language_implementation = wtypes.text """Actual language implementation supported by the language pack. Example: In case of java it might be 'Sun' or 'openJava' In case of C++ it can be 'gcc' or 'icc' or 'microsoft'. """ build_tool_chain = [BuildTool] """Toolchain available in the language pack. Example: For a java language pack which supports Ant and Maven, build_tool_chain = ["{type:ant,version:1.7}","{type:maven,version:1.2}"] """ os_platform = {str: wtypes.text} """OS and its version used by the language pack. This attribute identifies the base image of the language pack. """ attributes = {str: wtypes.text} """Additional section attributes will be used to expose custom attributes designed by language pack creator. """ source_uri = wtypes.text """The URI of the app/element.""" source_format = SOURCE_KIND """The source repository format.""" status = STATE_KIND """The state of the image. """ base_image_id = wtypes.text """The id (in glance) of the image to customize.""" image_format = IMAGE_KIND """The image format.""" created_image_id = wtypes.text """The id of the created image in glance.""" lp_metadata = wtypes.text """The languagepack meta data.""" """Parameters that can be used as part of lp building process.""" lp_params = wtypes.DictType( wtypes.text, wtypes.DictType( wtypes.text, api_types.MultiType(wtypes.text, six.integer_types, bool, float))) @classmethod def from_image(cls, image, host_url): as_dict = {} image_id = image['id'] as_dict['uuid'] = image_id as_dict['name'] = image['name'] as_dict['type'] = 'language_pack' as_dict['uri'] = '%s/v1/%s/%s' % (host_url, 'language_packs', image_id) image_tags = image['tags'] comp_versions = [] run_versions = [] build_tools = [] attrs = {} for tag in image_tags: if tag.startswith(DESCRIPTION): as_dict['description'] = tag[len(DESCRIPTION):] if tag.startswith(TYPE): as_dict['language_pack_type'] = tag[len(TYPE):] if tag.startswith(COMPILER_VERSION): comp_versions.append(tag[len(COMPILER_VERSION):]) if tag.startswith(RUNTIME_VERSION): run_versions.append(tag[len(RUNTIME_VERSION):]) if tag.startswith(IMPLEMENTATION): as_dict['language_implementation'] = tag[len(IMPLEMENTATION):] if tag.startswith(BUILD_TOOL): bt_type, bt_version = tag[len(BUILD_TOOL):].split('::') build_tool = BuildTool(type=bt_type, version=bt_version) build_tools.append(build_tool) if tag.startswith(OS_PLATFORM): osp_type, osp_version = tag[len(OS_PLATFORM):].split('::') os_platform = {'OS': osp_type, 'version': osp_version} as_dict['os_platform'] = os_platform if tag.startswith(ATTRIBUTE): key, value = tag[len(ATTRIBUTE):].split('::') attrs[key] = value as_dict['attributes'] = attrs as_dict['compiler_versions'] = comp_versions as_dict['runtime_versions'] = run_versions as_dict['build_tool_chain'] = build_tools return cls(**(as_dict)) def as_image_dict(self): tags = ['solum::lp'] if self.description is not wsme.Unset: tags.append(DESCRIPTION + self.description) if self.language_pack_type is not wsme.Unset: tags.append(TYPE + self.language_pack_type) if self.compiler_versions is not wsme.Unset: for cv in self.compiler_versions: tags.append(COMPILER_VERSION + cv) if self.runtime_versions is not wsme.Unset: for rv in self.runtime_versions: tags.append(RUNTIME_VERSION + rv) if self.language_implementation is not wsme.Unset: tags.append(IMPLEMENTATION + self.language_implementation) if self.build_tool_chain is not wsme.Unset: for bt in self.build_tool_chain: tags.append(BUILD_TOOL + bt.type + '::' + bt.version) ptfm = self.os_platform if ptfm is not wsme.Unset and 'OS' in ptfm and 'version' in ptfm: tags.append(OS_PLATFORM + ptfm['OS'] + '::' + ptfm['version']) if self.build_tool_chain is not wsme.Unset: for key, value in self.attributes.items(): tags.append(ATTRIBUTE + key + '::' + value) # TODO(julienvey) parse specific attributes for image creation from # self.attributes, such as image_format... return {'name': self.name, 'tags': tags} @classmethod def sample(cls): return cls( uri='http://example.com/v1/images/b3e0d79', source_uri='git://example.com/project/app.git', source_format='heroku', name='php-web-app', type='languagepack', description='A php web application', tags=['group_xyz'], project_id='1dae5a09ef2b4d8cbf3594b0eb4f6b94', user_id='55f41cf46df74320b9486a35f5d28a11', base_image_id='4dae5a09ef2b4d8cbf3594b0eb4f6b94', created_image_id='4afasa09ef2b4d8cbf3594b0ec4f6b94', image_format='docker', language_pack_name='java-1.4-1.7', language_pack_type='org.openstack.solum.Java', language_pack_id='123456789abcdef', compiler_versions=['1.4', '1.6', '1.7'], runtime_versions=['1.4', '1.6', '1.7'], language_implementation='Sun', build_tool_chain=[ BuildTool(type='ant', version='1.7'), BuildTool(type='maven', version='1.2') ], os_platform={ 'OS': 'Ubuntu', 'version': '12.04' }, attributes={ 'optional_attr1': 'value', 'admin_email': '*****@*****.**' }, )
class Cluster(base.APIBase): """API representation of a cluster. This class enforces type checking and value constraints, and converts between the internal object model and the API representation of a Cluster. """ _cluster_template_id = None def _get_cluster_template_id(self): return self._cluster_template_id def _set_cluster_template_id(self, value): if value and self._cluster_template_id != value: try: cluster_template = api_utils.get_resource('ClusterTemplate', value) self._cluster_template_id = cluster_template.uuid except exception.ClusterTemplateNotFound as e: # Change error code because 404 (NotFound) is inappropriate # response for a POST request to create a Cluster e.code = 400 # BadRequest raise elif value == wtypes.Unset: self._cluster_template_id = wtypes.Unset uuid = types.uuid """Unique UUID for this cluster""" name = wtypes.StringType(min_length=1, max_length=242, pattern='^[a-zA-Z][a-zA-Z0-9_.-]*$') """Name of this cluster, max length is limited to 242 because of heat stack requires max length limit to 255, and Magnum amend a uuid length""" cluster_template_id = wsme.wsproperty(wtypes.text, _get_cluster_template_id, _set_cluster_template_id, mandatory=True) """The cluster_template UUID""" keypair = wsme.wsattr(wtypes.StringType(min_length=1, max_length=255), default=None) """The name of the nova ssh keypair""" node_count = wsme.wsattr(wtypes.IntegerType(minimum=1), default=1) """The node count for this cluster. Default to 1 if not set""" master_count = wsme.wsattr(wtypes.IntegerType(minimum=1), default=1) """The number of master nodes for this cluster. Default to 1 if not set""" docker_volume_size = wtypes.IntegerType(minimum=1) """The size in GB of the docker volume""" labels = wtypes.DictType(wtypes.text, types.MultiType(wtypes.text, six.integer_types, bool, float)) """One or more key/value pairs""" master_flavor_id = wtypes.StringType(min_length=1, max_length=255) """The flavor of the master node for this Cluster""" flavor_id = wtypes.StringType(min_length=1, max_length=255) """The flavor of this Cluster""" create_timeout = wsme.wsattr(wtypes.IntegerType(minimum=0), default=60) """Timeout for creating the cluster in minutes. Default to 60 if not set""" links = wsme.wsattr([link.Link], readonly=True) """A list containing a self link and associated cluster links""" stack_id = wsme.wsattr(wtypes.text, readonly=True) """Stack id of the heat stack""" status = wtypes.Enum(wtypes.text, *fields.ClusterStatus.ALL) """Status of the cluster from the heat stack""" status_reason = wtypes.text """Status reason of the cluster from the heat stack""" health_status = wtypes.Enum(wtypes.text, *fields.ClusterStatus.ALL) """Health status of the cluster from the native COE API""" health_status_reason = wtypes.DictType(wtypes.text, wtypes.text) """Health status reason of the cluster from the native COE API""" discovery_url = wtypes.text """Url used for cluster node discovery""" api_address = wsme.wsattr(wtypes.text, readonly=True) """Api address of cluster master node""" coe_version = wsme.wsattr(wtypes.text, readonly=True) """Version of the COE software currently running in this cluster. Example: swarm version or kubernetes version.""" container_version = wsme.wsattr(wtypes.text, readonly=True) """Version of the container software. Example: docker version.""" project_id = wsme.wsattr(wtypes.text, readonly=True) """Project id of the cluster belongs to""" user_id = wsme.wsattr(wtypes.text, readonly=True) """User id of the cluster belongs to""" node_addresses = wsme.wsattr([wtypes.text], readonly=True) """IP addresses of cluster slave nodes""" master_addresses = wsme.wsattr([wtypes.text], readonly=True) """IP addresses of cluster master nodes""" faults = wsme.wsattr(wtypes.DictType(wtypes.text, wtypes.text)) """Fault info collected from the heat resources of this cluster""" def __init__(self, **kwargs): super(Cluster, self).__init__() self.fields = [] for field in objects.Cluster.fields: # Skip fields we do not expose. if not hasattr(self, field): continue self.fields.append(field) setattr(self, field, kwargs.get(field, wtypes.Unset)) @staticmethod def _convert_with_links(cluster, url, expand=True): if not expand: cluster.unset_fields_except(['uuid', 'name', 'cluster_template_id', 'keypair', 'docker_volume_size', 'labels', 'node_count', 'status', 'master_flavor_id', 'flavor_id', 'create_timeout', 'master_count', 'stack_id']) cluster.links = [link.Link.make_link('self', url, 'clusters', cluster.uuid), link.Link.make_link('bookmark', url, 'clusters', cluster.uuid, bookmark=True)] return cluster @classmethod def convert_with_links(cls, rpc_cluster, expand=True): cluster = Cluster(**rpc_cluster.as_dict()) return cls._convert_with_links(cluster, pecan.request.host_url, expand) @classmethod def sample(cls, expand=True): temp_id = '4a96ac4b-2447-43f1-8ca6-9fd6f36d146d' sample = cls(uuid='27e3153e-d5bf-4b7e-b517-fb518e17f34c', name='example', cluster_template_id=temp_id, keypair=None, node_count=2, master_count=1, docker_volume_size=1, labels={}, master_flavor_id='m1.small', flavor_id='m1.small', create_timeout=15, stack_id='49dc23f5-ffc9-40c3-9d34-7be7f9e34d63', status=fields.ClusterStatus.CREATE_COMPLETE, status_reason="CREATE completed successfully", health_status=fields.ClusterHealthStatus.HEALTHY, health_status_reason='{"api_server": "OK"}', api_address='172.24.4.3', node_addresses=['172.24.4.4', '172.24.4.5'], created_at=timeutils.utcnow(), updated_at=timeutils.utcnow(), coe_version=None, container_version=None) return cls._convert_with_links(sample, 'http://localhost:9511', expand)
class BayModel(base.APIBase): """API representation of a Baymodel. This class enforces type checking and value constraints, and converts between the internal object model and the API representation of a Baymodel. """ uuid = types.uuid """Unique UUID for this Baymodel""" name = wtypes.StringType(min_length=1, max_length=255) """The name of the Baymodel""" coe = wtypes.Enum(str, *fields.ClusterType.ALL, mandatory=True) """The Container Orchestration Engine for this bay model""" image_id = wsme.wsattr(wtypes.StringType(min_length=1, max_length=255), mandatory=True) """The image name or UUID to use as a base image for this Baymodel""" flavor_id = wtypes.StringType(min_length=1, max_length=255) """The flavor of this Baymodel""" master_flavor_id = wtypes.StringType(min_length=1, max_length=255) """The flavor of the master node for this Baymodel""" dns_nameserver = wtypes.IPv4AddressType() """The DNS nameserver address""" keypair_id = wsme.wsattr(wtypes.StringType(min_length=1, max_length=255), mandatory=True) """The name of the nova ssh keypair""" external_network_id = wtypes.StringType(min_length=1, max_length=255) """The external network to attach to the Bay""" fixed_network = wtypes.StringType(min_length=1, max_length=255) """The fixed network name to attach to the Bay""" fixed_subnet = wtypes.StringType(min_length=1, max_length=255) """The fixed subnet name to attach to the Bay""" network_driver = wtypes.StringType(min_length=1, max_length=255) """The name of the driver used for instantiating container networks""" apiserver_port = wtypes.IntegerType(minimum=1024, maximum=65535) """The API server port for k8s""" docker_volume_size = wtypes.IntegerType(minimum=1) """The size in GB of the docker volume""" cluster_distro = wtypes.StringType(min_length=1, max_length=255) """The Cluster distro for the bay, e.g. coreos, fedora-atomic, etc.""" links = wsme.wsattr([link.Link], readonly=True) """A list containing a self link and associated Baymodel links""" http_proxy = wtypes.StringType(min_length=1, max_length=255) """Address of a proxy that will receive all HTTP requests and relay them. The format is a URL including a port number. """ https_proxy = wtypes.StringType(min_length=1, max_length=255) """Address of a proxy that will receive all HTTPS requests and relay them. The format is a URL including a port number. """ no_proxy = wtypes.StringType(min_length=1, max_length=255) """A comma separated list of IPs for which proxies should not be used in the bay """ volume_driver = wtypes.StringType(min_length=1, max_length=255) """The name of the driver used for instantiating container volumes""" registry_enabled = wsme.wsattr(types.boolean, default=False) """Indicates whether the docker registry is enabled""" labels = wtypes.DictType(str, str) """One or more key/value pairs""" tls_disabled = wsme.wsattr(types.boolean, default=False) """Indicates whether TLS should be disabled""" public = wsme.wsattr(types.boolean, default=False) """Indicates whether the Baymodel is public or not.""" server_type = wsme.wsattr(wtypes.Enum(str, *fields.ServerType.ALL), default='vm') """Server type for this bay model""" insecure_registry = wtypes.StringType(min_length=1, max_length=255) """Insecure registry URL when creating a Baymodel""" docker_storage_driver = wtypes.StringType(min_length=1, max_length=255) """Docker storage driver""" master_lb_enabled = wsme.wsattr(types.boolean, default=False) """Indicates whether created bays should have a load balancer for master nodes or not. """ floating_ip_enabled = wsme.wsattr(types.boolean, default=True) """Indicates whether created bays should have a floating ip or not.""" def __init__(self, **kwargs): self.fields = [] for field in objects.ClusterTemplate.fields: # Skip fields we do not expose. if not hasattr(self, field): continue self.fields.append(field) setattr(self, field, kwargs.get(field, wtypes.Unset)) @staticmethod def _convert_with_links(baymodel, url): baymodel.links = [link.Link.make_link('self', url, 'baymodels', baymodel.uuid), link.Link.make_link('bookmark', url, 'baymodels', baymodel.uuid, bookmark=True)] return baymodel @classmethod def convert_with_links(cls, rpc_baymodel): baymodel = BayModel(**rpc_baymodel.as_dict()) return cls._convert_with_links(baymodel, pecan.request.host_url) @classmethod def sample(cls): sample = cls( uuid='27e3153e-d5bf-4b7e-b517-fb518e17f34c', name='example', image_id='Fedora-k8s', flavor_id='m1.small', master_flavor_id='m1.small', dns_nameserver='8.8.1.1', keypair_id='keypair1', external_network_id='ffc44e4a-2319-4062-bce0-9ae1c38b05ba', fixed_network='private', fixed_subnet='private-subnet', network_driver='libnetwork', volume_driver='cinder', apiserver_port=8080, docker_volume_size=25, docker_storage_driver='devicemapper', cluster_distro='fedora-atomic', coe=fields.ClusterType.KUBERNETES, http_proxy='http://proxy.com:123', https_proxy='https://proxy.com:123', no_proxy='192.168.0.1,192.168.0.2,192.168.0.3', labels={'key1': 'val1', 'key2': 'val2'}, server_type='vm', insecure_registry='10.238.100.100:5000', created_at=timeutils.utcnow(), updated_at=timeutils.utcnow(), public=False, master_lb_enabled=False, floating_ip_enabled=True, ) return cls._convert_with_links(sample, 'http://localhost:9511')
class Bay(base.APIBase): """API representation of a bay. This class enforces type checking and value constraints, and converts between the internal object model and the API representation of a bay. """ _baymodel_id = None def _get_baymodel_id(self): return self._baymodel_id def _set_baymodel_id(self, value): if value and self._baymodel_id != value: try: baymodel = api_utils.get_resource('ClusterTemplate', value) self._baymodel_id = baymodel.uuid except exception.ClusterTemplateNotFound as e: # Change error code because 404 (NotFound) is inappropriate # response for a POST request to create a Cluster e.code = 400 # BadRequest raise elif value == wtypes.Unset: self._baymodel_id = wtypes.Unset uuid = types.uuid """Unique UUID for this bay""" name = wtypes.StringType(min_length=1, max_length=242, pattern='^[a-zA-Z][a-zA-Z0-9_.-]*$') """Name of this bay, max length is limited to 242 because of heat stack requires max length limit to 255, and Magnum amend a uuid length""" baymodel_id = wsme.wsproperty(wtypes.text, _get_baymodel_id, _set_baymodel_id, mandatory=True) """The baymodel UUID""" node_count = wsme.wsattr(wtypes.IntegerType(minimum=1), default=1) """The node count for this bay. Default to 1 if not set""" master_count = wsme.wsattr(wtypes.IntegerType(minimum=1), default=1) """The number of master nodes for this bay. Default to 1 if not set""" docker_volume_size = wtypes.IntegerType(minimum=1) """The size in GB of the docker volume""" labels = wtypes.DictType( wtypes.text, types.MultiType(wtypes.text, six.integer_types, bool, float)) """One or more key/value pairs""" master_flavor_id = wtypes.StringType(min_length=1, max_length=255) """The master flavor of this Bay""" flavor_id = wtypes.StringType(min_length=1, max_length=255) """The flavor of this Bay""" bay_create_timeout = wsme.wsattr(wtypes.IntegerType(minimum=0), default=60) """Timeout for creating the bay in minutes. Default to 60 if not set""" links = wsme.wsattr([link.Link], readonly=True) """A list containing a self link and associated bay links""" stack_id = wsme.wsattr(wtypes.text, readonly=True) """Stack id of the heat stack""" status = wtypes.Enum(wtypes.text, *fields.ClusterStatus.ALL) """Status of the bay from the heat stack""" status_reason = wtypes.text """Status reason of the bay from the heat stack""" discovery_url = wtypes.text """Url used for bay node discovery""" api_address = wsme.wsattr(wtypes.text, readonly=True) """Api address of cluster master node""" coe_version = wsme.wsattr(wtypes.text, readonly=True) """Version of the COE software currently running in this cluster. Example: swarm version or kubernetes version.""" container_version = wsme.wsattr(wtypes.text, readonly=True) """Version of the container software. Example: docker version.""" node_addresses = wsme.wsattr([wtypes.text], readonly=True) """IP addresses of cluster slave nodes""" master_addresses = wsme.wsattr([wtypes.text], readonly=True) """IP addresses of cluster master nodes""" bay_faults = wsme.wsattr(wtypes.DictType(wtypes.text, wtypes.text)) """Fault info collected from the heat resources of this bay""" def __init__(self, **kwargs): super(Bay, self).__init__() self.fields = [] for field in objects.Cluster.fields: # Skip fields we do not expose. if not hasattr(self, field): continue self.fields.append(field) setattr(self, field, kwargs.get(field, wtypes.Unset)) # Set the renamed attributes for bay backwards compatibility self.fields.append('baymodel_id') if 'baymodel_id' in kwargs.keys(): setattr(self, 'cluster_template_id', kwargs.get('baymodel_id', None)) setattr(self, 'baymodel_id', kwargs.get('baymodel_id', None)) else: setattr(self, 'baymodel_id', kwargs.get('cluster_template_id', None)) self.fields.append('bay_create_timeout') if 'bay_create_timeout' in kwargs.keys(): setattr(self, 'create_timeout', kwargs.get('bay_create_timeout', wtypes.Unset)) setattr(self, 'bay_create_timeout', kwargs.get('bay_create_timeout', wtypes.Unset)) else: setattr(self, 'bay_create_timeout', kwargs.get('create_timeout', wtypes.Unset)) self.fields.append('bay_faults') if 'bay_faults' in kwargs.keys(): setattr(self, 'faults', kwargs.get('bay_faults', wtypes.Unset)) setattr(self, 'bay_faults', kwargs.get('bay_faults', wtypes.Unset)) else: setattr(self, 'bay_faults', kwargs.get('faults', wtypes.Unset)) @staticmethod def _convert_with_links(bay, url, expand=True): if not expand: bay.unset_fields_except([ 'uuid', 'name', 'baymodel_id', 'docker_volume_size', 'labels', 'master_flavor_id', 'flavor_id', 'node_count', 'status', 'bay_create_timeout', 'master_count', 'stack_id' ]) bay.links = [ link.Link.make_link('self', url, 'bays', bay.uuid), link.Link.make_link('bookmark', url, 'bays', bay.uuid, bookmark=True) ] return bay @classmethod def convert_with_links(cls, rpc_bay, expand=True): bay = Bay(**rpc_bay.as_dict()) return cls._convert_with_links(bay, pecan.request.host_url, expand) @classmethod def sample(cls, expand=True): sample = cls(uuid='27e3153e-d5bf-4b7e-b517-fb518e17f34c', name='example', baymodel_id='4a96ac4b-2447-43f1-8ca6-9fd6f36d146d', node_count=2, master_count=1, docker_volume_size=1, labels={}, master_flavor_id=None, flavor_id=None, bay_create_timeout=15, stack_id='49dc23f5-ffc9-40c3-9d34-7be7f9e34d63', status=fields.ClusterStatus.CREATE_COMPLETE, status_reason="CREATE completed successfully", api_address='172.24.4.3', node_addresses=['172.24.4.4', '172.24.4.5'], created_at=timeutils.utcnow(), updated_at=timeutils.utcnow(), coe_version=None, container_version=None) return cls._convert_with_links(sample, 'http://localhost:9511', expand) def as_dict(self): """Render this object as a dict of its fields.""" # Override this for old bay values d = super(Bay, self).as_dict() d['cluster_template_id'] = d['baymodel_id'] del d['baymodel_id'] d['create_timeout'] = d['bay_create_timeout'] del d['bay_create_timeout'] if 'bay_faults' in d.keys(): d['faults'] = d['bay_faults'] del d['bay_faults'] return d
class Host(base._Base): id = types.IntegerType() "The ID of the host" hypervisor_hostname = wtypes.text "The hostname of the host" # FIXME(sbauza): API V1 provides 'name', so mapping is necessary until we # patch the client name = hypervisor_hostname hypervisor_type = wtypes.text "The type of the hypervisor" vcpus = types.IntegerType() "The number of VCPUs of the host" hypervisor_version = types.IntegerType() "The version of the hypervisor" memory_mb = types.IntegerType() "The memory size (in Mb) of the host" local_gb = types.IntegerType() "The disk size (in Gb) of the host" cpu_info = types.CPUInfo() "The CPU info JSON data given by the hypervisor" trust_id = types.UuidType() "The ID of the trust created for delegating the rights of the user" extra_capas = wtypes.DictType(wtypes.text, types.TextOrInteger()) "Extra capabilities for the host" @classmethod def convert(cls, rpc_obj): extra_keys = [ key for key in rpc_obj if key not in [i.key for i in wtypes.list_attributes(Host)] ] extra_capas = dict((capa, rpc_obj[capa]) for capa in extra_keys if capa not in ['status']) rpc_obj['extra_capas'] = extra_capas obj = cls(**rpc_obj) return obj def as_dict(self): dct = super(Host, self).as_dict() extra_capas = dct.pop('extra_capas', None) if extra_capas is not None: dct.update(extra_capas) return dct @classmethod def sample(cls): return cls( id=u'1', hypervisor_hostname=u'host01', hypervisor_type=u'QEMU', vcpus=1, hypervisor_version=1000000, memory_mb=8192, local_gb=50, cpu_info="{\"vendor\": \"Intel\", \"model\": \"qemu32\", " "\"arch\": \"x86_64\", \"features\": []," " \"topology\": {\"cores\": 1}}", extra_capas={ u'vgpus': 2, u'fruits': u'bananas' }, )
def validate(value): """Validate and convert the input to a LocalLinkConnectionType. :param value: A dictionary of values to validate, switch_id is a MAC address or an OpenFlow based datapath_id, switch_info is an optional field. Required Smart NIC fields are port_id and hostname. For example:: { 'switch_id': mac_or_datapath_id(), 'port_id': 'Ethernet3/1', 'switch_info': 'switch1' } Or for Smart NIC:: { 'port_id': 'rep0-0', 'hostname': 'host1-bf' } :returns: A dictionary. :raises: Invalid if some of the keys in the dictionary being validated are unknown, invalid, or some required ones are missing. """ wtypes.DictType(wtypes.text, wtypes.text).validate(value) keys = set(value) # This is to workaround an issue when an API object is initialized from # RPC object, in which dictionary fields that are set to None become # empty dictionaries if not keys: return value invalid = keys - LocalLinkConnectionType.valid_fields if invalid: raise exception.Invalid(_('%s are invalid keys') % (invalid)) # Check any mandatory fields sets are present for mandatory_set in LocalLinkConnectionType.mandatory_fields_list: if mandatory_set <= keys: break else: msg = _('Missing mandatory keys. Required keys are ' '%(required_fields)s. Or in case of Smart NIC ' '%(smart_nic_required_fields)s. ' 'Submitted keys are %(keys)s .') % { 'required_fields': LocalLinkConnectionType.local_link_mandatory_fields, 'smart_nic_required_fields': LocalLinkConnectionType.smart_nic_mandatory_fields, 'keys': keys } raise exception.Invalid(msg) # Check switch_id is either a valid mac address or # OpenFlow datapath_id and normalize it. try: value['switch_id'] = utils.validate_and_normalize_mac( value['switch_id']) except exception.InvalidMAC: try: value['switch_id'] = utils.validate_and_normalize_datapath_id( value['switch_id']) except exception.InvalidDatapathID: raise exception.InvalidSwitchID(switch_id=value['switch_id']) except KeyError: # In Smart NIC case 'switch_id' is optional. pass return value