class L7PolicyPOST(BaseL7PolicyType): """Defines mandatory and optional attributes of a POST request.""" name = wtypes.wsattr(wtypes.StringType(max_length=255)) description = wtypes.wsattr(wtypes.StringType(max_length=255)) admin_state_up = wtypes.wsattr(bool, default=True) # TODO(johnsom) Remove after deprecation (R series) project_id = wtypes.wsattr(wtypes.StringType(max_length=36)) action = wtypes.wsattr(wtypes.Enum(str, *constants.SUPPORTED_L7POLICY_ACTIONS), mandatory=True) redirect_pool_id = wtypes.wsattr(wtypes.UuidType()) redirect_url = wtypes.wsattr(types.URLType()) redirect_prefix = wtypes.wsattr(types.URLType()) position = wtypes.wsattr(wtypes.IntegerType( minimum=constants.MIN_POLICY_POSITION, maximum=constants.MAX_POLICY_POSITION), default=constants.MAX_POLICY_POSITION) listener_id = wtypes.wsattr(wtypes.UuidType(), mandatory=True) rules = wtypes.wsattr([l7rule.L7RuleSingleCreate]) tags = wtypes.wsattr(wtypes.ArrayType(wtypes.StringType(max_length=255))) redirect_http_code = wtypes.wsattr( wtypes.Enum(int, *constants.SUPPORTED_L7POLICY_REDIRECT_HTTP_CODES))
class ClusterQuotaBase(base.BaseType): """Individual cluster quota definitions.""" cluster_total_loadbalancers = wtypes.wsattr(wtypes.IntegerType( minimum=consts.MIN_CLUSTERQUOTA, maximum=consts.MAX_CLUSTERQUOTA)) max_healthmonitors_per_pool = wtypes.wsattr(wtypes.IntegerType( minimum=consts.MIN_CLUSTERQUOTA, maximum=consts.MAX_CLUSTERQUOTA)) max_listeners_per_loadbalancer = wtypes.wsattr(wtypes.IntegerType( minimum=consts.MIN_CLUSTERQUOTA, maximum=consts.MAX_CLUSTERQUOTA)) max_members_per_pool = wtypes.wsattr(wtypes.IntegerType( minimum=consts.MIN_CLUSTERQUOTA, maximum=consts.MAX_CLUSTERQUOTA)) max_pools_per_loadbalancer = wtypes.wsattr(wtypes.IntegerType( minimum=consts.MIN_CLUSTERQUOTA, maximum=consts.MAX_CLUSTERQUOTA)) max_l7policies_per_listener = wtypes.wsattr(wtypes.IntegerType( minimum=consts.MIN_CLUSTERQUOTA, maximum=consts.MAX_CLUSTERQUOTA)) max_l7rules_per_l7policy = wtypes.wsattr(wtypes.IntegerType( minimum=consts.MIN_CLUSTERQUOTA, maximum=consts.MAX_CLUSTERQUOTA)) def to_dict(self, render_unsets=False): clusterquota_dict = super(ClusterQuotaBase, self ).to_dict(render_unsets) return clusterquota_dict
class ListenerPOST(BaseListenerType): """Defines mandatory and optional attributes of a POST request.""" name = wtypes.wsattr(wtypes.StringType(max_length=255)) description = wtypes.wsattr(wtypes.StringType(max_length=255)) admin_state_up = wtypes.wsattr(bool, default=True) protocol = wtypes.wsattr(wtypes.Enum(str, *constants.SUPPORTED_PROTOCOLS), mandatory=True) protocol_port = wtypes.wsattr( wtypes.IntegerType(minimum=constants.MIN_PORT_NUMBER, maximum=constants.MAX_PORT_NUMBER), mandatory=True) connection_limit = wtypes.wsattr( wtypes.IntegerType(minimum=constants.MIN_CONNECTION_LIMIT), default=-1) default_tls_container_ref = wtypes.wsattr( wtypes.StringType(max_length=255)) sni_container_refs = [wtypes.StringType(max_length=255)] # TODO(johnsom) Remove after deprecation (R series) project_id = wtypes.wsattr(wtypes.StringType(max_length=36)) default_pool_id = wtypes.wsattr(wtypes.UuidType()) default_pool = wtypes.wsattr(pool.PoolSingleCreate) l7policies = wtypes.wsattr([l7policy.L7PolicySingleCreate], default=[]) insert_headers = wtypes.wsattr( wtypes.DictType(str, wtypes.StringType(max_length=255))) loadbalancer_id = wtypes.wsattr(wtypes.UuidType(), mandatory=True) timeout_client_data = wtypes.wsattr( wtypes.IntegerType(minimum=constants.MIN_TIMEOUT, maximum=constants.MAX_TIMEOUT), default=CONF.haproxy_amphora.timeout_client_data) timeout_member_connect = wtypes.wsattr( wtypes.IntegerType(minimum=constants.MIN_TIMEOUT, maximum=constants.MAX_TIMEOUT), default=CONF.haproxy_amphora.timeout_member_connect) timeout_member_data = wtypes.wsattr( wtypes.IntegerType(minimum=constants.MIN_TIMEOUT, maximum=constants.MAX_TIMEOUT), default=CONF.haproxy_amphora.timeout_member_data) timeout_tcp_inspect = wtypes.wsattr( wtypes.IntegerType(minimum=constants.MIN_TIMEOUT, maximum=constants.MAX_TIMEOUT), default=CONF.haproxy_amphora.timeout_tcp_inspect) tags = wtypes.wsattr(wtypes.ArrayType(wtypes.StringType(max_length=255))) client_ca_tls_container_ref = wtypes.StringType(max_length=255) client_authentication = wtypes.wsattr( wtypes.Enum(str, *constants.SUPPORTED_CLIENT_AUTH_MODES), default=constants.CLIENT_AUTH_NONE) client_crl_container_ref = wtypes.StringType(max_length=255)
class CronTrigger(resource.Resource): """CronTrigger resource.""" id = wtypes.text name = wtypes.text workflow_name = wtypes.text workflow_id = wtypes.text workflow_input = types.jsontype workflow_params = types.jsontype project_id = wsme.wsattr(wtypes.text, readonly=True) scope = SCOPE_TYPES pattern = wtypes.text remaining_executions = wtypes.IntegerType(minimum=1) first_execution_time = wtypes.text next_execution_time = wtypes.text created_at = wtypes.text updated_at = wtypes.text @classmethod def sample(cls): return cls( id='123e4567-e89b-12d3-a456-426655440000', name='my_trigger', workflow_name='my_wf', workflow_id='123e4567-e89b-12d3-a456-426655441111', workflow_input={}, workflow_params={}, project_id='40a908dbddfe48ad80a87fb30fa70a03', scope='private', pattern='* * * * *', remaining_executions=42, created_at='1970-01-01T00:00:00.000000', updated_at='1970-01-01T00:00:00.000000' )
class ListenerSingleCreate(BaseListenerType): """Defines mandatory and optional attributes of a POST request.""" name = wtypes.wsattr(wtypes.StringType(max_length=255)) description = wtypes.wsattr(wtypes.StringType(max_length=255)) admin_state_up = wtypes.wsattr(bool, default=True) protocol = wtypes.wsattr(wtypes.Enum(str, *constants.SUPPORTED_PROTOCOLS), mandatory=True) protocol_port = wtypes.wsattr(wtypes.IntegerType( minimum=constants.MIN_PORT_NUMBER, maximum=constants.MAX_PORT_NUMBER), mandatory=True) connection_limit = wtypes.wsattr( wtypes.IntegerType(minimum=constants.MIN_CONNECTION_LIMIT), default=constants.DEFAULT_CONNECTION_LIMIT) default_tls_container_ref = wtypes.wsattr( wtypes.StringType(max_length=255)) sni_container_refs = [wtypes.StringType(max_length=255)] default_pool_id = wtypes.wsattr(wtypes.UuidType()) default_pool = wtypes.wsattr(pool.PoolSingleCreate) l7policies = wtypes.wsattr([l7policy.L7PolicySingleCreate], default=[]) insert_headers = wtypes.wsattr( wtypes.DictType(str, wtypes.StringType(max_length=255))) timeout_client_data = wtypes.wsattr( wtypes.IntegerType(minimum=constants.MIN_TIMEOUT, maximum=constants.MAX_TIMEOUT)) timeout_member_connect = wtypes.wsattr( wtypes.IntegerType(minimum=constants.MIN_TIMEOUT, maximum=constants.MAX_TIMEOUT)) timeout_member_data = wtypes.wsattr( wtypes.IntegerType(minimum=constants.MIN_TIMEOUT, maximum=constants.MAX_TIMEOUT)) timeout_tcp_inspect = wtypes.wsattr( wtypes.IntegerType(minimum=constants.MIN_TIMEOUT, maximum=constants.MAX_TIMEOUT)) tags = wtypes.wsattr(wtypes.ArrayType(wtypes.StringType(max_length=255))) client_ca_tls_container_ref = wtypes.StringType(max_length=255) client_authentication = wtypes.wsattr(wtypes.Enum( str, *constants.SUPPORTED_CLIENT_AUTH_MODES), default=constants.CLIENT_AUTH_NONE) client_crl_container_ref = wtypes.StringType(max_length=255) allowed_cidrs = wtypes.wsattr([types.CidrType()]) tls_ciphers = wtypes.StringType(max_length=2048) tls_versions = wtypes.wsattr( wtypes.ArrayType(wtypes.StringType(max_length=32))) alpn_protocols = wtypes.wsattr(wtypes.ArrayType(types.AlpnProtocolType()))
class ListenerSingleCreate(BaseListenerType): """Defines mandatory and optional attributes of a POST request.""" name = wtypes.wsattr(wtypes.StringType(max_length=255)) description = wtypes.wsattr(wtypes.StringType(max_length=255)) admin_state_up = wtypes.wsattr(bool, default=True) protocol = wtypes.wsattr(wtypes.Enum(str, *constants.SUPPORTED_PROTOCOLS), mandatory=True) protocol_port = wtypes.wsattr(wtypes.IntegerType( minimum=constants.MIN_PORT_NUMBER, maximum=constants.MAX_PORT_NUMBER), mandatory=True) connection_limit = wtypes.wsattr( wtypes.IntegerType(minimum=constants.MIN_CONNECTION_LIMIT), default=-1) default_tls_container_ref = wtypes.wsattr( wtypes.StringType(max_length=255)) sni_container_refs = [wtypes.StringType(max_length=255)] default_pool_id = wtypes.wsattr(wtypes.UuidType()) default_pool = wtypes.wsattr(pool.PoolSingleCreate) l7policies = wtypes.wsattr([l7policy.L7PolicySingleCreate], default=[]) insert_headers = wtypes.wsattr( wtypes.DictType(str, wtypes.StringType(max_length=255))) timeout_client_data = wtypes.wsattr( wtypes.IntegerType(minimum=constants.MIN_TIMEOUT, maximum=constants.MAX_TIMEOUT), default=constants.DEFAULT_TIMEOUT_CLIENT_DATA) timeout_member_connect = wtypes.wsattr( wtypes.IntegerType(minimum=constants.MIN_TIMEOUT, maximum=constants.MAX_TIMEOUT), default=constants.DEFAULT_TIMEOUT_MEMBER_CONNECT) timeout_member_data = wtypes.wsattr( wtypes.IntegerType(minimum=constants.MIN_TIMEOUT, maximum=constants.MAX_TIMEOUT), default=constants.DEFAULT_TIMEOUT_MEMBER_DATA) timeout_tcp_inspect = wtypes.wsattr( wtypes.IntegerType(minimum=constants.MIN_TIMEOUT, maximum=constants.MAX_TIMEOUT), default=constants.DEFAULT_TIMEOUT_TCP_INSPECT)
class ListenerResponse(BaseListenerType): """Defines which attributes are to be shown on any response.""" id = wtypes.wsattr(wtypes.UuidType()) name = wtypes.wsattr(wtypes.StringType()) description = wtypes.wsattr(wtypes.StringType()) provisioning_status = wtypes.wsattr(wtypes.StringType()) operating_status = wtypes.wsattr(wtypes.StringType()) admin_state_up = wtypes.wsattr(bool) protocol = wtypes.wsattr(wtypes.text) protocol_port = wtypes.wsattr(wtypes.IntegerType()) connection_limit = wtypes.wsattr(wtypes.IntegerType()) default_tls_container_ref = wtypes.wsattr(wtypes.StringType()) sni_container_refs = [wtypes.StringType()] # TODO(johnsom) Remove after deprecation (R series) project_id = wtypes.wsattr(wtypes.StringType()) # TODO(johnsom) Remove after deprecation (R series) tenant_id = wtypes.wsattr(wtypes.StringType()) default_pool_id = wtypes.wsattr(wtypes.UuidType()) default_pool = wtypes.wsattr(pool.PoolResponse) l7policies = wtypes.wsattr([l7policy.L7PolicyResponse]) insert_headers = wtypes.wsattr(wtypes.DictType(str, str)) created_at = wtypes.wsattr(wtypes.datetime.datetime) updated_at = wtypes.wsattr(wtypes.datetime.datetime) loadbalancers = wtypes.wsattr([MinimalLoadBalancer]) @classmethod def from_data_model(cls, data_model, children=False): listener = super(ListenerResponse, cls).from_data_model(data_model, children=children) listener.tenant_id = data_model.project_id listener.sni_container_refs = [ sni_c.tls_container_id for sni_c in data_model.sni_containers ] if data_model.tls_certificate_id: listener.default_tls_container_ref = data_model.tls_certificate_id listener.loadbalancers = [ MinimalLoadBalancer.from_data_model(data_model.load_balancer) ] if not listener.description: listener.description = "" if not listener.name: listener.name = "" if not children: # NOTE(blogan): do not show default_pool if the request does not # want to see children del listener.default_pool del listener.l7policies return listener if data_model.default_pool: listener.default_pool = pool.PoolResponse.from_data_model( data_model.default_pool, children=children) if data_model.l7policies: listener.l7policies = [ l7policy.L7PolicyResponse.from_data_model(policy, children=children) for policy in data_model.l7policies ] if not listener.default_pool: del listener.default_pool del listener.default_pool_id if not listener.l7policies: del listener.l7policies return listener
class Cluster(base.APIBase): """API representation of a cluster. This class enforces type checking and value constraints, and converts between the internal object model and the API representation of a Cluster. """ _cluster_template_id = None def _get_cluster_template_id(self): return self._cluster_template_id def _set_cluster_template_id(self, value): if value and self._cluster_template_id != value: try: cluster_template = api_utils.get_resource('ClusterTemplate', value) self._cluster_template_id = cluster_template.uuid except exception.ClusterTemplateNotFound as e: # Change error code because 404 (NotFound) is inappropriate # response for a POST request to create a Cluster e.code = 400 # BadRequest raise elif value == wtypes.Unset: self._cluster_template_id = wtypes.Unset uuid = types.uuid """Unique UUID for this cluster""" name = wtypes.StringType(min_length=1, max_length=242, pattern='^[a-zA-Z][a-zA-Z0-9_.-]*$') """Name of this cluster, max length is limited to 242 because of heat stack requires max length limit to 255, and Magnum amend a uuid length""" cluster_template_id = wsme.wsproperty(wtypes.text, _get_cluster_template_id, _set_cluster_template_id, mandatory=True) """The cluster_template UUID""" keypair = wsme.wsattr(wtypes.StringType(min_length=1, max_length=255), default=None) """The name of the nova ssh keypair""" node_count = wsme.wsattr(wtypes.IntegerType(minimum=1), default=1) """The node count for this cluster. Default to 1 if not set""" master_count = wsme.wsattr(wtypes.IntegerType(minimum=1), default=1) """The number of master nodes for this cluster. Default to 1 if not set""" docker_volume_size = wtypes.IntegerType(minimum=1) """The size in GB of the docker volume""" labels = wtypes.DictType(wtypes.text, types.MultiType(wtypes.text, six.integer_types, bool, float)) """One or more key/value pairs""" master_flavor_id = wtypes.StringType(min_length=1, max_length=255) """The flavor of the master node for this Cluster""" flavor_id = wtypes.StringType(min_length=1, max_length=255) """The flavor of this Cluster""" create_timeout = wsme.wsattr(wtypes.IntegerType(minimum=0), default=60) """Timeout for creating the cluster in minutes. Default to 60 if not set""" links = wsme.wsattr([link.Link], readonly=True) """A list containing a self link and associated cluster links""" stack_id = wsme.wsattr(wtypes.text, readonly=True) """Stack id of the heat stack""" status = wtypes.Enum(wtypes.text, *fields.ClusterStatus.ALL) """Status of the cluster from the heat stack""" status_reason = wtypes.text """Status reason of the cluster from the heat stack""" health_status = wtypes.Enum(wtypes.text, *fields.ClusterStatus.ALL) """Health status of the cluster from the native COE API""" health_status_reason = wtypes.DictType(wtypes.text, wtypes.text) """Health status reason of the cluster from the native COE API""" discovery_url = wtypes.text """Url used for cluster node discovery""" api_address = wsme.wsattr(wtypes.text, readonly=True) """Api address of cluster master node""" coe_version = wsme.wsattr(wtypes.text, readonly=True) """Version of the COE software currently running in this cluster. Example: swarm version or kubernetes version.""" container_version = wsme.wsattr(wtypes.text, readonly=True) """Version of the container software. Example: docker version.""" project_id = wsme.wsattr(wtypes.text, readonly=True) """Project id of the cluster belongs to""" user_id = wsme.wsattr(wtypes.text, readonly=True) """User id of the cluster belongs to""" node_addresses = wsme.wsattr([wtypes.text], readonly=True) """IP addresses of cluster slave nodes""" master_addresses = wsme.wsattr([wtypes.text], readonly=True) """IP addresses of cluster master nodes""" faults = wsme.wsattr(wtypes.DictType(wtypes.text, wtypes.text)) """Fault info collected from the heat resources of this cluster""" def __init__(self, **kwargs): super(Cluster, self).__init__() self.fields = [] for field in objects.Cluster.fields: # Skip fields we do not expose. if not hasattr(self, field): continue self.fields.append(field) setattr(self, field, kwargs.get(field, wtypes.Unset)) @staticmethod def _convert_with_links(cluster, url, expand=True): if not expand: cluster.unset_fields_except(['uuid', 'name', 'cluster_template_id', 'keypair', 'docker_volume_size', 'labels', 'node_count', 'status', 'master_flavor_id', 'flavor_id', 'create_timeout', 'master_count', 'stack_id']) cluster.links = [link.Link.make_link('self', url, 'clusters', cluster.uuid), link.Link.make_link('bookmark', url, 'clusters', cluster.uuid, bookmark=True)] return cluster @classmethod def convert_with_links(cls, rpc_cluster, expand=True): cluster = Cluster(**rpc_cluster.as_dict()) return cls._convert_with_links(cluster, pecan.request.host_url, expand) @classmethod def sample(cls, expand=True): temp_id = '4a96ac4b-2447-43f1-8ca6-9fd6f36d146d' sample = cls(uuid='27e3153e-d5bf-4b7e-b517-fb518e17f34c', name='example', cluster_template_id=temp_id, keypair=None, node_count=2, master_count=1, docker_volume_size=1, labels={}, master_flavor_id='m1.small', flavor_id='m1.small', create_timeout=15, stack_id='49dc23f5-ffc9-40c3-9d34-7be7f9e34d63', status=fields.ClusterStatus.CREATE_COMPLETE, status_reason="CREATE completed successfully", health_status=fields.ClusterHealthStatus.HEALTHY, health_status_reason='{"api_server": "OK"}', api_address='172.24.4.3', node_addresses=['172.24.4.4', '172.24.4.5'], created_at=timeutils.utcnow(), updated_at=timeutils.utcnow(), coe_version=None, container_version=None) return cls._convert_with_links(sample, 'http://localhost:9511', expand)
class CronTrigger(resource.Resource): """CronTrigger resource.""" id = wtypes.text name = wtypes.text workflow_name = wtypes.text workflow_input = wtypes.text workflow_params = wtypes.text scope = SCOPE_TYPES pattern = wtypes.text remaining_executions = wtypes.IntegerType(minimum=1) first_execution_time = wtypes.text next_execution_time = wtypes.text created_at = wtypes.text updated_at = wtypes.text def to_dict(self): d = super(CronTrigger, self).to_dict() self._transform_string_to_dict(d, ['workflow_input', 'workflow_params']) return d def _transform_string_to_dict(self, d, keys): """Transforms values of dict by given key list. :param d: dict to transform. :param keys: list of key names in dict """ for k in keys: if d.get(k): d[k] = json.loads(d[k]) @classmethod def from_dict(cls, d): e = cls() for key, val in d.items(): if hasattr(e, key): # Nonetype check for dictionary must be explicit. if (key in ['workflow_input', 'workflow_params'] and val is not None): val = json.dumps(val) setattr(e, key, val) return e @classmethod def sample(cls): return cls(id='123e4567-e89b-12d3-a456-426655440000', name='my_trigger', workflow_name='my_wf', workflow_input={}, workflow_params={}, scope='private', pattern='* * * * *', remaining_executions=42, created_at='1970-01-01T00:00:00.000000', updated_at='1970-01-01T00:00:00.000000')
class BayModel(base.APIBase): """API representation of a baymodel. This class enforces type checking and value constraints, and converts between the internal object model and the API representation of a baymodel. """ uuid = types.uuid """Unique UUID for this baymodel""" name = wtypes.text """The name of the bay model""" image_id = wtypes.text """The image name or UUID to use as a base image for this baymodel""" flavor_id = wtypes.text """The flavor of this bay model""" master_flavor_id = wtypes.text """The flavor of the master node for this bay model""" dns_nameserver = wtypes.text """The DNS nameserver address""" keypair_id = wtypes.text """The name or id of the nova ssh keypair""" external_network_id = wtypes.text """The external network to attach the Bay""" fixed_network = wtypes.text """The fixed network name to attach the Bay""" apiserver_port = wtypes.IntegerType() """The API server port for k8s""" docker_volume_size = wtypes.IntegerType() """The size in GB of the docker volume""" ssh_authorized_key = wtypes.text """The SSH Authorized Key""" links = wsme.wsattr([link.Link], readonly=True) """A list containing a self link and associated baymodel links""" def __init__(self, **kwargs): self.fields = [] for field in objects.BayModel.fields: # Skip fields we do not expose. if not hasattr(self, field): continue self.fields.append(field) setattr(self, field, kwargs.get(field, wtypes.Unset)) @staticmethod def _convert_with_links(baymodel, url, expand=True): if not expand: baymodel.unset_fields_except( ['uuid', 'name', 'image_id', 'apiserver_port']) baymodel.links = [ link.Link.make_link('self', url, 'baymodels', baymodel.uuid), link.Link.make_link('bookmark', url, 'baymodels', baymodel.uuid, bookmark=True) ] return baymodel @classmethod def convert_with_links(cls, rpc_baymodel, expand=True): baymodel = BayModel(**rpc_baymodel.as_dict()) return cls._convert_with_links(baymodel, pecan.request.host_url, expand) @classmethod def sample(cls, expand=True): sample = cls( uuid='27e3153e-d5bf-4b7e-b517-fb518e17f34c', name='example', image_id='Fedora-k8s', flavor_id='m1.small', master_flavor_id='m1.small', dns_nameserver='8.8.1.1', keypair_id='keypair1', external_network_id='ffc44e4a-2319-4062-bce0-9ae1c38b05ba', fixed_network='private', apiserver_port=8080, docker_volume_size=25, ssh_authorized_key='ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAB', created_at=datetime.datetime.utcnow(), updated_at=datetime.datetime.utcnow()) return cls._convert_with_links(sample, 'http://localhost:9511', expand)
class SessionPersistenceResponse(types.BaseType): """Defines which attributes are to be shown on any response.""" type = wtypes.wsattr(wtypes.text) cookie_name = wtypes.wsattr(wtypes.text) persistence_timeout = wtypes.wsattr(wtypes.IntegerType()) persistence_granularity = wtypes.wsattr(types.IPAddressType())
class Bay(base.APIBase): """API representation of a bay. This class enforces type checking and value constraints, and converts between the internal object model and the API representation of a bay. """ _baymodel_id = None def _get_baymodel_id(self): return self._baymodel_id def _set_baymodel_id(self, value): if value and self._baymodel_id != value: try: baymodel = api_utils.get_resource('ClusterTemplate', value) self._baymodel_id = baymodel.uuid except exception.ClusterTemplateNotFound as e: # Change error code because 404 (NotFound) is inappropriate # response for a POST request to create a Cluster e.code = 400 # BadRequest raise elif value == wtypes.Unset: self._baymodel_id = wtypes.Unset uuid = types.uuid """Unique UUID for this bay""" name = wtypes.StringType(min_length=1, max_length=242, pattern='^[a-zA-Z][a-zA-Z0-9_.-]*$') """Name of this bay, max length is limited to 242 because of heat stack requires max length limit to 255, and Magnum amend a uuid length""" baymodel_id = wsme.wsproperty(wtypes.text, _get_baymodel_id, _set_baymodel_id, mandatory=True) """The baymodel UUID""" node_count = wsme.wsattr(wtypes.IntegerType(minimum=1), default=1) """The node count for this bay. Default to 1 if not set""" master_count = wsme.wsattr(wtypes.IntegerType(minimum=1), default=1) """The number of master nodes for this bay. Default to 1 if not set""" docker_volume_size = wtypes.IntegerType(minimum=1) """The size in GB of the docker volume""" labels = wtypes.DictType( wtypes.text, types.MultiType(wtypes.text, six.integer_types, bool, float)) """One or more key/value pairs""" master_flavor_id = wtypes.StringType(min_length=1, max_length=255) """The master flavor of this Bay""" flavor_id = wtypes.StringType(min_length=1, max_length=255) """The flavor of this Bay""" bay_create_timeout = wsme.wsattr(wtypes.IntegerType(minimum=0), default=60) """Timeout for creating the bay in minutes. Default to 60 if not set""" links = wsme.wsattr([link.Link], readonly=True) """A list containing a self link and associated bay links""" stack_id = wsme.wsattr(wtypes.text, readonly=True) """Stack id of the heat stack""" status = wtypes.Enum(wtypes.text, *fields.ClusterStatus.ALL) """Status of the bay from the heat stack""" status_reason = wtypes.text """Status reason of the bay from the heat stack""" discovery_url = wtypes.text """Url used for bay node discovery""" api_address = wsme.wsattr(wtypes.text, readonly=True) """Api address of cluster master node""" coe_version = wsme.wsattr(wtypes.text, readonly=True) """Version of the COE software currently running in this cluster. Example: swarm version or kubernetes version.""" container_version = wsme.wsattr(wtypes.text, readonly=True) """Version of the container software. Example: docker version.""" node_addresses = wsme.wsattr([wtypes.text], readonly=True) """IP addresses of cluster slave nodes""" master_addresses = wsme.wsattr([wtypes.text], readonly=True) """IP addresses of cluster master nodes""" bay_faults = wsme.wsattr(wtypes.DictType(wtypes.text, wtypes.text)) """Fault info collected from the heat resources of this bay""" def __init__(self, **kwargs): super(Bay, self).__init__() self.fields = [] for field in objects.Cluster.fields: # Skip fields we do not expose. if not hasattr(self, field): continue self.fields.append(field) setattr(self, field, kwargs.get(field, wtypes.Unset)) # Set the renamed attributes for bay backwards compatibility self.fields.append('baymodel_id') if 'baymodel_id' in kwargs.keys(): setattr(self, 'cluster_template_id', kwargs.get('baymodel_id', None)) setattr(self, 'baymodel_id', kwargs.get('baymodel_id', None)) else: setattr(self, 'baymodel_id', kwargs.get('cluster_template_id', None)) self.fields.append('bay_create_timeout') if 'bay_create_timeout' in kwargs.keys(): setattr(self, 'create_timeout', kwargs.get('bay_create_timeout', wtypes.Unset)) setattr(self, 'bay_create_timeout', kwargs.get('bay_create_timeout', wtypes.Unset)) else: setattr(self, 'bay_create_timeout', kwargs.get('create_timeout', wtypes.Unset)) self.fields.append('bay_faults') if 'bay_faults' in kwargs.keys(): setattr(self, 'faults', kwargs.get('bay_faults', wtypes.Unset)) setattr(self, 'bay_faults', kwargs.get('bay_faults', wtypes.Unset)) else: setattr(self, 'bay_faults', kwargs.get('faults', wtypes.Unset)) @staticmethod def _convert_with_links(bay, url, expand=True): if not expand: bay.unset_fields_except([ 'uuid', 'name', 'baymodel_id', 'docker_volume_size', 'labels', 'master_flavor_id', 'flavor_id', 'node_count', 'status', 'bay_create_timeout', 'master_count', 'stack_id' ]) bay.links = [ link.Link.make_link('self', url, 'bays', bay.uuid), link.Link.make_link('bookmark', url, 'bays', bay.uuid, bookmark=True) ] return bay @classmethod def convert_with_links(cls, rpc_bay, expand=True): bay = Bay(**rpc_bay.as_dict()) return cls._convert_with_links(bay, pecan.request.host_url, expand) @classmethod def sample(cls, expand=True): sample = cls(uuid='27e3153e-d5bf-4b7e-b517-fb518e17f34c', name='example', baymodel_id='4a96ac4b-2447-43f1-8ca6-9fd6f36d146d', node_count=2, master_count=1, docker_volume_size=1, labels={}, master_flavor_id=None, flavor_id=None, bay_create_timeout=15, stack_id='49dc23f5-ffc9-40c3-9d34-7be7f9e34d63', status=fields.ClusterStatus.CREATE_COMPLETE, status_reason="CREATE completed successfully", api_address='172.24.4.3', node_addresses=['172.24.4.4', '172.24.4.5'], created_at=timeutils.utcnow(), updated_at=timeutils.utcnow(), coe_version=None, container_version=None) return cls._convert_with_links(sample, 'http://localhost:9511', expand) def as_dict(self): """Render this object as a dict of its fields.""" # Override this for old bay values d = super(Bay, self).as_dict() d['cluster_template_id'] = d['baymodel_id'] del d['baymodel_id'] d['create_timeout'] = d['bay_create_timeout'] del d['bay_create_timeout'] if 'bay_faults' in d.keys(): d['faults'] = d['bay_faults'] del d['bay_faults'] return d
class ATypeInt(object): attr = types.IntegerType(minimum=1, maximum=5)
class NodeGroup(base.APIBase): """API representation of a Node group. This class enforces type checking and value constraints, and converts between the internal object model and the API representation of NodeGroup. """ id = wsme.wsattr(wtypes.IntegerType(minimum=1)) """unique id""" uuid = types.uuid """Unique UUID for this nodegroup""" name = wsme.wsattr(wtypes.StringType(min_length=1, max_length=255), default=None) """Name of this nodegroup""" cluster_id = types.uuid """Unique UUID for the cluster where the nodegroup belongs to""" project_id = wsme.wsattr(wtypes.text, readonly=True) """Project UUID for this nodegroup""" docker_volume_size = wtypes.IntegerType(minimum=1) """The size in GB of the docker volume""" labels = wtypes.DictType(wtypes.text, types.MultiType(wtypes.text, six.integer_types, bool, float)) """One or more key/value pairs""" links = wsme.wsattr([link.Link], readonly=True) """A list containing a self link and associated nodegroup links""" flavor_id = wtypes.StringType(min_length=1, max_length=255) """The flavor of this nodegroup""" image_id = wtypes.StringType(min_length=1, max_length=255) """The image used for this nodegroup""" node_addresses = wsme.wsattr([wtypes.text], readonly=True) """IP addresses of nodegroup nodes""" node_count = wsme.wsattr(wtypes.IntegerType(minimum=1), default=1) """The node count for this nodegroup. Default to 1 if not set""" role = wsme.wsattr(wtypes.StringType(min_length=1, max_length=255), default='worker') """The role of the nodes included in this nodegroup""" min_node_count = wsme.wsattr(wtypes.IntegerType(minimum=1), default=1) """The minimum allowed nodes for this nodegroup. Default to 1 if not set""" max_node_count = wsme.wsattr(wtypes.IntegerType(minimum=1), default=None) """The maximum allowed nodes for this nodegroup. Default to 1 if not set""" is_default = types.BooleanType() """Specifies is a nodegroup was created by default or not""" stack_id = wsme.wsattr(wtypes.text, readonly=True) """Stack id of the heat stack""" status = wtypes.Enum(wtypes.text, *fields.ClusterStatus.ALL) """Status of the nodegroup from the heat stack""" status_reason = wtypes.text """Status reason of the nodegroup from the heat stack""" version = wtypes.text """Version of the nodegroup""" merge_labels = wsme.wsattr(types.boolean, default=False) """Indicates whether the labels will be merged with the cluster labels.""" labels_overridden = wtypes.DictType( wtypes.text, types.MultiType( wtypes.text, six.integer_types, bool, float)) """Contains labels that have a value different than the parent labels.""" labels_added = wtypes.DictType( wtypes.text, types.MultiType( wtypes.text, six.integer_types, bool, float)) """Contains labels that do not exist in the parent.""" labels_skipped = wtypes.DictType( wtypes.text, types.MultiType( wtypes.text, six.integer_types, bool, float)) """Contains labels that exist in the parent but were not inherited.""" def __init__(self, **kwargs): super(NodeGroup, self).__init__() self.fields = [] for field in objects.NodeGroup.fields: # Skip fields we do not expose. if not hasattr(self, field): continue self.fields.append(field) setattr(self, field, kwargs.get(field, wtypes.Unset)) @classmethod def convert(cls, nodegroup, expand=True): url = pecan.request.host_url cluster_path = 'clusters/%s' % nodegroup.cluster_id nodegroup_path = 'nodegroups/%s' % nodegroup.uuid ng = NodeGroup(**nodegroup.as_dict()) if not expand: ng.unset_fields_except(["uuid", "name", "flavor_id", "node_count", "role", "is_default", "image_id", "status", "stack_id"]) else: ng.links = [link.Link.make_link('self', url, cluster_path, nodegroup_path), link.Link.make_link('bookmark', url, cluster_path, nodegroup_path, bookmark=True)] cluster = api_utils.get_resource('Cluster', ng.cluster_id) overridden, added, skipped = api_utils.get_labels_diff( cluster.labels, ng.labels) ng.labels_overridden = overridden ng.labels_added = added ng.labels_skipped = skipped return ng
class Service(v1_base.K8sResourceBase): uuid = types.uuid """Unique UUID for this service""" selector = wsme.wsattr({wtypes.text: wtypes.text}, readonly=True) """Selector of this service""" ip = wtypes.text """IP of this service""" ports = wsme.wsattr([{wtypes.text: wtypes.IntegerType()}], readonly=True) """Port of this service""" links = wsme.wsattr([link.Link], readonly=True) """A list containing a self link and associated service links""" def __init__(self, **kwargs): super(Service, self).__init__() self.fields = [] for field in objects.Service.fields: # Skip fields we do not expose. if not hasattr(self, field): continue self.fields.append(field) setattr(self, field, kwargs.get(field, wtypes.Unset)) @staticmethod def _convert_with_links(service, url, expand=True): if not expand: service.unset_fields_except([ 'uuid', 'name', 'bay_uuid', 'labels', 'selector', 'ip', 'ports' ]) service.links = [ link.Link.make_link('self', url, 'services', service.uuid), link.Link.make_link('bookmark', url, 'services', service.uuid, bookmark=True) ] return service @classmethod def convert_with_links(cls, rpc_service, expand=True): service = Service(**rpc_service.as_dict()) return cls._convert_with_links(service, pecan.request.host_url, expand) @classmethod def sample(cls, expand=True): sample = cls(uuid='fe78db47-9a37-4e9f-8572-804a10abc0aa', name='MyService', bay_uuid='7ae81bb3-dec3-4289-8d6c-da80bd8001ae', labels={'label1': 'foo'}, selector={'label1': 'foo'}, ip='172.17.2.2', ports=[{ "port": 88, "targetPort": 6379, "protocol": "TCP" }], manifest_url='file:///tmp/rc.yaml', manifest='''{ "metadata": { "name": "test", "labels": { "key": "value" } }, "spec": { "ports": [ { "port": 88, "targetPort": 6379, "protocol": "TCP" } ], "selector": { "bar": "foo" } } }''', created_at=datetime.datetime.utcnow(), updated_at=datetime.datetime.utcnow()) return cls._convert_with_links(sample, 'http://localhost:9511', expand) def parse_manifest(self): try: manifest = k8s_manifest.parse(self._get_manifest()) except ValueError as e: raise exception.InvalidParameterValue(message=str(e)) try: self.name = manifest["metadata"]["name"] except (KeyError, TypeError): raise exception.InvalidParameterValue( "Field metadata['name'] can't be empty in manifest.") try: self.ports = manifest["spec"]["ports"][:] except (KeyError, TypeError): raise exception.InvalidParameterValue( "Field spec['ports'] can't be empty in manifest.") if "selector" in manifest["spec"]: self.selector = manifest["spec"]["selector"] if "labels" in manifest["metadata"]: self.labels = manifest["metadata"]["labels"]
class MemberPUT(BaseMemberType): """Defines attributes that are acceptable of a PUT request.""" name = wtypes.wsattr(wtypes.StringType(max_length=255)) admin_state_up = wtypes.wsattr(bool) weight = wtypes.wsattr(wtypes.IntegerType( minimum=constants.MIN_WEIGHT, maximum=constants.MAX_WEIGHT))
class AlarmThresholdRule(base.AlarmRule): """Alarm Threshold Rule Describe when to trigger the alarm based on computed statistics """ meter_name = wsme.wsattr(wtypes.text, mandatory=True) "The name of the meter" # FIXME(sileht): default doesn't work # workaround: default is set in validate method query = wsme.wsattr([base.Query], default=[]) """The query to find the data for computing statistics. Ownership settings are automatically included based on the Alarm owner. """ period = wsme.wsattr(wtypes.IntegerType(minimum=1), default=60) "The time range in seconds over which query" comparison_operator = base.AdvEnum('comparison_operator', str, 'lt', 'le', 'eq', 'ne', 'ge', 'gt', default='eq') "The comparison against the alarm threshold" threshold = wsme.wsattr(float, mandatory=True) "The threshold of the alarm" statistic = base.AdvEnum('statistic', str, 'max', 'min', 'avg', 'sum', 'count', default='avg') "The statistic to compare to the threshold" evaluation_periods = wsme.wsattr(wtypes.IntegerType(minimum=1), default=1) "The number of historical periods to evaluate the threshold" exclude_outliers = wsme.wsattr(bool, default=False) "Whether datapoints with anomalously low sample counts are excluded" def __init__(self, query=None, **kwargs): if query: query = [base.Query(**q) for q in query] super(AlarmThresholdRule, self).__init__(query=query, **kwargs) @staticmethod def validate(threshold_rule): # note(sileht): wsme default doesn't work in some case # workaround for https://bugs.launchpad.net/wsme/+bug/1227039 if not threshold_rule.query: threshold_rule.query = [] # Timestamp is not allowed for AlarmThresholdRule query, as the alarm # evaluator will construct timestamp bounds for the sequence of # statistics queries as the sliding evaluation window advances # over time. v2_utils.validate_query(threshold_rule.query, storage.SampleFilter.__init__, allow_timestamps=False) return threshold_rule @staticmethod def validate_alarm(alarm): # ensure an implicit constraint on project_id is added to # the query if not already present alarm.threshold_rule.query = v2_utils.sanitize_query( alarm.threshold_rule.query, storage.SampleFilter.__init__, on_behalf_of=alarm.project_id) @property def default_description(self): return (_('Alarm when %(meter_name)s is %(comparison_operator)s a ' '%(statistic)s of %(threshold)s over %(period)s seconds') % dict(comparison_operator=self.comparison_operator, statistic=self.statistic, threshold=self.threshold, meter_name=self.meter_name, period=self.period)) def as_dict(self): rule = self.as_dict_from_keys([ 'period', 'comparison_operator', 'threshold', 'statistic', 'evaluation_periods', 'meter_name', 'exclude_outliers' ]) rule['query'] = [q.as_dict() for q in self.query] return rule @classmethod def sample(cls): return cls(meter_name='cpu_util', period=60, evaluation_periods=1, threshold=300.0, statistic='avg', comparison_operator='gt', query=[{ 'field': 'resource_id', 'value': '2a4d689b-f0b8-49c1-9eef-87cae58d80db', 'op': 'eq', 'type': 'string' }])
class AlarmThresholdRule(base.AlarmRule): """Alarm Threshold Rule Describe when to trigger the alarm based on computed statistics """ meter_name = wsme.wsattr(wtypes.text, mandatory=True) "The name of the meter" # FIXME(sileht): default doesn't work # workaround: default is set in validate method query = wsme.wsattr([base.Query], default=[]) """The query to find the data for computing statistics. Ownership settings are automatically included based on the Alarm owner. """ period = wsme.wsattr(wtypes.IntegerType(minimum=1), default=60) "The time range in seconds over which query" comparison_operator = base.AdvEnum('comparison_operator', str, 'lt', 'le', 'eq', 'ne', 'ge', 'gt', default='eq') "The comparison against the alarm threshold" threshold = wsme.wsattr(float, mandatory=True) "The threshold of the alarm" statistic = base.AdvEnum('statistic', str, 'max', 'min', 'avg', 'sum', 'count', default='avg') "The statistic to compare to the threshold" evaluation_periods = wsme.wsattr(wtypes.IntegerType(minimum=1), default=1) "The number of historical periods to evaluate the threshold" exclude_outliers = wsme.wsattr(bool, default=False) "Whether datapoints with anomalously low sample counts are excluded" ceilometer_sample_api_is_supported = None def __init__(self, query=None, **kwargs): query = [base.Query(**q) for q in query] if query else [] super(AlarmThresholdRule, self).__init__(query=query, **kwargs) @classmethod def _check_ceilometer_sample_api(cls): # Check it only once if cls.ceilometer_sample_api_is_supported is None: auth_config = pecan.request.cfg.service_credentials client = ceiloclient.get_client( version=2, session=keystone_client.get_session(pecan.request.cfg), # ceiloclient adapter options region_name=auth_config.region_name, interface=auth_config.interface, ) try: client.statistics.list( meter_name="idontthinkthatexistsbutwhatever") except Exception as e: if isinstance(e, ceiloexc.HTTPException): if e.code == 410: cls.ceilometer_sample_api_is_supported = False elif e.code < 500: cls.ceilometer_sample_api_is_supported = True else: raise else: raise else: # I don't think this meter can exist but how known cls.ceilometer_sample_api_is_supported = True if cls.ceilometer_sample_api_is_supported is False: raise base.ClientSideError( "This telemetry installation is not configured to support" "alarm of type 'threshold") @staticmethod def validate(threshold_rule): # note(sileht): wsme default doesn't work in some case # workaround for https://bugs.launchpad.net/wsme/+bug/1227039 if not threshold_rule.query: threshold_rule.query = [] # Timestamp is not allowed for AlarmThresholdRule query, as the alarm # evaluator will construct timestamp bounds for the sequence of # statistics queries as the sliding evaluation window advances # over time. v2_utils.validate_query(threshold_rule.query, storage.SampleFilter.__init__, allow_timestamps=False) return threshold_rule @classmethod def validate_alarm(cls, alarm): cls._check_ceilometer_sample_api() # ensure an implicit constraint on project_id is added to # the query if not already present alarm.threshold_rule.query = v2_utils.sanitize_query( alarm.threshold_rule.query, storage.SampleFilter.__init__, on_behalf_of=alarm.project_id ) @property def default_description(self): return (_('Alarm when %(meter_name)s is %(comparison_operator)s a ' '%(statistic)s of %(threshold)s over %(period)s seconds') % dict(comparison_operator=self.comparison_operator, statistic=self.statistic, threshold=self.threshold, meter_name=self.meter_name, period=self.period)) def as_dict(self): rule = self.as_dict_from_keys(['period', 'comparison_operator', 'threshold', 'statistic', 'evaluation_periods', 'meter_name', 'exclude_outliers']) rule['query'] = [q.as_dict() for q in self.query] return rule @classmethod def sample(cls): return cls(meter_name='cpu_util', period=60, evaluation_periods=1, threshold=300.0, statistic='avg', comparison_operator='gt', query=[{'field': 'resource_id', 'value': '2a4d689b-f0b8-49c1-9eef-87cae58d80db', 'op': 'eq', 'type': 'string'}])
class Bay(base.APIBase): """API representation of a bay. This class enforces type checking and value constraints, and converts between the internal object model and the API representation of a bay. """ _baymodel_id = None def _get_baymodel_id(self): return self._baymodel_id def _set_baymodel_id(self, value): if value and self._baymodel_id != value: try: baymodel = api_utils.get_rpc_resource('BayModel', value) self._baymodel_id = baymodel.uuid except exception.BayModelNotFound as e: # Change error code because 404 (NotFound) is inappropriate # response for a POST request to create a Bay e.code = 400 # BadRequest raise e elif value == wtypes.Unset: self._baymodel_id = wtypes.Unset uuid = types.uuid """Unique UUID for this bay""" name = wtypes.StringType(min_length=1, max_length=255) """Name of this bay""" baymodel_id = wsme.wsproperty(wtypes.text, _get_baymodel_id, _set_baymodel_id, mandatory=True) """The bay model UUID or id""" node_count = wtypes.IntegerType(minimum=1) """The node count for this bay""" bay_create_timeout = wtypes.IntegerType(minimum=0) """Timeout for creating the bay in minutes. Set to 0 for no timeout.""" links = wsme.wsattr([link.Link], readonly=True) """A list containing a self link and associated bay links""" status = wtypes.text """Status of the bay from the heat stack""" status_reason = wtypes.text """Status reason of the bay from the heat stack""" discovery_url = wtypes.text """Url used for bay node discovery""" api_address = wsme.wsattr(wtypes.text, readonly=True) """Api address of cluster master node""" node_addresses = wsme.wsattr([wtypes.text], readonly=True) """Ip addresses of cluster slave nodes""" def __init__(self, **kwargs): super(Bay, self).__init__() self.fields = [] for field in objects.Bay.fields: # Skip fields we do not expose. if not hasattr(self, field): continue self.fields.append(field) setattr(self, field, kwargs.get(field, wtypes.Unset)) @staticmethod def _convert_with_links(bay, url, expand=True): if not expand: bay.unset_fields_except(['uuid', 'name', 'baymodel_id', 'node_count', 'status', 'bay_create_timeout']) bay.links = [link.Link.make_link('self', url, 'bays', bay.uuid), link.Link.make_link('bookmark', url, 'bays', bay.uuid, bookmark=True)] return bay @classmethod def convert_with_links(cls, rpc_bay, expand=True): bay = Bay(**rpc_bay.as_dict()) return cls._convert_with_links(bay, pecan.request.host_url, expand) @classmethod def sample(cls, expand=True): sample = cls(uuid='27e3153e-d5bf-4b7e-b517-fb518e17f34c', name='example', baymodel_id='4a96ac4b-2447-43f1-8ca6-9fd6f36d146d', node_count=2, bay_create_timeout=15, status="CREATE_COMPLETE", status_reason="CREATE completed successfully", api_address='172.24.4.3', node_addresses=['172.24.4.4', '172.24.4.5'], created_at=datetime.datetime.utcnow(), updated_at=datetime.datetime.utcnow()) return cls._convert_with_links(sample, 'http://localhost:9511', expand)
class ScaleInfo(Resource): count = wtypes.IntegerType(minimum=1)
class ReplicationController(v1_base.K8sResourceBase): """API representation of a ReplicationController. This class enforces type checking and value constraints, and converts between the internal object model and the API representation of a ReplicationController. """ uuid = types.uuid """Unique UUID for this ReplicationController""" images = [wtypes.text] """A list of images used by containers in this ReplicationController.""" replicas = wsme.wsattr(wtypes.IntegerType(), readonly=True) """Replicas of this ReplicationController""" links = wsme.wsattr([link.Link], readonly=True) """A list containing a self link and associated rc links""" def __init__(self, **kwargs): super(ReplicationController, self).__init__() self.fields = [] for field in objects.ReplicationController.fields: # Skip fields we do not expose. if not hasattr(self, field): continue self.fields.append(field) setattr(self, field, kwargs.get(field, wtypes.Unset)) @staticmethod def _convert_with_links(rc, url, expand=True): if not expand: rc.unset_fields_except(['uuid', 'name', 'images', 'bay_uuid', 'labels', 'replicas']) rc.links = [link.Link.make_link('self', url, 'rcs', rc.uuid), link.Link.make_link('bookmark', url, 'rcs', rc.uuid, bookmark=True)] return rc @classmethod def convert_with_links(cls, rpc_rc, expand=True): rc = ReplicationController(**rpc_rc.as_dict()) return cls._convert_with_links(rc, pecan.request.host_url, expand) @classmethod def sample(cls, expand=True): sample = cls(uuid='f978db47-9a37-4e9f-8572-804a10abc0aa', name='MyReplicationController', images=['MyImage'], bay_uuid='f978db47-9a37-4e9f-8572-804a10abc0ab', labels={'name': 'foo'}, replicas=2, manifest_url='file:///tmp/rc.yaml', manifest='''{ "metadata": { "name": "name_of_rc" }, "spec":{ "replicas":2, "selector":{ "name":"frontend" }, "template":{ "metadata":{ "labels":{ "name":"frontend" } }, "spec":{ "containers":[ { "name":"test-redis", "image":"steak/for-dinner", "ports":[ { "containerPort":80, "protocol":"TCP" } ] } ] } } } }''', created_at=timeutils.utcnow(), updated_at=timeutils.utcnow()) return cls._convert_with_links(sample, 'http://localhost:9511', expand) def parse_manifest(self): try: manifest = k8s_manifest.parse(self._get_manifest()) except ValueError as e: raise exception.InvalidParameterValue(message=str(e)) try: self.name = manifest["metadata"]["name"] except (KeyError, TypeError): raise exception.InvalidParameterValue( _("Field metadata['name'] can't be empty in manifest.")) try: self.replicas = manifest["spec"]["replicas"] except (KeyError, TypeError): pass try: self.selector = manifest["spec"]["selector"] except (KeyError, TypeError): raise exception.InvalidParameterValue( _("Field spec['selector'] can't be empty in manifest.")) try: self.labels = manifest["spec"]["template"]["metadata"]["labels"] except (KeyError, TypeError): raise exception.InvalidParameterValue(_( "Field spec['template']['metadata']['labels'] " "can't be empty in manifest.")) try: images = [] for cont in manifest["spec"]["template"]["spec"]["containers"]: images.append(cont["image"]) self.images = images except (KeyError, TypeError): raise exception.InvalidParameterValue(_( "Field spec['template']['spec']['containers'] " "can't be empty in manifest."))
class ListenerStatisticsResponse(base.BaseType): """Defines which attributes are to be shown on any response.""" bytes_in = wtypes.wsattr(wtypes.IntegerType()) bytes_out = wtypes.wsattr(wtypes.IntegerType()) active_connections = wtypes.wsattr(wtypes.IntegerType()) total_connections = wtypes.wsattr(wtypes.IntegerType())
class BayModel(base.APIBase): """API representation of a Baymodel. This class enforces type checking and value constraints, and converts between the internal object model and the API representation of a Baymodel. """ uuid = types.uuid """Unique UUID for this Baymodel""" name = wtypes.StringType(min_length=1, max_length=255) """The name of the Baymodel""" coe = wtypes.Enum(str, *fields.ClusterType.ALL, mandatory=True) """The Container Orchestration Engine for this bay model""" image_id = wsme.wsattr(wtypes.StringType(min_length=1, max_length=255), mandatory=True) """The image name or UUID to use as a base image for this Baymodel""" flavor_id = wtypes.StringType(min_length=1, max_length=255) """The flavor of this Baymodel""" master_flavor_id = wtypes.StringType(min_length=1, max_length=255) """The flavor of the master node for this Baymodel""" dns_nameserver = wtypes.IPv4AddressType() """The DNS nameserver address""" keypair_id = wsme.wsattr(wtypes.StringType(min_length=1, max_length=255), mandatory=True) """The name of the nova ssh keypair""" external_network_id = wtypes.StringType(min_length=1, max_length=255) """The external network to attach to the Bay""" fixed_network = wtypes.StringType(min_length=1, max_length=255) """The fixed network name to attach to the Bay""" fixed_subnet = wtypes.StringType(min_length=1, max_length=255) """The fixed subnet name to attach to the Bay""" network_driver = wtypes.StringType(min_length=1, max_length=255) """The name of the driver used for instantiating container networks""" apiserver_port = wtypes.IntegerType(minimum=1024, maximum=65535) """The API server port for k8s""" docker_volume_size = wtypes.IntegerType(minimum=1) """The size in GB of the docker volume""" cluster_distro = wtypes.StringType(min_length=1, max_length=255) """The Cluster distro for the bay, e.g. coreos, fedora-atomic, etc.""" links = wsme.wsattr([link.Link], readonly=True) """A list containing a self link and associated Baymodel links""" http_proxy = wtypes.StringType(min_length=1, max_length=255) """Address of a proxy that will receive all HTTP requests and relay them. The format is a URL including a port number. """ https_proxy = wtypes.StringType(min_length=1, max_length=255) """Address of a proxy that will receive all HTTPS requests and relay them. The format is a URL including a port number. """ no_proxy = wtypes.StringType(min_length=1, max_length=255) """A comma separated list of IPs for which proxies should not be used in the bay """ volume_driver = wtypes.StringType(min_length=1, max_length=255) """The name of the driver used for instantiating container volumes""" registry_enabled = wsme.wsattr(types.boolean, default=False) """Indicates whether the docker registry is enabled""" labels = wtypes.DictType(str, str) """One or more key/value pairs""" tls_disabled = wsme.wsattr(types.boolean, default=False) """Indicates whether TLS should be disabled""" public = wsme.wsattr(types.boolean, default=False) """Indicates whether the Baymodel is public or not.""" server_type = wsme.wsattr(wtypes.Enum(str, *fields.ServerType.ALL), default='vm') """Server type for this bay model""" insecure_registry = wtypes.StringType(min_length=1, max_length=255) """Insecure registry URL when creating a Baymodel""" docker_storage_driver = wtypes.StringType(min_length=1, max_length=255) """Docker storage driver""" master_lb_enabled = wsme.wsattr(types.boolean, default=False) """Indicates whether created bays should have a load balancer for master nodes or not. """ floating_ip_enabled = wsme.wsattr(types.boolean, default=True) """Indicates whether created bays should have a floating ip or not.""" def __init__(self, **kwargs): self.fields = [] for field in objects.ClusterTemplate.fields: # Skip fields we do not expose. if not hasattr(self, field): continue self.fields.append(field) setattr(self, field, kwargs.get(field, wtypes.Unset)) @staticmethod def _convert_with_links(baymodel, url): baymodel.links = [link.Link.make_link('self', url, 'baymodels', baymodel.uuid), link.Link.make_link('bookmark', url, 'baymodels', baymodel.uuid, bookmark=True)] return baymodel @classmethod def convert_with_links(cls, rpc_baymodel): baymodel = BayModel(**rpc_baymodel.as_dict()) return cls._convert_with_links(baymodel, pecan.request.host_url) @classmethod def sample(cls): sample = cls( uuid='27e3153e-d5bf-4b7e-b517-fb518e17f34c', name='example', image_id='Fedora-k8s', flavor_id='m1.small', master_flavor_id='m1.small', dns_nameserver='8.8.1.1', keypair_id='keypair1', external_network_id='ffc44e4a-2319-4062-bce0-9ae1c38b05ba', fixed_network='private', fixed_subnet='private-subnet', network_driver='libnetwork', volume_driver='cinder', apiserver_port=8080, docker_volume_size=25, docker_storage_driver='devicemapper', cluster_distro='fedora-atomic', coe=fields.ClusterType.KUBERNETES, http_proxy='http://proxy.com:123', https_proxy='https://proxy.com:123', no_proxy='192.168.0.1,192.168.0.2,192.168.0.3', labels={'key1': 'val1', 'key2': 'val2'}, server_type='vm', insecure_registry='10.238.100.100:5000', created_at=timeutils.utcnow(), updated_at=timeutils.utcnow(), public=False, master_lb_enabled=False, floating_ip_enabled=True, ) return cls._convert_with_links(sample, 'http://localhost:9511')
class AuditTemplate(base.APIBase): """API representation of a audit template. This class enforces type checking and value constraints, and converts between the internal object model and the API representation of an audit template. """ uuid = types.uuid """Unique UUID for this audit template""" name = wtypes.text """Name of this audit template""" description = wtypes.text """Short description of this audit template""" deadline = datetime.datetime """deadline of the audit template""" host_aggregate = wtypes.IntegerType(minimum=1) """ID of the Nova host aggregate targeted by the audit template""" extra = {wtypes.text: types.jsontype} """The metadata of the audit template""" goal = wtypes.text """Goal type of the audit template""" version = wtypes.text """Internal version of the audit template""" audits = wsme.wsattr([link.Link], readonly=True) """Links to the collection of audits contained in this audit template""" links = wsme.wsattr([link.Link], readonly=True) """A list containing a self link and associated audit template links""" def __init__(self, **kwargs): super(AuditTemplate, self).__init__() self.fields = [] for field in objects.AuditTemplate.fields: # Skip fields we do not expose. if not hasattr(self, field): continue self.fields.append(field) setattr(self, field, kwargs.get(field, wtypes.Unset)) @staticmethod def _convert_with_links(audit_template, url, expand=True): if not expand: audit_template.unset_fields_except(['uuid', 'name', 'host_aggregate', 'goal']) audit_template.links = [link.Link.make_link('self', url, 'audit_templates', audit_template.uuid), link.Link.make_link('bookmark', url, 'audit_templates', audit_template.uuid, bookmark=True) ] return audit_template @classmethod def convert_with_links(cls, rpc_audit_template, expand=True): audit_template = AuditTemplate(**rpc_audit_template.as_dict()) return cls._convert_with_links(audit_template, pecan.request.host_url, expand) @classmethod def sample(cls, expand=True): sample = cls(uuid='27e3153e-d5bf-4b7e-b517-fb518e17f34c', name='My Audit Template', description='Description of my audit template', host_aggregate=5, goal='DUMMY', extra={'automatic': True}, created_at=datetime.datetime.utcnow(), deleted_at=None, updated_at=datetime.datetime.utcnow()) return cls._convert_with_links(sample, 'http://localhost:9322', expand)
return strutils.bool_from_string(value, strict=True) except ValueError as e: # raise Invalid to return 400 (BadRequest) in the API raise exception.Invalid(e) @staticmethod def frombasetype(value): if value is None: return None return BooleanType.validate(value) uuid = UUIDType() jsontype = JsonType() boolean = BooleanType() integer = wtypes.IntegerType() class JsonPatchType(wtypes.Base): """A complex type that represents a single json-patch operation.""" path = wtypes.wsattr(wtypes.StringType(pattern=r'^(/[\w-]+)+$'), mandatory=True) op = wtypes.wsattr(wtypes.Enum(str, 'add', 'replace', 'remove'), mandatory=True) value = wtypes.wsattr(jsontype, default=wtypes.Unset) # The class of the objects being patched. Override this in subclasses. # Should probably be a subclass of cyborg.api.controllers.base.APIBase. _api_base = None
class ListenerStatisticsResponse(base.BaseType): bytes_in = wtypes.wsattr(wtypes.IntegerType()) bytes_out = wtypes.wsattr(wtypes.IntegerType()) active_connections = wtypes.wsattr(wtypes.IntegerType()) total_connections = wtypes.wsattr(wtypes.IntegerType()) request_errors = wtypes.wsattr(wtypes.IntegerType())
class CronTriggersController(rest.RestController): @rest_utils.wrap_wsme_controller_exception @wsme_pecan.wsexpose(resources.CronTrigger, wtypes.text) def get(self, identifier): """Returns the named cron_trigger. :param identifier: Id or name of cron trigger to retrieve """ acl.enforce('cron_triggers:get', context.ctx()) LOG.debug('Fetch cron trigger [identifier=%s]', identifier) # Use retries to prevent possible failures. r = rest_utils.create_db_retry_object() db_model = r.call(db_api.get_cron_trigger, identifier) return resources.CronTrigger.from_db_model(db_model) @rest_utils.wrap_wsme_controller_exception @wsme_pecan.wsexpose(resources.CronTrigger, body=resources.CronTrigger, status_code=201) def post(self, cron_trigger): """Creates a new cron trigger. :param cron_trigger: Required. Cron trigger structure. """ acl.enforce('cron_triggers:create', context.ctx()) LOG.debug('Create cron trigger: %s', cron_trigger) values = cron_trigger.to_dict() db_model = triggers.create_cron_trigger( values['name'], values.get('workflow_name'), values.get('workflow_input'), values.get('workflow_params'), values.get('pattern'), values.get('first_execution_time'), values.get('remaining_executions'), workflow_id=values.get('workflow_id')) return resources.CronTrigger.from_db_model(db_model) @rest_utils.wrap_wsme_controller_exception @wsme_pecan.wsexpose(None, wtypes.text, status_code=204) def delete(self, identifier): """Delete cron trigger. :param identifier: Id or name of cron trigger to delete """ acl.enforce('cron_triggers:delete', context.ctx()) LOG.debug("Delete cron trigger [identifier=%s]", identifier) triggers.delete_cron_trigger(identifier) @rest_utils.wrap_wsme_controller_exception @wsme_pecan.wsexpose(resources.CronTriggers, types.uuid, int, types.uniquelist, types.list, types.uniquelist, wtypes.text, wtypes.text, types.uuid, types.jsontype, types.jsontype, resources.SCOPE_TYPES, wtypes.text, wtypes.IntegerType(minimum=1), wtypes.text, wtypes.text, wtypes.text, wtypes.text, types.uuid, bool) def get_all(self, marker=None, limit=None, sort_keys='created_at', sort_dirs='asc', fields='', name=None, workflow_name=None, workflow_id=None, workflow_input=None, workflow_params=None, scope=None, pattern=None, remaining_executions=None, first_execution_time=None, next_execution_time=None, created_at=None, updated_at=None, project_id=None, all_projects=False): """Return all cron triggers. :param marker: Optional. Pagination marker for large data sets. :param limit: Optional. Maximum number of resources to return in a single result. Default value is None for backward compatibility. :param sort_keys: Optional. Columns to sort results by. Default: created_at, which is backward compatible. :param sort_dirs: Optional. Directions to sort corresponding to sort_keys, "asc" or "desc" can be chosen. Default: desc. The length of sort_dirs can be equal or less than that of sort_keys. :param fields: Optional. A specified list of fields of the resource to be returned. 'id' will be included automatically in fields if it's provided, since it will be used when constructing 'next' link. :param name: Optional. Keep only resources with a specific name. :param workflow_name: Optional. Keep only resources with a specific workflow name. :param workflow_id: Optional. Keep only resources with a specific workflow ID. :param workflow_input: Optional. Keep only resources with a specific workflow input. :param workflow_params: Optional. Keep only resources with specific workflow parameters. :param scope: Optional. Keep only resources with a specific scope. :param pattern: Optional. Keep only resources with a specific pattern. :param remaining_executions: Optional. Keep only resources with a specific number of remaining executions. :param project_id: Optional. Keep only resources with the specific project id. :param first_execution_time: Optional. Keep only resources with a specific time and date of first execution. :param next_execution_time: Optional. Keep only resources with a specific time and date of next execution. :param created_at: Optional. Keep only resources created at a specific time and date. :param updated_at: Optional. Keep only resources with specific latest update time and date. :param all_projects: Optional. Get resources of all projects. """ acl.enforce('cron_triggers:list', context.ctx()) if all_projects: acl.enforce('cron_triggers:list:all_projects', context.ctx()) filters = filter_utils.create_filters_from_request_params( created_at=created_at, name=name, updated_at=updated_at, workflow_name=workflow_name, workflow_id=workflow_id, workflow_input=workflow_input, workflow_params=workflow_params, scope=scope, pattern=pattern, remaining_executions=remaining_executions, first_execution_time=first_execution_time, next_execution_time=next_execution_time, project_id=project_id, ) LOG.debug( "Fetch cron triggers. marker=%s, limit=%s, sort_keys=%s, " "sort_dirs=%s, filters=%s, all_projects=%s", marker, limit, sort_keys, sort_dirs, filters, all_projects) return rest_utils.get_all(resources.CronTriggers, resources.CronTrigger, db_api.get_cron_triggers, db_api.get_cron_trigger, marker=marker, limit=limit, sort_keys=sort_keys, sort_dirs=sort_dirs, fields=fields, all_projects=all_projects, **filters)
class MultiType(wtypes.UserType): """A complex type that represents one or more types. Used for validating that a value is an instance of one of the types. :param *types: Variable-length list of types. """ def __init__(self, *types): self.types = types def __str__(self): return ' | '.join(map(str, self.types)) def validate(self, value): for t in self.types: try: return wtypes.validate_value(t, value) except (ValueError, TypeError): pass else: raise ValueError( _("Wrong type. Expected '%(type)s', got '%(value)s'") % { 'type': self.types, 'value': type(value) }) PortType = wtypes.IntegerType(minimum=1, maximum=65535)
class MemberPUT(base.BaseType): """Defines attributes that are acceptable of a PUT request.""" protocol_port = wtypes.wsattr(wtypes.IntegerType()) enabled = wtypes.wsattr(bool) weight = wtypes.wsattr(wtypes.IntegerType())
class BayModel(base.APIBase): """API representation of a baymodel. This class enforces type checking and value constraints, and converts between the internal object model and the API representation of a baymodel. """ _coe = None def _get_coe(self): return self._coe def _set_coe(self, value): if value and self._coe != value: self._coe = value elif value == wtypes.Unset: self._coe = wtypes.Unset uuid = types.uuid """Unique UUID for this baymodel""" name = wtypes.StringType(min_length=1, max_length=255) """The name of the bay model""" coe = wsme.wsproperty(wtypes.text, _get_coe, _set_coe, mandatory=True) """The Container Orchestration Engine for this bay model""" image_id = wsme.wsattr(wtypes.StringType(min_length=1, max_length=255), mandatory=True) """The image name or UUID to use as a base image for this baymodel""" flavor_id = wtypes.StringType(min_length=1, max_length=255) """The flavor of this bay model""" master_flavor_id = wtypes.StringType(min_length=1, max_length=255) """The flavor of the master node for this bay model""" dns_nameserver = wtypes.IPv4AddressType() """The DNS nameserver address""" keypair_id = wsme.wsattr(wtypes.StringType(min_length=1, max_length=255), mandatory=True) """The name or id of the nova ssh keypair""" external_network_id = wtypes.StringType(min_length=1, max_length=255) """The external network to attach the Bay""" fixed_network = wtypes.StringType(min_length=1, max_length=255) """The fixed network name to attach the Bay""" network_driver = wtypes.StringType(min_length=1, max_length=255) """The name of the driver used for instantiating container networks""" apiserver_port = wtypes.IntegerType(minimum=1024, maximum=65535) """The API server port for k8s""" docker_volume_size = wtypes.IntegerType(minimum=1) """The size in GB of the docker volume""" ssh_authorized_key = wtypes.StringType(min_length=1) """The SSH Authorized Key""" cluster_distro = wtypes.StringType(min_length=1, max_length=255) """The Cluster distro for the bay, ex - coreos, fedora-atomic.""" links = wsme.wsattr([link.Link], readonly=True) """A list containing a self link and associated baymodel links""" http_proxy = wtypes.StringType(min_length=1, max_length=255) """http_proxy for the bay """ https_proxy = wtypes.StringType(min_length=1, max_length=255) """https_proxy for the bay """ no_proxy = wtypes.StringType(min_length=1, max_length=255) """Its comma separated list of ip for which proxies should not used in the bay""" registry_enabled = wsme.wsattr(types.boolean, default=False) """Indicates whether the docker registry is enabled""" labels = wtypes.DictType(str, str) """One or more key/value pairs""" tls_disabled = wsme.wsattr(types.boolean, default=False) """Indicates whether the TLS should be disabled""" public = wsme.wsattr(types.boolean, default=False) """Indicates whether the baymodel is public or not.""" def __init__(self, **kwargs): self.fields = [] for field in objects.BayModel.fields: # Skip fields we do not expose. if not hasattr(self, field): continue self.fields.append(field) setattr(self, field, kwargs.get(field, wtypes.Unset)) @staticmethod def _convert_with_links(baymodel, url, expand=True): if not expand: baymodel.unset_fields_except( ['uuid', 'name', 'image_id', 'apiserver_port', 'coe']) baymodel.links = [ link.Link.make_link('self', url, 'baymodels', baymodel.uuid), link.Link.make_link('bookmark', url, 'baymodels', baymodel.uuid, bookmark=True) ] return baymodel @classmethod def convert_with_links(cls, rpc_baymodel, expand=True): baymodel = BayModel(**rpc_baymodel.as_dict()) return cls._convert_with_links(baymodel, pecan.request.host_url, expand) @classmethod def sample(cls, expand=True): sample = cls( uuid='27e3153e-d5bf-4b7e-b517-fb518e17f34c', name='example', image_id='Fedora-k8s', flavor_id='m1.small', master_flavor_id='m1.small', dns_nameserver='8.8.1.1', keypair_id='keypair1', external_network_id='ffc44e4a-2319-4062-bce0-9ae1c38b05ba', fixed_network='private', network_driver='libnetwork', apiserver_port=8080, docker_volume_size=25, cluster_distro='fedora-atomic', ssh_authorized_key='ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAB', coe='kubernetes', http_proxy='http://proxy.com:123', https_proxy='https://proxy.com:123', no_proxy='192.168.0.1,192.168.0.2,192.168.0.3', labels={ 'key1': 'val1', 'key2': 'val2' }, created_at=datetime.datetime.utcnow(), updated_at=datetime.datetime.utcnow(), public=False), return cls._convert_with_links(sample, 'http://localhost:9511', expand)