from voluptuous import Required, Url, Range, All USER_SCHEMA = { Required("login"): str, Required("id"): All(int, Range(min=1)), Required("avatar_url"): Url(), Required("gravatar_id"): str, Required("url"): Url(), Required("html_url"): Url(), Required("followers_url"): Url(), Required("following_url"): Url(), Required("gists_url"): Url(), Required("starred_url"): Url(), Required("subscriptions_url"): Url(), Required("organizations_url"): Url(), Required("repos_url"): Url(), Required("events_url"): Url(), Required("received_events_url"): Url(), Required("type"): str, Required("site_admin"): bool }
def __init__(self, configfiles): self.configfiles = configfiles configparser = RawConfigParser() config_tmp = configparser.read(self.configfiles) self.conf = dict() for section in configparser.sections(): self.conf[section] = dict(configparser.items(section)) #self.conf = ConfigObj(self.configfile, interpolation=False) @message("file could not be found") def check_file(v): f = os.path.expanduser(os.path.expanduser(v)) if os.path.exists(f): return f else: raise Invalid("file could not be found `%s`" % v) @message("Unsupported nova API version") def nova_api_version(version): try: from novaclient import client, exceptions client.get_client_class(version) return version except exceptions.UnsupportedVersion as ex: raise Invalid("Invalid option for `nova_api_version`: %s" % ex) self.schemas = { "storage": Schema({ Optional("storage_path"): All(str), Optional("storage_type"): Any('yaml', 'json', 'pickle'), }), "cloud": Schema( { "provider": Any('ec2_boto', 'google', 'openstack'), "ec2_url": Url(str), "ec2_access_key": All(str, Length(min=1)), "ec2_secret_key": All(str, Length(min=1)), "ec2_region": All(str, Length(min=1)), "auth_url": All(str, Length(min=1)), "username": All(str, Length(min=1)), "password": All(str, Length(min=1)), "tenant_name": All(str, Length(min=1)), Optional("region_name"): All(str, Length(min=1)), "gce_project_id": All(str, Length(min=1)), "gce_client_id": All(str, Length(min=1)), "gce_client_secret": All(str, Length(min=1)), "nova_client_api": nova_api_version() }, extra=True), "cluster": Schema( { "cloud": All(str, Length(min=1)), "setup_provider": All(str, Length(min=1)), "login": All(str, Length(min=1)), }, required=True, extra=True), "setup": Schema({ "provider": All(str, Length(min=1)), }, required=True, extra=True), "login": Schema( { "image_user": All(str, Length(min=1)), "image_user_sudo": All(str, Length(min=1)), "image_sudo": Boolean(str), "user_key_name": All(str, Length(min=1)), "user_key_private": check_file(), "user_key_public": check_file() }, required=True) }
def test_url_validation(): """ test with valid URL """ schema = Schema({"url": Url()}) out_ = schema({"url": "http://example.com/"}) assert 'http://example.com/', out_.get("url")
import yaml import gzip import lzma from voluptuous import Schema, Required, All, Length, Match, Url from optparse import OptionParser import multiprocessing as mp schema_header = Schema({ Required('File'): All(str, 'DEP-11', msg='Must be "DEP-11"'), Required('Origin'): All(str, Length(min=1)), Required('Version'): All(str, Match(r'(\d+\.?)+$'), msg='Must be a valid version number'), Required('MediaBaseUrl'): All(str, Url()), 'Time': All(str), 'Priority': All(int), }) schema_translated = Schema( { Required('C'): All(str, Length(min=1), msg='Must have an unlocalized \'C\' key'), dict: All(str, Length(min=1)), }, extra=True)
"""A GitHub user schema.""" from voluptuous import Required from voluptuous import Url # pylint: disable=no-value-for-parameter LICENSE_SCHEMA = { Required("key"): str, Required("name"): str, Required("spdx_id"): str, Required("url"): Url() }
'version': str, 'revision': str, 'build_number': int, 'release_eta': Any(None, Datetime(format='%Y-%m-%dT%H:%M:%S.%f')), Extra: object, }, 'signing': { 'signature': str, }, Extra: object, }, Required('metadata', msg="Required for TaskCluster schema."): { 'name': All(str, Length(max=255)), 'description': All(str, Length(max=32768)), 'owner': All(Email(), Length(max=255)), 'source': All(Url(), Length(max=4096)), }, Required('payload', msg="Required for TaskCluster schema."): { Extra: object, Optional('properties'): { 'version': str, 'build_number': int, 'release_promotion': bool, 'revision': str, 'product': str, Extra: object, } }, Required('provisionerId', msg="Required for TaskCluster schema."): All(Match(r'^([a-zA-Z0-9-_]*)$'), Length(min=1, max=22)), Required('priority', msg="Required for releasetasks schema."): 'high', Required('routes', msg="Required for releasetasks schema."): All(
def validate(self, request): xmlstr = request.saml_request data = saml_to_dict(xmlstr) if self._action == 'login': req_type = 'AuthnRequest' elif self._action == 'logout': req_type = 'LogoutRequest' issuer_name = data.get( '{urn:oasis:names:tc:SAML:2.0:protocol}%s' % (req_type), {}).get('children', {}).get('{urn:oasis:names:tc:SAML:2.0:assertion}Issuer', {}).get('text') if issuer_name is None: raise UnknownEntityIDError( 'Issuer non presente nella {}'.format(req_type)) if issuer_name and issuer_name not in self._registry.service_providers: raise UnknownEntityIDError( 'L\'entity ID "{}" indicato nell\'elemento <Issuer> non corrisponde a nessun Service Provider registrato in questo Identity Provider di test.' .format(issuer_name)) sp_metadata = self._registry.get(issuer_name) if sp_metadata is not None: atcss = sp_metadata.attribute_consuming_services attribute_consuming_service_indexes = [ str(el.get('attrs').get('index')) for el in atcss if 'index' in el.get('attrs', {}) ] ascss = sp_metadata.assertion_consumer_services assertion_consumer_service_indexes = [ str(el.get('index')) for el in ascss ] assertion_consumer_service_urls = [ str(el.get('Location')) for el in ascss ] else: attribute_consuming_service_indexes = [] assertion_consumer_service_indexes = [] assertion_consumer_service_urls = [] entity_id = self._config.entity_id issuer = Schema( { 'attrs': { 'Format': Equal( NAMEID_FORMAT_ENTITY, msg=DEFAULT_VALUE_ERROR.format(NAMEID_FORMAT_ENTITY)), 'NameQualifier': Any(Url(), Match(r'^urn:'), msg="Invalid URI"), }, 'children': {}, 'text': str, }, required=True, ) name_id = Schema( { 'attrs': { 'NameQualifier': str, 'Format': Equal(NAMEID_FORMAT_TRANSIENT, msg=DEFAULT_VALUE_ERROR.format( NAMEID_FORMAT_TRANSIENT)), }, 'children': {}, 'text': str }, required=True, ) name_id_policy = Schema( { 'attrs': { 'Format': Equal(NAMEID_FORMAT_TRANSIENT, msg=DEFAULT_VALUE_ERROR.format( NAMEID_FORMAT_TRANSIENT)), Optional('SPNameQualifier'): str, }, 'children': {}, 'text': None, }, required=True, ) conditions = Schema( { 'attrs': { 'NotBefore': All(str, _check_utc_date), 'NotOnOrAfter': All(str, _check_utc_date), }, 'children': {}, 'text': None, }, required=True, ) authn_context_class_ref = Schema( { 'attrs': {}, 'children': {}, 'text': All( str, In(SPID_LEVELS, msg=DEFAULT_LIST_VALUE_ERROR.format( ', '.join(SPID_LEVELS)))) }, required=True, ) requested_authn_context = Schema( { 'attrs': { 'Comparison': str }, 'children': { '{%s}AuthnContextClassRef' % (ASSERTION): authn_context_class_ref }, 'text': None }, required=True, ) scoping = Schema( { 'attrs': { 'ProxyCount': Equal('0', msg=DEFAULT_VALUE_ERROR.format('0')) }, 'children': {}, 'text': None }, required=True, ) signature = Schema( { 'attrs': dict, 'children': dict, 'text': None }, required=True, ) subject = Schema( { 'attrs': { 'Format': Equal( NAMEID_FORMAT_ENTITY, msg=DEFAULT_VALUE_ERROR.format(NAMEID_FORMAT_ENTITY)), 'NameQualifier': str }, 'children': {}, 'text': None }, required=True, ) # LOGIN def check_assertion_consumer_service(attrs): keys = attrs.keys() if ('AssertionConsumerServiceURL' in keys and 'ProtocolBinding' in keys and 'AssertionConsumerServiceIndex' not in keys): _errors = [] if attrs['ProtocolBinding'] != BINDING_HTTP_POST: _errors.append( Invalid(DEFAULT_VALUE_ERROR.format(BINDING_HTTP_POST), path=['ProtocolBinding'])) if attrs[ 'AssertionConsumerServiceURL'] not in assertion_consumer_service_urls: _errors.append( Invalid(DEFAULT_VALUE_ERROR.format( assertion_consumer_service_urls), path=['AssertionConsumerServiceURL'])) if _errors: raise MultipleInvalid(errors=_errors) return attrs elif ('AssertionConsumerServiceURL' not in keys and 'ProtocolBinding' not in keys and 'AssertionConsumerServiceIndex' in keys): if attrs[ 'AssertionConsumerServiceIndex'] not in assertion_consumer_service_indexes: raise Invalid(DEFAULT_LIST_VALUE_ERROR.format( ', '.join(assertion_consumer_service_indexes)), path=['AssertionConsumerServiceIndex']) return attrs else: raise Invalid( 'Uno e uno solo uno tra gli attributi o gruppi di attributi devono essere presenti: ' '[AssertionConsumerServiceIndex, [AssertionConsumerServiceUrl, ProtocolBinding]]' ) authnrequest_attr_schema = Schema(All( { 'ID': str, 'Version': Equal('2.0', msg=DEFAULT_VALUE_ERROR.format('2.0')), 'IssueInstant': All(str, _check_utc_date, self._check_date_in_range), 'Destination': Equal(entity_id, msg=DEFAULT_VALUE_ERROR.format(entity_id)), Optional('ForceAuthn'): str, Optional('AttributeConsumingServiceIndex'): In(attribute_consuming_service_indexes, msg=DEFAULT_LIST_VALUE_ERROR.format( ', '.join(attribute_consuming_service_indexes))), Optional('AssertionConsumerServiceIndex'): str, Optional('AssertionConsumerServiceURL'): str, Optional('ProtocolBinding'): str, }, check_assertion_consumer_service, ), required=True) AUTHNREQUEST_TAG = '{%s}AuthnRequest' % (PROTOCOL) authnrequest_schema = { AUTHNREQUEST_TAG: { 'attrs': authnrequest_attr_schema, 'children': Schema( { Optional('{%s}Subject' % (ASSERTION)): subject, '{%s}Issuer' % (ASSERTION): issuer, '{%s}NameIDPolicy' % (PROTOCOL): name_id_policy, Optional('{%s}Conditions' % (ASSERTION)): conditions, '{%s}RequestedAuthnContext' % (PROTOCOL): requested_authn_context, Optional('{%s}Scoping' % (PROTOCOL)): scoping, }, required=True, ), 'text': None } } # LOGOUT LOGOUTREQUEST_TAG = '{%s}LogoutRequest' % (PROTOCOL) logoutrequest_attr_schema = Schema(All({ 'ID': str, 'Version': Equal('2.0', msg=DEFAULT_VALUE_ERROR.format('2.0')), 'IssueInstant': All(str, _check_utc_date, self._check_date_in_range), 'Destination': Equal(entity_id, msg=DEFAULT_VALUE_ERROR.format(entity_id)), Optional('NotOnOrAfter'): All(str, _check_utc_date, self._check_date_not_expired), Optional('Reason'): str, }), required=True) logoutrequest_schema = { LOGOUTREQUEST_TAG: { 'attrs': logoutrequest_attr_schema, 'children': Schema( { '{%s}Issuer' % (ASSERTION): issuer, '{%s}NameID' % (ASSERTION): name_id, '{%s}SessionIndex' % (PROTOCOL): dict, }, required=True), 'text': None } } if self._binding == BINDING_HTTP_POST: if self._action == 'login': # Add signature schema _new_sub_schema = authnrequest_schema[AUTHNREQUEST_TAG][ 'children'].extend( {'{%s}Signature' % (SIGNATURE): signature}) authnrequest_schema[AUTHNREQUEST_TAG][ 'children'] = _new_sub_schema if self._action == 'logout': _new_sub_schema = logoutrequest_schema[LOGOUTREQUEST_TAG][ 'children'].extend( {'{%s}Signature' % (SIGNATURE): signature}) logoutrequest_schema[LOGOUTREQUEST_TAG][ 'children'] = _new_sub_schema authn_request = Schema( authnrequest_schema, required=True, ) logout_request = Schema( logoutrequest_schema, required=True, ) saml_schema = None if self._action == 'login': saml_schema = authn_request elif self._action == 'logout': saml_schema = logout_request errors = [] try: saml_schema(data) except MultipleInvalid as e: for err in e.errors: _paths = [] _attr = None for idx, _path in enumerate(err.path): if _path != 'children': if _path == 'attrs': try: _attr = err.path[(idx + 1)] except IndexError: _attr = '' break # strip namespaces for better readability _paths.append(_strip_namespaces(str(_path))) path = '/'.join(_paths) if _attr is not None: path += " - attribute: " + _attr # find value to show (iterate multiple times inside data # until we find the sub-element or attribute) _val = data for _ in err.path: try: _val = _val[_] except KeyError: _val = None except ValueError: _val = None # no need to show value if the error is the presence of the element _msg = err.msg if "extra keys not allowed" in _msg: _val = None _msg = "item not allowed" errors.append( ValidationDetail(_val, None, None, None, None, _msg, path)) raise SPIDValidationError(details=errors)
else: return None #: Schema utilizado para validar os atributos da classe Payment da versão 2 da API #: ..todo:: Verificar porque a validação de URLs não está funcionando payment_v2_schema = Schema( Object( { Required('email'): All(Email(), Length(max=60)), 'token': All(str, Length(min=32, max=32)), 'receiver_email': All(Email(), Length(max=60)), 'currency': 'BRL', 'reference': All(str, Length(max=200)), 'extra_amount': All(float, Range(min=-9999999.01, max=9999999)), 'redirect_url': Url(), 'notification_url': Url(), 'max_uses': All(int, Range(min=1, max=999)), 'max_age': All(int, Range(min=30, max=999999999)), 'client': client_schema, 'items': [ item_schema, ], 'shipping': shipping_schema, 'response': str, 'sandbox': bool, 'PAGSEGURO_API_URL': str, 'PAGSEGURO_PAYMENT_URL': str }, cls=Payment))
def get_schema(): _top_level_and_containers_common_options = { 'environment': { Extra: Coerce(str) }, 'hostnames': [ Hostname(), ], 'image': str, 'lxc_config': { Extra: str }, 'mode': In([ 'local', 'pull', ]), 'privileged': bool, 'profiles': [ str, ], 'protocol': In([ 'lxd', 'simplestreams', ]), 'provisioning': [], # will be set dynamically using provisioner classes... 'server': Url(), 'shares': [{ 'source': ExpandUserIfExists, 'dest': str, 'set_host_acl': bool, # TODO: need a way to deprecate this 'share_properties': { Extra: Coerce(str) }, }], 'shell': { 'user': str, 'home': str, # TODO: deprecated }, 'users': [{ # Usernames max length is set 32 characters according to useradd's man page. Required('name'): All(str, Length(max=32)), 'home': str, 'password': str, 'shell': str, 'sudoer': bool, }], 'x11': { 'enabled': bool, 'xsocket_path': PathExists(), 'xauthority_path': PathExists(), 'extra_driver_paths': [str], 'setup_guest_profile_d': bool, 'gpu_properties': { Extra: Coerce(str) } }, 'extras': { 'network_wait_timeout': int } } def _check_provisioner_config(config): provisioners = Provisioner.provisioners.values() # Check if 'type' is correctly defined Schema( { Required('type'): Any(*[provisioner.name for provisioner in provisioners]) }, extra=ALLOW_EXTRA)(config) # Check if the detected provisioner's schema is fully satisfied c = config.copy() name = c.pop('type') detected_provisioner = next(provisioner for provisioner in provisioners if provisioner.name == name) validated = Schema(detected_provisioner.schema)(c) validated['type'] = name return validated # Inserts provisioner specific schema rules in the global schema dict. _top_level_and_containers_common_options['provisioning'] = [ All(_check_provisioner_config) ] _container_options = { Required('name'): LXDIdentifier(), } _container_options.update(_top_level_and_containers_common_options) _lxdock_options = { Required('name'): LXDIdentifier(), 'containers': [ _container_options, ], } _lxdock_options.update(_top_level_and_containers_common_options) return Schema(_lxdock_options)
"license_files": posint_zero_p, "licensed_files": posint_p, "source_files": posint_p, "distinct_licenses": [DISTINCT_LICENSE], "sure_licenses": [str] }) # list of sure licenses SUMMARY_3_0_0 = S({"sure_licenses": [str]}) # TODO: add all remaining categories CATEGORY = Any("Permissive", "Copyleft", "Copyleft Limited") LICENSE = S({ "category": CATEGORY, Optional("dejacode_url"): Url(), "homepage_url": Url(), "owner": str, "paths": [str], Optional("reference_url"): Url(), "spdx_license_key": str, "spdx_url": Any("", Url()), "text_url": Any("", Url()) }) # a dictionary with licenses LICENSES = S({str: LICENSE}) # TODO: posint_p or posin_zero_p for empty project? OSLC_STATS = S({ "All files": posint_p,
from voluptuous import Schema, Required, All, Length, Range, Url schema = Schema(Url()) result = schema('w3.org') print(result)
json_url = attr.ib(type=str) #: video_path: The path of the video file on disk. video_path = attr.ib(type=Path) #: video_url: The URl of the video file. video_url = attr.ib(type=str) # NB: Keep in sync with try_task_config_schema in # taskcluster/taskgraph.decision.py #: The schema for validating jobs. JOB_SCHEMA = Schema( { Required("jobs"): [ {Required("browsertime_json_url"): Url(), Required("video_url"): Url()} ] } ) def run_command(log, cmd): """Run a command using subprocess.check_output Args: log: The structlog logger instance. cmd: the command to run as a list of strings. Returns: A tuple of the process' exit status and standard output. """
from voluptuous import Schema, Required, All, Length, Range, Url modelresource_form_schema = Schema({ Required('url'): All(unicode,Url()), Required('filename'):unicode, Required('resource_type'):unicode })
from voluptuous import Schema, Required, Optional, Any, Url, Range, All from githubcap.enums import State from .user import USER_SCHEMA from .label import LABEL_SCHEMA from .milestone import MILESTONE_SCHEMA ISSUE_SCHEMA = Schema({ Required("id"): All(Range(min=1)), Required("url"): Url(), Required("repository_url"): Url(), Required("labels_url"): Url(), Required("comments_url"): Url(), Required("events_url"): Url(), Required("html_url"): Url(), Required("number"): int, Required("state"): Schema(Any(*State.all_values())), Required("title"): str, Required("body"): Schema(Any(str, None)), Required("user"): USER_SCHEMA, Required("locked"): bool, Required("comments"): All(Range(min=0)), Required("labels"): [ LABEL_SCHEMA ], Required("author_association"): Schema(Any('OWNER', 'CONTRIBUTOR', 'NONE')), Optional("assignee"): object, Required("assignees"): [ USER_SCHEMA ],
}, 'mode': In([ 'local', 'pull', ]), 'privileged': bool, 'protocol': In([ 'lxd', 'simplestreams', ]), 'provisioning': [], # will be set dynamically using provisioner classes... 'server': Url(), 'shares': [{ # The existence of the source directory will be checked! 'source': IsDir(), 'dest': str, }], 'shell': { 'user': str, 'home': str, }, 'users': [{ # Usernames max length is set 32 characters according to useradd's man page. Required('name'): All(str, Length(max=32)), 'home': str,
class Service: """Representation of DID Document Services.""" _validator = Schema( { "id": All(str, DIDUrl.validate), "type": str, "serviceEndpoint": Switch(DIDUrl.validate, Url(), ""), }, extra=ALLOW_EXTRA, required=True, ) def __init__(self, id_: DIDUrl, type_: str, endpoint: str, **extra): """Initialize Service.""" self._id = id_ self._type = type_ self._endpoint = endpoint self._extra = extra @property def id(self): """Return id.""" return self._id @property def type(self): """Return type.""" return self._type @property def endpoint(self): """Return endpoint.""" return self._endpoint @property def extra(self): """Return extra.""" return self._extra def serialize(self): """Return serialized representation of Service.""" return { "id": str(self.id), "type": self.type, "serviceEndpoint": self.endpoint, **self.extra, } @classmethod @wrap_validation_error(ServiceValidationError, message="Failed to validate service") def validate(cls, value: dict): """Validate object against service.""" return cls._validator(value) @classmethod @wrap_validation_error(ServiceValidationError, message="Failed to deserialize service") def deserialize(cls, value: dict): """Deserialize into Service.""" value = cls.validate(value) deserializer = Schema( { Into("id", "id_"): DIDUrl.parse, Into("type", "type_"): str, Into("serviceEndpoint", "endpoint"): str, }, extra=ALLOW_EXTRA, ) value = deserializer(value) return cls(**value)
def validate(self): """Validates the given configuration :py:attr:`self.config` to comply with elasticluster. As well all types are converted to the expected format if possible. :raisegs: :py:class:`voluptuous.MultipleInvalid` if multiple properties are not compliant :raises: :py:class:`voluptuous.Invalid` if one property is invalid """ self._pre_validate() # custom validators @message("file could not be found") def check_file(v): f = os.path.expanduser(os.path.expanduser(v)) if os.path.exists(f): return f else: raise Invalid("file could not be found `%s`" % v) # schema to validate all cluster properties schema = {"cluster": {"cloud": All(str, Length(min=1)), "setup_provider": All(str, Length(min=1)), "login": All(str, Length(min=1))}, "setup": {"provider": All(str, Length(min=1)), Optional("playbook_path"): check_file()}, "login": {"image_user": All(str, Length(min=1)), "image_user_sudo": All(str, Length(min=1)), "image_sudo": Boolean(str), "user_key_name": All(str, Length(min=1)), "user_key_private": check_file(), "user_key_public": check_file()}} cloud_schema_ec2 = {"provider": 'ec2_boto', "ec2_url": Url(str), "ec2_access_key": All(str, Length(min=1)), "ec2_secret_key": All(str, Length(min=1)), "ec2_region": All(str, Length(min=1)), Optional("request_floating_ip"): Boolean(str)} cloud_schema_gce = {"provider": 'google', "gce_client_id": All(str, Length(min=1)), "gce_client_secret": All(str, Length(min=1)), "gce_project_id": All(str, Length(min=1))} cloud_schema_openstack = {"provider": 'openstack', "auth_url": All(str, Length(min=1)), "username": All(str, Length(min=1)), "password": All(str, Length(min=1)), "project_name": All(str, Length(min=1)), Optional("request_floating_ip"): Boolean(str), Optional("region_name"): All(str, Length(min=1))} node_schema = { "flavor": All(str, Length(min=1)), "image_id": All(str, Length(min=1)), "security_group": All(str, Length(min=1)) } # validation validator = Schema(schema, required=True, extra=True) validator_node = Schema(node_schema, required=True, extra=True) ec2_validator = Schema(cloud_schema_ec2, required=True, extra=False) gce_validator = Schema(cloud_schema_gce, required=True, extra=False) openstack_validator = Schema(cloud_schema_openstack, required=True, extra=False) if not self.config: raise Invalid("No clusters found in configuration.") for cluster, properties in self.config.iteritems(): self.config[cluster] = validator(properties) if 'provider' not in properties['cloud']: raise Invalid( "Missing `provider` option in cluster `%s`" % cluster) cloud_props = properties['cloud'] if properties['cloud']['provider'] == "ec2_boto": self.config[cluster]['cloud'] = ec2_validator(cloud_props) elif properties['cloud']['provider'] == "google": self.config[cluster]['cloud'] = gce_validator(cloud_props) elif properties['cloud']['provider'] == "openstack": self.config[cluster]['cloud'] = openstack_validator(cloud_props) if 'nodes' not in properties or len(properties['nodes']) == 0: raise Invalid( "No nodes configured for cluster `%s`" % cluster) for node, props in properties['nodes'].iteritems(): # check name pattern to conform hostnames match = re.search(r'^[a-zA-Z0-9-]*$', node) if not match: raise Invalid( "Invalid name `%s` for node group. A valid node group " "can only consist of letters, digits or the hyphens " "character (`-`)" % node) validator_node(props) self._post_validate()
# License along with this program. import os import sys import yaml import gzip import lzma from voluptuous import Schema, Required, All, Any, Length, Range, Match, Url from optparse import OptionParser import multiprocessing as mp schema_header = Schema({ Required('File'): All(str, 'DEP-11', msg="Must be \"DEP-11\""), Required('Origin'): All(str, Length(min=1)), Required('Version'): All(str, Match(r'(\d+\.?)+$'), msg="Must be a valid version number"), Required('MediaBaseUrl'): All(str, Url()), 'Time': All(str), 'Priority': All(int), }) schema_translated = Schema({ Required('C'): All(str, Length(min=1), msg="Must have an unlocalized 'C' key"), dict: All(str, Length(min=1)), }, extra = True) schema_component = Schema({ Required('Type'): All(str, Length(min=1)), Required('ID'): All(str, Length(min=1)), Required('Name'): All(dict, Length(min=1), schema_translated), Required('Package'): All(str, Length(min=1)), }, extra = True)
"""Issue milestone schema.""" from voluptuous import Any from voluptuous import Required from voluptuous import Schema from voluptuous import Url from githubcap.enums import State from .user import USER_SCHEMA # pylint: disable=no-value-for-parameter MILESTONE_SCHEMA = Schema({ Required("url"): Url(), Required("html_url"): Url(), Required("labels_url"): Url(), Required("id"): int, Required("number"): int, Required("state"): Schema(Any(*State.all_values())), Required("title"): str, Required("description"): Schema(Any(str, None)), Required("creator"): USER_SCHEMA, Required("open_issues"): int, Required("closed_issues"): int, Required("created_at"): str, Required("updated_at"): Schema(Any(str, None)), Required("closed_at"): Schema(Any(str, None)), Required("due_on"): Schema(Any(str, None)) })
class DIDDocument: """Representation of DID Document.""" properties = Properties(extra=ALLOW_EXTRA) def __init__(self, id: Union[str, DID], context: List[Any], *, also_known_as: List[str] = None, controller: List[str] = None, verification_method: List[VerificationMethod] = None, authentication: VerificationRelationship = None, assertion_method: VerificationRelationship = None, key_agreement: VerificationRelationship = None, capability_invocation: VerificationRelationship = None, capability_delegation: VerificationRelationship = None, service: List[Service] = None, **extra): """Create DIDDocument.""" self._id = id self._context = context self._also_known_as = also_known_as self._controller = controller self._verification_method = verification_method self._authentication = authentication self._assertion_method = assertion_method self._key_agreement = key_agreement self._capability_invocation = capability_invocation self._capability_delegation = capability_delegation self._service = service self.extra = extra self._index = {} self._index_resources() def _index_resources(self): """Index resources by ID. IDs are not guaranteed to be unique within the document. The first instance is stored in the index and subsequent id collisions are checked against the original. If they do not match, an error will be thrown. """ def _indexer(item): if not item: # Attribute isn't set return if isinstance(item, DIDUrl): # We don't index references return if isinstance(item, list): for subitem in item: _indexer(subitem) return if isinstance(item, VerificationRelationship): for subitem in item.items: _indexer(subitem) return assert isinstance(item, (VerificationMethod, Service)) if item.id in self._index and item != self._index[item.id]: raise IdentifiedResourceMismatch( "ID {} already found in Index and Items do not match". format(item.id)) self._index[item.id] = item for item in ( self.verification_method, self.authentication, self.assertion_method, self.key_agreement, self.capability_invocation, self.capability_delegation, self.service, ): _indexer(item) @property @properties.add( data_key="@context", required=True, validate=Switch(Url(), [Url()], dict, [dict]), serialize=unwrap_if_list_of_one, deserialize=single_to_list, ) def context(self): """Return context.""" return self._context @property @properties.add( required=True, validate=All(str, DID.validate), serialize=Coerce(str), deserialize=Coerce(DID), ) def id(self): """Return id.""" return self._id @property @properties.add(data_key="alsoKnownAs", validate=[str]) def also_known_as(self): """Return also_known_as.""" return self._also_known_as @property @properties.add( validate=Switch(All(str, DID.validate), [DID.validate]), serialize=All([Coerce(str)], unwrap_if_list_of_one), deserialize=All(single_to_list, [Coerce(DID)]), ) def controller(self): """Return controller.""" return self._controller @property @properties.add( data_key="verificationMethod", validate=[VerificationMethod.validate], serialize=[serialize], deserialize=[VerificationMethod.deserialize], ) def verification_method(self): """Return verification_method.""" return self._verification_method @property @properties.add( validate=VerificationRelationship.validate, serialize=serialize, deserialize=VerificationRelationship.deserialize, ) def authentication(self): """Return authentication.""" return self._authentication @property @properties.add( data_key="assertionMethod", validate=VerificationRelationship.validate, serialize=serialize, deserialize=VerificationRelationship.deserialize, ) def assertion_method(self): """Return assertion_method.""" return self._assertion_method @property @properties.add( data_key="keyAgreement", validate=VerificationRelationship.validate, serialize=serialize, deserialize=VerificationRelationship.deserialize, ) def key_agreement(self): """Return key_agreement.""" return self._key_agreement @property @properties.add( data_key="capabilityInvocation", validate=VerificationRelationship.validate, serialize=serialize, deserialize=VerificationRelationship.deserialize, ) def capability_invocation(self): """Return capability_invocation.""" return self._capability_invocation @property @properties.add( data_key="capabilityDelegation", validate=VerificationRelationship.validate, serialize=serialize, deserialize=VerificationRelationship.deserialize, ) def capability_delegation(self): """Return capability_delegation.""" return self._capability_delegation @property @properties.add( validate=[Service.validate], serialize=[serialize], deserialize=[Service.deserialize], ) def service(self): """Return service.""" return self._service def dereference(self, reference: Union[str, DIDUrl]): """Dereference a DID URL to a document resource.""" if isinstance(reference, str): reference = DIDUrl.parse(reference) if reference not in self._index: raise ResourceIDNotFound( "ID {} not found in document".format(reference)) return self._index[reference] @classmethod @wrap_validation_error(DIDDocumentValidationError, message="Failed to validate DID Document") def validate(cls, value): """Validate against expected schema.""" return cls.properties.validate(value) @wrap_validation_error(DIDDocumentError, message="Failed to serialize DID Document") def serialize(self): """Serialize DID Document.""" value = self.properties.serialize(self) return {**value, **self.extra} @classmethod @wrap_validation_error(DIDDocumentValidationError, message="Failed to deserialize DID Document") def deserialize(cls, value: dict, options: Set[Option] = None): """Deserialize DID Document.""" if options: value = DIDDocumentOption.apply(value, options) value = cls.validate(value) value = cls.properties.deserialize(value) return cls(**value)
Optional("os"): ["windows", "linux", "macos", "freebsd", "posix", "nix"], Optional("python"): Version, } ) SCHEMA = Schema( { Required("layout"): "plugin", Required("id"): NonEmptyString, Required("title"): NonEmptyString, Required("description"): NonEmptyString, Optional("author"): NonEmptyString, Optional("authors"): list, Required("license"): NonEmptyString, Required("date"): datetime.date, Required("homepage"): Url(), Required("source"): Url(), Required("archive"): Url(), Optional("follow_dependency_links"): bool, Optional("tags"): list, Optional("screenshots"): All([ScreenshotDef]), Optional("featuredimage"): ImageLocation, Optional("compatibility"): Compatibility, Optional("disabled"): NonEmptyString, Optional("abandoned"): NonEmptyString, Optional("up_for_adoption"): Url(), Optional("redirect_from"): NonEmptyString, } )
Optional("following"): int, Optional("hireable"): bool, Optional("location"): str, Optional("name"): str, Optional("public_gists"): int, Optional("public_repos"): int, Optional("updated_at"): str, Required("avatar_url"): Url(), Required("events_url"): Url(), Required("followers_url"): Url(), Required("following_url"): Url(), Required("gists_url"): Url(), Required("gravatar_id"): str, Required("html_url"): Url(), Required("id"): int, Required("login"):
def get_schema(): _top_level_and_containers_common_options = { 'environment': { Extra: Coerce(str) }, 'hostnames': [ Hostname(), ], 'image': str, 'lxc_config': { Extra: str }, 'mode': In([ 'local', 'pull', ]), 'privileged': bool, 'profiles': [ str, ], 'protocol': In([ 'lxd', 'simplestreams', ]), 'provisioning': [], # will be set dynamically using provisioner classes... 'server': Url(), 'shares': [{ # The existence of the source directory will be checked! 'source': IsDir(), 'dest': str, 'set_host_acl': bool, }], 'shell': { 'user': str, 'home': str, }, 'users': [{ # Usernames max length is set 32 characters according to useradd's man page. Required('name'): All(str, Length(max=32)), 'home': str, 'password': str, }], } def _check_provisioner_config(config): provisioners = Provisioner.provisioners.values() # Check if 'type' is correctly defined Schema( { Required('type'): Any(*[provisioner.name for provisioner in provisioners]) }, extra=ALLOW_EXTRA)(config) # Check if the detected provisioner's schema is fully satisfied c = config.copy() name = c.pop('type') detected_provisioner = next(provisioner for provisioner in provisioners if provisioner.name == name) validated = Schema(detected_provisioner.schema)(c) validated['type'] = name return validated # Inserts provisioner specific schema rules in the global schema dict. _top_level_and_containers_common_options['provisioning'] = [ All(_check_provisioner_config) ] _container_options = { Required('name'): LXDIdentifier(), } _container_options.update(_top_level_and_containers_common_options) _lxdock_options = { Required('name'): LXDIdentifier(), 'containers': [ _container_options, ], } _lxdock_options.update(_top_level_and_containers_common_options) return Schema(_lxdock_options)
"""GitHub repository schema.""" from voluptuous import Any from voluptuous import Url from .license import LICENSE_SCHEMA from .user import USER_SCHEMA # pylint: disable=no-value-for-parameter REPOSITORY_SCHEMA = { "archive_url": Url(), "archived": bool, "assignees_url": Url(), "blobs_url": Url(), "branches_url": Url(), "clone_url": Url(), "collaborators_url": Url(), "comments_url": Url(), "commits_url": Url(), "compare_url": Url(), "contents_url": Url(), "contributors_url": Url(), "created_at": str, "default_branch": str, "deployments_url": Url(), "description": Any(str, None), "downloads_url": Url(), "events_url": Url(), "fork": bool, "forks": int,
def validate(self): """Validates the given configuration :py:attr:`self.config` to comply with elasticluster. As well all types are converted to the expected format if possible. :raises: :py:class:`voluptuous.MultipleInvalid` if multiple properties are not compliant :raises: :py:class:`voluptuous.Invalid` if one property is invalid """ self._pre_validate() # custom validators @message("file could not be found") def check_file(v): f = os.path.expanduser(os.path.expanduser(v)) if os.path.exists(f): return f else: raise Invalid("file could not be found `%s`" % v) @message("Unsupported nova API version") def nova_api_version(version): try: from novaclient import client, exceptions client.get_client_class(version) return version except exceptions.UnsupportedVersion as ex: raise Invalid("Invalid option for `nova_api_version`: %s" % ex) # schema to validate all cluster properties schema = { "cluster": { "cloud": All(str, Length(min=1)), "setup_provider": All(str, Length(min=1)), "login": All(str, Length(min=1)), }, "setup": { "provider": All(str, Length(min=1)), Optional("playbook_path"): check_file(), Optional("ssh_pipelining"): Boolean(str), }, "login": { "image_user": All(str, Length(min=1)), "image_user_sudo": All(str, Length(min=1)), "image_sudo": Boolean(str), "user_key_name": All(str, Length(min=1)), "user_key_private": check_file(), "user_key_public": check_file(), }, } cloud_schema_ec2 = { "provider": 'ec2_boto', "ec2_url": Url(str), "ec2_access_key": All(str, Length(min=1)), "ec2_secret_key": All(str, Length(min=1)), "ec2_region": All(str, Length(min=1)), Optional("request_floating_ip"): Boolean(str), Optional("vpc"): All(str, Length(min=1)), } cloud_schema_gce = { "provider": 'google', "gce_client_id": All(str, Length(min=1)), "gce_client_secret": All(str, Length(min=1)), "gce_project_id": All(str, Length(min=1)), Optional("noauth_local_webserver"): Boolean(str), Optional("zone"): All(str, Length(min=1)), } cloud_schema_openstack = { "provider": 'openstack', "auth_url": All(str, Length(min=1)), "username": All(str, Length(min=1)), "password": All(str, Length(min=1)), "project_name": All(str, Length(min=1)), Optional("request_floating_ip"): Boolean(str), Optional("region_name"): All(str, Length(min=1)), Optional("nova_api_version"): nova_api_version(), } node_schema = { "flavor": All(str, Length(min=1)), "image_id": All(str, Length(min=1)), "security_group": All(str, Length(min=1)), Optional("network_ids"): All(str, Length(min=1)), } # validation validator = Schema(schema, required=True, extra=True) node_validator = Schema(node_schema, required=True, extra=True) ec2_validator = Schema(cloud_schema_ec2, required=True, extra=False) gce_validator = Schema(cloud_schema_gce, required=True, extra=False) openstack_validator = Schema(cloud_schema_openstack, required=True, extra=False) if not self.config: raise Invalid("No clusters found in configuration.") for cluster, properties in self.config.items(): self.config[cluster] = validator(properties) if 'provider' not in properties['cloud']: raise Invalid("Missing `provider` option in cluster `%s`" % cluster) try: cloud_props = properties['cloud'] if properties['cloud']['provider'] == "ec2_boto": self.config[cluster]['cloud'] = ec2_validator(cloud_props) elif properties['cloud']['provider'] == "google": self.config[cluster]['cloud'] = gce_validator(cloud_props) elif properties['cloud']['provider'] == "openstack": self.config[cluster]['cloud'] = openstack_validator( cloud_props) except MultipleInvalid as ex: raise Invalid( "Invalid configuration for cloud section `cloud/%s`: %s" % (properties['cluster']['cloud'], str.join(", ", [str(i) for i in ex.errors]))) if 'nodes' not in properties or len(properties['nodes']) == 0: raise Invalid("No nodes configured for cluster `%s`" % cluster) for node, props in properties['nodes'].items(): # check name pattern to conform hostnames match = re.search(r'^[a-zA-Z0-9-]*$', node) if not match: raise Invalid( "Invalid name `%s` for node group. A valid node group" " can only consist of letters, digits or the hyphen" " character (`-`)" % (node, )) node_validator(props) if (properties['cloud']['provider'] == 'ec2_boto' and 'vpc' in self.config[cluster]['cloud'] and 'network_ids' not in props): raise Invalid("Node group `%s/%s` is being used in" " a VPC, so it must specify network_ids." % (cluster, node)) if (properties['cloud']['provider'] == 'ec2_boto' and 'network_ids' in props and 'vpc' not in self.config[cluster]['cloud']): raise Invalid("Cluster `%s` must specify a VPC to place" " `%s` instances in %s" % (cluster, node, props['network_ids'])) self._post_validate()
"""A connector for Webex Teams.""" import json import logging import os import uuid import aiohttp from voluptuous import Required, Url from webexteamssdk import WebexTeamsAPI from opsdroid.connector import Connector, register_event from opsdroid.events import Message _LOGGER = logging.getLogger(__name__) CONFIG_SCHEMA = {Required("webhook-url"): Url(), Required("token"): str} class ConnectorWebexTeams(Connector): """A connector for Webex Teams.""" def __init__(self, config, opsdroid=None): """Create a connector.""" _LOGGER.debug(_("Loaded WebEx Teams Connector.")) super().__init__(config, opsdroid=opsdroid) self.name = config.get("name", "webexteams") self.config = config self.opsdroid = opsdroid self.default_target = None self.bot_name = config.get("bot-name", "opsdroid") self.bot_webex_id = None self.secret = uuid.uuid4().hex self.people = {}
class Warp10Bucket(Bucket): """ Warp10 bucket """ SCHEMA = Bucket.SCHEMA.extend({ Optional('url', default='http://localhost:8080'): Url(), Required('read_token'): str, Required('write_token'): str, Optional('global_prefix', default=None): Any(None, str), }) def __init__(self, cfg): cfg['type'] = 'warp10' super().__init__(cfg) self.read_token = cfg['read_token'] self.write_token = cfg['write_token'] self.global_prefix = cfg.get('global_prefix') self.warp10 = warp10client.Warp10Client( warp10_api_url=cfg['url'], read_token=self.read_token, write_token=self.write_token, ) def build_name(self, name): return "{}.{}".format(self.global_prefix, name) if self.global_prefix \ else name def build_selector(self, selector, is_regexp=False): selector = self.build_name(selector) if is_regexp: selector = "~" + selector return selector @catch_query_error def drop(self, tags=None, **kwargs): """ Delete database """ self.warp10.delete({ 'name': self.build_selector(".*", is_regexp=True), 'tags': tags or {}, }) def insert_data(self, data): raise NotImplementedError("Warp10 is a pure time-series database") def insert_times_data(self, ts, data, tags=None, *args, **kwargs): """ Insert data """ ts_us = make_ts(ts) * 1e6 if tags: check_tags(tags) for key, value in data.items(): metric = { 'name': self.build_selector(key), 'value': value, 'position': { 'longitude': None, 'latitude': None, 'elevation': None, 'timestamp': ts_us, }, 'tags': tags or {}, } self.enqueue(metric) @catch_query_error def send_bulk(self, metrics): """ Send data to Warp10 """ self.warp10.set(metrics) def build_fetch(self, feature, from_str, to_str, tags=None): tags = {} if tags is None else dict(tags) if feature.match_all: for tag in feature.match_all: k, v = tag['tag'], tag['value'] check_tag(k, v) tags[k] = v tags_str = build_tags(tags) return "[\n'{}'\n'{}'\n{}\n'{}'\n'{}'\n]\nFETCH".format( self.read_token, self.build_selector(feature.field), tags_str, from_str, to_str, ) def build_multi_fetch(self, bucket_interval, features, from_str, to_str, tags=None): bucket_span = int(bucket_interval * 1e6) scripts = [ "[\n{}\n{}\n0\n{}\n0\n]\nBUCKETIZE".format( self.build_fetch( feature, from_str, to_str, tags, ), metric_to_bucketizer(feature.metric), bucket_span, ) for feature in features ] return "[\n{}\n]".format("\n".join(scripts)) @catch_query_error def get_times_data(self, bucket_interval, features, from_date, to_date, tags=None, **kwargs): period = DateRange.build_date_range(from_date, to_date, bucket_interval) nb_buckets = int((period.to_ts - period.from_ts) / bucket_interval) buckets = np.full((nb_buckets, len(features)), np.nan, dtype=float) script = self.build_multi_fetch( bucket_interval, features, period.from_str, period.to_str, tags=tags, ) raw = self.warp10.exec(script) data = json.loads(raw) from_us = period.from_ts * 1e6 to_us = period.to_ts * 1e6 bucket_interval_us = int(bucket_interval * 1e6) has_data = False for i, item in enumerate(data[0]): if len(item) == 0: continue item = item[0] values = item['v'] for ts_us, value in values: # XXX: Warp10 buckets are labeled with the right timestamp # but Loud ML uses the left one. ts_us -= bucket_interval_us if ts_us < from_us or ts_us >= to_us: # XXX Sometimes, Warp10 returns extra buckets, skip them continue j = math.floor((ts_us - from_us) / bucket_interval_us) buckets[j][i] = value has_data = True if not has_data: raise errors.NoData() result = [] from_ts = ts = from_us / 1e6 for bucket in buckets: result.append(((ts - from_ts), list(bucket), ts)) ts += bucket_interval return result def save_timeseries_prediction(self, prediction, tags=None): prefix = prediction.model.name logging.info("saving prediction to '%s'", self.build_name(prefix)) for bucket in prediction.format_buckets(): data = bucket['predicted'] bucket_tags = tags or {} stats = bucket.get('stats', None) if stats is not None: data['score'] = float(stats.get('score')) bucket_tags['is_anomaly'] = stats.get('anomaly', False) # XXX As Warp10 uses the end date to identify buckets, use the same # convention ts = bucket['timestamp'] + prediction.model.bucket_interval self.insert_times_data( ts=ts, tags=bucket_tags, data={"{}.{}".format(prefix, k): v for k, v in data.items()}, ) self.commit()
SUMMARY = S(list) CODE_REPOSITORY = S({ "type": str, "url": str }) # Url()}) # Url does not seem to support git@... DETAIL = S({ Optional("code_repository"): CODE_REPOSITORY, Optional("declared_license"): str, Optional("declared_licenses"): [str], Optional("dependencies"): [str], Optional("description"): Any(None, str), Optional("devel_dependencies"): [str], Optional("ecosystem"): str, Optional("homepage"): Url(), Optional("name"): str, Optional("version"): str }) DETAILS = S([DETAIL]) # metadata schema for component (not package) COMPONENT_METADATA_SCHEMA = S({ "_audit": Any(None, AUDIT), Optional("_release"): str, "schema": SCHEMA, "status": STATUS, "summary": SUMMARY, "details": DETAILS })
'post': { str: str }, 'headers': { str: str }, 'nocache': bool, # crates_io fetcher 'fetch_delay': int, }], 'shadow': bool, 'repolinks': [{ Required('desc'): str, Required('url'): Url(), }], 'packagelinks': [{ Required('desc'): str, Required('url'): Url(), }], 'tags': [str] }], 'rules': [{ 'name': Any(str, [str]), 'namepat': str, 'ver': Any(str, [str]), 'verpat': str, 'wwwpart': Any(str, [str]), 'wwwpat': str, 'family':
def validate(self): """ Validate the given configuration, converting properties to native Python types. The configuration to check must have been given to the constructor and stored in :py:attr:`self.config`. :raises: :py:class:`voluptuous.Invalid` if one property is invalid :raises: :py:class:`voluptuous.MultipleInvalid` if multiple properties are not compliant """ self._pre_validate() # schema to validate all cluster properties schema = {"cluster": {"cloud": All(str, Length(min=1)), "setup_provider": All(str, Length(min=1)), "login": All(str, Length(min=1)), }, "setup": {"provider": All(str, Length(min=1)), Optional("playbook_path"): can_read_file(), Optional("ansible_command"): All(can_read_file(), can_execute_file()), Optional("ansible_extra_args"): All(str, Length(min=1)), Optional("ssh_pipelining"): Boolean(str), }, "login": {"image_user": All(str, Length(min=1)), "image_user_sudo": All(str, Length(min=1)), "image_sudo": Boolean(str), "user_key_name": All(str, Length(min=1)), "user_key_private": can_read_file(), "user_key_public": can_read_file(), }, } cloud_schema_ec2 = {"provider": 'ec2_boto', "ec2_url": Url(str), Optional("ec2_access_key"): All(str, Length(min=1)), Optional("ec2_secret_key"): All(str, Length(min=1)), "ec2_region": All(str, Length(min=1)), Optional("request_floating_ip"): Boolean(str), Optional("vpc"): All(str, Length(min=1)), Optional("instance_profile"): All(str, Length(min=1)), } cloud_schema_gce = {"provider": 'google', "gce_client_id": All(str, Length(min=1)), "gce_client_secret": All(str, Length(min=1)), "gce_project_id": All(str, Length(min=1)), Optional("noauth_local_webserver"): Boolean(str), Optional("zone"): All(str, Length(min=1)), Optional("network"): All(str, Length(min=1)), } cloud_schema_openstack = {"provider": 'openstack', "auth_url": All(str, Length(min=1)), "username": All(str, Length(min=1)), "password": All(str, Length(min=1)), "project_name": All(str, Length(min=1)), Optional("request_floating_ip"): Boolean(str), Optional("region_name"): All(str, Length(min=1)), Optional("nova_api_version"): nova_api_version(), } cloud_schema_azure = {"provider": 'azure', "subscription_id": All(str, Length(min=1)), "certificate": All(str, Length(min=1)), } node_schema = { "flavor": All(str, Length(min=1)), "image_id": All(str, Length(min=1)), "security_group": All(str, Length(min=1)), Optional("network_ids"): All(str, Length(min=1)), } # validation validator = Schema(schema, required=True, extra=True) node_validator = Schema(node_schema, required=True, extra=True) ec2_validator = Schema(cloud_schema_ec2, required=True, extra=False) gce_validator = Schema(cloud_schema_gce, required=True, extra=False) openstack_validator = Schema(cloud_schema_openstack, required=True, extra=False) azure_validator = Schema(cloud_schema_azure, required=True, extra=False) if not self.config: raise Invalid("No clusters found in configuration.") for cluster, properties in self.config.iteritems(): self.config[cluster] = validator(properties) if 'provider' not in properties['cloud']: raise Invalid( "Missing `provider` option in cluster `%s`" % cluster) try: cloud_props = properties['cloud'] if properties['cloud']['provider'] == "ec2_boto": self.config[cluster]['cloud'] = ec2_validator(cloud_props) elif properties['cloud']['provider'] == "google": self.config[cluster]['cloud'] = gce_validator(cloud_props) elif properties['cloud']['provider'] == "openstack": self.config[cluster]['cloud'] = openstack_validator(cloud_props) elif properties['cloud']['provider'] == "azure": self.config[cluster]['cloud'] = azure_validator(cloud_props) except MultipleInvalid as ex: raise Invalid("Invalid configuration for cloud section `cloud/%s`: %s" % (properties['cluster']['cloud'], str.join(", ", [str(i) for i in ex.errors]))) if 'nodes' not in properties or len(properties['nodes']) == 0: raise Invalid( "No nodes configured for cluster `%s`" % cluster) for node, props in properties['nodes'].iteritems(): # check name pattern to conform hostnames match = re.search(r'^[a-zA-Z0-9-]*$', node) if not match: raise Invalid( "Invalid name `%s` for node group. A valid node group" " can only consist of letters, digits or the hyphen" " character (`-`)" % (node,)) node_validator(props) if (properties['cloud']['provider'] == 'ec2_boto' and 'vpc' in self.config[cluster]['cloud'] and 'network_ids' not in props): raise Invalid( "Node group `%s/%s` is being used in" " a VPC, so it must specify network_ids." % (cluster, node)) if (properties['cloud']['provider'] == 'ec2_boto' and 'network_ids' in props and 'vpc' not in self.config[cluster]['cloud']): raise Invalid( "Cluster `%s` must specify a VPC to place" " `%s` instances in %s" % (cluster, node, props['network_ids'])) self._post_validate()