def _init_schema(self): self._schema = { Required('key_file'): str, Required('cert_file'): str, Required('base_url'): Url(), 'host': str, 'port': Any(int, str), 'debug': bool, 'https': bool, 'https_cert_file': str, 'https_key_file': str, 'users_file': str, 'behind_reverse_proxy': bool, 'can_add_user': bool, 'endpoints': { 'single_logout_service': str, 'single_sign_on_service': str, }, 'metadata': { 'local': All([str], Length(min=0)), 'remote': All( [ { Required('url'): Url(), 'cert': str, } ], Length(min=0), ) } }
def __init__(self): self.string_validator = Schema(str) self.integer_validator = Schema(int) self.list_string_validator = Schema([str]) self.list_integer_validator = Schema([int]) self.list_url_validator = Schema([Url(str)]) self.url_validator = Schema(Url(str)) self.email_validator = Schema(Email(str))
def __init__(self, configfiles): self.configfiles = configfiles configparser = RawConfigParser() config_tmp = configparser.read(self.configfiles) self.conf = dict() for section in configparser.sections(): self.conf[section] = dict(configparser.items(section)) #self.conf = ConfigObj(self.configfile, interpolation=False) @message("file could not be found") def check_file(v): f = os.path.expanduser(os.path.expanduser(v)) if os.path.exists(f): return f else: raise Invalid("file could not be found `%s`" % v) @message("Unsupported nova API version") def nova_api_version(version): try: from novaclient import client, exceptions client.get_client_class(version) return version except exceptions.UnsupportedVersion as ex: raise Invalid( "Invalid option for `nova_api_version`: %s" % ex) self.schemas = { "cloud": Schema( {"provider": Any('ec2_boto', 'google', 'openstack'), "ec2_url": Url(str), "ec2_access_key": All(str, Length(min=1)), "ec2_secret_key": All(str, Length(min=1)), "ec2_region": All(str, Length(min=1)), "auth_url": All(str, Length(min=1)), "username": All(str, Length(min=1)), "password": All(str, Length(min=1)), "tenant_name": All(str, Length(min=1)), Optional("region_name"): All(str, Length(min=1)), "gce_project_id": All(str, Length(min=1)), "gce_client_id": All(str, Length(min=1)), "gce_client_secret": All(str, Length(min=1)), "nova_client_api": nova_api_version()}, extra=True), "cluster": Schema( {"cloud": All(str, Length(min=1)), "setup_provider": All(str, Length(min=1)), "login": All(str, Length(min=1))}, required=True, extra=True), "setup": Schema( {"provider": All(str, Length(min=1)), }, required=True, extra=True), "login": Schema( {"image_user": All(str, Length(min=1)), "image_user_sudo": All(str, Length(min=1)), "image_sudo": Boolean(str), "user_key_name": All(str, Length(min=1)), "user_key_private": check_file(), "user_key_public": check_file()}, required=True) }
def test_contrib_schema(): js = repos.get_contributor_json('bmregner', 'finance_prediction') contrib_schema = Schema([{ 'login': str, 'html_url': Url(), 'contributions': int }], extra=REMOVE_EXTRA) assert contrib_schema(js)
def test_url_validation_without_host(): """ test with empty host URL """ schema = Schema({"url": Url()}) try: schema({"url": 'http://'}) except MultipleInvalid as e: assert str(e) == "expected a URL for dictionary value @ data['url']" else: assert False, "Did not raise Invalid for empty string url"
def test_url_validation_with_none(): """ test with invalid None url""" schema = Schema({"url": Url()}) try: schema({"url": None}) except MultipleInvalid as e: assert str(e) == "expected a URL for dictionary value @ data['url']" else: assert False, "Did not raise Invalid for None url"
def check_url(url): try: Url(url) except Invalid: # image url is a path image_path = os.path.abspath(os.path.join(src, url[1:])) if not os.path.exists(image_path): raise Invalid( "image location '{}' doesn't exist on disk ({})".format( url, image_path))
def test_url_validation_with_empty_string(): """ test with empty string URL """ schema = Schema({"url": Url()}) try: schema({"url": ''}) except MultipleInvalid as e: assert_equal(str(e), "expected a URL for dictionary value @ data['url']") else: assert False, "Did not raise Invalid for empty string url"
def get_data_schema(): return Schema( { Required("institution"): str, Required("link_to_regulation"): Url(), Required("publication_date"): str, Required("justification_legend"): str, Required("tutor_global_id"): str, Required("learning_unit_years"): All([{ Required("session_number"): int, Required("title"): str, Required("academic_year"): str, Required("acronym"): str, Required("decimal_scores"): bool, Required("programs"): [{ Required("deliberation_date"): str, Required("acronym"): str, Required("address"): {}, Required("enrollments"): [{ Required("registration_id"): str, Required("first_name", default=''): Any(None, str), Required("last_name", default=''): Any(None, str), Required("justification"): str, Required("score"): str, Required("deadline"): str }] }], Required("scores_responsible"): { Required("address"): { Required("city"): str, Required("postal_code"): str, Required("location"): str }, Required("first_name", default=''): Any(None, str), Required("last_name", default=''): Any(None, str) } }], Length(min=1), extra=True) }, extra=True)
def ImageLocation(v): if not isinstance(v, str): raise Invalid("image location {!r} is not a string".format(v)) if len(v) == 0: raise Invalid("image location must be a non empty string") if not v.startswith("/assets/img/plugins/"): try: Url()(v) except Invalid: raise Invalid( "image location '{}' must either be an URL or a path starting with '/assets/img/plugins/'" .format(v))
def test_the_anything_endpoint_3(): """Test the endpoint that returns a JSON payload.""" anything_struct = S({"args": dict, "data": str, "files": dict, "form": dict, "headers": S({str: str}), "json": Any(None, str), "method": Any("GET", "POST", "PUT", "DELETE"), "origin": origin, "url": Url()}) response = requests.get("https://httpbin.org/anything").json() assert response == anything_struct
def __init__(self, paths): self.configfiles = self._list_config_files(paths) configparser = RawConfigParser() config_tmp = configparser.read(self.configfiles) self.conf = dict() for section in configparser.sections(): self.conf[section] = dict(configparser.items(section)) #self.conf = ConfigObj(self.configfile, interpolation=False) self.schemas = { "storage": Schema( {Optional("storage_path"): All(str), Optional("storage_type"): Any('yaml', 'json', 'pickle'), }), "cloud": Schema( {"provider": Any('ec2_boto', 'google', 'openstack', 'azure'), "ec2_url": Url(str), Optional("ec2_access_key"): All(str, Length(min=1)), Optional("ec2_secret_key"): All(str, Length(min=1)), "ec2_region": All(str, Length(min=1)), "auth_url": All(str, Length(min=1)), "username": All(str, Length(min=1)), "password": All(str, Length(min=1)), "tenant_name": All(str, Length(min=1)), Optional("region_name"): All(str, Length(min=1)), "gce_project_id": All(str, Length(min=1)), "gce_client_id": All(str, Length(min=1)), "gce_client_secret": All(str, Length(min=1)), "nova_client_api": nova_api_version()}, extra=True), "subscription_id": All(str, Length(min=1)), "certificate": All(str, Length(min=1)), "cluster": Schema( {"cloud": All(str, Length(min=1)), "setup_provider": All(str, Length(min=1)), "login": All(str, Length(min=1)), }, required=True, extra=True), "setup": Schema( {"provider": All(str, Length(min=1)), }, required=True, extra=True), "login": Schema( {"image_user": All(str, Length(min=1)), "image_user_sudo": All(str, Length(min=1)), "image_sudo": Boolean(str), "user_key_name": All(str, Length(min=1)), "user_key_private": can_read_file(), "user_key_public": can_read_file()}, required=True) }
def test_repo_schema(): js = repos.get_user_json('bmregner') def date_format(fmt='%Y-%m-%dT%XZ'): return lambda v: datetime.strptime(v, fmt) user_schema = Schema([{ 'full_name': str, 'updated_at': date_format(), 'url': Url(), 'size': int, 'private': bool }], extra=REMOVE_EXTRA) assert user_schema(js)
class HTTPHook(Hook): CONFIG_SCHEMA = Schema({ Required('url'): Url(), Optional('method', default='POST'): Any('POST', 'PUT', 'GET'), }) def send_request(self, data): try: requests.request(self.config['method'], self.config['url'], timeout=1, json=data) except requests.exceptions.RequestException as exn: logging.error("cannot notify %s: %s", self.config['url'], exn) def on_anomaly_start(self, model, dt, score, predicted, observed, anomalies, *args, **kwargs): self.send_request({ 'type': 'anomaly_start', 'model': model, 'timestamp': dt.timestamp(), 'score': score, 'predicted': predicted, 'observed': observed, 'anomalies': anomalies, }) def on_anomaly_end(self, model, dt, score, *args, **kwargs): self.send_request({ 'type': 'anomaly_end', 'model': model, 'timestamp': dt.timestamp(), 'score': score, })
def validate(self): """Validates the given configuration :py:attr:`self.config` to comply with elasticluster. As well all types are converted to the expected format if possible. :raisegs: :py:class:`voluptuous.MultipleInvalid` if multiple properties are not compliant :raises: :py:class:`voluptuous.Invalid` if one property is invalid """ self._pre_validate() # custom validators @message("file could not be found") def check_file(v): f = os.path.expanduser(os.path.expanduser(v)) if os.path.exists(f): return f else: raise Invalid("file could not be found `%s`" % v) # schema to validate all cluster properties schema = {"cluster": {"cloud": All(str, Length(min=1)), "setup_provider": All(str, Length(min=1)), "login": All(str, Length(min=1))}, "setup": {"provider": All(str, Length(min=1)), Optional("playbook_path"): check_file()}, "login": {"image_user": All(str, Length(min=1)), "image_user_sudo": All(str, Length(min=1)), "image_sudo": Boolean(str), "user_key_name": All(str, Length(min=1)), "user_key_private": check_file(), "user_key_public": check_file()}} cloud_schema_ec2 = {"provider": 'ec2_boto', "ec2_url": Url(str), "ec2_access_key": All(str, Length(min=1)), "ec2_secret_key": All(str, Length(min=1)), "ec2_region": All(str, Length(min=1)), Optional("request_floating_ip"): Boolean(str)} cloud_schema_gce = {"provider": 'google', "gce_client_id": All(str, Length(min=1)), "gce_client_secret": All(str, Length(min=1)), "gce_project_id": All(str, Length(min=1))} cloud_schema_openstack = {"provider": 'openstack', "auth_url": All(str, Length(min=1)), "username": All(str, Length(min=1)), "password": All(str, Length(min=1)), "project_name": All(str, Length(min=1)), Optional("request_floating_ip"): Boolean(str), Optional("region_name"): All(str, Length(min=1))} node_schema = { "flavor": All(str, Length(min=1)), "image_id": All(str, Length(min=1)), "security_group": All(str, Length(min=1)) } # validation validator = Schema(schema, required=True, extra=True) validator_node = Schema(node_schema, required=True, extra=True) ec2_validator = Schema(cloud_schema_ec2, required=True, extra=False) gce_validator = Schema(cloud_schema_gce, required=True, extra=False) openstack_validator = Schema(cloud_schema_openstack, required=True, extra=False) if not self.config: raise Invalid("No clusters found in configuration.") for cluster, properties in self.config.iteritems(): self.config[cluster] = validator(properties) if 'provider' not in properties['cloud']: raise Invalid( "Missing `provider` option in cluster `%s`" % cluster) cloud_props = properties['cloud'] if properties['cloud']['provider'] == "ec2_boto": self.config[cluster]['cloud'] = ec2_validator(cloud_props) elif properties['cloud']['provider'] == "google": self.config[cluster]['cloud'] = gce_validator(cloud_props) elif properties['cloud']['provider'] == "openstack": self.config[cluster]['cloud'] = openstack_validator(cloud_props) if 'nodes' not in properties or len(properties['nodes']) == 0: raise Invalid( "No nodes configured for cluster `%s`" % cluster) for node, props in properties['nodes'].iteritems(): # check name pattern to conform hostnames match = re.search(r'^[a-zA-Z0-9-]*$', node) if not match: raise Invalid( "Invalid name `%s` for node group. A valid node group " "can only consist of letters, digits or the hyphens " "character (`-`)" % node) validator_node(props) self._post_validate()
def test_url_validation(): """ test with valid URL """ schema = Schema({"url": Url()}) out_ = schema({"url": "http://example.com/"}) assert 'http://example.com/', out_.get("url")
class Warp10Bucket(Bucket): """ Warp10 bucket """ SCHEMA = Bucket.SCHEMA.extend({ Optional('url', default='http://localhost:8080'): Url(), Required('read_token'): str, Required('write_token'): str, Optional('global_prefix', default=None): Any(None, str), }) def __init__(self, cfg): cfg['type'] = 'warp10' super().__init__(cfg) self.read_token = cfg['read_token'] self.write_token = cfg['write_token'] self.global_prefix = cfg.get('global_prefix') self.warp10 = warp10client.Warp10Client( warp10_api_url=cfg['url'], read_token=self.read_token, write_token=self.write_token, ) def build_name(self, name): return "{}.{}".format(self.global_prefix, name) if self.global_prefix \ else name def build_selector(self, selector, is_regexp=False): selector = self.build_name(selector) if is_regexp: selector = "~" + selector return selector @catch_query_error def drop(self, tags=None, **kwargs): """ Delete database """ self.warp10.delete({ 'name': self.build_selector(".*", is_regexp=True), 'tags': tags or {}, }) def insert_data(self, data): raise NotImplementedError("Warp10 is a pure time-series database") def insert_times_data(self, ts, data, tags=None, *args, **kwargs): """ Insert data """ ts_us = make_ts(ts) * 1e6 if tags: check_tags(tags) for key, value in data.items(): metric = { 'name': self.build_selector(key), 'value': value, 'position': { 'longitude': None, 'latitude': None, 'elevation': None, 'timestamp': ts_us, }, 'tags': tags or {}, } self.enqueue(metric) @catch_query_error def send_bulk(self, metrics): """ Send data to Warp10 """ self.warp10.set(metrics) def build_fetch(self, feature, from_str, to_str, tags=None): tags = {} if tags is None else dict(tags) if feature.match_all: for tag in feature.match_all: k, v = tag['tag'], tag['value'] check_tag(k, v) tags[k] = v tags_str = build_tags(tags) return "[\n'{}'\n'{}'\n{}\n'{}'\n'{}'\n]\nFETCH".format( self.read_token, self.build_selector(feature.field), tags_str, from_str, to_str, ) def build_multi_fetch(self, bucket_interval, features, from_str, to_str, tags=None): bucket_span = int(bucket_interval * 1e6) scripts = [ "[\n{}\n{}\n0\n{}\n0\n]\nBUCKETIZE".format( self.build_fetch( feature, from_str, to_str, tags, ), metric_to_bucketizer(feature.metric), bucket_span, ) for feature in features ] return "[\n{}\n]".format("\n".join(scripts)) @catch_query_error def get_times_data(self, bucket_interval, features, from_date, to_date, tags=None, **kwargs): period = DateRange.build_date_range(from_date, to_date, bucket_interval) nb_buckets = int((period.to_ts - period.from_ts) / bucket_interval) buckets = np.full((nb_buckets, len(features)), np.nan, dtype=float) script = self.build_multi_fetch( bucket_interval, features, period.from_str, period.to_str, tags=tags, ) raw = self.warp10.exec(script) data = json.loads(raw) from_us = period.from_ts * 1e6 to_us = period.to_ts * 1e6 bucket_interval_us = int(bucket_interval * 1e6) has_data = False for i, item in enumerate(data[0]): if len(item) == 0: continue item = item[0] values = item['v'] for ts_us, value in values: # XXX: Warp10 buckets are labeled with the right timestamp # but Loud ML uses the left one. ts_us -= bucket_interval_us if ts_us < from_us or ts_us >= to_us: # XXX Sometimes, Warp10 returns extra buckets, skip them continue j = math.floor((ts_us - from_us) / bucket_interval_us) buckets[j][i] = value has_data = True if not has_data: raise errors.NoData() result = [] from_ts = ts = from_us / 1e6 for bucket in buckets: result.append(((ts - from_ts), list(bucket), ts)) ts += bucket_interval return result def save_timeseries_prediction(self, prediction, tags=None): prefix = prediction.model.name logging.info("saving prediction to '%s'", self.build_name(prefix)) for bucket in prediction.format_buckets(): data = bucket['predicted'] bucket_tags = tags or {} stats = bucket.get('stats', None) if stats is not None: data['score'] = float(stats.get('score')) bucket_tags['is_anomaly'] = stats.get('anomaly', False) # XXX As Warp10 uses the end date to identify buckets, use the same # convention ts = bucket['timestamp'] + prediction.model.bucket_interval self.insert_times_data( ts=ts, tags=bucket_tags, data={"{}.{}".format(prefix, k): v for k, v in data.items()}, ) self.commit()
def validate(self): """Validates the given configuration :py:attr:`self.config` to comply with elasticluster. As well all types are converted to the expected format if possible. :raises: :py:class:`voluptuous.MultipleInvalid` if multiple properties are not compliant :raises: :py:class:`voluptuous.Invalid` if one property is invalid """ self._pre_validate() # custom validators @message("file could not be found") def check_file(v): f = os.path.expanduser(os.path.expanduser(v)) if os.path.exists(f): return f else: raise Invalid("file could not be found `%s`" % v) @message("Unsupported nova API version") def nova_api_version(version): try: from novaclient import client, exceptions client.get_client_class(version) return version except exceptions.UnsupportedVersion as ex: raise Invalid("Invalid option for `nova_api_version`: %s" % ex) # schema to validate all cluster properties schema = { "cluster": { "cloud": All(str, Length(min=1)), "setup_provider": All(str, Length(min=1)), "login": All(str, Length(min=1)), }, "setup": { "provider": All(str, Length(min=1)), Optional("playbook_path"): check_file(), Optional("ssh_pipelining"): Boolean(str), }, "login": { "image_user": All(str, Length(min=1)), "image_user_sudo": All(str, Length(min=1)), "image_sudo": Boolean(str), "user_key_name": All(str, Length(min=1)), "user_key_private": check_file(), "user_key_public": check_file(), }, } cloud_schema_ec2 = { "provider": 'ec2_boto', "ec2_url": Url(str), "ec2_access_key": All(str, Length(min=1)), "ec2_secret_key": All(str, Length(min=1)), "ec2_region": All(str, Length(min=1)), Optional("request_floating_ip"): Boolean(str), Optional("vpc"): All(str, Length(min=1)), } cloud_schema_gce = { "provider": 'google', "gce_client_id": All(str, Length(min=1)), "gce_client_secret": All(str, Length(min=1)), "gce_project_id": All(str, Length(min=1)), Optional("noauth_local_webserver"): Boolean(str), Optional("zone"): All(str, Length(min=1)), } cloud_schema_openstack = { "provider": 'openstack', "auth_url": All(str, Length(min=1)), "username": All(str, Length(min=1)), "password": All(str, Length(min=1)), "project_name": All(str, Length(min=1)), Optional("request_floating_ip"): Boolean(str), Optional("region_name"): All(str, Length(min=1)), Optional("nova_api_version"): nova_api_version(), } node_schema = { "flavor": All(str, Length(min=1)), "image_id": All(str, Length(min=1)), "security_group": All(str, Length(min=1)), Optional("network_ids"): All(str, Length(min=1)), } # validation validator = Schema(schema, required=True, extra=True) node_validator = Schema(node_schema, required=True, extra=True) ec2_validator = Schema(cloud_schema_ec2, required=True, extra=False) gce_validator = Schema(cloud_schema_gce, required=True, extra=False) openstack_validator = Schema(cloud_schema_openstack, required=True, extra=False) if not self.config: raise Invalid("No clusters found in configuration.") for cluster, properties in self.config.items(): self.config[cluster] = validator(properties) if 'provider' not in properties['cloud']: raise Invalid("Missing `provider` option in cluster `%s`" % cluster) try: cloud_props = properties['cloud'] if properties['cloud']['provider'] == "ec2_boto": self.config[cluster]['cloud'] = ec2_validator(cloud_props) elif properties['cloud']['provider'] == "google": self.config[cluster]['cloud'] = gce_validator(cloud_props) elif properties['cloud']['provider'] == "openstack": self.config[cluster]['cloud'] = openstack_validator( cloud_props) except MultipleInvalid as ex: raise Invalid( "Invalid configuration for cloud section `cloud/%s`: %s" % (properties['cluster']['cloud'], str.join(", ", [str(i) for i in ex.errors]))) if 'nodes' not in properties or len(properties['nodes']) == 0: raise Invalid("No nodes configured for cluster `%s`" % cluster) for node, props in properties['nodes'].items(): # check name pattern to conform hostnames match = re.search(r'^[a-zA-Z0-9-]*$', node) if not match: raise Invalid( "Invalid name `%s` for node group. A valid node group" " can only consist of letters, digits or the hyphen" " character (`-`)" % (node, )) node_validator(props) if (properties['cloud']['provider'] == 'ec2_boto' and 'vpc' in self.config[cluster]['cloud'] and 'network_ids' not in props): raise Invalid("Node group `%s/%s` is being used in" " a VPC, so it must specify network_ids." % (cluster, node)) if (properties['cloud']['provider'] == 'ec2_boto' and 'network_ids' in props and 'vpc' not in self.config[cluster]['cloud']): raise Invalid("Cluster `%s` must specify a VPC to place" " `%s` instances in %s" % (cluster, node, props['network_ids'])) self._post_validate()
"""A GitHub user schema.""" from voluptuous import Required from voluptuous import Url # pylint: disable=no-value-for-parameter LICENSE_SCHEMA = { Required("key"): str, Required("name"): str, Required("spdx_id"): str, Required("url"): Url() }
def get_schema(): _top_level_and_containers_common_options = { 'environment': { Extra: Coerce(str) }, 'hostnames': [ Hostname(), ], 'image': str, 'lxc_config': { Extra: str }, 'mode': In([ 'local', 'pull', ]), 'privileged': bool, 'profiles': [ str, ], 'protocol': In([ 'lxd', 'simplestreams', ]), 'provisioning': [], # will be set dynamically using provisioner classes... 'server': Url(), 'shares': [{ # The existence of the source directory will be checked! 'source': IsDir(), 'dest': str, 'set_host_acl': bool, }], 'shell': { 'user': str, 'home': str, }, 'users': [{ # Usernames max length is set 32 characters according to useradd's man page. Required('name'): All(str, Length(max=32)), 'home': str, 'password': str, }], } def _check_provisioner_config(config): provisioners = Provisioner.provisioners.values() # Check if 'type' is correctly defined Schema( { Required('type'): Any(*[provisioner.name for provisioner in provisioners]) }, extra=ALLOW_EXTRA)(config) # Check if the detected provisioner's schema is fully satisfied c = config.copy() name = c.pop('type') detected_provisioner = next(provisioner for provisioner in provisioners if provisioner.name == name) validated = Schema(detected_provisioner.schema)(c) validated['type'] = name return validated # Inserts provisioner specific schema rules in the global schema dict. _top_level_and_containers_common_options['provisioning'] = [ All(_check_provisioner_config) ] _container_options = { Required('name'): LXDIdentifier(), } _container_options.update(_top_level_and_containers_common_options) _lxdock_options = { Required('name'): LXDIdentifier(), 'containers': [ _container_options, ], } _lxdock_options.update(_top_level_and_containers_common_options) return Schema(_lxdock_options)
from voluptuous import Url,Schema,MultipleInvalid schema = Schema(Url()) try: schema('https://www.baidu.com/') #raise AssertionError('MultipleInvalid not raised') except MultipleInvalid as e: print(e.errors)
class Service: """Representation of DID Document Services.""" _validator = Schema( { "id": All(str, DIDUrl.validate), "type": str, "serviceEndpoint": Switch(DIDUrl.validate, Url()), }, extra=ALLOW_EXTRA, required=True, ) def __init__(self, id_: DIDUrl, type_: str, endpoint: str, **extra): """Initialize Service.""" self._id = id_ self._type = type_ self._endpoint = endpoint self._extra = extra @property def id(self): """Return id.""" return self._id @property def type(self): """Return type.""" return self._type @property def endpoint(self): """Return endpoint.""" return self._endpoint @property def extra(self): """Return extra.""" return self._extra def serialize(self): """Return serialized representation of Service.""" return { "id": str(self.id), "type": self.type, "serviceEndpoint": self.endpoint, **self.extra, } @classmethod @wrap_validation_error(ServiceValidationError, message="Failed to validate service") def validate(cls, value: dict): """Validate object against service.""" return cls._validator(value) @classmethod @wrap_validation_error( ServiceValidationError, message="Failed to deserialize service" ) def deserialize(cls, value: dict): """Deserialize into Service.""" value = cls.validate(value) deserializer = Schema( { Into("id", "id_"): DIDUrl.parse, Into("type", "type_"): str, Into("serviceEndpoint", "endpoint"): str, }, extra=ALLOW_EXTRA, ) value = deserializer(value) return cls(**value)
'rpm', 'rpmsphere', 'siduction', 'slitaz_next', 'stackage', 'trisquel', 'ubuntu', 'unitedrpms', 'whonix', 'windows', ] packagelinks = [ { Required('type'): str, Required('url'): Url(), } ] schemas = { 'repos': [ { Required('name'): str, 'sortname': str, 'singular': str, Required('type'): Any('repository', 'site', 'modules'), Required('desc'): str, 'statsgroup': str, Required('family'): Any(*families), 'ruleset': Any(Any(*rulesets), [Any(*rulesets)]), # XXX: make required 'color': str,
SUMMARY = S(list) CODE_REPOSITORY = S({ "type": str, "url": str }) # Url()}) # Url does not seem to support git@... DETAIL = S({ Optional("code_repository"): CODE_REPOSITORY, Optional("declared_license"): str, Optional("declared_licenses"): [str], Optional("dependencies"): [str], Optional("description"): Any(None, str), Optional("devel_dependencies"): [str], Optional("ecosystem"): str, Optional("homepage"): Url(), Optional("name"): str, Optional("version"): str }) DETAILS = S([DETAIL]) # metadata schema for component (not package) COMPONENT_METADATA_SCHEMA = S({ "_audit": Any(None, AUDIT), Optional("_release"): str, "schema": SCHEMA, "status": STATUS, "summary": SUMMARY, "details": DETAILS })
'local', 'pull', ]), 'privileged': bool, 'profiles': [ str, ], 'protocol': In([ 'lxd', 'simplestreams', ]), 'provisioning': [], # will be set dynamically using provisioner classes... 'server': Url(), 'shares': [{ # The existence of the source directory will be checked! 'source': IsDir(), 'dest': str, 'set_host_acl': bool, }], 'shell': { 'user': str, 'home': str, }, 'users': [{ # Usernames max length is set 32 characters according to useradd's man page. Required('name'): All(str, Length(max=32)), 'home':
"""Issue milestone schema.""" from voluptuous import Any from voluptuous import Required from voluptuous import Schema from voluptuous import Url from githubcap.enums import State from .user import USER_SCHEMA # pylint: disable=no-value-for-parameter MILESTONE_SCHEMA = Schema({ Required("url"): Url(), Required("html_url"): Url(), Required("labels_url"): Url(), Required("id"): int, Required("number"): int, Required("state"): Schema(Any(*State.all_values())), Required("title"): str, Required("description"): Schema(Any(str, None)), Required("creator"): USER_SCHEMA, Required("open_issues"): int, Required("closed_issues"): int, Required("created_at"): str, Required("updated_at"): Schema(Any(str, None)), Required("closed_at"): Schema(Any(str, None)), Required("due_on"): Schema(Any(str, None)) })
import yaml import gzip import lzma from voluptuous import Schema, Required, All, Any, Length, Range, Match, Url from optparse import OptionParser import multiprocessing as mp schema_header = Schema({ Required('File'): All(str, 'DEP-11', msg="Must be \"DEP-11\""), Required('Origin'): All(str, Length(min=1)), Required('Version'): All(str, Match(r'(\d+\.?)+$'), msg="Must be a valid version number"), Required('MediaBaseUrl'): All(str, Url()), 'Time': All(str), 'Priority': All(int), }) schema_translated = Schema( { Required('C'): All(str, Length(min=1), msg="Must have an unlocalized 'C' key"), dict: All(str, Length(min=1)), }, extra=True)
def validate(self, request): xmlstr = request.saml_request data = saml_to_dict(xmlstr) if self._action == 'login': req_type = 'AuthnRequest' elif self._action == 'logout': req_type = 'LogoutRequest' issuer_name = data.get( '{urn:oasis:names:tc:SAML:2.0:protocol}%s' % (req_type), {}).get('children', {}).get('{urn:oasis:names:tc:SAML:2.0:assertion}Issuer', {}).get('text') if issuer_name is None: raise UnknownEntityIDError( 'Issuer non presente nella {}'.format(req_type)) if issuer_name and issuer_name not in self._registry.service_providers: raise UnknownEntityIDError( 'L\'entity ID "{}" indicato nell\'elemento <Issuer> non corrisponde a nessun Service Provider registrato in questo Identity Provider di test.' .format(issuer_name)) sp_metadata = self._registry.get(issuer_name) if sp_metadata is not None: atcss = sp_metadata.attribute_consuming_services attribute_consuming_service_indexes = [ str(el.get('attrs').get('index')) for el in atcss if 'index' in el.get('attrs', {}) ] ascss = sp_metadata.assertion_consumer_services assertion_consumer_service_indexes = [ str(el.get('index')) for el in ascss ] assertion_consumer_service_urls = [ str(el.get('Location')) for el in ascss ] else: attribute_consuming_service_indexes = [] assertion_consumer_service_indexes = [] assertion_consumer_service_urls = [] entity_id = self._config.entity_id issuer = Schema( { 'attrs': { 'Format': Equal( NAMEID_FORMAT_ENTITY, msg=DEFAULT_VALUE_ERROR.format(NAMEID_FORMAT_ENTITY)), 'NameQualifier': Any(Url(), Match(r'^urn:'), msg="Invalid URI"), }, 'children': {}, 'text': str, }, required=True, ) name_id = Schema( { 'attrs': { 'NameQualifier': str, 'Format': Equal(NAMEID_FORMAT_TRANSIENT, msg=DEFAULT_VALUE_ERROR.format( NAMEID_FORMAT_TRANSIENT)), }, 'children': {}, 'text': str }, required=True, ) name_id_policy = Schema( { 'attrs': { 'Format': Equal(NAMEID_FORMAT_TRANSIENT, msg=DEFAULT_VALUE_ERROR.format( NAMEID_FORMAT_TRANSIENT)), Optional('SPNameQualifier'): str, }, 'children': {}, 'text': None, }, required=True, ) conditions = Schema( { 'attrs': { 'NotBefore': All(str, _check_utc_date), 'NotOnOrAfter': All(str, _check_utc_date), }, 'children': {}, 'text': None, }, required=True, ) authn_context_class_ref = Schema( { 'attrs': {}, 'children': {}, 'text': All( str, In(SPID_LEVELS, msg=DEFAULT_LIST_VALUE_ERROR.format( ', '.join(SPID_LEVELS)))) }, required=True, ) requested_authn_context = Schema( { 'attrs': { 'Comparison': str }, 'children': { '{%s}AuthnContextClassRef' % (ASSERTION): authn_context_class_ref }, 'text': None }, required=True, ) scoping = Schema( { 'attrs': { 'ProxyCount': Equal('0', msg=DEFAULT_VALUE_ERROR.format('0')) }, 'children': {}, 'text': None }, required=True, ) signature = Schema( { 'attrs': dict, 'children': dict, 'text': None }, required=True, ) subject = Schema( { 'attrs': { 'Format': Equal( NAMEID_FORMAT_ENTITY, msg=DEFAULT_VALUE_ERROR.format(NAMEID_FORMAT_ENTITY)), 'NameQualifier': str }, 'children': {}, 'text': None }, required=True, ) # LOGIN def check_assertion_consumer_service(attrs): keys = attrs.keys() if ('AssertionConsumerServiceURL' in keys and 'ProtocolBinding' in keys and 'AssertionConsumerServiceIndex' not in keys): _errors = [] if attrs['ProtocolBinding'] != BINDING_HTTP_POST: _errors.append( Invalid(DEFAULT_VALUE_ERROR.format(BINDING_HTTP_POST), path=['ProtocolBinding'])) if attrs[ 'AssertionConsumerServiceURL'] not in assertion_consumer_service_urls: _errors.append( Invalid(DEFAULT_VALUE_ERROR.format( assertion_consumer_service_urls), path=['AssertionConsumerServiceURL'])) if _errors: raise MultipleInvalid(errors=_errors) return attrs elif ('AssertionConsumerServiceURL' not in keys and 'ProtocolBinding' not in keys and 'AssertionConsumerServiceIndex' in keys): if attrs[ 'AssertionConsumerServiceIndex'] not in assertion_consumer_service_indexes: raise Invalid(DEFAULT_LIST_VALUE_ERROR.format( ', '.join(assertion_consumer_service_indexes)), path=['AssertionConsumerServiceIndex']) return attrs else: raise Invalid( 'Uno e uno solo uno tra gli attributi o gruppi di attributi devono essere presenti: ' '[AssertionConsumerServiceIndex, [AssertionConsumerServiceUrl, ProtocolBinding]]' ) authnrequest_attr_schema = Schema(All( { 'ID': str, 'Version': Equal('2.0', msg=DEFAULT_VALUE_ERROR.format('2.0')), 'IssueInstant': All(str, _check_utc_date, self._check_date_in_range), 'Destination': Equal(entity_id, msg=DEFAULT_VALUE_ERROR.format(entity_id)), Optional('ForceAuthn'): str, Optional('AttributeConsumingServiceIndex'): In(attribute_consuming_service_indexes, msg=DEFAULT_LIST_VALUE_ERROR.format( ', '.join(attribute_consuming_service_indexes))), Optional('AssertionConsumerServiceIndex'): str, Optional('AssertionConsumerServiceURL'): str, Optional('ProtocolBinding'): str, }, check_assertion_consumer_service, ), required=True) AUTHNREQUEST_TAG = '{%s}AuthnRequest' % (PROTOCOL) authnrequest_schema = { AUTHNREQUEST_TAG: { 'attrs': authnrequest_attr_schema, 'children': Schema( { Optional('{%s}Subject' % (ASSERTION)): subject, '{%s}Issuer' % (ASSERTION): issuer, '{%s}NameIDPolicy' % (PROTOCOL): name_id_policy, Optional('{%s}Conditions' % (ASSERTION)): conditions, '{%s}RequestedAuthnContext' % (PROTOCOL): requested_authn_context, Optional('{%s}Scoping' % (PROTOCOL)): scoping, }, required=True, ), 'text': None } } # LOGOUT LOGOUTREQUEST_TAG = '{%s}LogoutRequest' % (PROTOCOL) logoutrequest_attr_schema = Schema(All({ 'ID': str, 'Version': Equal('2.0', msg=DEFAULT_VALUE_ERROR.format('2.0')), 'IssueInstant': All(str, _check_utc_date, self._check_date_in_range), 'Destination': Equal(entity_id, msg=DEFAULT_VALUE_ERROR.format(entity_id)), Optional('NotOnOrAfter'): All(str, _check_utc_date, self._check_date_not_expired), Optional('Reason'): str, }), required=True) logoutrequest_schema = { LOGOUTREQUEST_TAG: { 'attrs': logoutrequest_attr_schema, 'children': Schema( { '{%s}Issuer' % (ASSERTION): issuer, '{%s}NameID' % (ASSERTION): name_id, '{%s}SessionIndex' % (PROTOCOL): dict, }, required=True), 'text': None } } if self._binding == BINDING_HTTP_POST: if self._action == 'login': # Add signature schema _new_sub_schema = authnrequest_schema[AUTHNREQUEST_TAG][ 'children'].extend( {'{%s}Signature' % (SIGNATURE): signature}) authnrequest_schema[AUTHNREQUEST_TAG][ 'children'] = _new_sub_schema if self._action == 'logout': _new_sub_schema = logoutrequest_schema[LOGOUTREQUEST_TAG][ 'children'].extend( {'{%s}Signature' % (SIGNATURE): signature}) logoutrequest_schema[LOGOUTREQUEST_TAG][ 'children'] = _new_sub_schema authn_request = Schema( authnrequest_schema, required=True, ) logout_request = Schema( logoutrequest_schema, required=True, ) saml_schema = None if self._action == 'login': saml_schema = authn_request elif self._action == 'logout': saml_schema = logout_request errors = [] try: saml_schema(data) except MultipleInvalid as e: for err in e.errors: _paths = [] _attr = None for idx, _path in enumerate(err.path): if _path != 'children': if _path == 'attrs': try: _attr = err.path[(idx + 1)] except IndexError: _attr = '' break # strip namespaces for better readability _paths.append(_strip_namespaces(str(_path))) path = '/'.join(_paths) if _attr is not None: path += " - attribute: " + _attr # find value to show (iterate multiple times inside data # until we find the sub-element or attribute) _val = data for _ in err.path: try: _val = _val[_] except KeyError: _val = None except ValueError: _val = None # no need to show value if the error is the presence of the element _msg = err.msg if "extra keys not allowed" in _msg: _val = None _msg = "item not allowed" errors.append( ValidationDetail(_val, None, None, None, None, _msg, path)) raise SPIDValidationError(details=errors)
Any(None, Datetime(format='%Y-%m-%dT%H:%M:%S.%f')), Extra: object, }, 'signing': { 'signature': str, }, Extra: object, }, Required('metadata', msg="Required for TaskCluster schema."): { 'name': All(str, Length(max=255)), 'description': All(str, Length(max=32768)), 'owner': All(Email(), Length(max=255)), 'source': All(Url(), Length(max=4096)), }, Required('payload', msg="Required for TaskCluster schema."): { Extra: object, Optional('properties'): { 'version': str, 'build_number': int, 'release_promotion': bool, 'revision': str, 'product': str, Extra: object, } }, Required('provisionerId', msg="Required for TaskCluster schema."):
Optional("os"): ["windows", "linux", "macos", "freebsd", "posix", "nix"], Optional("python"): Version, } ) SCHEMA = Schema( { Required("layout"): "plugin", Required("id"): NonEmptyString, Required("title"): NonEmptyString, Required("description"): NonEmptyString, Optional("author"): NonEmptyString, Optional("authors"): list, Required("license"): NonEmptyString, Required("date"): datetime.date, Required("homepage"): Url(), Required("source"): Url(), Required("archive"): Url(), Optional("follow_dependency_links"): bool, Optional("tags"): list, Optional("screenshots"): All([ScreenshotDef]), Optional("featuredimage"): ImageLocation, Optional("compatibility"): Compatibility, Optional("disabled"): NonEmptyString, Optional("abandoned"): NonEmptyString, Optional("up_for_adoption"): Url(), Optional("redirect_from"): NonEmptyString, } )