def validate(self, request): xmlstr = request.saml_request data = saml_to_dict(xmlstr) if self._action == 'login': req_type = 'AuthnRequest' elif self._action == 'logout': req_type = 'LogoutRequest' issuer_name = data.get( '{urn:oasis:names:tc:SAML:2.0:protocol}%s' % (req_type), {}).get('children', {}).get('{urn:oasis:names:tc:SAML:2.0:assertion}Issuer', {}).get('text') if issuer_name is None: raise UnknownEntityIDError( 'Issuer non presente nella {}'.format(req_type)) try: sp_metadata = self._registry.load(issuer_name) except MetadataNotFoundError: raise UnknownEntityIDError( 'L\'entity ID "{}" indicato nell\'elemento <Issuer> non corrisponde a nessun Service Provider registrato in questo Identity Provider di test.' .format(issuer_name)) atcss = sp_metadata.attribute_consuming_services attribute_consuming_service_indexes = [ str(el.get('attrs').get('index')) for el in atcss if 'index' in el.get('attrs', {}) ] ascss = sp_metadata.assertion_consumer_services assertion_consumer_service_indexes = [ str(el.get('index')) for el in ascss ] assertion_consumer_service_urls = [ str(el.get('Location')) for el in ascss ] entity_id = self._config.entity_id issuer = Schema( { 'attrs': { 'Format': Equal( NAMEID_FORMAT_ENTITY, msg=DEFAULT_VALUE_ERROR.format(NAMEID_FORMAT_ENTITY)), 'NameQualifier': Any(Url(), Match(r'^urn:'), msg="Invalid URI"), }, 'children': {}, 'text': str, }, required=True, ) name_id = Schema( { 'attrs': { 'NameQualifier': str, 'Format': Equal(NAMEID_FORMAT_TRANSIENT, msg=DEFAULT_VALUE_ERROR.format( NAMEID_FORMAT_TRANSIENT)), }, 'children': {}, 'text': str }, required=True, ) name_id_policy = Schema( { 'attrs': { 'Format': Equal(NAMEID_FORMAT_TRANSIENT, msg=DEFAULT_VALUE_ERROR.format( NAMEID_FORMAT_TRANSIENT)), Optional('SPNameQualifier'): str, }, 'children': {}, 'text': None, }, required=True, ) conditions = Schema( { 'attrs': { 'NotBefore': All(str, _check_utc_date), 'NotOnOrAfter': All(str, _check_utc_date), }, 'children': {}, 'text': None, }, required=True, ) authn_context_class_ref = Schema( { 'attrs': {}, 'children': {}, 'text': All( str, In(SPID_LEVELS, msg=DEFAULT_LIST_VALUE_ERROR.format( ', '.join(SPID_LEVELS)))) }, required=True, ) requested_authn_context = Schema( { 'attrs': { 'Comparison': str }, 'children': { '{%s}AuthnContextClassRef' % (ASSERTION): authn_context_class_ref }, 'text': None }, required=True, ) scoping = Schema( { 'attrs': { 'ProxyCount': Equal('0', msg=DEFAULT_VALUE_ERROR.format('0')) }, 'children': {}, 'text': None }, required=True, ) signature = Schema( { 'attrs': dict, 'children': dict, 'text': None }, required=True, ) subject = Schema( { 'attrs': { 'Format': Equal( NAMEID_FORMAT_ENTITY, msg=DEFAULT_VALUE_ERROR.format(NAMEID_FORMAT_ENTITY)), 'NameQualifier': str }, 'children': {}, 'text': None }, required=True, ) # LOGIN def check_assertion_consumer_service(attrs): keys = list(attrs.keys()) if ('AssertionConsumerServiceURL' in keys and 'ProtocolBinding' in keys and 'AssertionConsumerServiceIndex' not in keys): _errors = [] if attrs['ProtocolBinding'] != BINDING_HTTP_POST: _errors.append( Invalid(DEFAULT_VALUE_ERROR.format(BINDING_HTTP_POST), path=['ProtocolBinding'])) if attrs[ 'AssertionConsumerServiceURL'] not in assertion_consumer_service_urls: _errors.append( Invalid(DEFAULT_VALUE_ERROR.format( assertion_consumer_service_urls), path=['AssertionConsumerServiceURL'])) if _errors: raise MultipleInvalid(errors=_errors) return attrs elif ('AssertionConsumerServiceURL' not in keys and 'ProtocolBinding' not in keys and 'AssertionConsumerServiceIndex' in keys): if attrs[ 'AssertionConsumerServiceIndex'] not in assertion_consumer_service_indexes: raise Invalid(DEFAULT_LIST_VALUE_ERROR.format( ', '.join(assertion_consumer_service_indexes)), path=['AssertionConsumerServiceIndex']) return attrs else: raise Invalid( 'Uno e uno solo uno tra gli attributi o gruppi di attributi devono essere presenti: ' '[AssertionConsumerServiceIndex, [AssertionConsumerServiceUrl, ProtocolBinding]]' ) authnrequest_attr_schema = Schema(All( { 'ID': str, 'Version': Equal('2.0', msg=DEFAULT_VALUE_ERROR.format('2.0')), 'IssueInstant': All(str, _check_utc_date, self._check_date_in_range), 'Destination': In([entity_id, self._config.absolute_sso_url], msg=DEFAULT_VALUE_ERROR.format(entity_id)), Optional('ForceAuthn'): str, Optional('AttributeConsumingServiceIndex'): In(attribute_consuming_service_indexes, msg=DEFAULT_LIST_VALUE_ERROR.format( ', '.join(attribute_consuming_service_indexes))), Optional('AssertionConsumerServiceIndex'): str, Optional('AssertionConsumerServiceURL'): str, Optional('ProtocolBinding'): str, }, check_assertion_consumer_service, ), required=True) AUTHNREQUEST_TAG = '{%s}AuthnRequest' % (PROTOCOL) authnrequest_schema = { AUTHNREQUEST_TAG: { 'attrs': authnrequest_attr_schema, 'children': Schema( { Optional('{%s}Subject' % (ASSERTION)): subject, '{%s}Issuer' % (ASSERTION): issuer, '{%s}NameIDPolicy' % (PROTOCOL): name_id_policy, Optional('{%s}Conditions' % (ASSERTION)): conditions, '{%s}RequestedAuthnContext' % (PROTOCOL): requested_authn_context, Optional('{%s}Scoping' % (PROTOCOL)): scoping, }, required=True, ), 'text': None } } # LOGOUT LOGOUTREQUEST_TAG = '{%s}LogoutRequest' % (PROTOCOL) logoutrequest_attr_schema = Schema(All({ 'ID': str, 'Version': Equal('2.0', msg=DEFAULT_VALUE_ERROR.format('2.0')), 'IssueInstant': All(str, _check_utc_date, self._check_date_in_range), 'Destination': In([entity_id, self._config.absolute_slo_url], msg=DEFAULT_VALUE_ERROR.format(entity_id)), Optional('NotOnOrAfter'): All(str, _check_utc_date, self._check_date_not_expired), Optional('Reason'): str, }), required=True) logoutrequest_schema = { LOGOUTREQUEST_TAG: { 'attrs': logoutrequest_attr_schema, 'children': Schema( { '{%s}Issuer' % (ASSERTION): issuer, '{%s}NameID' % (ASSERTION): name_id, '{%s}SessionIndex' % (PROTOCOL): dict, }, required=True), 'text': None } } if self._binding == BINDING_HTTP_POST: if self._action == 'login': # Add signature schema _new_sub_schema = authnrequest_schema[AUTHNREQUEST_TAG][ 'children'].extend( {'{%s}Signature' % (SIGNATURE): signature}) authnrequest_schema[AUTHNREQUEST_TAG][ 'children'] = _new_sub_schema if self._action == 'logout': _new_sub_schema = logoutrequest_schema[LOGOUTREQUEST_TAG][ 'children'].extend( {'{%s}Signature' % (SIGNATURE): signature}) logoutrequest_schema[LOGOUTREQUEST_TAG][ 'children'] = _new_sub_schema authn_request = Schema( authnrequest_schema, required=True, ) logout_request = Schema( logoutrequest_schema, required=True, ) saml_schema = None if self._action == 'login': saml_schema = authn_request elif self._action == 'logout': saml_schema = logout_request errors = [] try: saml_schema(data) except MultipleInvalid as e: for err in e.errors: _paths = [] _attr = None for idx, _path in enumerate(err.path): if _path != 'children': if _path == 'attrs': try: _attr = err.path[(idx + 1)] except IndexError: _attr = '' break # strip namespaces for better readability _paths.append(_strip_namespaces(str(_path))) path = '/'.join(_paths) if _attr is not None: path += " - attribute: " + _attr # find value to show (iterate multiple times inside data # until we find the sub-element or attribute) _val = data for _ in err.path: try: _val = _val[_] except KeyError: _val = None except ValueError: _val = None # no need to show value if the error is the presence of the element _msg = err.msg if "extra keys not allowed" in _msg: _val = None _msg = "item not allowed" errors.append( ValidationDetail(_val, None, None, None, None, _msg, path)) raise SPIDValidationError(details=errors)
def _schema_1(): """Returns Voluptuous Schema object.""" return Schema({ Required("schema"): 1, Required("bugzilla"): { Required("product"): All(str, Length(min=1)), Required("component"): All(str, Length(min=1)), }, "origin": { Required("name"): All(str, Length(min=1)), Required("description"): All(str, Length(min=1)), Required("url"): FqdnUrl(), Required("license"): Msg(License(), msg="Unsupported License"), "license-file": All(str, Length(min=1)), Required("release"): All(str, Length(min=1)), Required("revision"): Match(r"^[a-fA-F0-9]{12,40}$"), }, "updatebot": { Required("maintainer-phab"): All(str, Length(min=1)), Required("maintainer-bz"): All(str, Length(min=1)), "tasks": All( UpdatebotTasks(), [{ Required("type"): In( [ "vendoring", "commit-alert", ], msg="Invalid type specified in tasks", ), "branch": All(str, Length(min=1)), "enabled": Boolean(), "cc": Unique([str]), "filter": In( [ "none", "security", "source-extensions", ], msg="Invalid filter value specified in tasks", ), "source-extensions": Unique([str]), }], ), }, "vendoring": { Required("url"): FqdnUrl(), Required("source-hosting"): All( str, Length(min=1), In(VALID_SOURCE_HOSTS, msg="Unsupported Source Hosting"), ), "vendor-directory": All(str, Length(min=1)), "patches": Unique([str]), "keep": Unique([str]), "exclude": Unique([str]), "include": Unique([str]), "update-actions": All( UpdateActions(), [{ Required("action"): In( [ "copy-file", "replace-in-file", "run-script", "delete-path", ], msg="Invalid action specified in update-actions", ), "from": All(str, Length(min=1)), "to": All(str, Length(min=1)), "pattern": All(str, Length(min=1)), "with": All(str, Length(min=1)), "file": All(str, Length(min=1)), "script": All(str, Length(min=1)), "cwd": All(str, Length(min=1)), "path": All(str, Length(min=1)), }], ), }, })
def validate(self, metadata): data = saml_to_dict(metadata) key_descriptor = Schema( All([{ 'attrs': Schema( { 'use': All( str, In(KEYDESCRIPTOR_USES, msg=DEFAULT_LIST_VALUE_ERROR.format( ', '.join(KEYDESCRIPTOR_USES)))), }, required=True), 'children': { '{%s}KeyInfo' % (SIGNATURE): { 'attrs': {}, 'children': { '{%s}X509Data' % (SIGNATURE): { 'attrs': {}, 'children': { '{%s}X509Certificate' % (SIGNATURE): { 'attrs': {}, 'children': {}, 'text': All(str, _check_certificate) } }, 'text': None } }, 'text': None } }, 'text': None }], self._check_keydescriptor), required=True, ) slo = Schema( All([{ 'attrs': dict, 'children': dict, 'text': None }], Length(min=1)), required=True, ) acs = Schema( All([{ 'attrs': dict, 'children': dict, 'text': None }], Length(min=1)), required=True, ) atcs = Schema( All([{ 'attrs': { 'index': str }, 'children': { '{%s}ServiceName' % (METADATA): { 'attrs': dict, 'children': {}, 'text': str }, Optional('{%s}ServiceDescription' % (METADATA)): { 'attrs': dict, 'children': {}, 'text': str }, '{%s}RequestedAttribute' % (METADATA): All([{ 'attrs': { 'Name': All( str, In(SPID_ATTRIBUTES_NAMES, msg=DEFAULT_LIST_VALUE_ERROR.format( ', '.join(SPID_ATTRIBUTES_NAMES)))), Optional('NameFormat'): Equal(NAME_FORMAT_BASIC, msg=DEFAULT_VALUE_ERROR.format( NAME_FORMAT_BASIC)), Optional('FriendlyName'): str, Optional('isRequired'): str }, 'children': {}, 'text': None }], Length(min=1)), }, 'text': None }], Length(min=1)), required=True, ) name_id_format = Schema( { 'attrs': {}, 'children': {}, 'text': Equal(NAMEID_FORMAT_TRANSIENT, msg=DEFAULT_VALUE_ERROR.format(NAMEID_FORMAT_TRANSIENT)), }, required=True, ) spsso_descriptor_attr_schema = Schema(All({ 'protocolSupportEnumeration': Equal(PROTOCOL, msg=DEFAULT_VALUE_ERROR.format(PROTOCOL)), 'AuthnRequestsSigned': Equal('true', msg=DEFAULT_VALUE_ERROR.format('true')), Optional('WantAssertionsSigned'): str, }), required=True) spsso = Schema( { 'attrs': spsso_descriptor_attr_schema, 'children': { '{%s}KeyDescriptor' % (METADATA): key_descriptor, '{%s}SingleLogoutService' % (METADATA): slo, '{%s}AssertionConsumerService' % (METADATA): acs, '{%s}AttributeConsumingService' % (METADATA): atcs, '{%s}NameIDFormat' % (METADATA): name_id_format, }, 'text': None }, required=True) entity_descriptor_schema = Schema( { '{%s}EntityDescriptor' % (METADATA): { 'attrs': Schema( { 'entityID': str, Optional('ID'): str, Optional('validUntil'): All(str, _check_utc_date), Optional('cacheDuration'): str, Optional('Name'): str, }, required=True), 'children': Schema( { Optional('{%s}Signature' % (SIGNATURE)): Schema( { 'attrs': dict, 'children': dict, 'text': None }, required=True, ), '{%s}SPSSODescriptor' % (METADATA): spsso, Optional('{%s}Organization' % (METADATA)): dict, Optional('{%s}ContactPerson' % (METADATA)): list }, required=True), 'text': None } }, required=True) errors = [] try: entity_descriptor_schema(data) except MultipleInvalid as e: for err in e.errors: _val = data _paths = [] _attr = None for idx, _path in enumerate(err.path): if _path != 'children': if _path == 'attrs': try: _attr = err.path[(idx + 1)] except IndexError: _attr = '' break # strip namespaces for better readability _paths.append(_strip_namespaces(str(_path))) path = '/'.join(_paths) if _attr is not None: path = '{} - attribute: {}'.format(path, _attr) for _ in err.path: try: _val = _val[_] except IndexError: _val = None except KeyError: _val = None errors.append( ValidationDetail(_val, None, None, None, None, err.msg, path)) raise SPIDValidationError(details=errors)
EXTRA_AGGREGATION_METHODS_FOR_ARCHIVE_POLICY.add("rate:%s" % agg) # The aggregation method that one can use to configure the archive # policies also supports the 'pct' (percentile) operation. Therefore, # we also expose this as a configuration. VALID_AGGREGATION_METHODS_FOR_METRICS = BASIC_AGGREGATION_METHODS.union( EXTRA_AGGREGATION_METHODS_FOR_ARCHIVE_POLICY) GNOCCHI_EXTRA_SCHEMA = { Required('extra_args'): { Required('resource_type'): All(str, Length(min=1)), # Due to Gnocchi model, metric are grouped by resource. # This parameter permits to adapt the key of the resource identifier Required('resource_key', default='id'): All(str, Length(min=1)), Required('aggregation_method', default='max'): In(VALID_AGGREGATION_METHODS_FOR_METRICS), Required('re_aggregation_method', default='max'): In(BASIC_AGGREGATION_METHODS), Required('force_granularity', default=3600): All(int, Range(min=0)), }, } class AssociatedResourceNotFound(Exception): """Exception raised when no resource can be associated with a metric.""" def __init__(self, resource_key, resource_id): super(AssociatedResourceNotFound, self).__init__( 'Resource with {}={} could not be found'.format( resource_key, resource_id), )
'initializationFeature': int, }, }, { 'transforms': [ Any({ 'Rigid': ANTs_parameter_transforms, }, { 'Affine': ANTs_parameter_transforms, }, { 'SyN': ANTs_parameter_transforms, }) ], }, { 'verbose': Any(bool, In({0, 1})), }, { 'float': Any(bool, In({0, 1})), }, { 'masks': { 'fixed_image_mask': bool, 'moving_image_mask': bool, }, }, dict # TODO: specify other valid ANTs parameters ) ] _url_version = 'nightly' if __version__.endswith( '-dev') else f'v{__version__.lstrip("v")}'
def _context_schema(): context_variables = CONTEXT_VARIABLES + settings.EXTRA_CONTEXT_VARIABLES return Schema({In(context_variables): Any(int, str, [int, str])}, extra=False)
def test_in(): """Verify that In works.""" schema = Schema({"color": In(frozenset(["blue", "red", "yellow"]))}) schema({"color": "blue"})
from voluptuous import All, In, Schema def replace_boolean(value): if value == 'boolean': return 'boolean_special' else: return value validate_params = Schema( { 'gender': In(['male', 'female', 'other']), 'type': All(In(['boolean', 'string', 'unicode']), replace_boolean), }, required=True) params1 = { 'gender': 'female', 'type': 'boolean', } params1 = validate_params(params1) print params1 print '\n' params2 = { 'gender': 'female', 'type': 'string', } params2 = validate_params(params2) print params2
return s raise ValueError # based off of: # http://stackoverflow.com/questions/2532053/validate-a-hostname-string def _hostname(hostname): if len(hostname) > 255: raise ValueError allowed = re.compile(r"(?!-)[A-Z\d-]{1,63}(?<!-)$", re.IGNORECASE) if all(allowed.match(x) for x in hostname.split(".")): return hostname raise ValueError _delivery_speed = In(['same_day', 'overnight', 'expedited', 'standard']) _address = { 'address': _unicode_or_printable_ascii, 'address_2': _unicode_or_printable_ascii, 'city': _unicode_or_printable_ascii, 'company': _unicode_or_printable_ascii, 'country': _country_code, 'first_name': _unicode_or_printable_ascii, 'last_name': _unicode_or_printable_ascii, 'phone_country_code': _telephone_country_code, 'phone_number': _unicode_or_printable_ascii, 'postal': _unicode_or_printable_ascii, 'region': _subdivision_iso_code, }
""" def validator(csv: str) -> str: if not csv: return [] split = csv.split(',') for val in split: if val not in values: raise Invalid(f"'{val}' could not be found in {values}") return split return validator _shared = { Required('format', default='json'): All(str, In(('json', 'xml', 'yaml'))), Required('options', default=''): All(str, SplitIn(('info', 'translate', 'summary', 'speech'))), Required('report_type'): All(str, In(('metar', 'taf', 'pirep'))) } _report = { **_shared, Required('onfail', default='error'): All(str, In(('error', 'cache'))), Required('station'): All(str, Location), } report = Schema(_report, extra=REMOVE_EXTRA)
] VALID_UPLOAD_FILE_TYPES = [ 'image', 'pdf-and-image', 'custom' ] # Schema definition for an update from the Studio JavaScript editor. EDITOR_UPDATE_SCHEMA = Schema({ Required('prompts'): [ Schema({ Required('description'): utf8_validator, }) ], Required('prompts_type', default='text'): Any(All(utf8_validator, In(PROMPTS_TYPES)), None), Required('title'): utf8_validator, Required('feedback_prompt'): utf8_validator, Required('feedback_default_text'): utf8_validator, Required('submission_start'): Any(datetime_validator, None), Required('submission_due'): Any(datetime_validator, None), Required('text_response', default='required'): Any(All(utf8_validator, In(NECESSITY_OPTIONS)), None), Required('file_upload_response', default=None): Any(All(utf8_validator, In(NECESSITY_OPTIONS)), None), 'allow_file_upload': bool, # Backwards compatibility. Required('file_upload_type', default=None): Any(All(utf8_validator, In(VALID_UPLOAD_FILE_TYPES)), None), 'white_listed_file_types': utf8_validator, Required('allow_multiple_files'): bool, Required('allow_latex'): bool, Required('leaderboard_show'): int, Optional('teams_enabled'): bool, Optional('selected_teamset_id'): utf8_validator,
class BuildStatusPayload(PayloadBuilder): """ A builder object to help create payloads for creating and updating build statuses. """ schema = Schema({ Required('key'): str, Required('state'): In(list(BuildStatusStates)), Required('url'): Url(), Optional('name'): str, Optional('description'): str }) def __init__(self, payload=None, owner=None, repository_name=None, revision=None): super(self.__class__, self).__init__(payload=payload) self._owner = owner self._repository_name = repository_name self._revision = revision @property def owner(self): return self._owner @property def repository_name(self): return self._repository_name @property def revision(self): return self._revision def add_owner(self, owner): return BuildStatusPayload(payload=self._payload.copy(), owner=owner, repository_name=self.repository_name, revision=self.revision) def add_repository_name(self, repository_name): return BuildStatusPayload(payload=self._payload.copy(), owner=self.owner, repository_name=repository_name, revision=self.revision) def add_revision(self, revision): return BuildStatusPayload(payload=self._payload.copy(), owner=self.owner, repository_name=self.repository_name, revision=revision) def add_name(self, name): new = self._payload.copy() new['name'] = name return BuildStatusPayload(payload=new, owner=self.owner, repository_name=self.repository_name, revision=self.revision) def add_description(self, description): new = self._payload.copy() new['description'] = description return BuildStatusPayload(payload=new, owner=self.owner, repository_name=self.repository_name, revision=self.revision) def add_key(self, key): new = self._payload.copy() new['key'] = key return BuildStatusPayload(payload=new, owner=self.owner, repository_name=self.repository_name, revision=self.revision) def add_state(self, state): new = self._payload.copy() new['state'] = state return BuildStatusPayload(payload=new, owner=self.owner, repository_name=self.repository_name, revision=self.revision) def add_url(self, url): new = self._payload.copy() new['url'] = url return BuildStatusPayload(payload=new, owner=self.owner, repository_name=self.repository_name, revision=self.revision)
@truth def payload_validation(value): try: return isinstance(json.loads(value), dict) except Exception: raise Invalid("Invalid payload") @truth def filename_validation(value): if not value[-3:] == "csv": return False return True base_model = Schema({ Required('name'): All(Strip, str, Match("[a-zA-Z0-9 '-]+$"), Length(min=0, max=30), msg="Invalid contactId"), Required('role'): All(Strip, str, In(['Dev', 'Test', 'Support'], msg ="Invalid role")), Required('referenceId'): All(Strip, str, Match("[a-zA-Z0-9 '-]+$"), Length(min=0,max=30), msg = "Invalid referenceId"), Required('postalcode'): All(Strip, str, Match("^\d{5}\-\d{4}$|^\d{5}$"), Length(min=5,max=10), msg = "Invalid postalcode"), 'payload': All(Strip, payload_validation, Length(min=0,max=1000), msg = "Invalid payload"), }, extra = True ) user_model = base_model.extend({ Required('fileName'): All(Strip, str, Match("^[a-zA-Z0-9 '-_.]+$"), filename_validation, Length(min=0,max=100), msg = "Invalid fileName"), }) request = { "name": "John", "role": "Dev",
''' schema = Schema( { Optional('digest'): All(basestring, Length(min=1)), 'metadata': { 'author': basestring, 'author_email': Any(All(basestring, email), None), 'extras': [{ 'key': basestring, 'value': basestring }], 'frequency': All(Lower, In(FREQUENCIES.keys())), 'groups': Any(None, All(Lower, 'agriculture et alimentation')), 'id': basestring, 'license_id': Any('fr-lo'), 'maintainer': Any(basestring, None), 'maintainer_email': Any(All(basestring, email), None), 'notes': All(basestring, normalize_string), 'organization': basestring, 'private':
), ] cfg.CONF.register_opts(keystone_opts, COLLECTOR_GNOCCHI_OPTS) cfg.CONF.register_opts(collector_gnocchi_opts, COLLECTOR_GNOCCHI_OPTS) CONF = cfg.CONF GNOCCHI_EXTRA_SCHEMA = { Required('extra_args'): { Required('resource_type'): All(str, Length(min=1)), # Due to Gnocchi model, metric are grouped by resource. # This parameter permits to adapt the key of the resource identifier Required('resource_key', default='id'): All(str, Length(min=1)), Required('aggregation_method', default='max'): In(['max', 'mean', 'min']), }, } class GnocchiCollector(collector.BaseCollector): collector_name = 'gnocchi' def __init__(self, transformers, **kwargs): super(GnocchiCollector, self).__init__(transformers, **kwargs) adapter_options = {'connect_retries': 3} if CONF.collector_gnocchi.gnocchi_auth_type == 'keystone': auth_plugin = ks_loading.load_auth_from_conf_options( CONF,
) from voluptuous import All, In DOMAIN = 'tion' DEFAULT_NAME = "Tion Breezer" CONF_TARGET_TEMP = "target_temp" CONF_KEEP_ALIVE = "keep_alive" CONF_INITIAL_HVAC_MODE = "initial_hvac_mode" CONF_AWAY_TEMP = "away_temp" CONF_MAC = "mac" SUPPORTED_DEVICES = ['S3', 'Lite'] SUPPORT_FLAGS = SUPPORT_TARGET_TEMPERATURE | SUPPORT_FAN_MODE TION_SCHEMA = { 'model': { 'type': All(str, In(SUPPORTED_DEVICES)), 'required': True }, 'name': { 'type': str, 'default': DEFAULT_NAME, 'required': True }, CONF_MAC: { 'type': str, 'required': True }, CONF_KEEP_ALIVE: { 'type': int, 'default': 60, 'required': False
VALID_ASSESSMENT_TYPES = [ 'peer-assessment', 'self-assessment', 'student-training', 'staff-assessment', ] VALID_UPLOAD_FILE_TYPES = ['image', 'pdf-and-image', 'custom'] # Schema definition for an update from the Studio JavaScript editor. EDITOR_UPDATE_SCHEMA = Schema({ Required('prompts'): [Schema({ Required('description'): utf8_validator, })], Required('prompts_type', default='text'): Any(All(utf8_validator, In(PROMPTS_TYPES)), None), Required('title'): utf8_validator, Required('feedback_prompt'): utf8_validator, Required('feedback_default_text'): utf8_validator, Required('submission_start'): Any(datetime_validator, None), Required('submission_due'): Any(datetime_validator, None), Required('text_response', default='required'): Any(All(utf8_validator, In(NECESSITY_OPTIONS)), None), Required('text_response_editor', default='text'): Any(All(utf8_validator, In(AVAILABLE_EDITOR_OPTIONS)), None), Required('file_upload_response', default=None):
""" Returns a validator to check for given values in a comma-separated string """ def validator(csv: str) -> str: if not csv: return [] split = csv.split(",") for val in split: if val not in values: raise Invalid(f"'{val}' could not be found in {values}") return split return validator _required = {Required("format", default="json"): In(FORMATS)} _report_shared = { Required("options", default=""): SplitIn(OPTIONS), Required("report_type"): In(REPORT_TYPES), } _uses_cache = {Required("onfail", default="cache"): In(ONFAIL)} _station_search = { Required("airport", default=True): Boolean(None), Required("reporting", default=True): Boolean(None), } def _coord_search_validator(param_name: str, coerce_station: bool) -> Callable: """ Returns a validator the pre-validates nearest station parameters """
def get_schema(): _top_level_and_containers_common_options = { 'environment': { Extra: Coerce(str) }, 'hostnames': [ Hostname(), ], 'image': str, 'lxc_config': { Extra: str }, 'mode': In([ 'local', 'pull', ]), 'privileged': bool, 'profiles': [ str, ], 'protocol': In([ 'lxd', 'simplestreams', ]), 'provisioning': [], # will be set dynamically using provisioner classes... 'server': Url(), 'shares': [{ 'source': ExpandUserIfExists, 'dest': str, 'set_host_acl': bool, # TODO: need a way to deprecate this 'share_properties': { Extra: Coerce(str) }, }], 'shell': { 'user': str, 'home': str, # TODO: deprecated }, 'users': [{ # Usernames max length is set 32 characters according to useradd's man page. Required('name'): All(str, Length(max=32)), 'home': str, 'password': str, 'shell': str, }], 'extras': { 'network_wait_timeout': int } } def _check_provisioner_config(config): provisioners = Provisioner.provisioners.values() # Check if 'type' is correctly defined Schema( { Required('type'): Any(*[provisioner.name for provisioner in provisioners]) }, extra=ALLOW_EXTRA)(config) # Check if the detected provisioner's schema is fully satisfied c = config.copy() name = c.pop('type') detected_provisioner = next(provisioner for provisioner in provisioners if provisioner.name == name) validated = Schema(detected_provisioner.schema)(c) validated['type'] = name return validated # Inserts provisioner specific schema rules in the global schema dict. _top_level_and_containers_common_options['provisioning'] = [ All(_check_provisioner_config) ] _container_options = { Required('name'): LXDIdentifier(), } _container_options.update(_top_level_and_containers_common_options) _lxdock_options = { Required('name'): LXDIdentifier(), 'containers': [ _container_options, ], } _lxdock_options.update(_top_level_and_containers_common_options) return Schema(_lxdock_options)
def valid_action(): return { Required('action'): Any(In(settings.all_actions()), msg='action must be one of {0}'.format(settings.all_actions())) }
'resolution_for_anat': All(str, Match(r'^[0-9]+mm$')), 'template_brain_only_for_anat': str, 'template_skull_for_anat': str, 'template_symmetric_brain_only': str, 'template_symmetric_skull': str, 'dilated_symmetric_brain_mask': str, 'already_skullstripped': bool, 'skullstrip_option': In(['AFNI', 'BET']), 'skullstrip_shrink_factor': float, 'skullstrip_var_shrink_fac': bool, 'skullstrip_shrink_factor_bot_lim': float, 'skullstrip_avoid_vent': bool, 'skullstrip_n_iterations': int, 'skullstrip_pushout': bool, 'skullstrip_touchup': bool, 'skullstrip_fill_hole':
def job(extra_context_variables=[]): context_variables = CONTEXT_VARIABLES + extra_context_variables lava_lxc = { Required("name"): str, Required("distribution"): str, Required("release"): str, Optional("arch"): str, Optional("mirror"): str, Optional("persist"): bool, Optional("security_mirror"): str, Optional("template"): str, Optional("timeout"): timeout(), Optional("verbose"): bool, } return All( { Required("job_name"): All(str, Length(min=1, max=200)), Optional("device_type"): All(str, Length(min=1, max=200)), Required("timeouts"): { Required("job"): timeout(), Optional("action"): timeout(), Optional("actions"): { str: timeout() }, Optional("connection"): timeout(), Optional("connections"): { str: timeout() }, }, Required("visibility"): Any("public", "personal", {"group": [str]}), Optional("context"): Schema({In(context_variables): Any(int, str, [int, str])}, extra=False), Optional("metadata"): { str: object }, Optional("priority"): Any("high", "medium", "low", Range(min=0, max=100)), Optional("tags"): [str], Optional("secrets"): dict, Optional("environment"): dict, Optional("protocols"): { Optional("lava-lxc"): Any(lava_lxc, {str: lava_lxc}), Optional("lava-multinode"): { Required("roles"): { str: Any( { Required("device_type"): str, Required("count"): Range(min=0), Optional("context"): Schema( { In(context_variables): Any(int, str, [int, str]) }, extra=False, ), Optional("tags"): [str], Optional("environment"): dict, Optional("essential"): bool, Optional("timeout"): timeout(), }, { Required("connection"): str, Required("count"): Range(min=0), Required("expect_role"): str, Required("host_role"): str, Optional("essential"): bool, Optional("request"): str, Optional("tags"): [str], Optional("timeout"): timeout(), Optional("context"): Schema( { In(context_variables): Any(int, str, [int, str]) }, extra=False, ), }, ) }, Optional("timeout"): timeout(), }, Optional("lava-vland"): Any( {str: { str: { Required("tags"): [str] } }}, {str: { Required("tags"): [str] }}, ), Optional("lava-xnbd"): { Required("port"): Any("auto", int), Optional("timeout"): timeout(), }, }, Optional("notify"): notify(), Optional("reboot_to_fastboot"): bool, Required("actions"): [{ Any("boot", "command", "deploy", "test"): dict }], }, extra_checks, )
def validate(self, request): xmlstr = request.saml_request data = saml_to_dict(xmlstr) atcss = [] if self._action == 'login': req_type = 'AuthnRequest' service = 'single_sign_on_service' elif self._action == 'logout': req_type = 'LogoutRequest' service = 'single_logout_service' issuer_name = data.get( '{urn:oasis:names:tc:SAML:2.0:protocol}%s' % (req_type), {}).get('children', {}).get('{urn:oasis:names:tc:SAML:2.0:assertion}Issuer', {}).get('text') if issuer_name and issuer_name not in self._metadata.service_providers( ): raise UnknownEntityIDError( 'entity ID {} non registrato'.format(issuer_name)) for k, _md in self._metadata.items(): if k == issuer_name: _srvs = _md.get('spsso_descriptor', []) for _srv in _srvs: for _acs in _srv.get('attribute_consuming_service', []): atcss.append(_acs) try: ascss = self._metadata.assertion_consumer_service(issuer_name) except Exception: ascss = [] except Exception: ascss = [] attribute_consuming_service_indexes = [ str(el.get('index')) for el in atcss ] assertion_consumer_service_indexes = [ str(el.get('index')) for el in ascss ] receivers = self._config.receivers(service) issuer = Schema( { 'attrs': { 'Format': Equal( NAMEID_FORMAT_ENTITY, msg=DEFAULT_VALUE_ERROR.format(NAMEID_FORMAT_ENTITY)), 'NameQualifier': Equal(issuer_name, msg=DEFAULT_VALUE_ERROR.format(issuer_name)), }, 'children': {}, 'text': str, }, required=True, ) name_id = Schema( { 'attrs': { 'NameQualifier': str, 'Format': Equal(NAMEID_FORMAT_TRANSIENT, msg=DEFAULT_VALUE_ERROR.format( NAMEID_FORMAT_TRANSIENT)), }, 'children': {}, 'text': str }, required=True, ) name_id_policy = Schema( { 'attrs': { 'Format': Equal(NAMEID_FORMAT_TRANSIENT, msg=DEFAULT_VALUE_ERROR.format( NAMEID_FORMAT_TRANSIENT)), }, 'children': {}, 'text': None, }, required=True, ) conditions = Schema( { 'attrs': { 'NotBefore': All(str, self._check_utc_date), 'NotOnOrAfter': All(str, self._check_utc_date), }, 'children': {}, 'text': None, }, required=True, ) authn_context_class_ref = Schema( { 'attrs': {}, 'children': {}, 'text': All( str, In(SPID_LEVELS, msg=DEFAULT_LIST_VALUE_ERROR.format(SPID_LEVELS))) }, required=True, ) requested_authn_context = Schema( { 'attrs': { 'Comparison': str }, 'children': { '{%s}AuthnContextClassRef' % (ASSERTION): authn_context_class_ref }, 'text': None }, required=True, ) scoping = Schema( { 'attrs': { 'ProxyCount': Equal('0', msg=DEFAULT_VALUE_ERROR.format('0')) }, 'children': {}, 'text': None }, required=True, ) signature = Schema( { 'attrs': dict, 'children': dict, 'text': None }, required=True, ) subject = Schema( { 'attrs': { 'Format': Equal( NAMEID_FORMAT_ENTITY, msg=DEFAULT_VALUE_ERROR.format(NAMEID_FORMAT_ENTITY)), 'NameQualifier': str }, 'children': {}, 'text': None }, required=True, ) # LOGIN def check_assertion_consumer_service(attrs): keys = attrs.keys() if ('AssertionConsumerServiceURL' in keys and 'ProtocolBinding' in keys and 'AssertionConsumerServiceIndex' not in keys): if attrs['ProtocolBinding'] != BINDING_HTTP_POST: raise Invalid( DEFAULT_VALUE_ERROR.format(BINDING_HTTP_POST), path=['ProtocolBinding']) return attrs elif ('AssertionConsumerServiceURL' not in keys and 'ProtocolBinding' not in keys and 'AssertionConsumerServiceIndex' in keys): if attrs[ 'AssertionConsumerServiceIndex'] not in assertion_consumer_service_indexes: raise Invalid(DEFAULT_LIST_VALUE_ERROR.format( assertion_consumer_service_indexes), path=['AssertionConsumerServiceIndex']) return attrs else: raise Invalid( 'Uno e uno solo uno tra gli attributi o gruppi di attributi devono essere presenti: ' '[AssertionConsumerServiceIndex, [AssertionConsumerServiceUrl, ProtocolBinding]]' ) authnrequest_attr_schema = Schema(All( { 'ID': str, 'Version': Equal('2.0', msg=DEFAULT_VALUE_ERROR.format('2.0')), 'IssueInstant': All(str, self._check_utc_date, self._check_date_in_range), 'Destination': In(receivers, msg=DEFAULT_LIST_VALUE_ERROR.format(receivers)), Optional('ForceAuthn'): str, Optional('AttributeConsumingServiceIndex'): In(attribute_consuming_service_indexes, msg=DEFAULT_LIST_VALUE_ERROR.format( attribute_consuming_service_indexes)), Optional('AssertionConsumerServiceIndex'): str, Optional('AssertionConsumerServiceURL'): str, Optional('ProtocolBinding'): str, }, check_assertion_consumer_service, ), required=True) authnrequest_schema = { '{%s}AuthnRequest' % (PROTOCOL): { 'attrs': authnrequest_attr_schema, 'children': Schema( { Optional('{%s}Subject' % (ASSERTION)): subject, '{%s}Issuer' % (ASSERTION): issuer, '{%s}NameIDPolicy' % (PROTOCOL): name_id_policy, Optional('{%s}Conditions' % (ASSERTION)): conditions, '{%s}RequestedAuthnContext' % (PROTOCOL): requested_authn_context, Optional('{%s}Scoping' % (PROTOCOL)): scoping, }, required=True, ), 'text': None } } if self._binding == BINDING_HTTP_POST: authnrequest_schema['{%s}AuthnRequest' % (PROTOCOL)]['children'].extend = { '{%s}Signature' % (SIGNATURE): signature } authn_request = Schema( authnrequest_schema, required=True, ) # LOGOUT logout_request = Schema( { '{%s}LogoutRequest' % (PROTOCOL): { 'attrs': { 'ID': str, 'Version': Equal('2.0', msg=DEFAULT_VALUE_ERROR.format('2.0')), 'IssueInstant': All(str, self._check_utc_date, self._check_date_in_range), 'Destination': In(receivers, msg=DEFAULT_LIST_VALUE_ERROR.format(receivers)), }, 'children': { '{%s}Issuer' % (ASSERTION): issuer, '{%s}NameID' % (ASSERTION): name_id, '{%s}SessionIndex' % (PROTOCOL): dict, }, 'text': None } }, required=True, ) saml_schema = None if self._action == 'login': saml_schema = authn_request elif self._action == 'logout': saml_schema = logout_request errors = [] try: saml_schema(data) except MultipleInvalid as e: for err in e.errors: _val = data _paths = [] _attr = None for idx, _path in enumerate(err.path): if _path != 'children': if _path == 'attrs': try: _attr = err.path[(idx + 1)] except IndexError: _attr = '' break _paths.append(_path) path = '/'.join(_paths) path = 'xpath: {}'.format(path) if _attr is not None: path = '{} - attribute: {}'.format(path, _attr) for _ in err.path: _val = _val.get(_) errors.append( ValidationDetail(_val, None, None, None, None, err.msg, path)) raise SPIDValidationError(details=errors)
def job(): lava_lxc = { Required("name"): str, Required("distribution"): str, Required("release"): str, Optional("arch"): str, Optional("mirror"): str, Optional("persist"): bool, Optional("security_mirror"): str, Optional("template"): str, Optional("timeout"): timeout(), Optional("verbose"): bool, } return All( { Required("job_name"): All(str, Length(min=1, max=200)), Optional("device_type"): All(str, Length(min=1, max=200)), Required("timeouts"): { Required("job"): timeout(), Optional("action"): timeout(), Optional("actions"): { str: timeout() }, Optional("connection"): timeout(), Optional("connections"): { str: timeout() }, }, Optional("context"): Schema( { In([ # qemu variables "arch", "boot_console", "boot_root", "cpu", "extra_options", "guestfs_driveid", "guestfs_interface", "guestfs_size", "machine", "memory", "model", "monitor", "netdevice", "serial", "vga", # others "bootloader_prompt", "console_device", "extra_kernel_args", "extra_nfsroot_args", "kernel_loglevel", "kernel_start_message", "lava_test_results_dir", "menu_interrupt_prompt", "mustang_menu_list", "test_character_delay", "tftp_mac_address", ]): Any(int, str, [int, str]) }, extra=False, ), Optional("metadata"): { str: object }, Optional("priority"): Any("high", "medium", "low", Range(min=0, max=100)), Optional("tags"): [str], Optional("secrets"): dict, Optional("visibility"): Any("public", "personal", {"group": [str]}), Optional("protocols"): { Optional("lava-lxc"): Any(lava_lxc, {str: lava_lxc}), Optional("lava-multinode"): { Required("roles"): { str: Any( { Required("device_type"): str, Required("count"): Range(min=0), Optional("context"): dict, Optional("tags"): [str], Optional("timeout"): timeout(), }, { Required("connection"): str, Required("count"): Range(min=0), Required("expect_role"): str, Required("host_role"): str, Optional("request"): str, Optional("tags"): [str], Optional("timeout"): timeout(), }, ) }, Optional("timeout"): timeout(), }, Optional("lava-vland"): Any( {str: { str: { Required("tags"): [str] } }}, {str: { Required("tags"): [str] }}, ), Optional("lava-xnbd"): { Required("port"): Any("auto", int), Optional("timeout"): timeout(), }, }, Optional("notify"): notify(), Optional("reboot_to_fastboot"): bool, Required("actions"): [{ Any("boot", "command", "deploy", "test"): dict }], }, extra_checks, )