Example #1
0
from voluptuous import Schema, Required, All, Any, Length, Range, Match, In

schema = Schema({
    Required('pipelineName'):
    All(str, Length(min=1)),
    Required('workingDirectory'):
    str,
    Required('crashLogDirectory'):
    str,
    Required('logDirectory'):
    str,
    Required('outputDirectory'):
    str,
    'FSLDIR':
    str,
    'runOnGrid':
    bool,
    'resourceManager':
    str,
    'parallelEnvironment':
    str,
    'queue':
    str,
    'awsOutputBucketCredentials':
    str,
    's3Encryption':
    bool,  # check/normalize
    'maximumMemoryPerParticipant':
    float,
    'maxCoresPerParticipant':
    All(int, Range(min=1)),
Example #2
0
def test_fix_157():
    s = Schema(All([Any('one', 'two', 'three')]), Length(min=1))
    assert_equal(['one'], s(['one']))
    assert_raises(MultipleInvalid, s, ['four'])
Example #3
0
LOGGER = logging.getLogger(__name__)


def get_lwt_topic(mqtt: dict) -> dict:
    if not mqtt["last_will_topic"]:
        mqtt["last_will_topic"] = f"{mqtt['client_id']}/lwt"
    return mqtt


SCHEMA = Schema(
    All(
        {
            Required("broker"): str,
            Required("port", default=1883): int,
            Optional("username", default=None): Any(str, None),
            Optional("password", default=None): Any(str, None),
            Optional("client_id", default="viseron"): Any(str, None),
            Optional("discovery_prefix", default="homeassistant"): str,
            Optional("last_will_topic", default=None): Any(str, None),
        },
        get_lwt_topic,
    ))


class MQTTConfig:
    schema = SCHEMA

    def __init__(self, mqtt):
        self._broker = mqtt.broker
        self._port = mqtt.port
        self._username = mqtt.username
        self._password = mqtt.password
]

schemas = {
    'repos': [
        {
            Required('name'): str,
            'sortname': str,
            'singular': str,
            Required('type'): Any('repository', 'site', 'modules'),
            Required('desc'): str,
            'statsgroup': str,
            Required('family'): Any(*families),
            'ruleset': Any(Any(*rulesets), [Any(*rulesets)]),  # XXX: make required
            'color': str,
            'valid_till': date,
            'default_maintainer': All(str, Contains('@')),
            'update_period': Any(int, str),
            Required('minpackages'): int,
            Required('sources'): [
                {
                    Required('name'): Any(str, [str]),
                    'disabled': bool,
                    Required('fetcher'): str,
                    Required('parser'): str,
                    'url': str,  # not Url(), as there may be rsync or cvs addresses

                    # git fetcher args
                    'branch': str,
                    'subrepo': str,
                    'sparse_checkout': [str],
                    'depth': Any(int, None),
Example #5
0
def LSValues(validator=None, msg=None):
    def f(v):
        if not isinstance(v, list):
            return [v]
        return v
    return All(f, All([LSValue(validator)], Length(min=1)))
Example #6
0
from voluptuous import (Schema, All, Any, Lower, Coerce, DefaultTo, Optional)
from udata.harvest.filters import (boolean, email, to_date, slug,
                                   normalize_tag, normalize_string, is_url,
                                   empty_none, hash)

RESOURCE_TYPES = ('file', 'file.upload', 'api', 'documentation', 'image',
                  'visualization')

resource = {
    'id':
    basestring,
    'position':
    int,
    'name':
    All(DefaultTo(''), basestring),
    'description':
    All(basestring, normalize_string),
    'format':
    All(basestring, Lower),
    'mimetype':
    Any(All(basestring, Lower), None),
    'size':
    Any(Coerce(int), None),
    'hash':
    Any(All(basestring, hash), None),
    'created':
    All(basestring, to_date),
    'last_modified':
    Any(All(basestring, to_date), None),
    'url':
    All(basestring),
Example #7
0
# -*- coding: utf-8 -*-

from voluptuous import Schema, All, Required, Match, Length, Range, In, Coerce, Datetime

from tlutil.validator import to_strip, to_split
from tlutil.validator import email_validator, ipv4_validator, tfsUrl_validator, nickname_validator

user_id_schema = Schema(
    {Required('user_id'): All(Coerce(int), Range(min=10001))})

user_ids_schema = Schema(
    {Required('user_ids'): All(Coerce(to_split), Range(min=10001))})

mobile_register_schema = Schema({
    Required('mobile'):
    All(unicode, Length(min=11, max=15)),
    Required('nickname'):
    All(Coerce(unicode), Length(min=2, max=20), Match(nickname_validator)),
    Required('password'):
    All(unicode, Length(min=6, max=20)),
    Required('platform'):
    All(Coerce(int), Range(min=0, max=3)),
    Required('user_ip'):
    All(unicode, Match(ipv4_validator)),
    # f(female)表示女性,m(male)表示男性,u(unkown)表示未知
    Required('gender', default='u'):
    In(frozenset(['f', 'm', 'u'])),
    Required('cover', default=''):
    All(unicode),
    Required('avatar', default=''):
    All(unicode),
Example #8
0
MAP_SCHEMA = Schema(
    {
        "version": Any(int, None),
        Required("id"): str,
        Required("url"): Url,
        "url_valid_for_seconds": Any(int, None),
        Optional("run_id"): str,  # documented, but not present
        "status": Any(str, None),
        "launched_from": Any(str, None),
        "error": Any(str, None),
        "category": Any(int, None),
        "mode": Any(int, None),
        "modifier": Any(int, None),
        "start_at": Any(str, None),
        "end_at": Any(str, None),
        "end_orientation_relative_degrees": All(int, Range(min=0, max=360)),
        "run_charge_at_start": All(int, Range(min=0, max=100)),
        "run_charge_at_end": All(int, Range(min=0, max=100)),
        "suspended_cleaning_charging_count": Any(int, None),
        "time_in_suspended_cleaning": Any(int, None),
        "time_in_error": Any(int, None),
        "time_in_pause": Any(int, None),
        "cleaned_area": Any(float, None),
        "base_count": Any(int, None),
        "is_docked": Any(bool, None),
        "delocalized": Any(bool, None),
        # Everything below this line is not documented, but still present
        "generated_at": Any(str, None),
        "persistent_map_id": Any(int, str, None),
        "cleaned_with_persistent_map_id": Any(int, str, None),
        "valid_as_persistent_map": Any(bool, None),
Example #9
0
        return schema(v)
    return v


return_contains_schema = Any(
    All(
        Schema({
            Required('description'):
            Any(list_string_types, *string_types),
            'returned':
            Any(*string_types),  # only returned on top level
            Required('type'):
            Any('bool', 'complex', 'dict', 'float', 'int', 'list', 'str'),
            'version_added':
            Any(float, *string_types),
            'sample':
            Any(None, list, dict, int, float, *string_types),
            'example':
            Any(None, list, dict, int, float, *string_types),
            'contains':
            Any(None, *list({str_type: Self} for str_type in string_types)),
            # in case of type='list' elements define type of individual item in list
            'elements':
            Any(None, 'bits', 'bool', 'bytes', 'dict', 'float', 'int', 'json',
                'jsonarg', 'list', 'path', 'raw', 'sid', 'str'),
        }),
        Schema(return_contains)),
    Schema(type(None)),
)

# This generates list of dicts with keys from string_types and return_contains_schema value
# for example in Python 3: {str: return_contains_schema}
Example #10
0
    def __init__(self, paths):
        self.configfiles = self._list_config_files(paths)

        configparser = RawConfigParser()
        config_tmp = configparser.read(self.configfiles)
        self.conf = dict()
        for section in configparser.sections():
            self.conf[section] = dict(configparser.items(section))

        #self.conf = ConfigObj(self.configfile, interpolation=False)

        self.schemas = {
            "storage":
            Schema({
                Optional("storage_path"): All(str),
                Optional("storage_type"): Any('yaml', 'json', 'pickle'),
            }),
            "cloud":
            Schema(
                {
                    "provider": Any('ec2_boto', 'google', 'openstack'),
                    "ec2_url": Url(str),
                    Optional("ec2_access_key"): All(str, Length(min=1)),
                    Optional("ec2_secret_key"): All(str, Length(min=1)),
                    "ec2_region": All(str, Length(min=1)),
                    "auth_url": All(str, Length(min=1)),
                    "username": All(str, Length(min=1)),
                    "password": All(str, Length(min=1)),
                    "tenant_name": All(str, Length(min=1)),
                    Optional("region_name"): All(str, Length(min=1)),
                    "gce_project_id": All(str, Length(min=1)),
                    "gce_client_id": All(str, Length(min=1)),
                    "gce_client_secret": All(str, Length(min=1)),
                    "nova_client_api": nova_api_version()
                },
                extra=True),
            "cluster":
            Schema(
                {
                    "cloud": All(str, Length(min=1)),
                    "setup_provider": All(str, Length(min=1)),
                    "login": All(str, Length(min=1)),
                },
                required=True,
                extra=True),
            "setup":
            Schema({
                "provider": All(str, Length(min=1)),
            },
                   required=True,
                   extra=True),
            "login":
            Schema(
                {
                    "image_user": All(str, Length(min=1)),
                    "image_user_sudo": All(str, Length(min=1)),
                    "image_sudo": Boolean(str),
                    "user_key_name": All(str, Length(min=1)),
                    "user_key_private": can_read_file(),
                    "user_key_public": can_read_file()
                },
                required=True)
        }
Example #11
0
    Length,
    Match,
    message,
    Optional,
    Range,
    Schema,
)

from .misc import (
    make_ts,
    parse_timedelta,
)

key = All(
    str,
    Length(min=1),
    Match("^[a-zA-Z0-9-_@]+$"),
)

dotted_key = All(
    str,
    Length(min=1),
    Match("^[a-zA-Z0-9-_@.]+$"),
)

seasonality = Schema({
    Optional('daytime', default=False): Boolean(),
    Optional('weekday', default=False): Boolean(),
})

score = Any(All(Any(int, float), Range(min=0, max=100)), None)
Example #12
0
    def validate(self):
        """
        Validate the given configuration,
        converting properties to native Python types.

        The configuration to check must have been given to the
        constructor and stored in :py:attr:`self.config`.

        :raises: :py:class:`voluptuous.Invalid` if one property is invalid
        :raises: :py:class:`voluptuous.MultipleInvalid` if multiple
                 properties are not compliant
        """
        self._pre_validate()

        # schema to validate all cluster properties
        schema = {
            "cluster": {
                "cloud": All(str, Length(min=1)),
                "setup_provider": All(str, Length(min=1)),
                "login": All(str, Length(min=1)),
            },
            "setup": {
                "provider":
                All(str, Length(min=1)),
                Optional("playbook_path"):
                can_read_file(),
                Optional("ansible_command"):
                All(can_read_file(), can_execute_file()),
                Optional("ansible_extra_args"):
                All(str, Length(min=1)),
                Optional("ssh_pipelining"):
                Boolean(str),
            },
            "login": {
                "image_user": All(str, Length(min=1)),
                "image_user_sudo": All(str, Length(min=1)),
                "image_sudo": Boolean(str),
                "user_key_name": All(str, Length(min=1)),
                "user_key_private": can_read_file(),
                "user_key_public": can_read_file(),
            },
        }

        cloud_schema_ec2 = {
            "provider": 'ec2_boto',
            "ec2_url": Url(str),
            Optional("ec2_access_key"): All(str, Length(min=1)),
            Optional("ec2_secret_key"): All(str, Length(min=1)),
            "ec2_region": All(str, Length(min=1)),
            Optional("request_floating_ip"): Boolean(str),
            Optional("vpc"): All(str, Length(min=1)),
            Optional("instance_profile"): All(str, Length(min=1)),
        }
        cloud_schema_gce = {
            "provider": 'google',
            "gce_client_id": All(str, Length(min=1)),
            "gce_client_secret": All(str, Length(min=1)),
            "gce_project_id": All(str, Length(min=1)),
            Optional("noauth_local_webserver"): Boolean(str),
            Optional("zone"): All(str, Length(min=1)),
            Optional("network"): All(str, Length(min=1)),
        }

        cloud_schema_openstack = {
            "provider": 'openstack',
            "auth_url": All(str, Length(min=1)),
            "username": All(str, Length(min=1)),
            "password": All(str, Length(min=1)),
            "project_name": All(str, Length(min=1)),
            Optional("request_floating_ip"): Boolean(str),
            Optional("region_name"): All(str, Length(min=1)),
            Optional("nova_api_version"): nova_api_version(),
        }

        node_schema = {
            "flavor": All(str, Length(min=1)),
            "image_id": All(str, Length(min=1)),
            "security_group": All(str, Length(min=1)),
            Optional("network_ids"): All(str, Length(min=1)),
        }

        # validation
        validator = Schema(schema, required=True, extra=True)
        node_validator = Schema(node_schema, required=True, extra=True)
        ec2_validator = Schema(cloud_schema_ec2, required=True, extra=False)
        gce_validator = Schema(cloud_schema_gce, required=True, extra=False)
        openstack_validator = Schema(cloud_schema_openstack,
                                     required=True,
                                     extra=False)

        if not self.config:
            raise Invalid("No clusters found in configuration.")

        for cluster, properties in self.config.iteritems():
            self.config[cluster] = validator(properties)

            if 'provider' not in properties['cloud']:
                raise Invalid("Missing `provider` option in cluster `%s`" %
                              cluster)
            try:
                cloud_props = properties['cloud']
                if properties['cloud']['provider'] == "ec2_boto":
                    self.config[cluster]['cloud'] = ec2_validator(cloud_props)
                elif properties['cloud']['provider'] == "google":
                    self.config[cluster]['cloud'] = gce_validator(cloud_props)
                elif properties['cloud']['provider'] == "openstack":
                    self.config[cluster]['cloud'] = openstack_validator(
                        cloud_props)
            except MultipleInvalid as ex:
                raise Invalid(
                    "Invalid configuration for cloud section `cloud/%s`: %s" %
                    (properties['cluster']['cloud'],
                     str.join(", ", [str(i) for i in ex.errors])))

            if 'nodes' not in properties or len(properties['nodes']) == 0:
                raise Invalid("No nodes configured for cluster `%s`" % cluster)

            for node, props in properties['nodes'].iteritems():
                # check name pattern to conform hostnames
                match = re.search(r'^[a-zA-Z0-9-]*$', node)
                if not match:
                    raise Invalid(
                        "Invalid name `%s` for node group. A valid node group"
                        " can only consist of letters, digits or the hyphen"
                        " character (`-`)" % (node, ))

                node_validator(props)

                if (properties['cloud']['provider'] == 'ec2_boto'
                        and 'vpc' in self.config[cluster]['cloud']
                        and 'network_ids' not in props):
                    raise Invalid("Node group `%s/%s` is being used in"
                                  " a VPC, so it must specify network_ids." %
                                  (cluster, node))

                if (properties['cloud']['provider'] == 'ec2_boto'
                        and 'network_ids' in props
                        and 'vpc' not in self.config[cluster]['cloud']):
                    raise Invalid("Cluster `%s` must specify a VPC to place"
                                  " `%s` instances in %s" %
                                  (cluster, node, props['network_ids']))

        self._post_validate()
Example #13
0
        extra=ALLOW_EXTRA
    )
    if v.get('type') == 'complex':
        return schema(v)
    return v


return_schema = Any(
    All(
        Schema(
            {
                any_string_types: {
                    Required('description'): Any(list_string_types, *string_types),
                    Required('returned'): Any(*string_types),
                    Required('type'): Any('string', 'list', 'boolean', 'dict', 'complex', 'bool', 'float', 'int', 'dictionary', 'str'),
                    'version_added': Any(float, *string_types),
                    'sample': Any(None, list, dict, int, float, *string_types),
                    'example': Any(None, list, dict, int, float, *string_types),
                    'contains': object,
                }
            }
        ),
        Schema({any_string_types: return_contains})
    ),
    Schema(type(None)),
)


deprecation_schema = Schema(
    {
        # Only list branches that are deprecated or may have docs stubs in
        # Deprecation cycle changed at 2.4 (though not retroactively)
            carbon2_batch = [carbon2(*sample, scrape_ts) for sample in batch]
            body = "\n".join(carbon2_batch).encode("utf-8")
            resp = self._sumo_session.post(
                self._config["sumo_http_url"],
                data=zlib.compress(body, level=1),
                headers={
                    "Content-Type": "application/vnd.sumologic.carbon2",
                    "Content-Encoding": "gzip",
                },
            )
            resp.raise_for_status()


global_config_schema = Schema({
    Required("run_interval_seconds", default=60):
    All(int, Range(min=1)),
    Required("target_threads", default=10):
    All(int, Range(min=1, max=50)),
    Required("batch_size", default=1000):
    All(int, Range(min=1)),
    Required("retries", default=5):
    All(int, Range(min=1, max=20)),
    Required("backoff_factor", default=0.2):
    All(float, Range(min=0)),
    "source_category":
    str,
    "source_host":
    str,
    "source_name":
    str,
    "dimensions":
Example #15
0
from yombo.constants import SENTINEL
from yombo.core.entity import Entity
from yombo.core.log import get_logger
from yombo.core.library import YomboLibrary
from yombo.core.schemas import StateSchema
from yombo.mixins.library_db_child_mixin import LibraryDBChildMixin
from yombo.mixins.library_db_parent_mixin import LibraryDBParentMixin
from yombo.mixins.library_search_mixin import LibrarySearchMixin
from yombo.mixins.systemdata_mixin import SystemDataParentMixin, SystemDataChildMixin
from yombo.utils import random_int

logger = get_logger("library.states")

ACTION_STATE_SCHEMA = Schema({
    Required('gateway_id'):
    All(str, Length(min=2)),
    Required('state_name'):
    All(str, Length(min=2)),
    Required('value'):
    All(str, Length(min=2)),
    Required('human_value'):
    All(str, Length(min=2)),
    Required('value_type'): ['str', 'dict', 'list', 'int', 'float', 'epoch'],
    Required('value'):
    All(str, Length(min=2)),
    Required('weight'):
    All(int, Range(min=0)),
})


class State(Entity, LibraryDBChildMixin, SystemDataChildMixin):
Example #16
0
config_contract = Schema(
    {
        Required('enabled'): bool,
        Required('materialized'): basestring,
        Required('post-hook'): [hook_contract],
        Required('pre-hook'): [hook_contract],
        Required('vars'): dict,
        Required('quoting'): dict,
        Required('column_types'): dict,
    },
    extra=ALLOW_EXTRA)

parsed_node_contract = unparsed_node_contract.extend({
    # identifiers
    Required('unique_id'):
    All(basestring, Length(min=1, max=255)),
    Required('fqn'):
    All(list, [All(basestring)]),
    Required('schema'):
    basestring,
    Required('refs'): [All(tuple)],

    # parsed fields
    Required('depends_on'): {
        Required('nodes'): [All(basestring, Length(min=1, max=255))],
        Required('macros'): [All(basestring, Length(min=1, max=255))],
    },
    Required('empty'):
    bool,
    Required('config'):
    config_contract,
Example #17
0
    ),
]

CONF = cfg.CONF
CONF.register_opts(keystone_opts, COLLECTOR_MONASCA_OPTS)
CONF.register_opts(collector_monasca_opts, COLLECTOR_MONASCA_OPTS)

METRICS_CONF = ck_utils.load_conf(CONF.collect.metrics_conf)

MONASCA_EXTRA_SCHEMA = {
    Required('extra_args'): {
        # Key corresponding to the resource id in a metric's dimensions
        # Allows to adapt the resource identifier. Should not need to be
        # modified in a standard OpenStack installation
        Required('resource_key', default='resource_id'):
        All(str, Length(min=1)),
        Required('aggregation_method', default='max'):
        In(['max', 'mean', 'min']),
        # In case the metrics in Monasca do not belong to the project
        # cloudkitty is identified in
        Required('forced_project_id', default=''):
        str,
    },
}


class EndpointNotFound(Exception):
    """Exception raised if the Monasca endpoint is not found"""


class MonascaCollector(collector.BaseCollector):
Example #18
0

def custom_schema():
    custom_dict = {}

    for tag, settings in ops_settings['column_mappings'].items():
        custom_dict[tag] = Required(list, msg='Must be a list')

    return custom_dict


geom_schema = {
    Optional('geojson'):
    Required(object, msg='Must be a geojson object'),
    Optional('bbox'):
    Required(All(list, Length(min=2, max=2)),
             msg='Must be length of {}'.format(2)),
    Optional('buffer'):
    Required(All(
        Coerce(int),
        Range(min=0, max=ops_settings['maximum_search_radius_for_points'])),
             msg='Must be between 1 and {}'.format(
                 ops_settings['maximum_search_radius_for_points']))
}

filters_schema = {
    Optional('category_group_ids'):
    Required(All(categories_tools.category_group_ids,
                 Length(max=ops_settings['maximum_categories'])),
             msg='Must be one of {} and have a maximum amount of {}'.format(
                 categories_tools.category_group_ids,
Example #19
0
connection_contract = Schema({
    Required('type'): Any('postgres', 'redshift', 'snowflake'),
    Required('name'): Any(None, basestring),
    Required('state'): Any('init', 'open', 'closed', 'fail'),
    Required('transaction_open'): bool,
    Required('handle'): Any(None, object),
    Required('credentials'): object,
})

postgres_credentials_contract = Schema({
    Required('dbname'): basestring,
    Required('host'): basestring,
    Required('user'): basestring,
    Required('pass'): basestring,
    Required('port'): All(int, Range(min=0, max=65535)),
    Required('schema'): basestring,
})

snowflake_credentials_contract = Schema({
    Required('account'): basestring,
    Required('user'): basestring,
    Required('password'): basestring,
    Required('database'): basestring,
    Required('schema'): basestring,
    Required('warehouse'): basestring,
    Optional('role'): basestring,
})

credentials_mapping = {
    'postgres': postgres_credentials_contract,
Example #20
0
    def validate(self, request):
        xmlstr = request.saml_request
        data = saml_to_dict(xmlstr)
        atcss = []
        if self._action == 'login':
            req_type = 'AuthnRequest'
            service = 'single_sign_on_service'
        elif self._action == 'logout':
            req_type = 'LogoutRequest'
            service = 'single_logout_service'
        issuer_name = data.get(
            '{urn:oasis:names:tc:SAML:2.0:protocol}%s' % (req_type),
            {}).get('children',
                    {}).get('{urn:oasis:names:tc:SAML:2.0:assertion}Issuer',
                            {}).get('text')
        if issuer_name and issuer_name not in self._metadata.service_providers(
        ):
            raise UnknownEntityIDError(
                'entity ID {} non registrato'.format(issuer_name))
        for k, _md in self._metadata.items():
            if k == issuer_name:
                _srvs = _md.get('spsso_descriptor', [])
                for _srv in _srvs:
                    for _acs in _srv.get('attribute_consuming_service', []):
                        atcss.append(_acs)
        try:
            ascss = self._metadata.assertion_consumer_service(issuer_name)
        except Exception:
            ascss = []
        except Exception:
            ascss = []
        attribute_consuming_service_indexes = [
            str(el.get('index')) for el in atcss
        ]
        assertion_consumer_service_indexes = [
            str(el.get('index')) for el in ascss
        ]
        receivers = self._config.receivers(service)

        issuer = Schema(
            {
                'attrs': {
                    'Format':
                    Equal(
                        NAMEID_FORMAT_ENTITY,
                        msg=DEFAULT_VALUE_ERROR.format(NAMEID_FORMAT_ENTITY)),
                    'NameQualifier':
                    Equal(issuer_name,
                          msg=DEFAULT_VALUE_ERROR.format(issuer_name)),
                },
                'children': {},
                'text': str,
            },
            required=True,
        )

        name_id = Schema(
            {
                'attrs': {
                    'NameQualifier':
                    str,
                    'Format':
                    Equal(NAMEID_FORMAT_TRANSIENT,
                          msg=DEFAULT_VALUE_ERROR.format(
                              NAMEID_FORMAT_TRANSIENT)),
                },
                'children': {},
                'text': str
            },
            required=True,
        )

        name_id_policy = Schema(
            {
                'attrs': {
                    'Format':
                    Equal(NAMEID_FORMAT_TRANSIENT,
                          msg=DEFAULT_VALUE_ERROR.format(
                              NAMEID_FORMAT_TRANSIENT)),
                },
                'children': {},
                'text': None,
            },
            required=True,
        )

        conditions = Schema(
            {
                'attrs': {
                    'NotBefore': All(str, self._check_utc_date),
                    'NotOnOrAfter': All(str, self._check_utc_date),
                },
                'children': {},
                'text': None,
            },
            required=True,
        )

        authn_context_class_ref = Schema(
            {
                'attrs': {},
                'children': {},
                'text':
                All(
                    str,
                    In(SPID_LEVELS,
                       msg=DEFAULT_LIST_VALUE_ERROR.format(SPID_LEVELS)))
            },
            required=True,
        )

        requested_authn_context = Schema(
            {
                'attrs': {
                    'Comparison': str
                },
                'children': {
                    '{%s}AuthnContextClassRef' % (ASSERTION):
                    authn_context_class_ref
                },
                'text': None
            },
            required=True,
        )

        scoping = Schema(
            {
                'attrs': {
                    'ProxyCount': Equal('0',
                                        msg=DEFAULT_VALUE_ERROR.format('0'))
                },
                'children': {},
                'text': None
            },
            required=True,
        )

        signature = Schema(
            {
                'attrs': dict,
                'children': dict,
                'text': None
            },
            required=True,
        )

        subject = Schema(
            {
                'attrs': {
                    'Format':
                    Equal(
                        NAMEID_FORMAT_ENTITY,
                        msg=DEFAULT_VALUE_ERROR.format(NAMEID_FORMAT_ENTITY)),
                    'NameQualifier':
                    str
                },
                'children': {},
                'text': None
            },
            required=True,
        )

        # LOGIN

        def check_assertion_consumer_service(attrs):
            keys = attrs.keys()
            if ('AssertionConsumerServiceURL' in keys
                    and 'ProtocolBinding' in keys
                    and 'AssertionConsumerServiceIndex' not in keys):
                if attrs['ProtocolBinding'] != BINDING_HTTP_POST:
                    raise Invalid(
                        DEFAULT_VALUE_ERROR.format(BINDING_HTTP_POST),
                        path=['ProtocolBinding'])
                return attrs

            elif ('AssertionConsumerServiceURL' not in keys
                  and 'ProtocolBinding' not in keys
                  and 'AssertionConsumerServiceIndex' in keys):
                if attrs[
                        'AssertionConsumerServiceIndex'] not in assertion_consumer_service_indexes:
                    raise Invalid(DEFAULT_LIST_VALUE_ERROR.format(
                        assertion_consumer_service_indexes),
                                  path=['AssertionConsumerServiceIndex'])
                return attrs

            else:
                raise Invalid(
                    'Uno e uno solo uno tra gli attributi o gruppi di attributi devono essere presenti: '
                    '[AssertionConsumerServiceIndex, [AssertionConsumerServiceUrl, ProtocolBinding]]'
                )

        authnrequest_attr_schema = Schema(All(
            {
                'ID':
                str,
                'Version':
                Equal('2.0', msg=DEFAULT_VALUE_ERROR.format('2.0')),
                'IssueInstant':
                All(str, self._check_utc_date, self._check_date_in_range),
                'Destination':
                In(receivers, msg=DEFAULT_LIST_VALUE_ERROR.format(receivers)),
                Optional('ForceAuthn'):
                str,
                Optional('AttributeConsumingServiceIndex'):
                In(attribute_consuming_service_indexes,
                   msg=DEFAULT_LIST_VALUE_ERROR.format(
                       attribute_consuming_service_indexes)),
                Optional('AssertionConsumerServiceIndex'):
                str,
                Optional('AssertionConsumerServiceURL'):
                str,
                Optional('ProtocolBinding'):
                str,
            },
            check_assertion_consumer_service,
        ),
                                          required=True)

        authnrequest_schema = {
            '{%s}AuthnRequest' % (PROTOCOL): {
                'attrs':
                authnrequest_attr_schema,
                'children':
                Schema(
                    {
                        Optional('{%s}Subject' % (ASSERTION)): subject,
                        '{%s}Issuer' % (ASSERTION): issuer,
                        '{%s}NameIDPolicy' % (PROTOCOL): name_id_policy,
                        Optional('{%s}Conditions' % (ASSERTION)): conditions,
                        '{%s}RequestedAuthnContext' % (PROTOCOL):
                        requested_authn_context,
                        Optional('{%s}Scoping' % (PROTOCOL)): scoping,
                    },
                    required=True,
                ),
                'text':
                None
            }
        }

        if self._binding == BINDING_HTTP_POST:
            authnrequest_schema['{%s}AuthnRequest' %
                                (PROTOCOL)]['children'].extend = {
                                    '{%s}Signature' % (SIGNATURE): signature
                                }

        authn_request = Schema(
            authnrequest_schema,
            required=True,
        )

        # LOGOUT

        logout_request = Schema(
            {
                '{%s}LogoutRequest' % (PROTOCOL): {
                    'attrs': {
                        'ID':
                        str,
                        'Version':
                        Equal('2.0', msg=DEFAULT_VALUE_ERROR.format('2.0')),
                        'IssueInstant':
                        All(str, self._check_utc_date,
                            self._check_date_in_range),
                        'Destination':
                        In(receivers,
                           msg=DEFAULT_LIST_VALUE_ERROR.format(receivers)),
                    },
                    'children': {
                        '{%s}Issuer' % (ASSERTION): issuer,
                        '{%s}NameID' % (ASSERTION): name_id,
                        '{%s}SessionIndex' % (PROTOCOL): dict,
                    },
                    'text': None
                }
            },
            required=True,
        )

        saml_schema = None
        if self._action == 'login':
            saml_schema = authn_request
        elif self._action == 'logout':
            saml_schema = logout_request
        errors = []
        try:
            saml_schema(data)
        except MultipleInvalid as e:
            for err in e.errors:
                _val = data
                _paths = []
                _attr = None
                for idx, _path in enumerate(err.path):
                    if _path != 'children':
                        if _path == 'attrs':
                            try:
                                _attr = err.path[(idx + 1)]
                            except IndexError:
                                _attr = ''
                            break
                        _paths.append(_path)
                path = '/'.join(_paths)
                path = 'xpath: {}'.format(path)
                if _attr is not None:
                    path = '{} - attribute: {}'.format(path, _attr)
                for _ in err.path:
                    _val = _val.get(_)
                errors.append(
                    ValidationDetail(_val, None, None, None, None, err.msg,
                                     path))
            raise SPIDValidationError(details=errors)
Example #21
0
"""
Configuration data and functions
"""

from socket import getfqdn
from six import string_types
from voluptuous import All, Any, Boolean, Coerce, Optional, Range, Schema

# pylint: disable=no-value-for-parameter
DEFAULTS = {
    # Configuration file: logging
    'logging': {
        Optional('loglevel', default='INFO'):
        Any(None, 'NOTSET', 'DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL',
            All(Coerce(int), Any(0, 10, 20, 30, 40, 50))),
        Optional('logfile', default=None):
        Any(None, *string_types),
        Optional('logformat', default='default'):
        Any('default', 'json', 'logstash'),
        Optional('blacklist', default=['elasticsearch', 'urllib3']):
        Any(None, list),
    },
    # Configuration file: backend
    'backend': {
        Optional('host', default='127.0.0.1'):
        Any(None, *string_types),
        Optional('port', default=7600):
        All(Coerce(int), Range(min=1025, max=65534)),
        Optional('debug', default=False):
        Boolean(),
        Optional('cache_timeout', default=60):
Example #22
0
                        "DependentSamplers depend on undefined quantities: " +
                        bad_symbols)

                # 2: Circular dependencies
                bad_symbols = ", ".join(sorted(unevaluated_dependents.keys()))
                raise ConfigError(
                    "Circularly dependent DependentSamplers detected: " +
                    bad_symbols)

        sample_list.append(sample_dict)
    return sample_list


# Used by NumericalGrader
schema_user_functions_no_random = All(has_keys_of_type(six.string_types),
                                      coerce_string_keys_to_text_type,
                                      {Extra: is_callable})
# Used by FormulaGrader and friends
schema_user_functions = All(
    has_keys_of_type(six.string_types),
    coerce_string_keys_to_text_type,
    {
        Extra:
        Any(is_callable, All([is_callable], Coerce(SpecificFunctions)),
            FunctionSamplingSet)
    },
)


def construct_functions(default_functions, user_funcs):
    """
Example #23
0
from voluptuous import Required, All
from datatypes.core import DictionaryValidator

from datatypes.validators import deed_validator

entry_schema = {
    Required("template"): All(unicode),
    Required("full_text"): All(unicode),
    Required("fields"): {unicode: object},
    Required("deeds"): [deed_validator.deed_schema],
    Required("notes"): []
}


class Entry(DictionaryValidator):

    def define_schema(self):
        return entry_schema

    def define_error_dictionary(self):
        return {
            "template": "template is a required field",
            "full_text": "full_text is a required field",
            "fields": "fields are required",
            "deeds": "deeds are required",
            "notes": "notes are required"
        }
Example #24
0
def validate_user_constants(*allow_types):
    return All(
        has_keys_of_type(six.string_types),
        coerce_string_keys_to_text_type,
        {Extra: Any(*allow_types)},
    )
Example #25
0
            'values(%s, %s, %s, NOW(), %s, %s)', const.seq.PRE_PAYMENT, id,
            supplier_shop_account_id, external_money.amount,
            "减去给商户的预付款(前期销售额不能提现)")

        self.redirect('/external-money')


list_schema = Schema({
    'supplier': str,
    'source': str,
}, extra=True)

add_schema = Schema(
    {
        'supplier': str,
        'amount': All(Coerce(Decimal), Range(min=Decimal(0.1))),
        'source': str,
        'flag': str,
        'id': str,
        'type': str,
        'expire_at': str,
        'remark': str,
        'short_name': str,
        'action': Any('add_external_money', 'edit_external_money'),
        'fee': str,
        'received_at': str,
        'fee_remark': str,
        'received_type': Any('OPERATE_FEE', 'ADS_FEE', 'ONLINE_FEE')
    },
    extra=True)
Example #26
0
EXTRA_AGGREGATION_METHODS_FOR_ARCHIVE_POLICY = set(
    (str(i) + 'pct' for i in six.moves.range(1, 100)))

for agg in list(EXTRA_AGGREGATION_METHODS_FOR_ARCHIVE_POLICY):
    EXTRA_AGGREGATION_METHODS_FOR_ARCHIVE_POLICY.add("rate:%s" % agg)

# The aggregation method that one can use to configure the archive
# policies also supports the 'pct' (percentile) operation. Therefore,
# we also expose this as a configuration.
VALID_AGGREGATION_METHODS_FOR_METRICS = BASIC_AGGREGATION_METHODS.union(
    EXTRA_AGGREGATION_METHODS_FOR_ARCHIVE_POLICY)

GNOCCHI_EXTRA_SCHEMA = {
    Required('extra_args'): {
        Required('resource_type'): All(str, Length(min=1)),
        # Due to Gnocchi model, metric are grouped by resource.
        # This parameter permits to adapt the key of the resource identifier
        Required('resource_key', default='id'): All(str, Length(min=1)),
        Required('aggregation_method', default='max'):
            In(VALID_AGGREGATION_METHODS_FOR_METRICS),
        Required('re_aggregation_method', default='max'):
            In(BASIC_AGGREGATION_METHODS),
        Required('force_granularity', default=3600): All(int, Range(min=0)),
    },
}


class AssociatedResourceNotFound(Exception):
    """Exception raised when no resource can be associated with a metric."""
Example #27
0
        return None
    if isinstance(types, str):
        types = [typ.strip() for typ in types.split(",")]

    unsupported = set(types) - {"reflink", "hardlink", "symlink", "copy"}
    if unsupported:
        raise Invalid("Unsupported cache type(s): {}".format(
            ", ".join(unsupported)))

    return types


# Checks that value is either true or false and converts it to bool
to_bool = Bool = All(
    Lower,
    Any("true", "false"),
    lambda v: v == "true",
    msg="expected true or false",
)


def Choices(*choices):
    """Checks that value belongs to the specified set of values

    Args:
        *choices: pass allowed values as arguments, or pass a list or
            tuple as a single argument
    """
    return Any(*choices, msg="expected one of {}".format(", ".join(choices)))


def ByUrl(mapping):
def validate_metadata_file(path, is_ansible, check_deprecation_dates=False):
    """Validate explicit runtime metadata file"""
    try:
        with open(path, 'r') as f_path:
            routing = yaml.safe_load(f_path)
    except yaml.error.MarkedYAMLError as ex:
        print('%s:%d:%d: YAML load failed: %s' %
              (path, ex.context_mark.line + 1, ex.context_mark.column + 1,
               re.sub(r'\s+', ' ', str(ex))))
        return
    except Exception as ex:  # pylint: disable=broad-except
        print('%s:%d:%d: YAML load failed: %s' %
              (path, 0, 0, re.sub(r'\s+', ' ', str(ex))))
        return

    if is_ansible:
        current_version = get_ansible_version()
    else:
        current_version = get_collection_version()

    # Updates to schema MUST also be reflected in the documentation
    # ~https://docs.ansible.com/ansible/devel/dev_guide/developing_collections.html

    # plugin_routing schema

    avoid_additional_data = Schema(Any(
        {
            Required('removal_version'): any_value,
            'warning_text': any_value,
        }, {
            Required('removal_date'): any_value,
            'warning_text': any_value,
        }),
                                   extra=PREVENT_EXTRA)

    deprecation_schema = All(
        # The first schema validates the input, and the second makes sure no extra keys are specified
        Schema({
            'removal_version':
            partial(removal_version,
                    is_ansible=is_ansible,
                    current_version=current_version),
            'removal_date':
            partial(isodate, check_deprecation_date=check_deprecation_dates),
            'warning_text':
            Any(*string_types),
        }),
        avoid_additional_data)

    tombstoning_schema = All(
        # The first schema validates the input, and the second makes sure no extra keys are specified
        Schema({
            'removal_version':
            partial(removal_version,
                    is_ansible=is_ansible,
                    current_version=current_version,
                    is_tombstone=True),
            'removal_date':
            partial(isodate, is_tombstone=True),
            'warning_text':
            Any(*string_types),
        }),
        avoid_additional_data)

    plugin_routing_schema = Any(
        Schema(
            {
                ('deprecation'): Any(deprecation_schema),
                ('tombstone'): Any(tombstoning_schema),
                ('redirect'): Any(*string_types),
            },
            extra=PREVENT_EXTRA), )

    list_dict_plugin_routing_schema = [{
        str_type: plugin_routing_schema
    } for str_type in string_types]

    plugin_schema = Schema(
        {
            ('action'): Any(None, *list_dict_plugin_routing_schema),
            ('become'): Any(None, *list_dict_plugin_routing_schema),
            ('cache'): Any(None, *list_dict_plugin_routing_schema),
            ('callback'): Any(None, *list_dict_plugin_routing_schema),
            ('cliconf'): Any(None, *list_dict_plugin_routing_schema),
            ('connection'): Any(None, *list_dict_plugin_routing_schema),
            ('doc_fragments'): Any(None, *list_dict_plugin_routing_schema),
            ('filter'): Any(None, *list_dict_plugin_routing_schema),
            ('httpapi'): Any(None, *list_dict_plugin_routing_schema),
            ('inventory'): Any(None, *list_dict_plugin_routing_schema),
            ('lookup'): Any(None, *list_dict_plugin_routing_schema),
            ('module_utils'): Any(None, *list_dict_plugin_routing_schema),
            ('modules'): Any(None, *list_dict_plugin_routing_schema),
            ('netconf'): Any(None, *list_dict_plugin_routing_schema),
            ('shell'): Any(None, *list_dict_plugin_routing_schema),
            ('strategy'): Any(None, *list_dict_plugin_routing_schema),
            ('terminal'): Any(None, *list_dict_plugin_routing_schema),
            ('test'): Any(None, *list_dict_plugin_routing_schema),
            ('vars'): Any(None, *list_dict_plugin_routing_schema),
        },
        extra=PREVENT_EXTRA)

    # import_redirection schema

    import_redirection_schema = Any(
        Schema(
            {
                ('redirect'): Any(*string_types),
                # import_redirect doesn't currently support deprecation
            },
            extra=PREVENT_EXTRA))

    list_dict_import_redirection_schema = [{
        str_type: import_redirection_schema
    } for str_type in string_types]

    # top level schema

    schema = Schema(
        {
            # All of these are optional
            ('plugin_routing'):
            Any(plugin_schema),
            ('import_redirection'):
            Any(None, *list_dict_import_redirection_schema),
            # requires_ansible: In the future we should validate this with SpecifierSet
            ('requires_ansible'):
            Any(*string_types),
            ('action_groups'):
            dict,
        },
        extra=PREVENT_EXTRA)

    # Ensure schema is valid

    try:
        schema(routing)
    except MultipleInvalid as ex:
        for error in ex.errors:
            # No way to get line/column numbers
            print('%s:%d:%d: %s' %
                  (path, 0, 0, humanize_error(routing, error)))
Example #29
0
            Required('type'): 'decision-task',

            # Treeherder symbol for the cron task
            Required('treeherder-symbol'): basestring,

            # --target-tasks-method './mach taskgraph decision' argument
            'target-tasks-method': basestring,
        }),

        # when to run it

        # Optional set of projects on which this job should run; if omitted, this will
        # run on all projects for which cron tasks are set up.  This works just like the
        # `run_on_projects` attribute, where strings like "release" and "integration" are
        # expanded to cover multiple repositories.  (taskcluster/docs/attributes.rst)
        'run-on-projects': [basestring],

        # Array of times at which this task should run.  These *must* be a
        # multiple of 15 minutes, the minimum scheduling interval.  This field
        # can be keyed by project so that each project has a different schedule
        # for the same job.
        'when': optionally_keyed_by(
            'project',
            [{'hour': int, 'minute': All(int, even_15_minutes)}]),
    }],
})


def validate(cron_yml):
    validate_schema(cron_yml_schema, cron_yml, "Invalid .cron.yml:")
 def get_schema(cls, pin, version):
     return Schema({
         Required('type'): cls.descriptor_name,
         Required('pin'): pin,
         Required('name'): All(str, NotEmpty()),
     })