Esempio n. 1
0
def test_and():
    assert And(int, lambda n: 0 < n < 5).validate(3) == 3
    with SE: And(int, lambda n: 0 < n < 5).validate(3.33)
    assert And(Use(int), lambda n: 0 < n < 5).validate(3.33) == 3
    with SE: And(Use(int), lambda n: 0 < n < 5).validate('3.33')
Esempio n. 2
0
class Config(SkeleYaml):
    """
    Root Config Class for Skelebot YAML File

    Built on top of the SkeleYaml parent Object in order to enherit and extend the functionality
    of yaml file generation and parsing
    """

    schema = Schema(
        {
            'name':
            And(str, error='\'name\' must be a String'),
            Optional('env'):
            And(str, error='\'env\' must be a String'),
            Optional('description'):
            And(str, error='\'description\' must be a String'),
            Optional('maintainer'):
            And(str, error='\'maintainer\' must be a String'),
            Optional('contact'):
            And(str, error='\'contact\' must be a String'),
            Optional('host'):
            And(str, error='\'host\' must be a String'),
            'language':
            And(str, error='\'language\' must be a String'),
            Optional('baseImage'):
            And(str, error='\'baseImage\' must be a String'),
            Optional('timezone'):
            And(str, error='\'timezone\' must be a String'),
            Optional('primaryJob'):
            And(str, error='\'primaryJob\' must be a String'),
            Optional('primaryExe'):
            And(str,
                Use(str.upper),
                lambda s: s in ('CMD', 'ENTRYPOINT'),
                error='\'primaryExe\' must be CMD or ENTRYPOINT'),
            Optional('ephemeral'):
            And(bool, error='\'ephemeral\' must be a Boolean'),
            Optional('dependencies'):
            Or(dict, list, error='\'dependencies\' must be a Dict or List'),
            Optional('ignores'):
            And(list, error='\'ignores\' must be a List'),
            Optional('jobs'):
            And(list, error='\'jobs\' must be a List'),
            Optional('ports'):
            And(list, error='\'ports\' must be a List'),
            Optional('components'):
            And(dict, error='\'components\' must be a Dictionary'),
            Optional('params'):
            And(list, error='\'params\' must be a List'),
            Optional('commands'):
            And(list, error='\'commands\' must be a List'),
            Optional('pythonVersion'):
            And(str,
                Or(*PYTHON_VERSIONS),
                error='\'pythonVersion\' must be one of:' +
                ', '.join(PYTHON_VERSIONS))
        },
        ignore_extra_keys=True)

    name = None
    env = None
    description = None
    version = None
    maintainer = None
    contact = None
    host = None
    language = None
    baseImage = None
    timezone = None
    primaryJob = None
    primaryExe = None
    ephemeral = None
    dependencies = None
    ignores = None
    jobs = None
    ports = None
    components = None
    params = None
    commands = None
    pythonVersion = '3.6'

    def __init__(self,
                 name=None,
                 env=None,
                 description=None,
                 version=None,
                 maintainer=None,
                 contact=None,
                 host=None,
                 language=None,
                 baseImage=None,
                 timezone=None,
                 primaryJob=None,
                 primaryExe=None,
                 ephemeral=None,
                 dependencies=None,
                 ignores=None,
                 jobs=None,
                 ports=None,
                 components=None,
                 params=None,
                 commands=None,
                 pythonVersion='3.6'):
        """Initialize the config object with all provided optional attributes"""

        self.name = name
        self.env = env
        self.description = description
        self.version = version
        self.maintainer = maintainer
        self.contact = contact
        self.host = host
        self.language = language
        self.baseImage = baseImage
        self.timezone = timezone
        self.primaryJob = primaryJob
        self.primaryExe = primaryExe.upper(
        ) if primaryExe is not None else "CMD"
        self.ephemeral = ephemeral
        self.dependencies = dependencies if dependencies is not None else []
        self.ignores = ignores if ignores is not None else []
        self.jobs = jobs if jobs is not None else []
        self.ports = ports if ports is not None else []
        self.components = components if components is not None else []
        self.params = params if params is not None else []
        self.commands = commands if commands is not None else []
        self.pythonVersion = pythonVersion

    def toDict(self):
        """
        Extends the parent function in order to logic for handling the conversion of
        components since the Class structure and yaml sctructures do not match
        """

        components_dict = {}
        for component in self.components:
            component_dict = component.toDict()
            if component_dict != {}:
                components_dict[
                    component.__class__.__name__.lower()] = component_dict

        dct = super().toDict()
        dct["components"] = components_dict
        return dct

    def getBaseImage(self):
        """
        Returns the proper base image based on the values for language and kerberos,
        or returns the user defined base image if provided in the config yaml
        """

        if self.baseImage:
            image = self.baseImage
        else:
            language = self.language if self.language is not None else "NA"
            image = LANGUAGE_IMAGE[language]
            if language == 'Python':
                image['base'] = image['base'].format(
                    pythonVersion=self.pythonVersion)

            variant = "base"
            for component in self.components:
                if component.__class__.__name__.lower() == "kerberos":
                    variant = "krb"

            image = image[variant]

        return image

    def getImageName(self):
        """Construct and return the name for the docker image based on the project name"""
        image_name = self.name.lower().replace(" ", "-")
        if self.env:
            image_name += "-{env}".format(env=self.env)
        return image_name

    def getHost(self, job=None, args=None):
        host = self.host if self.host is not None else None
        if job is not None:
            host = job.host if job.host is not None else host
        if args is not None:
            host = args.host if getattr(args, 'host',
                                        None) is not None else host
        return host

    def loadComponents(self, config):
        """
        Parses the components section of skelebot.yaml config to generate the complete list of
        components for the project based on the active component list and each components'
        Activation attribute
        """

        componentFactory = ComponentFactory()
        if (config is None):
            # Build the default components for a non-skelebot project
            defaultActivations = [Activation.EMPTY, Activation.ALWAYS]
            self.components = componentFactory.buildComponents(
                defaultActivations)
        else:
            # Build the components that are defined in the config yaml data
            compNames = []
            components = []
            if ("components" in config):
                configComps = config["components"]
                for compName in configComps:
                    # Ensures that artifactory component is not loaded if repository is present
                    if ("repository"
                            not in configComps) or (compName != "artifactory"):
                        component = componentFactory.buildComponent(
                            compName, configComps[compName])
                        if (component is not None):
                            components.append(component)
                            compNames.append(component.__class__.__name__)

            # Build the additonal components that are active without configuration data
            activations = [Activation.PROJECT, Activation.ALWAYS]
            components.extend(
                componentFactory.buildComponents(activations,
                                                 ignores=compNames))
            self.components = components

    @classmethod
    def load(cls, config):
        """Load the config Dict from the yaml file into the Config object"""

        cfg = cls()
        if config is not None:

            cls.validate(config)
            values = {}
            for attr, value in config.items():
                if (attr in vars(Config)) and (attr != "components") and (
                        attr != "version"):
                    if (attr == "jobs"):
                        values[attr] = Job.loadList(value)
                    elif (attr == "params"):
                        values[attr] = Param.loadList(value)
                    else:
                        values[attr] = value

            cfg = cls(**values)

        cfg.loadComponents(config)

        return cfg
Esempio n. 3
0
 def get(self):
     params = self.validate_argument(
         {Optional("page", default=None): Use(int)})
     posts = PostService.get_posts(**params)
     self.success(posts)
    print('Sent %s messages in %.3fs' % (count, time.time() - start))

    # Close all of those connections.
    for client in clients:
        client.close()


if __name__ == '__main__':
    options = docopt(main.__doc__)

    try:
        options = Schema({
            '--client-id':
            And(str, error='--client-id is required'),
            '--token-file':
            Or(None, Use(str, error='--token-file should be path')),
            '--host':
            str,
            '--port':
            Use(int),
            '--count':
            Use(int),
            '--sleep':
            Use(float),
            '--connections':
            Use(int),
            '--word-count':
            Use(int),
            object:
            object,
        }).validate(options)
Esempio n. 5
0
ActionsApi = Blueprint(
    'ActionsApi',
    __name__,
    description='Endpoints for API methods not associated with `arn`.')


class NextflowConfigParseArgs(m.Schema):
    class Meta:
        unknown = m.INCLUDE

    nextflow_config = m.fields.String(location="json")


valid_schema = Schema({
    'workDir': And(Use(str), lambda s: s.startswith("s3://")),
    'process': {
        'scratch': '/docker_scratch',
        'queue': And(Use(str)),
        'executor': 'awsbatch',
        Optional('container'): And(Use(str))
    },
    'aws': {
        'region': 'us-west-2',
        'batch': {
            'volumes': '/docker_scratch',
            'cliPath': '/home/ec2-user/miniconda/bin/aws'
        }
    }
})
Esempio n. 6
0
    def __init__(self, name, unvalidated_tree, sections, config):
        """Fix up settings that depend on the [DXR] section or have
        inter-setting dependencies. (schema can't do multi-setting validation
        yet, and configobj can't do cross-section interpolation.)

        Add a ``config`` attr to trees as a shortcut back to the [DXR] section
        and a ``name`` attr to save cumbersome tuple unpacks in callers.

        """
        self.config = config
        self.name = name

        schema = Schema({
            Optional('build_command', default='make -j {workers}'): basestring,
            Optional('clean_command', default='make clean'): basestring,
            Optional('description', default=''): basestring,
            Optional('disabled_plugins', default=plugin_list('')): Plugins,
            Optional('enabled_plugins', default=plugin_list('*')): Plugins,
            Optional('es_index', default=config.es_index): basestring,
            Optional('es_shards', default=5):
                Use(int, error='"es_shards" must be an integer.'),
            Optional('ignore_patterns',
                     default=['.hg', '.git', 'CVS', '.svn', '.bzr',
                              '.deps', '.libs', '.DS_Store', '.nfs*', '*~',
                              '._*']): WhitespaceList,
            Optional('object_folder', default=None): AbsPath,
            'source_folder': AbsPath,
            Optional('source_encoding', default='utf-8'): basestring,
            Optional('temp_folder', default=None): AbsPath,
            Optional('p4web_url', default='http://p4web/'): basestring,
            Optional('workers', default=None): WORKERS_VALIDATOR,
            Optional(basestring): dict})
        tree = schema.validate(unvalidated_tree)

        if tree['temp_folder'] is None:
            tree['temp_folder'] = config.temp_folder
        if tree['object_folder'] is None:
            tree['object_folder'] = tree['source_folder']
        if tree['workers'] is None:
            tree['workers'] = config.workers

        # Convert enabled_plugins to a list of plugins:
        if tree['disabled_plugins'].is_all:
            # * doesn't really mean "all" in a tree. It means "everything the
            # [DXR] section enabled".
            tree['disabled_plugins'] = config.enabled_plugins
        else:
            # Add anything globally disabled to our local disabled list:
            tree['disabled_plugins'].extend(p for p in config.disabled_plugins
                                            if p not in
                                            tree['disabled_plugins'])

        if tree['enabled_plugins'].is_all:
            tree['enabled_plugins'] = [p for p in config.enabled_plugins
                                       if p not in tree['disabled_plugins']]
        tree['enabled_plugins'].insert(0, core_plugin())

        # Split ignores into paths and filenames:
        tree['ignore_paths'] = [i for i in tree['ignore_patterns']
                                if i.startswith('/')]
        tree['ignore_filenames'] = [i for i in tree['ignore_patterns']
                                    if not i.startswith('/')]

        # Delete misleading, useless, or raw values people shouldn't use:
        del tree['ignore_patterns']
        del tree['disabled_plugins']

        # Validate plugin config:
        enableds_with_all_optional_config = set(
            p for p in tree['enabled_plugins']
            if all(isinstance(k, Optional) for k in p.config_schema.iterkeys()))
        plugin_schema = Schema(merge(
            dict((Optional(name) if plugin in enableds_with_all_optional_config
                                    or plugin not in tree['enabled_plugins']
                                 else name,
                  plugin.config_schema)
                 for name, plugin in all_plugins_but_core().iteritems()),
            # And whatever isn't a plugin section, that we don't care about:
            {object: object}))
        # Insert empty missing sections for enabled plugins with entirely
        # optional config so their defaults get filled in. (Don't insert them
        # if the plugin has any required options; then we wouldn't produce the
        # proper error message about the section being absent.)
        for plugin in enableds_with_all_optional_config:
            tree.setdefault(plugin.name, {})
        tree = plugin_schema.validate(tree)

        super(TreeConfig, self).__init__(tree)
Esempio n. 7
0
def main(argv, session=None):
    args = docopt(__doc__, argv=argv)

    # Validate args.
    s = Schema({
        six.text_type:
        Use(bool),
        '<query>':
        Use(lambda x: ' '.join(x)),
        '--parameters':
        Use(lambda x: get_args_dict(x, query_string=True)),
        '--header':
        Or(None,
           And(Use(get_args_dict), dict),
           error='--header must be formatted as --header="key:value"'),
        '--sort':
        list,
        '--field':
        list,
        '--timeout':
        Use(lambda x: float(x[0]), error='--timeout must be integer or float.')
    })
    try:
        args = s.validate(args)
    except SchemaError as exc:
        print('{0}\n{1}'.format(str(exc), printable_usage(__doc__)),
              file=sys.stderr)
        sys.exit(1)

    # Support comma separated values.
    fields = list(chain.from_iterable([x.split(',') for x in args['--field']]))
    sorts = list(chain.from_iterable([x.split(',') for x in args['--sort']]))

    r_kwargs = dict(
        headers=args['--header'],
        timeout=args['--timeout'],
    )

    search = session.search_items(args['<query>'],
                                  fields=fields,
                                  sorts=sorts,
                                  params=args['--parameters'],
                                  request_kwargs=r_kwargs)

    try:
        if args['--num-found']:
            print('{0}'.format(search.num_found))
            sys.exit(0)

        for result in search:
            if args['--itemlist']:
                print(result.get('identifier', ''))
            else:
                j = json.dumps(result)
                print(j)
    except ValueError as e:
        print('error: {0}'.format(e), file=sys.stderr)
    except ConnectTimeout as exc:
        print(
            'error: Request timed out. Increase the --timeout and try again.',
            file=sys.stderr)
        sys.exit(1)
    except AuthenticationError as exc:
        print('error: {}'.format(exc), file=sys.stderr)
        sys.exit(1)
Esempio n. 8
0
def validation_setup():
    # There's a mix of GET, PUT and POST in these; mixing it up may give us
    # better coverage. The particulars of which PUT and POST calls get which
    # methods are arbitrary, though GET calls have some specificity.

    # We have several kinds of calls we want to validate here:

    # 1. No arguments in the URL or body (no_args)
    @rest.rest_call(['GET', 'POST'], '/no/args', Schema({}))
    # pylint: disable=unused-variable
    def no_args():
        pass

    # 2. Argument in the URL and not in the body (url_args)
    @rest.rest_call(['GET', 'POST'], '/url/args/<arg1>/<arg2>',
                    Schema({
                        'arg1': basestring,
                        'arg2': basestring,
                    }))
    # pylint: disable=unused-variable
    def url_args(arg1, arg2):
        return json.dumps([arg1, arg2])

    # 3. Arguments in both the URL and body (mixed_args)
    @rest.rest_call(['GET', 'PUT'], '/mixed/args/<arg1>',
                    Schema({
                        'arg1': basestring,
                        'arg2': basestring,
                    }))
    # pylint: disable=unused-variable
    def mixed_args(arg1, arg2):
        return json.dumps([arg1, arg2])

    # 4. Arguments in body (query parameters for GET) and not in the URL.
    @rest.rest_call(['GET', 'POST'], '/just/bodyargs',
                    Schema({
                        'arg1': basestring,
                        'arg2': basestring,
                    }))
    # pylint: disable=unused-variable
    def just_bodyargs(arg1, arg2):
        return json.dumps([arg1, arg2])

    # 5. One optional argument.
    @rest.rest_call(['GET', 'POST'], '/just/bodyargs/optional_arg2_int',
                    Schema({
                        'arg1': basestring,
                        Optional('arg2'): Use(int),
                    }))
    # pylint: disable=unused-variable
    def bodyargs_optional_arg2_int(arg1, arg2=-42):
        return json.dumps([arg1, arg2])

    # Let's also make sure we're testing something with a schema that isn't
    # just basestring:
    @rest.rest_call('PUT',
                    '/put-int-body-arg',
                    schema=Schema({"the_value": int}))
    # pylint: disable=unused-variable
    def put_int_body_arg(the_value):
        return json.dumps(the_value)

    @rest.rest_call('GET',
                    '/get-int-body-arg-with-use',
                    schema=Schema({"the_value": Use(int)}))
    # pylint: disable=unused-variable
    def get_int_body_arg_with_use(the_value):
        return json.dumps(the_value)
Esempio n. 9
0
# coding=utf-8
# __author__ = 'Mio'

import re
from schema import And, Use, Or
from tornado.escape import utf8


def power_split(value, separator=',', schema=str):
    assert callable(schema)
    value = utf8(value)
    value = value.strip()
    l = re.split("\s*" + separator + "\s*", value)  # 这个slip直接去除逗号左右的空格
    return [v for v in l if v != '']


schema_utf8 = And(Or(Use(utf8), Use(str)), len)  # 非空
schema_utf8_none = Or(Use(utf8), Use(str))
schema_int = Use(int)
schema_float = Use(float)

# MongoDB object_id
schema_object_id = And(schema_utf8, lambda x: len(x) == 24)
Esempio n. 10
0
    def play_quizzes():
        body = request.get_json()

        if not body:
            abort(400, 'no json body was found')

        schema = Schema({
            'previous_questions': [
                And(Use(int),
                    lambda question: Question.query.get(question) is not None)
            ],
            Optional('quiz_category'):
            And(Use(int),
                lambda category: Category.query.get(category) is not None)
        })

        # validate quiz input
        quiz_data = {}
        try:
            quiz_data = schema.validate(body)
        except:
            abort(400, 'quiz input was bad or not formatted correctly')

        prev_questions = [
            Question.query.get(question)
            for question in quiz_data['previous_questions']
        ]
        if 'quiz_category' in quiz_data:
            category = Category.query.get(quiz_data['quiz_category'])

            for question in prev_questions:
                if question.category_id != category.id:
                    abort(400, 'a question does not belong to category')

            total_questions = len(category.questions) - len(prev_questions)

            if total_questions == 0:
                return jsonify({
                    'success': True,
                    'total_questions': total_questions,
                    'categoy': category.id
                })

            question = list(
                filter(lambda question: question not in prev_questions,
                       category.questions))[random.randint(
                           0, total_questions - 1)]

            return jsonify({
                'success': True,
                'question': question.format(),
                'total_questions': total_questions,
                'categoy': category.id
            })
        else:
            questions = Question.query.all()
            total_questions = len(questions) - len(prev_questions)

            if total_questions == 0:
                return jsonify({
                    'success': True,
                    'total_questions': total_questions,
                })

            question = list(
                filter(lambda question: question not in prev_questions,
                       questions))[random.randint(0, total_questions - 1)]

            return jsonify({
                'success': True,
                'question': question.format(),
                'total_questions': total_questions,
            })
Esempio n. 11
0
__ver__ = "0.4.0"

if __name__ == "__main__":
    arguments = docopt(__doc__, version=__ver__)

    schema = Schema({
        '<script>':
        And(
            And(path.isfile, error='Script should exist!'),
            And(lambda f: f.endswith('.py'),
                error='Script should be a python file')),
        '-o':
        str,
        '--props':
        Or(None, Use(json.loads, error='Invalid JSON')),
        '--verbose':
        bool,
        '--help':
        False,
        '--version':
        False
    })

    try:
        arguments = schema.validate(arguments)
    except SchemaError as e:
        sys.exit(e)

    if arguments['--verbose']:
        logging.basicConfig(level=logging.DEBUG)
def main(argv, session):
    args = docopt(__doc__, argv=argv)

    # Validation error messages.
    destdir_msg = '--destdir must be a valid path to a directory.'
    itemlist_msg = '--itemlist must be a valid path to an existing file.'

    # Validate args.
    s = Schema({
        str: Use(bool),
        '--destdir': Or([], And(Use(lambda d: d[0]), dir_exists), error=destdir_msg),
        '--format': list,
        '--glob': Use(lambda l: l[0] if l else None),
        '<file>': list,
        '--search': Or(str, None),
        '--itemlist': Or(None, And(lambda f: os.path.isfile(f)), error=itemlist_msg),
        '<identifier>': Or(str, None),
        '--retries': Use(lambda x: x[0]),
        '--search-parameters': Use(lambda x: get_args_dict(x, query_string=True)),
        '--on-the-fly': Use(bool),
        '--no-change-timestamp': Use(bool),
        '--download-history': Use(bool),
        '--parameters': Use(lambda x: get_args_dict(x, query_string=True)),
    })

    # Filenames should be unicode literals. Support PY2 and PY3.
    if six.PY2:
        args['<file>'] = [f.decode('utf-8') for f in args['<file>']]

    try:
        args = s.validate(args)
        if args['--glob']:
            if args['--format']:
                raise(SchemaError(None, '--glob and --format cannot be used together.'))

    except SchemaError as exc:
        print('{0}\n{1}'.format(
            str(exc), printable_usage(__doc__)), file=sys.stderr)
        sys.exit(1)

    retries = int(args['--retries'])

    if args['--itemlist']:
        with open(args['--itemlist']) as fp:
            ids = [x.strip() for x in fp]
        total_ids = len(ids)
    elif args['--search']:
        try:
            _search = session.search_items(args['--search'],
                                           params=args['--search-parameters'])
            total_ids = _search.num_found
            if total_ids == 0:
                print('error: the query "{0}" returned no results'.format(
                    args['--search']), file=sys.stderr)
                sys.exit(1)
            ids = _search
        except ValueError as e:
            print('error: {0}'.format(e), file=sys.stderr)
            sys.exit(1)

    # Download specific files.
    if args['<identifier>'] and args['<identifier>'] != '-':
        if '/' in args['<identifier>']:
            identifier = args['<identifier>'].split('/')[0]
            files = ['/'.join(args['<identifier>'].split('/')[1:])]
        else:
            identifier = args['<identifier>']
            files = args['<file>']
        total_ids = 1
        ids = [identifier]
    elif args['<identifier>'] == '-':
        total_ids = 1
        ids = sys.stdin
        files = None
    else:
        files = None

    errors = list()
    for i, identifier in enumerate(ids):
        if args['--stdout']:
            item = session.get_item(identifier)
            f = list(item.get_files(args['<file>']))
            try:
                assert len(f) == 1
            except AssertionError:
                print('error: {0}/{1} does not exist!'.format(
                    identifier, args['<file>'][0]), file=sys.stderr)
                sys.exit(1)
            if six.PY2:
                stdout_buf = sys.stdout
            else:
                stdout_buf = sys.stdout.buffer
            f[0].download(retries=args['--retries'],
                          fileobj=stdout_buf,
                          params=args['--parameters'])
            sys.exit(0)
        try:
            identifier = identifier.strip()
        except AttributeError:
            identifier = identifier.get('identifier')
        if total_ids > 1:
            item_index = '{0}/{1}'.format((i + 1), total_ids)
        else:
            item_index = None

        try:
            item = session.get_item(identifier)
        except Exception as exc:
            print('{0}: failed to retrieve item metadata - errors'.format(identifier),
                  file=sys.stderr)
            if 'You are attempting to make an HTTPS' in str(exc):
                print('\n{0}'.format(exc), file=sys.stderr)
                sys.exit(1)
            else:
                continue

        # Otherwise, download the entire item.
        ignore_history_dir = True if not args['--download-history'] else False
        _errors = item.download(
            files=files,
            formats=args['--format'],
            glob_pattern=args['--glob'],
            dry_run=args['--dry-run'],
            verbose=args['--verbose'],
            silent=args['--silent'],
            ignore_existing=args['--ignore-existing'],
            checksum=args['--checksum'],
            destdir=args['--destdir'],
            no_directory=args['--no-directories'],
            retries=retries,
            item_index=item_index,
            ignore_errors=True,
            on_the_fly=args['--on-the-fly'],
            no_change_timestamp=args['--no-change-timestamp'],
            params=args['--parameters'],
            ignore_history_dir=ignore_history_dir,
        )
        if _errors:
            errors.append(_errors)
    if errors:
        # TODO: add option for a summary/report.
        sys.exit(1)
    else:
        sys.exit(0)
 def validate_args(args):
     """Validate the input parameters."""
     schema = Schema({
         '--ip': And(str, Regex(r'^(?:[0-9]{1,3}\.){3}[0-9]{1,3}$'), error="Invalid input for `--ip`. Please enter a valid IP address. Example: `127.0.0.1`."),
         '--port': And(Use(int), lambda x: 1024 <= x <= 65535, error="Invalid input for `--port`. Please enter a number between 1024 and 65535."),
         '--date': Or(None, And(str, Regex(r'^\s*(3[01]|[12][0-9]|0?[1-9])\.(1[012]|0?[1-9])\.((?:19|20)\d{2})\s*$')), error="Invalid input for `--date`. Please enter a valid date. Example: `01.01.2020`."),
         '--order-date': Or(None, And(str, Regex(r'^\s*(3[01]|[12][0-9]|0?[1-9])\.(1[012]|0?[1-9])\.((?:19|20)\d{2})\s*$')), error="Invalid input for `--order-date`. Please enter a valid date. Example: `01.01.2020`."),
         '--event-date': Or(None, And(str, Regex(r'^\s*(3[01]|[12][0-9]|0?[1-9])\.(1[012]|0?[1-9])\.((?:19|20)\d{2})\s*$')), error="Invalid input for `--event-date`. Please enter a valid date. Example: `01.01.2020`."),
         '--quantity': Or(None, And(Use(int)), error="Invalid input for `--quantity`. Please enter an integer number. Example: `3`."),
         '--name': Or(None, And(Use(str))),
         '--location': Or(None, And(str, Regex(r'^([^0-9]+) ([0-9]+.*?), ([0-9]{5}) (.*)$')), error="Invalid input for `--location`. Please enter a valid address. Example: `Friedrich Ebert Straße 30, 78054 Villingen-Schwenningen`."),
         '--address': Or(None, And(str, Regex(r'^([^0-9]+) ([0-9]+.*?), ([0-9]{5}) (.*)$')), error="Invalid input for `--address`. Please enter a valid address. Example: `Friedrich Ebert Straße 30, 78054 Villingen-Schwenningen`."),
         '--ticket-price': Or(None, And(Use(str), Regex(r'^\s*(?=.*[1-9])\d*(?:\.\d{1,2})?\s*$')), error="Invalid input for `--ticket-price`. Please enter a valid amount of money. Example: `5.60`."),
         '--max-tickets': Or(None, And(Use(int)), error="Invalid input for `--max-tickets`. Please enter an integer number. Example: `3`."),
         '--max-tickets-per-customer': Or(None, And(Use(int)), error="Invalid input for `--max-tickets-per-customer`. Please enter an integer number. Example: `3`."),
         '--sale-start-date': Or(None, And(str, Regex(r'^\s*(3[01]|[12][0-9]|0?[1-9])\.(1[012]|0?[1-9])\.((?:19|20)\d{2})\s*$')), error="Invalid input for `--sale-start-date`. Please enter a valid date. Example: `01.01.2020`."),
         '--sale-period': Or(None, And(Use(int)), error="Invalid input for `--sale-period`. Please enter the number of days as an integer number. Example: `3`."),
         '--budget': Or(None, And(Use(str), Regex(r'^\s*(?=.*[1-9])\d*(?:\.\d{1,2})?\s*$')), error="Invalid input for `--budget`. Please enter a valid amount of money. Example: `5.60`."),
         '<event-id>': Or(None, And(Use(int)), error="Invalid input for `<event-id>`. The event ID can only be an integer number. Example: `3`."),
         '<customer-id>': Or(None, And(Use(int)), error="Invalid input for `<customer-id>`. The customer ID can only be an integer number. Example: `3`."),
         '--year': Or(None, And(Use(int)), error="Invalid input for `--year`. Please enter an integer number. Example: `2020`.")
     })
     if args.get('--date') is not None and args.get('--sale-start-date') is not None:
         event_date = DateHelper.date_to_timestamp(args.get('--date'))
         sale_start_date = DateHelper.date_to_timestamp(
             args.get('--sale-start-date'))
         if sale_start_date > event_date:
             raise SchemaError(
                 None, errors="Invalid input for `--date` or `--sale-start-date`. The event date cannot be earlier than the start date of the sale. Please check your input.")
     try:
         schema.validate(args)
     except SchemaWrongKeyError as ex:
         pass
     except SchemaError as ex:
         sys.exit(ex)
def main(argv, session):
    args = docopt(__doc__, argv=argv)

    # Validate args.
    s = Schema({
        six.text_type: bool,
        '<identifier>': list,
        '--modify': list,
        '--header': Or(None, And(Use(get_args_header_dict), dict),
               error='--header must be formatted as --header="key:value"'),
        '--append': list,
        '--append-list': list,
        '--remove': list,
        '--spreadsheet': Or(None, And(lambda f: os.path.exists(f),
                            error='<file> should be a readable file or directory.')),
        '--target': Or(None, str),
        '--priority': Or(None, Use(int, error='<priority> should be an integer.')),
    })
    try:
        args = s.validate(args)
    except SchemaError as exc:
        print('{0}\n{1}'.format(str(exc), printable_usage(__doc__)), file=sys.stderr)
        sys.exit(1)

    formats = set()
    responses = []

    for i, identifier in enumerate(args['<identifier>']):
        item = session.get_item(identifier)

        # Check existence of item.
        if args['--exists']:
            if item.exists:
                responses.append(True)
                print('{0} exists'.format(identifier))
            else:
                responses.append(False)
                print('{0} does not exist'.format(identifier), file=sys.stderr)
            if (i + 1) == len(args['<identifier>']):
                if all(r is True for r in responses):
                    sys.exit(0)
                else:
                    sys.exit(1)

        # Modify metadata.
        elif (args['--modify'] or args['--append'] or args['--append-list']
              or args['--remove']):
            if args['--modify']:
                metadata_args = args['--modify']
            elif args['--append']:
                metadata_args = args['--append']
            elif args['--append-list']:
                metadata_args = args['--append-list']
            if args['--remove']:
                metadata_args = args['--remove']
            try:
                metadata = get_args_dict(metadata_args)
                if any('/' in k for k in metadata):
                    metadata = get_args_dict_many_write(metadata)
            except ValueError:
                print("error: The value of --modify, --remove, --append or --append-list "
                      "is invalid. It must be formatted as: --modify=key:value",
                      file=sys.stderr)
                sys.exit(1)

            if args['--remove']:
                responses.append(remove_metadata(item, metadata, args))
            else:
                responses.append(modify_metadata(item, metadata, args))
            if (i + 1) == len(args['<identifier>']):
                if all(r.status_code == 200 for r in responses):
                    sys.exit(0)
                else:
                    for r in responses:
                        if r.status_code == 200:
                            continue
                        # We still want to exit 0 if the non-200 is a
                        # "no changes to xml" error.
                        elif 'no changes' in r.content.decode('utf-8'):
                            continue
                        else:
                            sys.exit(1)

        # Get metadata.
        elif args['--formats']:
            for f in item.get_files():
                formats.add(f.format)
            if (i + 1) == len(args['<identifier>']):
                print('\n'.join(formats))

        # Dump JSON to stdout.
        else:
            metadata = json.dumps(item.item_metadata)
            print(metadata)

    # Edit metadata for items in bulk, using a spreadsheet as input.
    if args['--spreadsheet']:
        if not args['--priority']:
            args['--priority'] = -5
        with io.open(args['--spreadsheet'], 'rU', newline='', encoding='utf-8') as csvfp:
            spreadsheet = csv.DictReader(csvfp)
            responses = []
            for row in spreadsheet:
                if not row['identifier']:
                    continue
                item = session.get_item(row['identifier'])
                if row.get('file'):
                    del row['file']
                metadata = dict((k.lower(), v) for (k, v) in row.items() if v)
                responses.append(modify_metadata(item, metadata, args))

            if all(r.status_code == 200 for r in responses):
                sys.exit(0)
            else:
                for r in responses:
                    if r.status_code == 200:
                        continue
                    # We still want to exit 0 if the non-200 is a
                    # "no changes to xml" error.
                    elif 'no changes' in r.content.decode('utf-8'):
                        continue
                    else:
                        sys.exit(1)
Esempio n. 15
0
 Optional("accessRights"): Or(str, [str]),
 Optional("accessRightsComments"): Or(str, [str]),
 Optional("bucket"): str,
 Optional("byteSize"): Or(str, int),
 Optional("category"): str,
 Optional("creator"): Or(str, {
     "name": str,
     "email": str,
 }),
 Optional("contactpoint"): [],
 Optional("code"): str,
 Optional("description"): str,
 Optional("distribution"): [],
 Optional("format"): str,
 Optional("identifier"): str,
 Optional("issued"): And(Use(is_datetime_string)),
 Optional("keyword"): Or(str, [str]),
 Optional("keywords"): Or(str, [str]),
 Optional("landingPage"): str,
 Optional("language"): str,
 Optional("license"): Or(str, {
     "name": str,
     "url": str,
 }),
 Optional("modified"): And(Use(is_datetime_string)),
 Optional("notebook"): str,
 Optional("provenance"): Or(str, object),
 Optional("publisher"): str,
 Optional("repo"): str,
 Optional("rights"): str,
 Optional("sample"): [str],
Esempio n. 16
0
from functools import partial
from os.path import abspath

from schema import Optional, Use, And

from dxr.config import AbsPath
from dxr.plugins import Plugin, AdHocTreeToIndex, filters_from_namespace, refs_from_namespace
from dxr.plugins.xpidl import filters, refs
from dxr.plugins.xpidl.mappings import mappings
from dxr.plugins.xpidl.indexers import FileToIndex


def split_on_space_into_abspaths(value):
    return map(abspath, value.strip().split())


ColonPathList = And(basestring,
                    Use(split_on_space_into_abspaths),
                    error='This should be a space-separated list of paths.')

plugin = Plugin(tree_to_index=partial(AdHocTreeToIndex,
                                      file_to_index_class=FileToIndex),
                refs=refs_from_namespace(refs.__dict__),
                filters=filters_from_namespace(filters.__dict__),
                badge_colors={'xpidl': '#DAF6B9'},
                mappings=mappings,
                config_schema={
                    'header_path': AbsPath,
                    Optional('include_folders', default=[]): ColonPathList
                })
Esempio n. 17
0
from funcy import merge
from hashlib import sha1
from more_itertools import first
from pkg_resources import resource_string
from schema import Optional, Use, And, Schema, SchemaError

from dxr.exceptions import ConfigError
from dxr.plugins import all_plugins_but_core, core_plugin
from dxr.utils import cd, if_raises


# Format version, signifying the instance format this web frontend code is
# able to serve. Must match exactly; deploy will do nothing until it does.
FORMAT = resource_string('dxr', 'format').strip()

WORKERS_VALIDATOR = And(Use(int),
                        lambda v: v >= 0,
                        error='"workers" must be a non-negative integer.')


class DotSection(object):
    """In the absense of an actual attribute, let attr lookup fall through to
    ``self._section[attr]``."""

    def __getattr__(self, attr):
        if not hasattr(self, '_section'):  # Happens during unpickling
            raise AttributeError(attr)
        try:
            val = self._section[attr]
        except KeyError:
            raise AttributeError(attr)
Esempio n. 18
0
from schema import Schema, And, Use, Optional, Or, Regex
import re
import sys


inp_spec_schema = Schema({'format': And(str, Use(str.lower), lambda s: s in ('mxf', 'ts', 'mov', 'mp4', 'mpg')),
                           'vcodec': And(str, Use(str.lower), lambda s: s in ('h264', 'mpeg2video', 'copy')),
                           'acodec': And(str, Use(str.lower), lambda s: s in ('mp2', 'aac', 'pcm_s24le', 'ac3', 'copy')),
                           'n_out_aud_tracks': And(Use(int), lambda n: 1 <= n <= 16),
                           'aud_ch': And(Use(int), lambda n: 1 <= n <= 2),
                           'gop_length': And(Use(int)),
                           'vid_out_resolution': And(str, Use(str.lower), lambda s: s in ('1920x1080', '1280x720', '720x576',\
                                                                                          '720x480', '640x480')),
                          'vid_out_bitrate': Or(And(Use(str)), And(Regex('[0-9\.]*[kK]'), Use(str)),\
                                                And(Regex('[0-9\.]*[M]'), Use(str))),
                           'vid_out_fps': And(str, Use(float), lambda f: f in [25.0, 29.97, 30.0]),
                          #Optional(Regex('aud_map')): Or(((Use(int),Use(int), Use(int))), Use(tuple(Use(int), Use(int)))),
                          Optional(Regex('aud_map')): Or(And(str, Use(eval), lambda a: 1<=a[0]<16 and 1<=a[1]<=16),\
                                                         And(str, Use(eval), lambda a: 1<=a[0][0]<16 and 1<=a[0][1]<=16 and 1<=a[1]<=16)),
                          'vid_inp_scan_type': And(str, Use(str.lower), lambda s: s in ('progressive', 'interlaced')),
                          'n_inp_aud_tracks': And(Use(int), lambda n: 1 <= n <=16),
                           'streamid_vid': And(Use(int)),
                           'streamid_aud_start': And(Use(int))
                          })



def _get_vid_bitrate_(s, d):
    if s[-1] in 'kK':
        o_bitrate = int(s[:-1]) * 1000
    elif d['vid_out_bitrate'][-1] in 'M':
Esempio n. 19
0
    def __init__(self, input, relative_to=None):
        """Pull in and validate a config file.

        :arg input: A string or dict from which to populate the config
        :arg relative_to: The dir relative to which to interpret relative paths

        Raise ConfigError if the configuration is invalid.

        """
        if not relative_to:
            relative_to = getcwd()
        self.path = relative_to
        with cd(relative_to):
            schema = Schema({
                'DXR': {
                    Optional('temp_folder', default=abspath('dxr-temp-{tree}')):
                        AbsPath,
                    Optional('default_tree', default=None): basestring,
                    Optional('disabled_plugins', default=plugin_list('')): Plugins,
                    Optional('enabled_plugins', default=plugin_list('*')): Plugins,
                    Optional('generated_date',
                             default=datetime.utcnow()
                                             .strftime("%a, %d %b %Y %H:%M:%S +0000")):
                        basestring,
                    Optional('log_folder', default=abspath('dxr-logs-{tree}')):
                        AbsPath,
                    Optional('workers', default=if_raises(NotImplementedError,
                                                          cpu_count,
                                                          1)):
                        WORKERS_VALIDATOR,
                    Optional('skip_stages', default=[]): WhitespaceList,
                    Optional('www_root', default=''): Use(lambda v: v.rstrip('/')),
                    Optional('google_analytics_key', default=''): basestring,
                    Optional('es_hosts', default='http://127.0.0.1:9200/'):
                        WhitespaceList,
                    # A semi-random name, having the tree name and format version in it.
                    Optional('es_index', default='dxr_{format}_{tree}_{unique}'):
                        basestring,
                    Optional('es_alias', default='dxr_{format}_{tree}'):
                        basestring,
                    Optional('es_catalog_index', default='dxr_catalog'):
                        basestring,
                    Optional('es_catalog_replicas', default=1):
                        Use(int, error='"es_catalog_replicas" must be an integer.'),
                    Optional('max_thumbnail_size', default=20000):
                        And(Use(int),
                            lambda v: v >= 0,
                            error='"max_thumbnail_size" must be a non-negative '
                                  'integer.'),
                    Optional('es_indexing_timeout', default=60):
                        And(Use(int),
                            lambda v: v >= 0,
                            error='"es_indexing_timeout" must be a non-negative '
                                  'integer.'),
                    Optional('es_indexing_retries', default=0):
                        And(Use(int),
                            lambda v: v >= 0,
                            error='"es_indexing_retries" must be a non-negative '
                                  'integer.'),
                    Optional('es_refresh_interval', default=60):
                        Use(int, error='"es_refresh_interval" must be an integer.')
                },
                basestring: dict
            })

            # Parse the ini into nested dicts:
            config_obj = ConfigObj(input.splitlines() if isinstance(input,
                                                                    basestring)
                                   else input,
                                   list_values=False)

            try:
                config = schema.validate(config_obj.dict())
            except SchemaError as exc:
                raise ConfigError(exc.code, ['DXR'])

            self._section = config['DXR']

            # Normalize enabled_plugins:
            if self.enabled_plugins.is_all:
                # Then explicitly enable anything that isn't explicitly
                # disabled:
                self._section['enabled_plugins'] = [
                        p for p in all_plugins_but_core().values()
                        if p not in self.disabled_plugins]

            # Now that enabled_plugins and the other keys that TreeConfig
            # depends on are filled out, make some TreeConfigs:
            self.trees = OrderedDict()  # name -> TreeConfig
            for section in config_obj.sections:
                if section != 'DXR':
                    try:
                        self.trees[section] = TreeConfig(section,
                                                         config[section],
                                                         config_obj[section].sections,
                                                         self)
                    except SchemaError as exc:
                        raise ConfigError(exc.code, [section])

        self._section['es_catalog_index'] = self._section['es_catalog_index'].format(
            config_path_hash=self.path_hash())

        # Make sure default_tree is defined:
        if not self.default_tree:
            self._section['default_tree'] = first(self.trees.iterkeys())

        # These aren't intended for actual use; they're just to influence
        # enabled_plugins of trees, and now we're done with them:
        del self._section['enabled_plugins']
        del self._section['disabled_plugins']
Esempio n. 20
0
import asyncio
import time
import var as V
from knlAgentIntr import KnlAgentIntr
from knlAgentWithoutIpq import KnlAgentWithoutIpq
from devAgent import DevAgent
from appAgent import AppAgent
from sndAgent import SndAgent

if __name__ == "__main__":

    args = docopt(__doc__)
    schema = Schema({
        '--kernel-type':
        And(Use(int),
            lambda n: 0 <= n <= 1,
            error='--kernel-type should be 0 or 1'),
        '--infering-type':
        Or(None,
           And(Use(int), lambda n: 0 <= n <= 1),
           error='--infering-type should be 0 or 1'),
        '--sender-type':
        And(Use(int),
            lambda n: 0 <= n <= 4,
            error='--sender-type should between 0 and 4'),
        '--sender-period':
        And(Use(float),
            lambda n: 0.1 <= n <= 1.0,
            error='--sender-period should be 0 or 1'),
        '--senders':
def validate_method(method, is_challenge):
    '''Validate method configuration passed as a JSON file.'''

    # Define a dictionary schema
    # TODO would be nice to not copy-paste for multiple datasets
    schema = Schema({
        Optional('metadata'): {
            'publish_anonymously':
            bool,
            'authors':
            str,
            'contact_email':
            str,
            'method_name':
            str,
            'method_description':
            str,
            # 'descriptor_type': str,
            # 'descriptor_size': And(int, lambda v: v >= 1),
            Optional('link_to_website'):
            str,
            Optional('link_to_pdf'):
            str,
            Optional('under_review'):
            bool,
            Optional('under_review_override'):
            str,
        },
        'config_common': {
            'json_label': str,
            'keypoint': And(Use(str), lambda v: '_' not in v),
            'descriptor': And(Use(str), lambda v: '_' not in v),
            'num_keypoints': And(int, lambda v: v > 1),
        },
        Optional('config_phototourism_stereo'): {
            Optional('use_custom_matches'): bool,
            Optional('custom_matches_name'): str,
            Optional('matcher'): {
                'method':
                And(str, lambda v: v in ['nn']),
                'distance':
                And(str, lambda v: v.lower() in ['l1', 'l2', 'hamming']),
                'flann':
                bool,
                'num_nn':
                And(int, lambda v: v >= 1),
                'filtering': {
                    'type':
                    And(
                        str, lambda v: v.lower() in [
                            'none', 'snn_ratio_pairwise', 'snn_ratio_vs_last',
                            'fginn_ratio_pairwise'
                        ]),
                    Optional('threshold'):
                    And(Use(float), lambda v: 0 < v <= 1),
                    Optional('fginn_radius'):
                    And(Use(float), lambda v: 0 < v < 100.),
                },
                Optional('descriptor_distance_filter'): {
                    'threshold': And(Use(float), lambda v: v > 0),
                },
                'symmetric': {
                    'enabled':
                    And(bool),
                    Optional('reduce'):
                    And(str, lambda v: v.lower() in ['both', 'any']),
                },
            },
            Optional('outlier_filter'): {
                'method':
                And(Use(str), lambda v: v.lower() in ['none', 'cne-bp-nd']),
            },
            Optional('geom'): {
                'method':
                And(
                    str, lambda v: v.lower() in [
                        'cv2-ransac-f', 'cv2-ransac-e', 'cv2-lmeds-f',
                        'cv2-lmeds-e', 'cv2-7pt', 'cv2-8pt',
                        'cv2-patched-ransac-f', 'cmp-degensac-f',
                        'cmp-degensac-f-laf', 'cmp-gc-ransac-f',
                        'cmp-degensac-f-laf', 'cmp-gc-ransac-f',
                        'cmp-magsac-f', 'cmp-gc-ransac-e', 'skimage-ransac-f',
                        'intel-dfe-f'
                    ]),
                Optional('threshold'):
                And(Use(float), lambda v: v > 0),
                Optional('confidence'):
                And(Use(float), lambda v: v > 0),
                Optional('max_iter'):
                And(Use(int), lambda v: v > 0),
                Optional('postprocess'):
                And(Use(bool), lambda v: v is not None),
                Optional('error_type'):
                And(Use(str),
                    lambda v: v.lower() in ['sampson', 'symm_epipolar']),
                Optional('degeneracy_check'):
                bool,
            }
        },
        Optional('config_phototourism_multiview'): {
            Optional('use_custom_matches'): bool,
            Optional('custom_matches_name'): str,
            Optional('matcher'): {
                'method':
                And(str, lambda v: v in ['nn']),
                'distance':
                And(str, lambda v: v.lower() in ['l1', 'l2', 'hamming']),
                'flann':
                bool,
                'num_nn':
                And(int, lambda v: v >= 1),
                'filtering': {
                    'type':
                    And(
                        str, lambda v: v.lower() in [
                            'none', 'snn_ratio_pairwise', 'snn_ratio_vs_last',
                            'fginn_ratio_pairwise'
                        ]),
                    Optional('threshold'):
                    And(Use(float), lambda v: 0 < v <= 1),
                    Optional('fginn_radius'):
                    And(Use(float), lambda v: 0 < v < 100.),
                },
                Optional('descriptor_distance_filter'): {
                    'threshold': And(Use(float), lambda v: v > 0),
                },
                'symmetric': {
                    'enabled':
                    And(bool),
                    Optional('reduce'):
                    And(str, lambda v: v.lower() in ['both', 'any']),
                },
            },
            Optional('outlier_filter'): {
                'method':
                And(Use(str), lambda v: v.lower() in ['none', 'cne-bp-nd']),
            },
            Optional('colmap'): {},
        },
        Optional('config_phototourism_relocalization'): {},
    })

    schema.validate(method)

    # Check for metadata for challenge entries
    if is_challenge and not method['metadata']:
        raise ValueError('Must specify metadata')

    # Check what we are running
    do_pt_stereo = False if 'config_phototourism_stereo' not in method \
            else bool(method['config_phototourism_stereo'])
    do_pt_multiview = False if 'config_phototourism_multiview' not in method \
            else bool(method['config_phototourism_multiview'])
    do_pt_relocalization = False if 'config_phototourism_relocalization' not \
            in method else bool(method['config_phototourism_relocalization'])

    if do_pt_stereo:
        print('Running: Phototourism, stereo track')
    if do_pt_multiview:
        print('Running: Phototourism, multiview track')
    if do_pt_relocalization:
        print('Running: Phototourism, relocalization track')
    if not any([do_pt_stereo, do_pt_multiview, do_pt_relocalization]):
        raise ValueError('No tasks were specified')

    # Check for incorrect, missing, or redundant options
    for dataset in ['phototourism']:
        for task in ['stereo', 'multiview', 'relocalization']:
            cur_key = 'config_{}_{}'.format(dataset, task)
            if cur_key not in method:
                print('Key "{}" is empty -> skipping check'.format(cur_key))
                continue
            else:
                print('Validating key "{}"'.format(cur_key))

            # If dict is not empty, use_custom_matches should exist
            if method[cur_key] and ('use_custom_matches'
                                    not in method[cur_key]):
                raise ValueError('Must specify use_custom_matches')

            # If using custom matches, we cannot specify further options
            if method[cur_key] and ('use_custom_matches' in method[cur_key]) \
                    and method[cur_key]['use_custom_matches']:

                if 'matcher' in method[cur_key] or 'outlier_filter' in method[
                        cur_key]:
                    raise ValueError(
                        'Cannot specify a matcher or outlier filter with '
                        'use_custom_matches=True')

            # Matcher and filter
            if 'matcher' in method[cur_key]:
                matcher = method[cur_key]['matcher']
                if matcher['symmetric']['enabled'] and 'reduce' not in \
                        matcher['symmetric']:
                    raise ValueError(
                        '[{}/{}] Must specify "reduce" if "symmetric" is enabled'
                        .format(dataset, task))

                # Check for redundant settings with custom matches
                if 'config_{}_stereo'.format(dataset) in method:
                    cur_config = method['config_{}_stereo'.format(dataset)]
                    if cur_config['use_custom_matches']:
                        if 'matcher' in cur_config or 'outlier_filter' in cur_config \
                                or 'geom' in cur_config:
                            raise ValueError(
                                '[{}/stereo] Found redundant settings with use_custom_matches=True'
                                .format(dataset))
                    else:
                        if 'matcher' not in cur_config or 'outlier_filter' not in \
                                cur_config or 'geom' not in cur_config:
                            raise ValueError(
                                '[{}/stereo] Missing required settings with use_custom_matches=False'
                                .format(dataset))

                    if cur_config['use_custom_matches']:
                        if 'matcher' in cur_config or 'outlier_filter' in cur_config \
                                or 'geom' in cur_config:
                            raise ValueError(
                                '[{}/stereo] Found redundant settings with use_custom_matches=True'
                            )

            # For stereo, check also geom
            if task == 'stereo' and \
                    'config_{}_stereo'.format(dataset) in method and \
                    'geom' in method['config_{}_stereo'.format(dataset)]:
                geom = method['config_{}_stereo'.format(dataset)]['geom']

                # Threshold for RANSAC
                if geom['method'].lower() in [
                        'cv2-ransac-f', 'cv2-ransac-e', 'cv2-patched-ransac-f',
                        'cmp-degensac-f', 'cmp-gc-ransac-f', 'cmp-gc-ransac-e',
                        'cmp-degensac-f-laf', 'cmp-magsac-f',
                        'skimage-ransac-f', 'intel-dfe-f'
                ]:
                    if 'threshold' not in geom:
                        raise ValueError(
                            '[{}] Must specify a threshold for this method'.
                            format(dataset))
                else:
                    if 'threshold' in geom:
                        raise ValueError(
                            '[{}] Cannot specify a threshold for this method'.
                            format(dataset))

                # Degeneracy check for RANSAC
                if geom['method'].lower() in [
                        'cmp-degensac-f', 'cmp-degensac-f-laf'
                ]:
                    if 'degeneracy_check' not in geom:
                        raise ValueError(
                            '[{}] Must indicate degeneracy check for this method'
                            .format(dataset))
                    if 'error_type' not in geom:
                        raise ValueError(
                            '[{}] Must indicate error type for this method'.
                            format(dataset))
                else:
                    if 'degeneracy_check' in geom:
                        raise ValueError(
                            '[{}] Cannot apply degeneracy check to this method'
                            .format(dataset))
                    if 'error_type' in geom:
                        raise ValueError(
                            '[{}] Cannot indicate error type for this method'.
                            format(dataset))

                # Confidence for RANSAC/LMEDS
                if geom['method'].lower() in [
                        'cv2-ransac-f',
                        'cv2-patched-ransac-f',
                        'cv2-ransac-e',
                        'cv2-lmeds-f',
                        'cv2-lmeds-e',
                        'cmp-degensac-f',
                        'cmp-degensac-f-laf',
                        'cmp-gc-ransac-f',
                        'cmp-gc-ransac-e',
                        'skimage-ransac-f',
                        'cmp-magsac-f',
                ]:
                    if 'confidence' not in geom:
                        raise ValueError(
                            '[{}] Must specify a confidence value for OpenCV or DEGENSAC'
                            .format(dataset))
                else:
                    if 'confidence' in geom:
                        raise ValueError(
                            '[{}] Cannot specify a confidence value for this method'
                            .format(dataset))

                # Maximum number of RANSAC iterations
                if geom['method'].lower() in [
                        'cv2-patched-ransac-f',
                        'cmp-degensac-f',
                        'cmp-degensac-f-laf',
                        'cmp-gc-ransac-f',
                        'cmp-gc-ransac-e',
                        'skimage-ransac-f',
                        'cmp-magsac-f',
                ]:
                    if 'max_iter' not in geom:
                        raise ValueError(
                            '[{}] Must indicate max_iter for this method'.
                            format(dataset))
                else:
                    if 'max_iter' in geom:
                        raise ValueError(
                            '[{}] Cannot indicate max_iter for this method'.
                            format(dataset))

                # DFE-specific
                if geom['method'].lower() in ['intel-dfe-f']:
                    if 'postprocess' not in geom:
                        raise ValueError(
                            '[{}] Must specify a postprocess flag for DFE'.
                            format(dataset))
                else:
                    if 'postprocess' in geom:
                        raise ValueError(
                            '[{}] Cannot specify a postprocess flag for this method'
                            .format(dataset))
Esempio n. 22
0
user = User('username', 'password', 'access')
integrity_error = IntegrityError('Mock', 'mock', Exception('mock', 'mock'))
aleatory_errors = [
    DataError('Mock', 'mock', Exception('mock', 'mock')),
    OperationalError('Mock', 'mock', Exception('mock', 'mock')),
    InternalError('Mock', 'mock', Exception('mock', 'mock')),
    ProgrammingError('Mock', 'mock', Exception('mock', 'mock')),
    NotSupportedError('Mock', 'mock', Exception('mock', 'mock'))
]

user_validate = Schema({
    'id': Or(int, None),
    'updated': Or(str, None),
    'created': Or(str, None),
    'access': Or(str, None),
    'username': Use(str),
    'removed': Or(str, None)
})

user_list_validate = Schema([{
    'id': Or(int, None),
    'updated': Or(str, None),
    'created': Or(str, None),
    'access': Or(str, None),
    'username': Use(str),
    'removed': Or(str, None)
}])


class TestFind(unittest.TestCase):
    @patch('app.views.user.m_User')
Esempio n. 23
0
commandRenameRoomSchema = dictSchema({
    "command": str,
    "server": str,
    "data": {
        str: {
            "roomUID": str
        }
    }
})

commandShareRoomSchema = dictSchema(
    {
        "command": str,
        "server": str,
        "data": {
            Use(str, error='specify roomUID'): {
                Use(is_email, error="please specify a valid mail address"):
                Optional(str, error='specify the name of the user')
            }
        }
    },
    error="please specify a room name and at least one valid mail address")

commandCreateRoomSchema = dictSchema(
    {
        "command": str,
        "server": str,
        "data": {
            Use(str, error='specify room name'): {
                "email":
                Use(is_email, error="please specify a valid mail address"),
Esempio n. 24
0
                              request_json_must_have_one,
                              request_json_must_have, token_admin_required,
                              token_required, auth)
import unittest


class User:
    def __init__(self, access):
        self.id = 1
        self.access = access
        self.username = '******'
        self.password = '******'


user_dict_validate = Schema({
    'id': Use(int),
    'access': Use(str),
    'username': Use(str)
})

user_dict = {'id': 1, 'access': 'access', 'username': '******'}


def inject_kwargs(f):
    def wrapper(*args, **kwargs):
        kwargs['id'] = 1
        f(*args, **kwargs)

    return wrapper

Esempio n. 25
0
def main(argv, session, cmd='copy'):
    args = docopt(__doc__, argv=argv)
    src_path = args['<src-identifier>/<src-file>']
    dest_path = args['<dest-identifier>/<dest-file>']

    # If src == dest, file get's deleted!
    try:
        assert src_path != dest_path
    except AssertionError:
        print('error: The source and destination files cannot be the same!',
              file=sys.stderr)
        sys.exit(1)

    global SRC_ITEM
    SRC_ITEM = session.get_item(src_path.split('/')[0])

    # Validate args.
    s = Schema({
        str:
        Use(bool),
        '<src-identifier>/<src-file>':
        And(
            str,
            And(And(
                str,
                lambda x: '/' in x,
                error='Destination not formatted correctly. See usage example.'
            ),
                assert_src_file_exists,
                error=('https://archive.org/download/{} does not exist. '
                       'Please check the identifier and filepath and retry.'.
                       format(src_path)))),
        '<dest-identifier>/<dest-file>':
        And(str,
            lambda x: '/' in x,
            error='Destination not formatted correctly. See usage example.'),
        '--metadata':
        Or(None,
           And(Use(get_args_dict), dict),
           error='--metadata must be formatted as --metadata="key:value"'),
        '--header':
        Or(None,
           And(Use(get_args_dict), dict),
           error='--header must be formatted as --header="key:value"'),
    })

    try:
        args = s.validate(args)
    except SchemaError as exc:
        # This module is sometimes called by other modules.
        # Replace references to 'ia copy' in ___doc__ to 'ia {cmd}' for clarity.
        usage = printable_usage(__doc__.replace('ia copy',
                                                'ia {}'.format(cmd)))
        print('{0}\n{1}'.format(str(exc), usage), file=sys.stderr)
        sys.exit(1)

    args['--header']['x-amz-copy-source'] = '/{}'.format(src_path)
    args['--header']['x-amz-metadata-directive'] = 'COPY'
    args['--header']
    # Add keep-old-version by default.
    if 'x-archive-keep-old-version' not in args['--header']:
        args['--header']['x-archive-keep-old-version'] = '1'

    url = '{}//s3.us.archive.org/{}'.format(session.protocol, dest_path)
    req = ia.iarequest.S3Request(url=url,
                                 method='PUT',
                                 metadata=args['--metadata'],
                                 headers=args['--header'],
                                 access_key=session.access_key,
                                 secret_key=session.secret_key)
    p = req.prepare()
    r = session.send(p)
    if r.status_code != 200:
        try:
            msg = get_s3_xml_text(r.text)
        except Exception as e:
            msg = r.text
        print('error: failed to {} "{}" to "{}" - {}'.format(
            cmd, src_path, dest_path, msg))
        sys.exit(1)
    elif cmd == 'copy':
        print('success: copied "{}" to "{}".'.format(src_path, dest_path))
    else:
        return (r, SRC_FILE)
Esempio n. 26
0
def main(*argv):
    # Gather command line arguments
    argv = list(*argv) if len(argv) == 1 else [i for i in argv]
    args = docopt(__doc__,
                  argv=argv,
                  help=True,
                  version=__version__,
                  options_first=False)

    # Choose an appropriate log level
    if args['--verbose']:
        log_level = logging.DEBUG
    elif args['--quiet']:
        log_level = logging.INFO
    else:
        # Default to debugging (complete I/O output)
        log_level = logging.DEBUG
    logging.basicConfig(
        level=log_level,
        format='%(levelname)5s:%(asctime)s.%(msecs)03d %(message)s',
        datefmt='%d/%m/%Y %H:%M:%S')
    """
    Create input schemata - Options accepting user input as value are checked for plausibility
    """
    input_schema = Schema(
        {"IN_FILE": And(os.path.exists, error="IN_FILE should exist")},
        ignore_extra_keys=True,
    )
    config_schema = Schema(
        {"CONFIG_FILE": And(os.path.exists, error="CONFIG_FILE should exist")},
        ignore_extra_keys=True,
    )
    gcode_schema = Schema(
        {"--f": And(os.path.exists, error="G-Code file should exist")},
        ignore_extra_keys=True,
    )
    connection_schema = Schema(
        {
            "--ip":
            And(
                validate_ip,
                error=
                'IPv4 needs to be in format "[0-255].[0-255].[0-255].[0-255]"',
            ),
            "--port":
            And(
                Use(int),
                validate_port,
                error="Port needs to be unsigned short (16 bits): 0..65535)",
            ),
        },
        ignore_extra_keys=True,
    )
    usb_schema = Schema(
        {
            "--vid":
            And(Use(lambda x: int(x, 16)),
                validate_id,
                error=
                "Vendor ID needs to be unsigned short (16 bits): 0..65535)"),
            "--pid":
            And(Use(lambda x: int(x, 16)),
                validate_id,
                error=
                "Product ID needs to be unsigned short (16 bits): 0..65535)")
        },
        ignore_extra_keys=True,
    )
    """
    Dispatch sub-functions - new sub-commands are called here
    """
    # noinspection PyBroadException
    try:
        if args["--gi"]:
            usb_present = args["--vid"] is not None and args[
                "--pid"] is not None
            tcp_present = args["--ip"] is not None and args[
                "--port"] is not None
            if tcp_present:
                args.update(connection_schema.validate(args))
                ip, port, safe = args["--ip"], args["--port"], args["--safe"]

                if usb_present:
                    # Robot and PCB
                    args.update(usb_schema.validate(args))
                    if args["--f"]:
                        gcode_schema.validate(args)
                        execute_gcode(ip, port, (args["--vid"], args["--pid"]),
                                      args["--f"])
                    else:
                        interactive_gcode(ip, port,
                                          (args["--vid"], args["--pid"]))
                else:
                    # Robot only
                    interactive_gcode_robot_only(ip, port, safe_return=safe)
            elif usb_present:
                # PCB only
                args.update(usb_schema.validate(args))
                interactive_gcode_printer_only((args["--vid"], args["--pid"]))
            else:
                # None present
                raise ValueError
        elif args["--validate"]:
            input_schema.validate(args)
            config_schema.validate(args)
            check_trajectory(config_f=args["CONFIG_FILE"],
                             gcode_f=args["IN_FILE"])
        else:
            args.update(connection_schema.validate(args))
            ip, port, safe = (
                args["--ip"],
                args["--port"],
                args["--safe"],
            )

            if args["--mi"]:
                interactive_melfa(ip, port, safe_return=safe)
            elif args["--demo"]:
                demo_mode(ip, port, safe_return=safe)
            else:
                raise ApiException(
                    "Unknown option passed. Type --help for more info.")
    except SchemaError:
        # Input validation error
        logging.exception("Input data invalid.")
        sys.exit(EXIT_BAD_INPUT)
    except (ApiException, Singularity, OutOfReachError, TrajectoryError):
        # Intentionally thrown exception by functions of this module
        logging.exception("Internal error.")
        sys.exit(EXIT_INTERNAL_ERROR)
    except KeyError:
        # Accessing the arg dictionary with different keys as specified in docstring
        logging.exception(
            "This might have happened due to different versions of CLI documentation and parsing."
        )
        sys.exit(EXIT_UNEXPECTED_ERROR)
    except NotImplementedError:
        # This might be used in some functions
        logging.exception("Encountered not implemented feature.")
        sys.exit(EXIT_UNEXPECTED_ERROR)
    except Exception:
        # Exception that has not been caught and rethrown as a proper ApiException (= Bug)
        logging.exception("External or unexpected exception!")
        sys.exit(EXIT_UNEXPECTED_ERROR)
    else:
        # Everything okay, no exception occurred
        sys.exit(EXIT_SUCCESS)
Esempio n. 27
0
    'description':
    And(
        str,
        lambda s: len(s) > 30,
        error='Description length must be > 40 characters',
    ),
    'doi':
    And(
        validators.url,
        str,
        error='DOI must be a URL linking to the orginal paper',
    ),
    'variables':
    list,
    'remote': {
        'url': Use(URL),
        Optional('username'): str,
        Optional(Or('service', 'password', only_one=True)): str,
        Optional('port'): int,
    },
    'local_store':
    Use(Path),
    Optional('pipelines'): {
        str: {
            'data_path': Use(Path, error='data_path must be a valid path'),
            'functions': Use(get_modules_from_list),
        }
    },
})

if __name__ == '__main__':
Esempio n. 28
0
     - `expression`: the <expression> passed in by the user
     - `regex`: Boolean set by --regex

    :return: expression if not None or '*' or '.*' depending on regex
    """
    if expresion is None:
        if not regex:
            return '*'
        return '.*'
    return expresion


finder_schema_dict = {
    Arguments.expression: Or(None, str),
    Arguments.root: Or(None, os.path.exists),
    Arguments.shallow: Use(bool),
    Arguments.regex: Use(bool)
}
schema = Schema(finder_schema_dict)


def setup_finder(arguments):
    """
    Chooses the find function and expression based on the arguments

    :param:

     - `arguments`: validated dictionary of arguments

    :return: <find function> <glob-expression>
    """
Esempio n. 29
0
from schema import And, Use, Schema


def is_json_serializable(data):
    if data is None:
        return True
    try:
        json.dumps(data)
        return True
    except Exception:
        return False


doc_schema = Schema({
    'method': And(Use(str)),
    'data': And(Use(is_json_serializable)),
    'url': And(Use(str)),
    'url_kwargs': And(Use(is_json_serializable)),
    'format': And(Use(str)),
    'headers': And(Use(is_json_serializable)),
    'success': And(Use(bool)),
    'meta': {
        'docs': And(Use(str)),
        'method_name': And(Use(str)),
        'class_name': And(Use(str)),
        'app_name': And(Use(str))
    },
    'response': {
        'data': And(Use(is_json_serializable)),
        'content_type': And(Use(str)),
Esempio n. 30
0
def test_issue_add_dict_key_in_error():
    try:
        Schema({'attr1': Use(int)}).validate({'attr1': 'str'})
    except SchemaError as e:
        assert 'attr1' in e.autos[-1]
        assert e.errors == [None, None]