예제 #1
0
    'qos_node_messages': 1,
    'base_topic_prefix': '',  # ie. 'home-'
    'automatic_remove_kit_from_names': True,
    'automatic_rename_kit_nodes': True,
    'automatic_rename_generic_nodes': True,
    'automatic_rename_nodes': True,
    'rename': {}
}


def port_range(port):
    return 0 <= port <= 65535


schema = Schema({
    Optional('device'):
    And(str, len),
    Optional('name'):
    And(str, len),
    Optional('mqtt'): {
        Optional('host'): And(str, len),
        Optional('port'): And(int, port_range),
        Optional('username'): And(str, len),
        Optional('password'): And(str, len),
        Optional('cafile'): And(str, len, os.path.exists),
        Optional('certfile'): And(str, len, os.path.exists),
        Optional('keyfile'): And(str, len, os.path.exists),
    },
    Optional('retain_node_messages'):
    Use(bool),
    Optional('qos_node_messages'):
예제 #2
0
def get_linkset_spec_schema(ets_ids):
    matching_methods_info = get_matching_methods()
    transformers_info = get_transformers()

    return Schema({
        'id': Use(int),
        'label': And(str, len),
        Optional('description', default=None): Or(str, None),
        Optional('use_counter', default=True): bool,
        'sources': [EntityTypeSelection(ets_ids)],
        'targets': [EntityTypeSelection(ets_ids)],
        'methods': And(LogicBox(Schema({
            'method': {
                'name': And(str, Use(str.lower), lambda m: m in matching_methods_info.keys()),
                'config': And(dict, MatchingMethodConfig(ets_ids)),
            },
            Optional('sim_method', default={'name': None, 'config': {}, 'normalized': False}): {
                Optional('name', default=None):
                    Or(None, And(str, Use(str.lower), lambda m: m in matching_methods_info.keys())),
                Optional('config', default={}): And(dict, MatchingMethodConfig(ets_ids)),
                Optional('normalized', default=False): bool,
            },
            Optional('fuzzy', default={'t_norm': 'minimum_t_norm', 's_norm': 'maximum_s_norm', 'threshold': 0}): {
                Optional('t_norm', default='minimum_t_norm'):
                    lambda s: s in ('minimum_t_norm', 'product_t_norm', 'lukasiewicz_t_norm',
                                    'drastic_t_norm', 'nilpotent_minimum', 'hamacher_product'),
                Optional('s_norm', default='maximum_s_norm'):
                    lambda s: s in ('maximum_s_norm', 'probabilistic_sum', 'bounded_sum',
                                    'drastic_s_norm', 'nilpotent_maximum', 'einstein_sum'),
                Optional('threshold', default=0): Or(float, Use(lambda t: 0)),
            },
            Optional('list_matching', default={'threshold': 0, 'is_percentage': False}): {
                Optional('threshold', default=0): int,
                Optional('is_percentage', default=False): bool,
            },
            'sources': {
                'properties': {
                    EntityTypeSelection(ets_ids): [{
                        'property': And(Use(filter_property), len),
                        Optional('property_transformer_first', default=False): bool,
                        Optional('transformers', default=list): [{
                            'name': And(str, Use(str.lower), lambda n: n in transformers_info.keys()),
                            'parameters': dict
                        }],
                    }]
                },
                Optional('transformers', default=list): [{
                    'name': And(str, Use(str.lower), lambda n: n in transformers_info.keys()),
                    'parameters': dict
                }],
            },
            'targets': {
                'properties': {
                    EntityTypeSelection(ets_ids): [{
                        'property': And(Use(filter_property), len),
                        Optional('property_transformer_first', default=False): bool,
                        Optional('transformers', default=list): [{
                            'name': And(str, Use(str.lower), lambda n: n in transformers_info.keys()),
                            'parameters': dict
                        }],
                    }]
                },
                Optional('transformers', default=list): [{
                    'name': And(str, Use(str.lower), lambda n: n in transformers_info.keys()),
                    'parameters': dict
                }],
            }
        }, ignore_extra_keys=True), name='conditions', types=(
            'and', 'or', 'minimum_t_norm', 'product_t_norm', 'lukasiewicz_t_norm', 'drastic_t_norm',
            'nilpotent_minimum', 'hamacher_product', 'maximum_s_norm', 'probabilistic_sum',
            'bounded_sum', 'drastic_s_norm', 'nilpotent_maximum', 'einstein_sum'
        ), elements_schema=Schema({
            'type': str,
            'conditions': list,
            Optional('threshold', default=0): Or(float, Use(lambda t: 0)),
        }, ignore_extra_keys=True)), dict),
    }, ignore_extra_keys=True)
예제 #3
0
def setNumberRange(key, keyType, start, end):
    '''check number range'''
    return And(
        And(keyType, error=SCHEMA_TYPE_ERROR % (key, keyType.__name__)),
        And(lambda n: start <= n <= end, error=SCHEMA_RANGE_ERROR % (key, '(%s,%s)' % (start, end))),
    )

def setPathCheck(key):
    '''check if path exist'''
    return And(os.path.exists, error=SCHEMA_PATH_ERROR % key)

common_schema = {
    'authorName': setType('authorName', str),
    'experimentName': setType('experimentName', str),
    Optional('description'): setType('description', str),
    'trialConcurrency': setNumberRange('trialConcurrency', int, 1, 99999),
    Optional('maxExecDuration'): And(Regex(r'^[1-9][0-9]*[s|m|h|d]$', error='ERROR: maxExecDuration format is [digit]{s,m,h,d}')),
    Optional('maxTrialNum'): setNumberRange('maxTrialNum', int, 1, 99999),
    'trainingServicePlatform': setChoice('trainingServicePlatform', 'remote', 'local', 'pai', 'kubeflow', 'frameworkcontroller', 'paiYarn'),
    Optional('searchSpacePath'): And(os.path.exists, error=SCHEMA_PATH_ERROR % 'searchSpacePath'),
    Optional('multiPhase'): setType('multiPhase', bool),
    Optional('multiThread'): setType('multiThread', bool),
    Optional('nniManagerIp'): setType('nniManagerIp', str),
    Optional('logDir'): And(os.path.isdir, error=SCHEMA_PATH_ERROR % 'logDir'),
    Optional('debug'): setType('debug', bool),
    Optional('versionCheck'): setType('versionCheck', bool),
    Optional('logLevel'): setChoice('logLevel', 'trace', 'debug', 'info', 'warning', 'error', 'fatal'),
    Optional('logCollection'): setChoice('logCollection', 'http', 'none'),
    'useAnnotation': setType('useAnnotation', bool),
    Optional('tuner'): dict,
예제 #4
0
class Stage(object):
    STAGE_FILE = 'Dvcfile'
    STAGE_FILE_SUFFIX = '.dvc'

    PARAM_MD5 = 'md5'
    PARAM_CMD = 'cmd'
    PARAM_DEPS = 'deps'
    PARAM_OUTS = 'outs'
    PARAM_LOCKED = 'locked'

    SCHEMA = {
        Optional(PARAM_MD5): Or(str, None),
        Optional(PARAM_CMD): Or(str, None),
        Optional(PARAM_DEPS): Or(And(list, Schema([dependency.SCHEMA])), None),
        Optional(PARAM_OUTS): Or(And(list, Schema([output.SCHEMA])), None),
        Optional(PARAM_LOCKED): bool,
    }

    def __init__(self,
                 project,
                 path=None,
                 cmd=None,
                 cwd=os.curdir,
                 deps=[],
                 outs=[],
                 md5=None,
                 locked=False):
        self.project = project
        self.path = path
        self.cmd = cmd
        self.cwd = cwd
        self.outs = outs
        self.deps = deps
        self.md5 = md5
        self.locked = locked

    @property
    def relpath(self):
        return os.path.relpath(self.path)

    @property
    def is_data_source(self):
        return self.cmd is None

    @staticmethod
    def is_stage_file(path):
        if not os.path.isfile(path):
            return False

        if not path.endswith(Stage.STAGE_FILE_SUFFIX) \
           and os.path.basename(path) != Stage.STAGE_FILE:
            return False

        return True

    def changed_md5(self):
        md5 = self.dumpd().get(self.PARAM_MD5, None)

        # backward compatibility
        if self.md5 is None:
            return False

        if self.md5 and md5 and self.md5 == md5:
            return False

        msg = "Dvc file '{}' md5 changed(expected '{}', actual '{}')"
        self.project.logger.debug(msg.format(self.relpath, self.md5, md5))
        return True

    @property
    def is_callback(self):
        return not self.is_data_source and len(self.deps) == 0

    @property
    def is_import(self):
        return not self.cmd and \
               len(self.deps) == 1 and \
               len(self.outs) == 1

    def changed(self):
        ret = False

        if self.is_callback:
            ret = True

        if self.locked:
            entries = self.outs
        else:
            entries = itertools.chain(self.outs, self.deps)

        for entry in entries:
            if entry.changed():
                ret = True

        if self.changed_md5():
            ret = True

        if ret:
            msg = u'Dvc file \'{}\' changed'.format(self.relpath)
        else:
            msg = u'Dvc file \'{}\' didn\'t change'.format(self.relpath)

        self.project.logger.debug(msg)

        return ret

    def remove_outs(self, ignore_remove=False):
        for out in self.outs:
            out.remove(ignore_remove=ignore_remove)

    def remove(self):
        self.remove_outs(ignore_remove=True)
        os.unlink(self.path)

    def reproduce(self, force=False, dry=False, interactive=False):
        if not self.changed() and not force:
            return None

        if (self.cmd or self.is_import) and not self.locked and not dry:
            # Removing outputs only if we actually have command to reproduce
            self.remove_outs(ignore_remove=False)

        msg = "Going to reproduce '{}'. Are you sure you want to continue?"
        msg = msg.format(self.relpath)
        if interactive and not prompt(msg):
            raise DvcException('Reproduction aborted by the user')

        self.project.logger.info(u'Reproducing \'{}\''.format(self.relpath))

        self.run(dry=dry)

        msg = u'\'{}\' was reproduced'.format(self.relpath)
        self.project.logger.debug(msg)

        return self

    @staticmethod
    def validate(d):
        try:
            Schema(Stage.SCHEMA).validate(d)
        except SchemaError as exc:
            Logger.debug(str(exc))
            raise StageFileFormatError()

    @staticmethod
    def loadd(project, d, path):
        Stage.validate(d)

        path = os.path.abspath(path)
        cwd = os.path.dirname(path)
        cmd = d.get(Stage.PARAM_CMD, None)
        md5 = d.get(Stage.PARAM_MD5, None)
        locked = d.get(Stage.PARAM_LOCKED, False)

        stage = Stage(project=project,
                      path=path,
                      cmd=cmd,
                      cwd=cwd,
                      md5=md5,
                      locked=locked)

        stage.deps = dependency.loadd_from(stage, d.get(Stage.PARAM_DEPS, []))
        stage.outs = output.loadd_from(stage, d.get(Stage.PARAM_OUTS, []))

        return stage

    @classmethod
    def _stage_fname_cwd(cls, fname, cwd, outs, add):
        if fname and cwd:
            return (fname, cwd)

        if not outs:
            return (cls.STAGE_FILE, cwd if cwd else os.getcwd())

        out = outs[0]
        if out.path_info['scheme'] == 'local':
            path = os.path
        else:
            path = posixpath

        if not fname:
            fname = path.basename(out.path) + cls.STAGE_FILE_SUFFIX

        if not cwd or (add and out.is_local):
            cwd = path.dirname(out.path)

        return (fname, cwd)

    @staticmethod
    def loads(project=None,
              cmd=None,
              deps=[],
              outs=[],
              outs_no_cache=[],
              metrics_no_cache=[],
              fname=None,
              cwd=os.curdir,
              locked=False,
              add=False,
              overwrite=True):
        stage = Stage(project=project, cwd=cwd, cmd=cmd, locked=locked)

        stage.outs = output.loads_from(stage, outs, use_cache=True)
        stage.outs += output.loads_from(stage, outs_no_cache, use_cache=False)
        stage.outs += output.loads_from(stage,
                                        metrics_no_cache,
                                        use_cache=False,
                                        metric=True)
        stage.deps = dependency.loads_from(stage, deps)

        fname, cwd = Stage._stage_fname_cwd(fname, cwd, stage.outs, add=add)

        cwd = os.path.abspath(cwd)
        path = os.path.join(cwd, fname)

        if os.path.exists(path):
            relpath = os.path.relpath(path)
            msg = "'{}' already exists. " \
                  "Do you wish to run the command and overwrite it?"
            if not overwrite and not prompt(msg.format(relpath), False):
                raise DvcException("'{}' already exists".format(relpath))

        stage.cwd = cwd
        stage.path = path

        return stage

    @staticmethod
    def _check_dvc_file(fname):
        sname = fname + Stage.STAGE_FILE_SUFFIX
        if Stage.is_stage_file(sname):
            Logger.info("Do you mean '{}'?".format(sname))

    @staticmethod
    def load(project, fname):
        if not os.path.exists(fname):
            Stage._check_dvc_file(fname)
            raise StageFileDoesNotExistError(fname)

        if not Stage.is_stage_file(fname):
            Stage._check_dvc_file(fname)
            raise StageFileIsNotDvcFileError(fname)

        with open(fname, 'r') as fd:
            return Stage.loadd(project, yaml.safe_load(fd), fname)

    def dumpd(self):
        deps = [x.dumpd() for x in self.deps]
        outs = [x.dumpd() for x in self.outs]

        ret = {}
        if self.cmd is not None:
            ret[Stage.PARAM_CMD] = self.cmd

        if len(deps):
            ret[Stage.PARAM_DEPS] = deps

        if len(outs):
            ret[Stage.PARAM_OUTS] = outs

        if self.locked:
            ret[Stage.PARAM_LOCKED] = self.locked

        ret[Stage.PARAM_MD5] = dict_md5(ret)

        return ret

    def dump(self, fname=None):
        if not fname:
            fname = self.path

        msg = "Saving information to '{}'.".format(os.path.relpath(fname))
        Logger.info(msg)

        with open(fname, 'w') as fd:
            yaml.safe_dump(self.dumpd(), fd, default_flow_style=False)

    def save(self):
        for dep in self.deps:
            dep.save()

        for out in self.outs:
            out.save()

    def run(self, dry=False):
        if self.locked:
            msg = u'Verifying outputs in locked stage \'{}\''
            self.project.logger.info(msg.format(self.relpath))
            if not dry:
                self.check_missing_outputs()
        elif self.is_import:
            msg = u'Importing \'{}\' -> \'{}\''
            self.project.logger.info(
                msg.format(self.deps[0].path, self.outs[0].path))

            if not dry:
                self.deps[0].download(self.outs[0].path_info)
        elif self.is_data_source:
            msg = u'Verifying data sources in \'{}\''.format(self.relpath)
            self.project.logger.info(msg)
            if not dry:
                self.check_missing_outputs()
        else:
            msg = u'Running command:\n\t{}'.format(self.cmd)
            self.project.logger.info(msg)

            if not dry:
                p = subprocess.Popen(self.cmd,
                                     cwd=self.cwd,
                                     shell=True,
                                     env=fix_env(os.environ),
                                     executable=os.getenv('SHELL'))
                p.communicate()
                if p.returncode != 0:
                    raise StageCmdFailedError(self)

        if not dry:
            self.save()

    def check_missing_outputs(self):
        outs = [out for out in self.outs if not out.exists]
        paths = [
            out.path if out.path_info['scheme'] != 'local' else out.rel_path
            for out in outs
        ]
        if paths:
            raise MissingDataSource(paths)

    def checkout(self):
        for out in self.outs:
            out.checkout()

    def _status(self, entries, name):
        ret = {}

        for entry in entries:
            ret.update(entry.status())

        if ret:
            return {name: ret}

        return {}

    def status(self):
        ret = {}

        if not self.locked:
            ret.update(self._status(self.deps, 'deps'))

        ret.update(self._status(self.outs, 'outs'))

        if ret or self.changed_md5() or self.is_callback:
            return {self.relpath: ret}

        return {}
예제 #5
0
"""
Migration from V2.8.5 to V3.0.0
"""
# SPDX-License-Identifier: GPL-2.0-or-later
# SPDX-FileCopyrightText: 2021 Dominik Gedon <*****@*****.**>
# SPDX-FileCopyrightText: 2021 Enno Gotthold <*****@*****.**>
# SPDX-FileCopyrightText: Copyright SUSE LLC

from schema import Optional, Or, Schema, SchemaError
from cobbler.settings.migrations import helper

schema = Schema(
    {
        Optional("auto_migrate_settings", default=True):
        bool,
        "allow_duplicate_hostnames":
        int,
        "allow_duplicate_ips":
        int,
        "allow_duplicate_macs":
        int,
        "allow_dynamic_settings":
        int,
        "always_write_dhcp_entries":
        int,
        "anamon_enabled":
        int,
        "authn_pam_service":
        str,
        "auth_token_expiration":
        int,
예제 #6
0
"""
Migration from V2.x.x to V2.8.5
"""
# SPDX-License-Identifier: GPL-2.0-or-later
# SPDX-FileCopyrightText: 2021 Dominik Gedon <*****@*****.**>
# SPDX-FileCopyrightText: 2021 Enno Gotthold <*****@*****.**>
# SPDX-FileCopyrightText: Copyright SUSE LLC

from schema import Optional, Schema, SchemaError

schema = Schema(
    {
        Optional("auto_migrate_settings", default=True):
        int,
        "allow_duplicate_hostnames":
        int,
        "allow_duplicate_ips":
        int,
        "allow_duplicate_macs":
        int,
        "allow_dynamic_settings":
        int,
        "always_write_dhcp_entries":
        int,
        "anamon_enabled":
        int,
        "authn_pam_service":
        str,
        "auth_token_expiration":
        int,
        "bind_chroot_path":
예제 #7
0
            raise errors.BlockedError(
                "Project still has headnode(s) attached to the network")

    network.access.remove(project)
    db.session.commit()


# Node Code #
#############
@rest_call('PUT',
           '/node/<node>',
           schema=Schema({
               'node': basestring,
               'obm': {
                   'type': basestring,
                   Optional(object): object,
               },
               'obmd': {
                   'uri':
                   And(basestring, lambda s: urlparse(s).scheme in
                       ('http', 'https')),
                   'admin_token':
                   basestring,
               },
               Optional('metadata'): {
                   basestring: object
               },
           }))
def node_register(node, obmd, **kwargs):
    """Create node.
예제 #8
0
        And(lambda polygon: len(polygon[0]) >= 4,
            error='Polygon should contain at least 4 points.'),
        And([[POINT_SCHEMA]])))

POLYGON_SCHEMA = Schema({'type': 'Polygon', 'coordinates': COORDINATES_SCHEMA})

PROPERTIES_SCHEMA = Schema({
    'category':
    And(Use(list),
        lambda categories: len(categories) >= 2,
        error='Feature should have at least one category'),
    'confidence':
    And(Use(float),
        lambda confidence: 0 <= confidence <= 1,
        error='Confidence should be a positive float between 0 and 1'),
    Optional(str):
    Or(bool, str, int, float, list, dict)
})

FEATURE_SCHEMA = Schema({
    'type': 'Feature',
    'properties': PROPERTIES_SCHEMA,
    'geometry': POLYGON_SCHEMA
})

FEATURE_COLLECTION_SCHEMA = Schema({
    'type': 'FeatureCollection',
    'features': [FEATURE_SCHEMA],
    Optional('id'): Use(str),
    Optional('bbox'): Use(list)
})
예제 #9
0
def validate_settings(settings_content: dict) -> dict:
    """
    This function performs logical validation of our loaded YAML files.
    This function will:
    - Perform type validation on all values of all keys.
    - Provide defaults for optional settings.
    :param settings_content: The dictionary content from the YAML file.
    :raises SchemaError: In case the data given is invalid.
    :return: The Settings of Cobbler which can be safely used inside this instance.
    """
    schema = Schema(
        {
            "allow_duplicate_hostnames":
            bool,
            "allow_duplicate_ips":
            bool,
            "allow_duplicate_macs":
            bool,
            "allow_dynamic_settings":
            bool,
            "always_write_dhcp_entries":
            bool,
            "anamon_enabled":
            bool,
            "auth_token_expiration":
            int,
            "authn_pam_service":
            str,
            "autoinstall_snippets_dir":
            str,
            "autoinstall_templates_dir":
            str,
            "bind_chroot_path":
            str,
            "bind_zonefile_path":
            str,
            "bind_master":
            str,
            "boot_loader_conf_template_dir":
            str,
            Optional("bootloaders_dir", default="/var/lib/cobbler/loaders"):
            str,
            Optional("grubconfig_dir", default="/var/lib/cobbler/grub_config"):
            str,
            "build_reporting_enabled":
            bool,
            "build_reporting_email": [str],
            "build_reporting_ignorelist": [str],
            "build_reporting_sender":
            str,
            "build_reporting_smtp_server":
            str,
            "build_reporting_subject":
            str,
            Optional("buildisodir", default="/var/cache/cobbler/buildiso"):
            str,
            "cheetah_import_whitelist": [str],
            "client_use_https":
            bool,
            "client_use_localhost":
            bool,
            Optional("cobbler_master", default=""):
            str,
            Optional("convert_server_to_ip", default=False):
            bool,
            "createrepo_flags":
            str,
            "autoinstall":
            str,
            "default_name_servers": [str],
            "default_name_servers_search": [str],
            "default_ownership": [str],
            "default_password_crypted":
            str,
            "default_template_type":
            str,
            "default_virt_bridge":
            str,
            Optional("default_virt_disk_driver", default="raw"):
            str,
            "default_virt_file_size":
            int,
            "default_virt_ram":
            int,
            "default_virt_type":
            str,
            "enable_ipxe":
            bool,
            "enable_menu":
            bool,
            "http_port":
            int,
            "include": [str],
            Optional("iso_template_dir", default="/etc/cobbler/iso"):
            str,
            Optional("jinja2_includedir", default="/var/lib/cobbler/jinja2"):
            str,
            "kernel_options":
            dict,
            "ldap_anonymous_bind":
            bool,
            "ldap_base_dn":
            str,
            "ldap_port":
            int,
            "ldap_search_bind_dn":
            str,
            "ldap_search_passwd":
            str,
            "ldap_search_prefix":
            str,
            "ldap_server":
            str,
            "ldap_tls":
            bool,
            "ldap_tls_cacertfile":
            str,
            "ldap_tls_certfile":
            str,
            "ldap_tls_keyfile":
            str,
            Optional("bind_manage_ipmi", default=False):
            bool,
            # TODO: Remove following line
            "manage_dhcp":
            bool,
            "manage_dhcp_v4":
            bool,
            "manage_dhcp_v6":
            bool,
            "manage_dns":
            bool,
            "manage_forward_zones": [str],
            "manage_reverse_zones": [str],
            Optional("manage_genders", False):
            bool,
            "manage_rsync":
            bool,
            "manage_tftpd":
            bool,
            "mgmt_classes": [str],
            # TODO: Validate Subdict
            "mgmt_parameters":
            dict,
            "next_server_v4":
            str,
            "next_server_v6":
            str,
            Optional("nsupdate_enabled", False):
            bool,
            Optional("nsupdate_log", default="/var/log/cobbler/nsupdate.log"):
            str,
            Optional("nsupdate_tsig_algorithm", default="hmac-sha512"):
            str,
            Optional("nsupdate_tsig_key", default=[]): [str],
            "power_management_default_type":
            str,
            "proxy_url_ext":
            str,
            "proxy_url_int":
            str,
            "puppet_auto_setup":
            bool,
            Optional("puppet_parameterized_classes", default=True):
            bool,
            Optional("puppet_server", default="puppet"):
            str,
            Optional("puppet_version", default=2):
            int,
            "puppetca_path":
            str,
            "pxe_just_once":
            bool,
            "nopxe_with_triggers":
            bool,
            "redhat_management_permissive":
            bool,
            "redhat_management_server":
            str,
            "redhat_management_key":
            str,
            "register_new_installs":
            bool,
            "remove_old_puppet_certs_automatically":
            bool,
            "replicate_repo_rsync_options":
            str,
            "replicate_rsync_options":
            str,
            "reposync_flags":
            str,
            "reposync_rsync_flags":
            str,
            "restart_dhcp":
            bool,
            "restart_dns":
            bool,
            "run_install_triggers":
            bool,
            "scm_track_enabled":
            bool,
            "scm_track_mode":
            str,
            "scm_track_author":
            str,
            "scm_push_script":
            str,
            "serializer_pretty_json":
            bool,
            "server":
            str,
            "sign_puppet_certs_automatically":
            bool,
            Optional("signature_path",
                     default="/var/lib/cobbler/distro_signatures.json"):
            str,
            Optional("signature_url",
                     default="https://cobbler.github.io/signatures/3.0.x/latest.json"):
            str,
            "tftpboot_location":
            str,
            "virt_auto_boot":
            bool,
            "webdir":
            str,
            "webdir_whitelist": [str],
            "xmlrpc_port":
            int,
            "yum_distro_priority":
            int,
            "yum_post_install_mirror":
            bool,
            "yumdownloader_flags":
            str,
            Optional("windows_enabled", default=False):
            bool,
            Optional("windows_template_dir", default="/etc/cobbler/windows"):
            str,
            Optional("samba_distro_share", default="DISTRO"):
            str,
        },
        ignore_extra_keys=False)
    return schema.validate(settings_content)
예제 #10
0
파일: schemas.py 프로젝트: Josca/vmaas
"""Schemas of responses."""
# pylint: disable=C0103
from schema import Or, Optional, Schema

_cves = {
    "cve_list": {
        str: {
            "impact": str,
            "public_date": str,
            "synopsis": str,
            "description": str,
            "modified_date": str,
            Optional("redhat_url"): str,
            "cvss3_score": str,
            "cvss2_score": str,
            Optional("secondary_url"): str,
            "cwe_list": [str],
            "errata_list": [str],
            "package_list": [str],
            "source_package_list": [str],
            "cvss3_metrics": str,
            "cvss2_metrics": str,
        }
    },
    Optional("modified_since"): str,
    "page": int,
    "page_size": int,
    "pages": int,
}

_pkgs_top = {"package_list": {str: dict}}
예제 #11
0
def define_data_schema(read=True):
    """
    Define data schema.

    :param read:
        Schema for reading?
    :type read: bool

    :return:
        Data schema.
    :rtype: schema.Schema
    """
    cmv = _cmv(read=read)
    dtc = _dtc(read=read)
    cvt = _cvt(read=read)
    gspv = _gspv(read=read)
    gsch = _gsch(read=read)
    string = _string(read=read)
    positive = _positive(read=read)
    greater_than_zero = _positive(
        read=read,
        error='should be as <float> and greater than zero!',
        check=lambda x: x > 0)
    between_zero_and_one = _positive(
        read=read,
        error='should be as <float> and between zero and one!',
        check=lambda x: 0 <= x <= 1)
    greater_than_one = _positive(
        read=read,
        error='should be as <float> and greater than one!',
        check=lambda x: x >= 1)
    positive_int = _positive(type=int, read=read)
    greater_than_one_int = _positive(
        type=int,
        read=read,
        error='should be as <int> and greater than one!',
        check=lambda x: x >= 1)
    limits = _limits(read=read)
    index_dict = _index_dict(read=read)
    np_array = _np_array(read=read)
    np_array_sorted = _np_array_positive(
        read=read,
        error='cannot be parsed because it should be an '
        'np.array dtype=<float> with ascending order!',
        check=_is_sorted)
    np_array_greater_than_minus_one = _np_array_positive(
        read=read,
        error='cannot be parsed because it should be an '
        'np.array dtype=<float> and all values >= -1!',
        check=lambda x: (x >= -1).all())
    np_array_bool = _np_array(dtype=bool, read=read)
    np_array_int = _np_array(dtype=int, read=read)
    _bool = _type(type=bool, read=read)
    function = _function(read=read)
    tuplefloat2 = _type(type=And(Use(tuple), (_type(float), )),
                        length=2,
                        read=read)
    tuplefloat = _type(type=And(Use(tuple), (_type(float), )), read=read)
    dictstrdict = _dict(format={str: dict}, read=read)
    ordictstrdict = _ordict(format={str: dict}, read=read)
    parameters = _parameters(read=read)
    dictstrfloat = _dict(format={str: Use(float)}, read=read)
    dictarray = _dict(format={str: np_array}, read=read)
    tyre_code = _tyre_code(read=read)
    tyre_dimensions = _tyre_dimensions(read=read)

    schema = {
        _compare_str('CVT'):
        cvt,
        _compare_str('CMV'):
        cmv,
        _compare_str('CMV_Cold_Hot'):
        gsch,
        _compare_str('DTGS'):
        dtc,
        _compare_str('GSPV'):
        gspv,
        _compare_str('GSPV_Cold_Hot'):
        gsch,
        _compare_str('MVL'):
        _mvl(read=read),
        'engine_n_cylinders':
        positive_int,
        'lock_up_tc_limits':
        tuplefloat2,
        _convert_str('ki_factor', 'ki_multiplicative'):
        greater_than_one,
        'ki_additive':
        positive,
        'drive_battery_technology':
        string,
        'drive_battery_n_cells':
        greater_than_one_int,
        'drive_battery_n_series_cells':
        greater_than_one_int,
        'drive_battery_n_parallel_cells':
        greater_than_one_int,
        'tyre_dimensions':
        tyre_dimensions,
        'tyre_code':
        tyre_code,
        'wltp_base_model':
        _dict(format=dict, read=read),
        'fuel_type':
        _select(types=('gasoline', 'diesel', 'LPG', 'NG', 'ethanol',
                       'biodiesel', 'methanol', 'propane'),
                read=read),
        'obd_fuel_type_code':
        positive_int,
        'vehicle_category':
        _select(types='ABCDEFSMJ', read=read),
        'vehicle_body':
        _select(types=('cabriolet', 'sedan', 'hatchback', 'stationwagon',
                       'suv/crossover', 'mpv', 'coupé', 'bus', 'bestelwagen',
                       'pick-up'),
                read=read),
        'tyre_class':
        _select(types=('C1', 'C2', 'C3'), read=read),
        'tyre_category':
        _select(types='ABCDEFG', read=read),
        'engine_fuel_lower_heating_value':
        positive,
        'fuel_carbon_content':
        positive,
        'engine_capacity':
        positive,
        'engine_stroke':
        positive,
        'engine_max_power':
        positive,
        _convert_str('engine_max_speed_at_max_power', 'engine_speed_at_max_power'):
        positive,
        'engine_max_speed':
        positive,
        'engine_max_torque':
        positive,
        'idle_engine_speed_median':
        positive,
        'engine_idle_fuel_consumption':
        greater_than_zero,
        'final_drive_ratio':
        positive,
        'r_dynamic':
        positive,
        'n_wheel':
        positive_int,
        'wheel_drive_load_fraction':
        between_zero_and_one,
        'static_friction':
        greater_than_zero,
        'tyre_state':
        _select(types=('new', 'worm'), read=read),
        'road_state':
        _select(types=('dry', 'wet', 'rainfall', 'puddles', 'ice'), read=read),
        'wltp_class':
        _select(types=('class1', 'class2', 'class3a', 'class3b'), read=read),
        'downscale_phases':
        tuplefloat,
        'electrical_hybridization_degree':
        _select(types=('mild', 'full', 'plugin', 'electric'), read=read),
        'gear_box_type':
        _select(types=('manual', 'automatic', 'cvt', 'planetary'), read=read),
        'ignition_type':
        _select(types=('positive', 'compression'), read=read),
        'start_stop_activation_time':
        positive,
        'alternator_nominal_voltage':
        positive,
        _convert_str('battery_voltage', 'service_battery_nominal_voltage'):
        positive,
        _convert_str('battery_capacity', 'service_battery_capacity'):
        positive,
        _convert_str('state_of_charge_balance', 'service_battery_state_of_charge_balance'):
        limits,
        _convert_str('state_of_charge_balance_window', 'service_battery_state_of_charge_balance_window'):
        limits,
        _convert_str('initial_state_of_charge', 'initial_service_battery_state_of_charge'):
        limits,
        'idle_engine_speed_std':
        positive,
        'alternator_nominal_power':
        positive,
        'alternator_efficiency':
        _limits(limits=(0, 1), read=read),
        'time_cold_hot_transition':
        positive,
        'co2_params':
        dictstrfloat,
        'willans_factors':
        dictstrfloat,
        'phases_willans_factors':
        _type(type=And(Use(tuple), (dictstrfloat, )), read=read),
        'optimal_efficiency':
        dictarray,
        'velocity_speed_ratios':
        index_dict,
        'gear_box_ratios':
        index_dict,
        'final_drive_ratios':
        index_dict,
        'speed_velocity_ratios':
        index_dict,
        'full_load_speeds':
        np_array_sorted,
        'full_load_torques':
        np_array,
        'full_load_powers':
        np_array,
        'vehicle_mass':
        positive,
        'f0_uncorrected':
        positive,
        'f2':
        positive,
        'f0':
        positive,
        'correct_f0':
        _bool,
        'co2_emission_low':
        positive,
        'co2_emission_medium':
        positive,
        'co2_emission_high':
        positive,
        'co2_emission_extra_high':
        positive,
        _compare_str('co2_emission_UDC'):
        positive,
        _compare_str('co2_emission_EUDC'):
        positive,
        'co2_emission_value':
        positive,
        'declared_co2_emission_value':
        positive,
        'n_dyno_axes':
        positive_int,
        'n_wheel_drive':
        positive_int,
        'rcb_correction':
        _bool,
        'speed_distance_correction':
        _bool,
        'engine_is_turbo':
        _bool,
        'has_start_stop':
        _bool,
        'has_gear_box_thermal_management':
        _bool,
        'has_energy_recuperation':
        _bool,
        'is_hybrid':
        _bool,
        'has_roof_box':
        _bool,
        'has_periodically_regenerating_systems':
        _bool,
        'engine_has_variable_valve_actuation':
        _bool,
        'has_thermal_management':
        _bool,
        'engine_has_direct_injection':
        _bool,
        'has_lean_burn':
        _bool,
        'engine_has_cylinder_deactivation':
        _bool,
        'has_exhausted_gas_recirculation':
        _bool,
        'has_particle_filter':
        _bool,
        'has_selective_catalytic_reduction':
        _bool,
        'has_nox_storage_catalyst':
        _bool,
        'has_torque_converter':
        _bool,
        'is_cycle_hot':
        _bool,
        'is_serial':
        _bool,
        'use_dt_gear_shifting':
        _bool,
        _convert_str('eco_mode', 'fuel_saving_at_strategy'):
        _bool,
        'correct_start_stop_with_gears':
        _bool,
        'enable_phases_willans':
        _bool,
        'enable_willans':
        _bool,
        'has_engine_idle_coasting':
        _bool,
        'has_engine_off_coasting':
        _bool,
        'fuel_map':
        dictarray,
        'transition_cycle_index':
        positive_int,
        'alternator_charging_currents':
        tuplefloat2,
        'relative_electric_energy_change':
        tuplefloat,
        'alternator_current_model':
        _alternator_current_model(read=read),
        'dcdc_current_model':
        _alternator_current_model(read=read),
        'service_battery_status_model':
        _service_battery_status_model(read=read),
        'clutch_speed_model':
        function,
        'co2_emissions_model':
        function,
        'co2_error_function_on_emissions':
        function,
        'co2_error_function_on_phases':
        function,
        'motor_p0_electric_power_loss_function':
        function,
        'motor_p1_electric_power_loss_function':
        function,
        'motor_p2_electric_power_loss_function':
        function,
        'motor_p3_front_electric_power_loss_function':
        function,
        'motor_p3_rear_electric_power_loss_function':
        function,
        'motor_p4_front_electric_power_loss_function':
        function,
        'motor_p4_rear_electric_power_loss_function':
        function,
        'after_treatment_speed_model':
        function,
        'after_treatment_power_model':
        function,
        'clutch_window':
        tuplefloat2,
        'co2_params_calibrated':
        parameters,
        'co2_params_initial_guess':
        parameters,
        'drive_battery_technology_type':
        string,
        'cycle_type':
        string,
        'cycle_name':
        string,
        'specific_gear_shifting':
        string,
        'calibration_status':
        _type(type=And(Use(list), [(bool, Or(parameters, None))]),
              length=4,
              read=read),
        _convert_str('electric_load', 'service_battery_load'):
        tuplefloat2,
        'engine_thermostat_temperature_window':
        tuplefloat2,
        'engine_temperature_regression_model':
        _engine_temperature_regression_model(read=read),
        'engine_type':
        string,
        'starter_model':
        function,
        'drive_battery_model':
        function,
        'motor_p0_maximum_power_function':
        function,
        'motor_p1_maximum_power_function':
        function,
        'motor_p2_planetary_maximum_power_function':
        function,
        'start_stop_hybrid_params':
        dictstrfloat,
        'full_load_curve':
        function,
        'fmep_model':
        _fmep_model(read=read),
        'gear_box_efficiency_constants':
        dictstrdict,
        'gear_box_efficiency_parameters_cold_hot':
        dictstrdict,
        'scores':
        dictstrdict,
        'param_selections':
        dictstrdict,
        'model_selections':
        dictstrdict,
        'score_by_model':
        dictstrdict,
        'at_scores':
        ordictstrdict,
        'fuel_density':
        positive,
        'idle_engine_speed':
        tuplefloat2,
        'k1':
        positive_int,
        'k2':
        positive_int,
        'k5':
        positive_int,
        'max_gear':
        positive_int,
        'hybrid_modes':
        np_array_int,
        'road_loads':
        _type(type=And(Use(tuple), (_type(float), )), length=3, read=read),
        'start_stop_model':
        function,
        'gear_box_temperature_references':
        tuplefloat2,
        'torque_converter_speed_model':
        function,
        'phases_co2_emissions':
        tuplefloat,
        'bag_phases':
        _bag_phases(read=read),
        'phases_integration_times':
        _type(type=And(Use(tuple), (And(Use(tuple), (_type(float), )), )),
              read=read),
        'active_cylinder_ratios':
        tuplefloat,
        'extended_phases_co2_emissions':
        tuplefloat,
        'extended_phases_integration_times':
        _type(type=And(Use(tuple), (And(Use(tuple), (_type(float), )), )),
              read=read),
        'extended_integration_times':
        tuplefloat,
        'phases_fuel_consumptions':
        tuplefloat,
        'co2_rescaling_scores':
        tuplefloat,
        'accelerations':
        np_array,
        'alternator_currents':
        np_array,
        'active_cylinders':
        np_array,
        'alternator_powers':
        np_array,
        _convert_str('alternator_statuses', 'service_battery_charging_statuses'):
        np_array_int,
        'auxiliaries_power_losses':
        np_array,
        'auxiliaries_torque_loss_factors':
        tuplefloat,
        'auxiliaries_torque_losses':
        np_array,
        _convert_str('battery_currents', 'service_battery_currents'):
        np_array,
        'clutch_tc_powers':
        np_array,
        'clutch_tc_speeds_delta':
        np_array,
        'co2_emissions':
        np_array,
        'after_treatment_speeds_delta':
        np_array,
        'engine_coolant_temperatures':
        np_array,
        'engine_powers_out':
        np_array,
        'engine_speeds_out':
        np_array,
        'engine_speeds_out_hot':
        np_array,
        'engine_starts':
        np_array_bool,
        'co2_normalization_references':
        np_array,
        'final_drive_powers_in':
        np_array,
        'final_drive_speeds_in':
        np_array,
        'final_drive_torques_in':
        np_array,
        'fuel_consumptions':
        np_array,
        'gear_box_efficiencies':
        np_array,
        'gear_box_powers_in':
        np_array,
        'gear_box_speeds_in':
        np_array,
        'gear_box_temperatures':
        np_array,
        'gear_box_torque_losses':
        np_array,
        'gear_box_torques_in':
        np_array,
        'gear_shifts':
        np_array_bool,
        'gears':
        np_array_int,
        'identified_co2_emissions':
        np_array,
        'motive_powers':
        np_array,
        'on_engine':
        np_array_bool,
        'clutch_phases':
        np_array_bool,
        'after_treatment_warm_up_phases':
        np_array_bool,
        'on_idle':
        np_array_bool,
        _convert_str('state_of_charges', 'service_battery_state_of_charges'):
        np_array,
        'times':
        np_array_sorted,
        'velocities':
        np_array_greater_than_minus_one,
        _compare_str('obd_velocities'):
        np_array_greater_than_minus_one,
        'wheel_powers':
        np_array,
        'wheel_speeds':
        np_array,
        'wheel_torques':
        np_array,
    }
    try:
        from co2mpas_driver.co2mpas import plugin_schema

        schema = plugin_schema(schema)
    except ImportError:
        pass

    schema = {Optional(k): Or(Empty(), v) for k, v in schema.items()}
    schema[Optional(str)] = Or(_type(type=float, read=read), np_array)

    if not read:

        def _f(x):
            return x is sh.NONE

        schema = {k: And(v, Or(_f, Use(str))) for k, v in schema.items()}

    return Schema(schema)
예제 #12
0
"""This module manages all the validations of the JSON models."""
from schema import Schema, And, Use, Optional, Regex

validator_memberATE = Schema({
    Optional('idAccount'):
    And(int),
    'username':
    And(str, Regex(r'^[A-Za-z0-9]{3,20}$')),
    'password':
    And(Use(str), lambda e: 254 > len(e) > 5),
    'name':
    And(
        str,
        Regex(
            r'^[a-zA-ZÀ-ÿ\u00f1\u00d1]+(\s*[a-zA-ZÀ-ÿ\u00f1\u00d1]*)*[a-zA-ZÀ-ÿ\u00f1\u00d1]{2,150}'
        )),
    'lastname':
    And(
        str,
        Regex(
            r'^[a-zA-ZÀ-ÿ\u00f1\u00d1]+(\s*[a-zA-ZÀ-ÿ\u00f1\u00d1]*)*[a-zA-ZÀ-ÿ\u00f1\u00d1]{2,150}'
        )),
    'dateBirth':
    And(str,
        Regex(r'^((19|20)\d\d)/(0?[1-9]|1[012])/(0?[1-9]|[12][0-9]|3[01])$')),
    'email':
    And(Use(str), Regex(r'\b[\w.%+-]+@[\w.-]+\.[a-zA-Z]{2,6}\b'),
        lambda e: 255 > len(e) > 4),
    'idCity':
    And(int),
    Optional('memberATEStatus'):
예제 #13
0
    def schema(cls) -> Schema:
        """
        [$name]: str
            Name as specified in the key of the parent app.

            Added by system. DO NOT specify this yourself in the YAML files.

        [roles]: List[str]
            A list of roles for this service. You can use arbitrary strings and get services by their
            assigned roles using :func:`~riptide.config.document.app.App.get_service_by_role`.

            Some roles are pre-defined and have a special meaning:

                *main*:
                    This service is the main service for the app.

                    Some commands will default to this service and the proxy URL for this service is shorter.
                    Usually services are accessible via ``http://<project_name>--<service_name>.<proxy_url>``,
                    however the main service is accessible via ``http://<project_name>.<proxy_url>``.

                    Only one service is allowed to have this role.
                *src*:
                    The container of this service will have access to the source code of the application.

                    It's working directory will be set accordingly.
                *db*:
                    This service is the primary database. A database driver has to be set (see key ``driver``).

                    This service is then used by Riptide for `database management </user_docs/db.html>`.

        image: str
            Docker Image to use

        [command]: str
            Command to run inside of the container. Default's to command defined in image.

            .. warning:: Avoid quotes (", ') inside of the command, as those may lead to strange side effects.

        [port]: int
            HTTP port that the web service is accessible under. This port will be used by the proxy server to redirect
            the traffic.

            If the port is not specified, the service is not accessible via proxy server.

        [logging]
            Logging settings. All logs will be placed inside the "_riptide/logs" directory.

            [stdout]: bool
                Whether or not to log the stdout stream of the container's main command. Default: false
            [stderr]: bool
                Whether or not to log the stderr stream of the container's main command. Default: false
            [paths]
                {key}: str
                    Additional text files to mount into the logging directory. Keys are filename's on host (without .log)
                    and values are the paths inside the containers.
            [commands]
                {key}: str
                    Additional commands to start inside the container. Their stdout and stderr will be logged to the file
                    specified by the key.

        [pre_start]: List[str]
            List of commands to run, before the container starts. They are run sequentially.
            The startup will wait for the commands to finish. Exit codes (failures) are ignored.

            Each of these commands is run in a separate container based on the service specification. Each command
            is run in a "sh" shell.

        [post_start]: List[str]
            List of commands to run, after container starts. They are run sequentially.
            The startup will wait for the commands to finish. Exit codes (failures) are ignored.

            Each of these command's is run inside the service container (equivalent of ``docker exec``).
            Each command is run in a “sh” shell.

        [environment]
            Additional environment variables

            {key}: str
                Key is the name of the variable, value is the value.

        [working_directory]: str
            Working directory for the service, either

            - absolute, if an absolute path is given
            - relative to the src specified in the project, if the role "src" is set.
            - relative to the default working directory from the image, if the role is not set.

            Defaults to ``.``.

        [config]
            Additional configuration files to mount. These files are NOT directly mounted.
            Instead they are processed and the resulting file is mounted.

            All variables and variable helpers inside the configuration file are processed.

            Example configuration file (demo.ini)::

                [demo]
                domain={{domain()}}
                project_name={{parent().parent().name}}

            Resulting file that will be mounted::

                [demo]
                domain=projectname.riptide.local
                project_name=projectname

            {key}
                from: str
                    Path to the configuration file, relative to any YAML file that was used
                    to load the project (including "riptide.yml" and all yaml files used inside the repository;
                    all are searched). Absolute paths are not allowed.

                to: str
                    Path to store the configuration file at, relative to working directory of container or absolute.

        [additional_ports]
            Additional TCP and/or UDP ports that will be made available on the host system.
            For details see section in
            `user guide </user_docs/7_working_with_riptide.html#access-other-tcp-udp-ports>`_.

            {key}
                title: str
                    Title for this port, will be displayed in ``riptide status``

                container: int
                    Port number inside the container

                host_start: int
                    First port number on host that Riptide will try to reserve, if the
                    port is already occupied, the next one will be used. This port
                    will be reserved and permanently used for this service after that.

        [additional_volumes]
            Additional volumes to mount into the container for this command.

            {key}
                host: str
                    Path on the host system to the volume. Avoid hardcoded absolute paths.
                container: str
                    Path inside the container (relative to src of Project or absolute).
                [mode]: str
                    Whether to mount the volume read-write ("rw", default) or read-only ("ro").
                [type]: str
                    Whether this volume is a "directory" (default) or a "file". Only checked if the file/dir does
                    not exist yet on the host system. Riptide will then create it with the appropriate type.

        [driver]
            The database driver configuration, set this only if the role "db" is set.

            Detailed documentation can be found in a `separate section </config_docs/database_drivers.html>`_.

            name: str
                Name of the database driver, must be installed.
            config: ???
                Specification depends on the database driver.

        [run_as_current_user]: bool
            Whether to run as the user using riptide (True)
            or image default (False).

            Default: True

            Riptide will always create the user and group, matching the host user and group,
            inside the container on startup, regardless of this setting.

            Some images don't support switching the user, set this to false then.
            Please note that, if you set this to false and also specify the role 'src', you may run
            into permission issues.

        [allow_full_memlock]: bool
            Whether to set memlock ulimit to -1:-1 (soft:hard).
            This is required for some database services, such as Elasticsearch.
            Note that engines might ignore this setting, if they don't support it.

            Default: False

        **Example Document:**

        .. code-block:: yaml

            service:
              image: node:10
              roles:
                - main
                - src
              command: 'node server.js'
              port: 1234
              logging:
                stdout: true
                stderr: false
                paths:
                  one: '/foo/bar'
                commands:
                  two: 'varnishlog'
              pre_start:
                - "echo 'command 1'"
                - "echo 'command 2'"
              post_start:
                - "echo 'command 3'"
                - "echo 'command 4'"
              environment:
                SOMETHING_IMPORTANT: foo
              config:
                one:
                  from: ci/config.yml
                  to: app_config/config.yml
              working_directory: www
              additional_ports:
                one:
                  title: MySQL Port
                  container: 3306
                  host_start: 3006
              additional_volumes:
                temporary_files:
                  host: '{{ get_tempdir() }}'
                  container: /tmp

        """
        return Schema(
            {
                Optional('$ref'): str,  # reference to other Service documents
                Optional('$name'): str,  # Added by system during processing parent app.
                Optional('roles'): [str],
                'image': str,
                Optional('command'): str,
                Optional('port'): int,
                Optional('logging'): {
                    Optional('stdout'): bool,
                    Optional('stderr'): bool,
                    Optional('paths'): {str: str},
                    Optional('commands'): {str: str}
                },
                Optional('pre_start'): [str],
                Optional('post_start'): [str],
                Optional('environment'): {str: str},
                Optional('config'): {
                    str: {
                        'from': str,
                        '$source': str,  # Path to the document that "from" references. Is added durinng loading of service
                        'to': str
                    }
                },
                # Whether to run as the user using riptide (True) or image default (False). Default: True
                # Limitation: If false and the image USER is not root,
                #             then a user with the id of the image USER must exist in /etc/passwd of the image.
                Optional('run_as_current_user'): bool,
                # DEPRECATED. Inverse of run_as_current_user if set
                Optional('run_as_root'): bool,
                # Whether to create the riptide user and group, mapped to current user. Default: False
                Optional('dont_create_user'): bool,
                Optional('working_directory'): str,
                Optional('additional_ports'): {
                    str: {
                        'title': str,
                        'container': int,
                        'host_start': int
                    }
                },
                Optional('additional_volumes'): {
                    str: {
                        'host': str,
                        'container': str,
                        Optional('mode'): Or('rw', 'ro'),  # default: rw - can be rw/ro.
                        Optional('type'): Or('directory', 'file')  # default: directory
                    }
                },
                Optional('allow_full_memlock'): bool,
                # db only
                Optional('driver'): {
                    'name': str,
                    'config': any  # defined by driver
                }
            }
        )
예제 #14
0
# Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: MIT-0
"""
Schema Validation for Deployment map files
"""

from schema import Schema, And, Use, Or, Optional, Regex
from logger import configure_logger

LOGGER = configure_logger(__name__)

# Pipeline Params
PARAM_SCHEMA = {
    Optional("notification_endpoint"): str,
    Optional("schedule"): str,
    Optional("restart_execution_on_update"): bool,
}

AWS_ACCOUNT_ID_REGEX_STR = r"\A[0-9]{12}\Z"
AWS_ACCOUNT_ID_SCHEMA = Schema(
    And(
        Or(int, str), Use(str),
        Regex(
            AWS_ACCOUNT_ID_REGEX_STR,
            error=(
                "The specified account id is incorrect. "
                "This typically happens when you specify the account id as a "
                "number, while the account id starts with a zero. If this is "
                "the case, please wrap the account id in quotes to make it a "
                "string. An AWS Account Id is a number of 12 digits, which "
                "should start with a zero if the Account Id has a zero at "
예제 #15
0
class OutputLOCAL(DependencyLOCAL):
    PARAM_CACHE = 'cache'
    PARAM_METRIC = 'metric'
    PARAM_METRIC_TYPE = 'type'
    PARAM_METRIC_XPATH = 'xpath'

    METRIC_SCHEMA = Or(None, bool,
                       {Optional(PARAM_METRIC_TYPE): Or(str, None),
                        Optional(PARAM_METRIC_XPATH): Or(str, None)})

    DoesNotExistError = OutputDoesNotExistError
    IsNotFileOrDirError = OutputIsNotFileOrDirError

    def __init__(self,
                 stage,
                 path,
                 info=None,
                 remote=None,
                 cache=True,
                 metric=False):
        super(OutputLOCAL, self).__init__(stage, path, info, remote=remote)
        self.use_cache = cache
        self.metric = metric

    @property
    def md5(self):
        return self.info.get(self.project.cache.local.PARAM_MD5, None)

    @property
    def cache(self):
        return self.project.cache.local.get(self.md5)

    def dumpd(self):
        ret = super(OutputLOCAL, self).dumpd()
        ret[self.PARAM_CACHE] = self.use_cache

        if isinstance(self.metric, dict):
            if self.PARAM_METRIC_XPATH in self.metric and \
               not self.metric[self.PARAM_METRIC_XPATH]:
                del self.metric[self.PARAM_METRIC_XPATH]

        if self.metric:
            ret[self.PARAM_METRIC] = self.metric

        return ret

    def changed(self):
        if not self.use_cache:
            return super(OutputLOCAL, self).changed()

        return self.project.cache.local.changed(self.path_info, self.info)

    def checkout(self):
        if not self.use_cache:
            return

        self.project.cache.local.checkout(self.path_info, self.info)

    def _verify_metric(self):
        if not self.metric:
            return

        if os.path.isdir(self.path):
            msg = 'Directory \'{}\' cannot be used as metrics.'
            raise DvcException(msg.format(self.rel_path))

        if not istextfile(self.path):
            msg = 'Binary file \'{}\' cannot be used as metrics.'
            raise DvcException(msg.format(self.rel_path))

    def save(self):
        if not self.use_cache:
            super(OutputLOCAL, self).save()
            self._verify_metric()
            msg = 'Output \'{}\' doesn\'t use cache. Skipping saving.'
            self.project.logger.info(msg.format(self.rel_path))
            return

        if not os.path.exists(self.path):
            raise self.DoesNotExistError(self.rel_path)

        if not os.path.isfile(self.path) \
           and not os.path.isdir(self.path):  # pragma: no cover
            raise self.IsNotFileOrDirError(self.rel_path)

        if (os.path.isfile(self.path) and os.path.getsize(self.path) == 0) or \
           (os.path.isdir(self.path) and len(os.listdir(self.path)) == 0):
            msg = "File/directory '{}' is empty.".format(self.rel_path)
            self.project.logger.warn(msg)

        if not self.changed():
            msg = 'Output \'{}\' didn\'t change. Skipping saving.'
            self.project.logger.info(msg.format(self.rel_path))
            return

        if self.is_local:
            if self.project.scm.is_tracked(self.path):
                raise OutputAlreadyTrackedError(self.rel_path)

            if self.use_cache:
                self.project.scm.ignore(self.path)

        self.info = self.project.cache.local.save(self.path_info)

    def remove(self, ignore_remove=False):
        self.remote.remove(self.path_info)
        if ignore_remove and self.use_cache and self.is_local:
            self.project.scm.ignore_remove(self.path)

    def move(self, out):
        if self.use_cache and self.is_local:
            self.project.scm.ignore_remove(self.path)

        self.remote.move(self.path_info, out.path_info)
        self.path = out.path
        self.path_info = out.path_info
        self.save()

        if self.use_cache and self.is_local:
            self.project.scm.ignore(self.path)
예제 #16
0
    Or(None, Use(float, error="pixels-per-um does not appear to be a number")),
    '--bar-microns':
    And(Use(int),
        lambda n: 1 < n,
        error="--bar-microns must be an integer greater than 0"),
    '--padding':
    And(Use(int),
        lambda n: 0 <= n,
        error="--padding must be greater or equal to 0"),
    '--bar-padding':
    And(Use(int),
        lambda n: 0 <= n,
        error="--bar-padding must be greater or equal to 0"),
    '--font-size':
    And(Use(int), lambda n: 1 < n, error="--font-size must be greater than 1"),
    Optional('--skip-merge'):
    bool,
    Optional('--invert'):
    bool,
    '--ortho': [
        Or(
            None,
            And(
                Use(literal_eval), lambda x: len(x) == 3 and 0 <= x[0] and 0 <=
                x[1] and 0 <= x[2]))
    ]
}

schema = Schema(schema_def)

예제 #17
0
파일: dellnos9.py 프로젝트: mikelyy/hil
from hil.model import db, Switch
from hil.errors import BadArgumentError
from hil.model import BigIntegerType
from hil.ext.switches.common import check_native_networks, parse_vlans
from hil.config import core_schema, string_is_bool
from hil.ext.switches import _vlan_http


logger = logging.getLogger(__name__)

CONFIG = 'config-commands'
SHOW = 'show-command'
EXEC = 'exec-command'

core_schema[__name__] = {
    Optional('save'): string_is_bool
}


class DellNOS9(Switch, _vlan_http.Session):
    """Dell S3048-ON running Dell NOS9"""
    api_name = 'http://schema.massopencloud.org/haas/v0/switches/dellnos9'

    __mapper_args__ = {
        'polymorphic_identity': api_name,
    }

    id = db.Column(BigIntegerType,
                   db.ForeignKey('switch.id'), primary_key=True)
    hostname = db.Column(db.String, nullable=False)
    username = db.Column(db.String, nullable=False)
예제 #18
0
def main():
    ch = logging.StreamHandler()
    ch.setLevel(logging.INFO)
    fh = logging.FileHandler('dht.log')
    fh.setLevel(logging.DEBUG)
    logging.basicConfig(
        handlers=[ch, fh],
        format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
        level=logging.DEBUG)
    logger = logging.getLogger(__name__)
    logger.info('dht')

    parser = argparse.ArgumentParser(
        description=
        'Publishes humidity and temperature data from Adafruit DHT series sensors'
    )
    parser.add_argument('-c',
                        '--config',
                        help='path to config file',
                        default='config.ini')
    args = parser.parse_args()

    config = configparser.ConfigParser()
    config.read(args.config)
    if not config.read(args.config):
        logger.warn('failed to read config file, path={}'.format(args.config))

    config_schema = Schema(
        {
            'dht': {
                'type': Use(int),
                'pin': Use(int),
                Optional('read_interval', default=60): Use(int)
            },
            Optional('mqtt'): {
                Optional('broker_host', default='localhost'): str,
                Optional('broker_port', default='1883'): Use(int)
            }
        },
        ignore_extra_keys=True)
    config_schema.validate(config._sections)

    location = config.get('mqtt', 'location', fallback='unknown')
    humidity_topic = '/'.join((location, 'humidity'))
    temperature_topic = '/'.join((location, 'temperature'))

    # Create sensor
    sensor_type = config.getint('dht', 'type')
    pin = config.getint('dht', 'pin')
    sensor = DhtSensor(sensor_type, pin)

    def on_connect(client, userdata, flags, rc):
        logger.info('connected to mqtt broker')

    # Connect to MQTT broker
    client = mqtt.Client()
    client.on_connect = on_connect
    host = config.get('mqtt', 'broker_host', fallback='localhost')
    port = config.getint('mqtt', 'broker_port', fallback=1883)
    client.connect(host, port)
    client.loop_start()

    # Publish sensor readings
    read_interval = config.getint('dht', 'read_interval', fallback=60)
    logger.debug('read_interval={}'.format(read_interval))
    scheduler = sched.scheduler(time.time, time.sleep)

    def read():
        scheduler.enter(read_interval, 1, read)
        reading = sensor.read_retry()
        if not reading.is_valid():
            return

        logger.debug(
            'publishing humidity={} and temperature={} for location={}'.format(
                reading.humidity, reading.temperature, location))

        client.publish(humidity_topic, reading.humidity)
        client.publish(temperature_topic, reading.temperature)

    read()
    scheduler.run()
예제 #19
0
                raise SchemaError('classFileName {} not found.'.format(class_file_name))

        builtin_name = data.get(builtin_key)
        class_args = data.get('classArgs')
        self.validate_class_args(class_args, algo_type, builtin_name)

    def validate(self, data):
        self.algo_schema.update(self.builtin_name_schema[self.algo_type])
        Schema(self.algo_schema).validate(data)
        self.validate_extras(data, self.algo_type)


common_schema = {
    'authorName': setType('authorName', str),
    'experimentName': setType('experimentName', str),
    Optional('description'): setType('description', str),
    'trialConcurrency': setNumberRange('trialConcurrency', int, 1, 99999),
    Optional('maxExecDuration'): And(Regex(r'^[1-9][0-9]*[s|m|h|d]$', error='ERROR: maxExecDuration format is [digit]{s,m,h,d}')),
    Optional('maxTrialNum'): setNumberRange('maxTrialNum', int, 1, 99999),
    'trainingServicePlatform': setChoice(
        'trainingServicePlatform', 'adl', 'remote', 'local', 'pai', 'kubeflow', 'frameworkcontroller', 'paiYarn', 'dlts', 'aml'),
    Optional('searchSpacePath'): And(os.path.exists, error=SCHEMA_PATH_ERROR % 'searchSpacePath'),
    Optional('multiPhase'): setType('multiPhase', bool),
    Optional('multiThread'): setType('multiThread', bool),
    Optional('nniManagerIp'): setType('nniManagerIp', str),
    Optional('logDir'): And(os.path.isdir, error=SCHEMA_PATH_ERROR % 'logDir'),
    Optional('debug'): setType('debug', bool),
    Optional('versionCheck'): setType('versionCheck', bool),
    Optional('logLevel'): setChoice('logLevel', 'trace', 'debug', 'info', 'warning', 'error', 'fatal'),
    Optional('logCollection'): setChoice('logCollection', 'http', 'none'),
    'useAnnotation': setType('useAnnotation', bool),
예제 #20
0
class OutputBase(object):
    IS_DEPENDENCY = False

    REMOTE = RemoteBASE

    PARAM_PATH = "path"
    PARAM_CACHE = "cache"
    PARAM_METRIC = "metric"
    PARAM_METRIC_TYPE = "type"
    PARAM_METRIC_XPATH = "xpath"
    PARAM_PERSIST = "persist"

    METRIC_SCHEMA = Or(
        None,
        bool,
        {
            Optional(PARAM_METRIC_TYPE): Or(str, None),
            Optional(PARAM_METRIC_XPATH): Or(str, None),
        },
    )

    PARAM_TAGS = "tags"

    DoesNotExistError = OutputDoesNotExistError
    IsNotFileOrDirError = OutputIsNotFileOrDirError

    sep = "/"

    def __init__(
        self,
        stage,
        path,
        info=None,
        remote=None,
        cache=True,
        metric=False,
        persist=False,
        tags=None,
    ):
        # This output (and dependency) objects have too many paths/urls
        # here is a list and comments:
        #
        #   .def_path - path from definition in stage file
        #   .path_info - PathInfo/URLInfo structured resolved path
        #   .fspath - local only, resolved
        #   .__str__ - for presentation purposes, def_path/relpath
        #
        # By resolved path, which contains actual location,
        # should be absolute and don't contain remote:// refs.
        self.stage = stage
        self.repo = stage.repo if stage else None
        self.def_path = path
        self.info = info
        self.remote = remote or self.REMOTE(self.repo, {})
        self.use_cache = False if self.IS_DEPENDENCY else cache
        self.metric = False if self.IS_DEPENDENCY else metric
        self.persist = persist
        self.tags = None if self.IS_DEPENDENCY else (tags or {})

        if self.use_cache and self.cache is None:
            raise DvcException(
                "no cache location setup for '{}' outputs.".format(
                    self.REMOTE.scheme))

        self.path_info = self._parse_path(remote, path)

    def _parse_path(self, remote, path):
        if remote:
            parsed = urlparse(path)
            return remote.path_info / parsed.path.lstrip("/")
        else:
            return self.REMOTE.path_cls(path)

    def __repr__(self):
        return "{class_name}: '{def_path}'".format(
            class_name=type(self).__name__, def_path=self.def_path)

    def __str__(self):
        return self.def_path

    @property
    def scheme(self):
        return self.REMOTE.scheme

    @property
    def is_in_repo(self):
        return False

    @property
    def cache(self):
        return getattr(self.repo.cache, self.scheme)

    @property
    def dir_cache(self):
        return self.cache.get_dir_cache(self.checksum)

    def assign_to_stage_file(self, target_repo):
        raise DvcException("change repo is not supported for {}".format(
            self.scheme))

    @classmethod
    def supported(cls, url):
        return cls.REMOTE.supported(url)

    @property
    def cache_path(self):
        return self.cache.checksum_to_path_info(self.checksum).url

    @property
    def checksum(self):
        return self.info.get(self.remote.PARAM_CHECKSUM)

    @property
    def is_dir_checksum(self):
        return self.remote.is_dir_checksum(self.checksum)

    @property
    def exists(self):
        return self.remote.exists(self.path_info)

    def changed_checksum(self):
        return (self.checksum != self.remote.save_info(
            self.path_info)[self.remote.PARAM_CHECKSUM])

    def changed_cache(self):
        if not self.use_cache or not self.checksum:
            return True

        return self.cache.changed_cache(self.checksum)

    def status(self):
        if self.checksum and self.use_cache and self.changed_cache():
            return {str(self): "not in cache"}

        if not self.exists:
            return {str(self): "deleted"}

        if self.changed_checksum():
            return {str(self): "modified"}

        if not self.checksum:
            return {str(self): "new"}

        return {}

    def changed(self):
        status = self.status()
        logger.debug(str(status))
        return bool(status)

    @property
    def is_empty(self):
        return self.remote.is_empty(self.path_info)

    def isdir(self):
        return self.remote.isdir(self.path_info)

    def isfile(self):
        return self.remote.isfile(self.path_info)

    def save(self):
        if not self.exists:
            raise self.DoesNotExistError(self)

        if not self.isfile and not self.isdir:
            raise self.IsNotFileOrDirError(self)

        if self.is_empty:
            logger.warning("'{}' is empty.".format(self))

        if not self.use_cache:
            self.info = self.remote.save_info(self.path_info)
            if self.metric:
                self.verify_metric()
            if not self.IS_DEPENDENCY:
                logger.info(
                    "Output '{}' doesn't use cache. Skipping saving.".format(
                        self))
            return

        assert not self.IS_DEPENDENCY

        if not self.changed():
            logger.info(
                "Output '{}' didn't change. Skipping saving.".format(self))
            return

        if self.is_in_repo:
            if self.repo.scm.is_tracked(self.fspath):
                raise OutputAlreadyTrackedError(self)

            if self.use_cache:
                self.repo.scm.ignore(self.fspath)

        self.info = self.remote.save_info(self.path_info)

    def commit(self):
        if self.use_cache:
            self.cache.save(self.path_info, self.info)

    def dumpd(self):
        ret = copy(self.info)
        ret[self.PARAM_PATH] = self.def_path

        if self.IS_DEPENDENCY:
            return ret

        ret[self.PARAM_CACHE] = self.use_cache

        if isinstance(self.metric, dict):
            if (self.PARAM_METRIC_XPATH in self.metric
                    and not self.metric[self.PARAM_METRIC_XPATH]):
                del self.metric[self.PARAM_METRIC_XPATH]

        ret[self.PARAM_METRIC] = self.metric
        ret[self.PARAM_PERSIST] = self.persist

        if self.tags:
            ret[self.PARAM_TAGS] = self.tags

        return ret

    def verify_metric(self):
        raise DvcException("verify metric is not supported for {}".format(
            self.scheme))

    def download(self, to, resume=False):
        self.remote.download([self.path_info], [to.path_info], resume=resume)

    def checkout(self, force=False, progress_callback=None, tag=None):
        if not self.use_cache:
            return

        if tag:
            info = self.tags[tag]
        else:
            info = self.info

        self.cache.checkout(
            self.path_info,
            info,
            force=force,
            progress_callback=progress_callback,
        )

    def remove(self, ignore_remove=False):
        self.remote.remove(self.path_info)
        if self.scheme != "local":
            return

        if ignore_remove and self.use_cache and self.is_in_repo:
            self.repo.scm.ignore_remove(self.fspath)

    def move(self, out):
        if self.scheme == "local" and self.use_cache and self.is_in_repo:
            self.repo.scm.ignore_remove(self.fspath)

        self.remote.move(self.path_info, out.path_info)
        self.def_path = out.def_path
        self.path_info = out.path_info
        self.save()
        self.commit()

        if self.scheme == "local" and self.use_cache and self.is_in_repo:
            self.repo.scm.ignore(self.fspath)

    def get_files_number(self):
        if not self.use_cache or not self.checksum:
            return 0

        if self.is_dir_checksum:
            return len(self.dir_cache)

        return 1

    def unprotect(self):
        if self.exists:
            self.remote.unprotect(self.path_info)

    def _collect_used_dir_cache(self, remote=None, force=False, jobs=None):
        """Get a list of `info`s retaled to the given directory.

        - Pull the directory entry from the remote cache if it was changed.

        Example:

            Given the following commands:

            $ echo "foo" > directory/foo
            $ echo "bar" > directory/bar
            $ dvc add directory

            It will return something similar to the following list:

            [
                { 'path': 'directory/foo', 'md5': 'c157a79031e1', ... },
                { 'path': 'directory/bar', 'md5': 'd3b07384d113', ... },
            ]
        """

        ret = []

        if self.cache.changed_cache_file(self.checksum):
            try:
                self.repo.cloud.pull(
                    [{
                        self.remote.PARAM_CHECKSUM: self.checksum,
                        "name": str(self),
                    }],
                    jobs=jobs,
                    remote=remote,
                    show_checksums=False,
                )
            except DvcException:
                logger.debug("failed to pull cache for '{}'".format(self))

        if self.cache.changed_cache_file(self.checksum):
            msg = ("Missing cache for directory '{}'. "
                   "Cache for files inside will be lost. "
                   "Would you like to continue? Use '-f' to force.")
            if not force and not prompt.confirm(msg.format(self.path_info)):
                raise DvcException(
                    "unable to fully collect used cache"
                    " without cache for directory '{}'".format(self))
            else:
                return ret

        for entry in self.dir_cache:
            info = copy(entry)
            path_info = self.path_info / entry[self.remote.PARAM_RELPATH]
            info["name"] = str(path_info)
            ret.append(info)

        return ret

    def get_used_cache(self, **kwargs):
        """Get a dumpd of the given `out`, with an entry including the branch.

        The `used_cache` of an output is no more than its `info`.

        In case that the given output is a directory, it will also
        include the `info` of its files.
        """

        if self.stage.is_repo_import:
            return []

        if not self.use_cache:
            return []

        if not self.info:
            logger.warning(
                "Output '{}'({}) is missing version info. Cache for it will "
                "not be collected. Use dvc repro to get your pipeline up to "
                "date.".format(self, self.stage))
            return []

        ret = [{self.remote.PARAM_CHECKSUM: self.checksum, "name": str(self)}]

        if not self.is_dir_checksum:
            return ret

        ret.extend(self._collect_used_dir_cache(**kwargs))

        return ret
예제 #21
0
 def schema():
     """Provide schema for shell configuration."""
     return Schema({
         'script':
         And(Or(type(' '), type(u' ')), len),
         Optional('title', default=''):
         str,
         Optional('model', default={}): {
             Optional(And(str, len)): object
         },
         Optional('env', default={}): {
             Optional(And(str, len)): And(str, len)
         },
         Optional('item', default=None):
         object,
         Optional('dry_run', default=False):
         bool,
         Optional('debug', default=False):
         bool,
         Optional('strict', default=False):
         bool,
         Optional('variables', default={}): {
             Optional(
                 And(Or(type(' '), type(u' ')), len,
                     Regex(r'([a-zA-Z][_a-zA-Z]*)'))):
             Or(type(' '), type(u' '))
         },
         Optional('temporary_scripts_path', default=''):
         Or(type(''), type(u'')),
         Optional('internal', default=False):
         bool
     })
예제 #22
0
    def __init__(self,
                 input_file=None,
                 output_file=None,
                 dfxml_file=None,
                 report_file=None,
                 commit=False,
                 ignore_patterns=[],
                 key=None,
                 rules=[]):
        #  Validate configuration
        from schema import Schema, Optional, Or, Use, And, SchemaError
        schema = Schema({
            'input_file':
            Use(lambda f: open(f, 'r'), error='Cannot read the input file'),
            Optional('output_file'):
            Or(
                None,
                Use(lambda f: open(f, 'w'),
                    error='Cannot write to the output file')),
            Optional('dfxml_file'):
            Or(None, Use(lambda f: open(f, 'r'),
                         error='Cannot read DFXML file')),
            Optional('report_file'):
            Or(None,
               lambda f: open(f, 'w'),
               error='Cannot write to the report file'),
            'commit':
            Or(True, False),
            'ignore_patterns':
            Use(lambda f: re.compile(convert_fileglob_to_re('|'.join(f))),
                error='Cannot compile unified ignore regex'),
            'key':
            Or(None, str),
            'rules':
            And([(redact_rule, redact_action)], lambda f: len(f) > 0)
        })
        try:
            kwargs = {
                'input_file': input_file,
                'output_file': output_file,
                'dfxml_file': dfxml_file,
                'report_file': report_file,
                'commit': commit,
                'ignore_patterns': ignore_patterns,
                'key': key,
                'rules': rules
            }
            self.conf = schema.validate(kwargs)
        except SchemaError as e:
            logging.warning('The redact configuration did not validate:')
            exit(e)
        if self.conf['commit'] and 'output_file' not in self.conf.keys():
            logging.error('An output file is required when COMMIT is on.')
            exit(1)
        # TODO Check input and output are not same file

        logging.debug('Configuration:\n%s' % self.conf)

        # Print rules
        logging.debug(
            json.dumps(map(
                lambda x, y: (x.line, x.__class__.__name__, y.__class__.
                              __name__, x.lgpattern
                              if hasattr(x, 'lgpattern') else ''),
                self.conf['rules']),
                       indent=4))

        self.input_file = self.conf['input_file']
        from os import path
        self.image_size = path.getsize(self.input_file.name)
        self.output_file = self.conf['output_file']
        self.report_file = self.conf['report_file']
        self.dfxml_file = self.conf['dfxml_file']
        self.commit = self.conf['commit']
        self.configure_report_logger()
예제 #23
0
from schema import Schema, And, Or, Use, Optional, Literal
from .tools import HasKey, Nullable, Url, List

SiteConfigSchemaObject = {
    "server_name": Use(str),
    Optional("web_upstream", default="localhost:8069"): str,
    Optional("poll_upstream", default="localhost:8072"): str,
    Optional("proxy_pass"): Use(str),  # Or default=None ?
    Optional("disable_longpolling", default=False): bool,
    Optional("httpaccess"): bool,
    Optional("httpsaccess"): bool,
    Optional("posbox", default=False): bool,
    Optional("proxy_http_11"): bool,
    Optional("cache_statics"): bool,
    Optional("disable_cache_zone"): bool,
    Optional("header_upgrade"): bool,
    Optional("header_connection"): bool,
    Optional("header_host"): bool,
    Optional("masquerade"): str,
    Optional("redirect"): str,
    Optional("allow_tls_v1"): Nullable(bool),
    Optional("enable_hsts"): bool,
    Optional("certificate_folder"): str,
    Optional("ssl_certificate"): str,
    Optional("ssl_certificate_key"): str,
    Optional("ssl_trusted_certificate"): bool,
    Optional("disable_stapling"): bool,
    Optional("ip_allow", default=[]): List(str),
    Optional("ip_deny", default=[]): List(str),
}
예제 #24
0
# BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

import os
from schema import Schema, And, Use, Optional, Regex, Or

common_schema = {
    'authorName':
    str,
    'experimentName':
    str,
    'trialConcurrency':
    And(int, lambda n: 1 <= n <= 999999),
    Optional('maxExecDuration'):
    Regex(r'^[1-9][0-9]*[s|m|h|d]$'),
    Optional('maxTrialNum'):
    And(int, lambda x: 1 <= x <= 99999),
    'trainingServicePlatform':
    And(str, lambda x: x in ['remote', 'local', 'pai']),
    Optional('searchSpacePath'):
    os.path.exists,
    'useAnnotation':
    bool,
    'tuner':
    Or(
        {
            'builtinTunerName':
            Or('TPE', 'Random', 'Anneal', 'Evolution', 'SMAC', 'BatchTuner'),
            'classArgs': {
예제 #25
0
from schema import Schema, Optional, And, Or

from msa.plugins.notifications import handlers

handler_factories = [handlers.SendNotificationEventHandler]

entities_list = []

config_schema = Schema({
    Optional("preferred_provider"):
    Or("pushbullet", "email", "slack"),
    "providers": {
        Optional("pushbullet"): {
            "token": And(str, len)
        },
        Optional("email"): {
            "host": And(str, len),
            "port": int,
            "from": And(str, len),
            Optional("password"): And(str, len),
            Optional("username"): And(str, len),
            Optional("ssl"): bool,
            Optional("tls"): bool,
        },
        Optional("slack"): {
            "webhook_url": And(str, len),
            Optional("username"): And(str, len),
            Optional("icon_url"): And(str, len),
            Optional("icon_emoji"): And(str, len),
        },
    },
예제 #26
0
class Stage(object):
    STAGE_FILE = "Dvcfile"
    STAGE_FILE_SUFFIX = ".dvc"

    PARAM_MD5 = "md5"
    PARAM_CMD = "cmd"
    PARAM_WDIR = "wdir"
    PARAM_DEPS = "deps"
    PARAM_OUTS = "outs"
    PARAM_LOCKED = "locked"

    SCHEMA = {
        Optional(PARAM_MD5): Or(str, None),
        Optional(PARAM_CMD): Or(str, None),
        Optional(PARAM_WDIR): Or(str, None),
        Optional(PARAM_DEPS): Or(And(list, Schema([dependency.SCHEMA])), None),
        Optional(PARAM_OUTS): Or(And(list, Schema([output.SCHEMA])), None),
        Optional(PARAM_LOCKED): bool,
    }

    TAG_REGEX = r"^(?P<path>.*)@(?P<tag>[^\\/@:]*)$"

    def __init__(
        self,
        repo,
        path=None,
        cmd=None,
        wdir=os.curdir,
        deps=None,
        outs=None,
        md5=None,
        locked=False,
        tag=None,
        state=None,
    ):
        if deps is None:
            deps = []
        if outs is None:
            outs = []

        self.repo = repo
        self.path = path
        self.cmd = cmd
        self.wdir = wdir
        self.outs = outs
        self.deps = deps
        self.md5 = md5
        self.locked = locked
        self.tag = tag
        self._state = state or {}

    def __repr__(self):
        return "Stage: '{path}'".format(
            path=self.relpath if self.path else "No path")

    @property
    def relpath(self):
        return os.path.relpath(self.path)

    @property
    def is_data_source(self):
        """Whether the stage file was created with `dvc add` or `dvc import`"""
        return self.cmd is None

    @staticmethod
    def is_valid_filename(path):
        return (
            # path.endswith doesn't work for encoded unicode filenames on
            # Python 2 and since Stage.STAGE_FILE_SUFFIX is ascii then it is
            # not needed to decode the path from py2's str
            path[-len(Stage.STAGE_FILE_SUFFIX):] == Stage.STAGE_FILE_SUFFIX
            or os.path.basename(path) == Stage.STAGE_FILE)

    @staticmethod
    def is_stage_file(path):
        return os.path.isfile(path) and Stage.is_valid_filename(path)

    def changed_md5(self):
        return self.md5 != self._compute_md5()

    @property
    def is_callback(self):
        """
        A callback stage is always considered as changed,
        so it runs on every `dvc repro` call.
        """
        return not self.is_data_source and len(self.deps) == 0

    @property
    def is_import(self):
        """Whether the stage file was created with `dvc import`."""
        return not self.cmd and len(self.deps) == 1 and len(self.outs) == 1

    def _changed_deps(self):
        if self.locked:
            return False

        if self.is_callback:
            logger.warning(
                "Dvc file '{fname}' is a 'callback' stage "
                "(has a command and no dependencies) and thus always "
                "considered as changed.".format(fname=self.relpath))
            return True

        for dep in self.deps:
            status = dep.status()
            if status:
                logger.warning(
                    "Dependency '{dep}' of '{stage}' changed because it is "
                    "'{status}'.".format(dep=dep,
                                         stage=self.relpath,
                                         status=status[str(dep)]))
                return True

        return False

    def _changed_outs(self):
        for out in self.outs:
            status = out.status()
            if status:
                logger.warning(
                    "Output '{out}' of '{stage}' changed because it is "
                    "'{status}'".format(out=out,
                                        stage=self.relpath,
                                        status=status[str(out)]))
                return True

        return False

    def _changed_md5(self):
        if self.changed_md5():
            logger.warning("Dvc file '{}' changed.".format(self.relpath))
            return True
        return False

    def changed(self):
        ret = any(
            [self._changed_deps(),
             self._changed_outs(),
             self._changed_md5()])

        if ret:
            logger.warning("Stage '{}' changed.".format(self.relpath))
        else:
            logger.info("Stage '{}' didn't change.".format(self.relpath))

        return ret

    def remove_outs(self, ignore_remove=False, force=False):
        """Used mainly for `dvc remove --outs` and :func:`Stage.reproduce`."""
        for out in self.outs:
            if out.persist and not force:
                out.unprotect()
            else:
                logger.debug("Removing output '{out}' of '{stage}'.".format(
                    out=out, stage=self.relpath))
                out.remove(ignore_remove=ignore_remove)

    def unprotect_outs(self):
        for out in self.outs:
            out.unprotect()

    def remove(self, force=False):
        self.remove_outs(ignore_remove=True, force=force)
        os.unlink(self.path)

    def reproduce(self,
                  force=False,
                  dry=False,
                  interactive=False,
                  no_commit=False):
        if not self.changed() and not force:
            return None

        msg = ("Going to reproduce '{stage}'. "
               "Are you sure you want to continue?".format(stage=self.relpath))

        if interactive and not prompt.confirm(msg):
            raise DvcException("reproduction aborted by the user")

        logger.info("Reproducing '{stage}'".format(stage=self.relpath))

        self.run(dry=dry, no_commit=no_commit, force=force)

        logger.debug("'{stage}' was reproduced".format(stage=self.relpath))

        return self

    @staticmethod
    def validate(d, fname=None):
        from dvc.utils import convert_to_unicode

        try:
            Schema(Stage.SCHEMA).validate(convert_to_unicode(d))
        except SchemaError as exc:
            raise StageFileFormatError(fname, exc)

    @classmethod
    def _stage_fname(cls, fname, outs, add):
        if fname:
            return fname

        if not outs:
            return cls.STAGE_FILE

        out = outs[0]
        path_handler = out.remote.ospath

        fname = path_handler.basename(out.path) + cls.STAGE_FILE_SUFFIX

        fname = Stage._expand_to_path_on_add_local(add, fname, out,
                                                   path_handler)

        return fname

    @staticmethod
    def _expand_to_path_on_add_local(add, fname, out, path_handler):
        if (add and out.is_in_repo
                and not contains_symlink_up_to(out.path, out.repo.root_dir)):
            fname = path_handler.join(path_handler.dirname(out.path), fname)
        return fname

    @staticmethod
    def _check_stage_path(repo, path):
        assert repo is not None

        real_path = os.path.realpath(path)
        if not os.path.exists(real_path):
            raise StagePathNotFoundError(path)

        if not os.path.isdir(real_path):
            raise StagePathNotDirectoryError(path)

        proj_dir = os.path.realpath(repo.root_dir) + os.path.sep
        if not (real_path + os.path.sep).startswith(proj_dir):
            raise StagePathOutsideError(path)

    @property
    def is_cached(self):
        """
        Checks if this stage has been already ran and stored
        """
        from dvc.remote.local import RemoteLOCAL
        from dvc.remote.s3 import RemoteS3

        old = Stage.load(self.repo, self.path)
        if old._changed_outs():
            return False

        # NOTE: need to save checksums for deps in order to compare them
        # with what is written in the old stage.
        for dep in self.deps:
            dep.save()

        old_d = old.dumpd()
        new_d = self.dumpd()

        # NOTE: need to remove checksums from old dict in order to compare
        # it to the new one, since the new one doesn't have checksums yet.
        old_d.pop(self.PARAM_MD5, None)
        new_d.pop(self.PARAM_MD5, None)
        outs = old_d.get(self.PARAM_OUTS, [])
        for out in outs:
            out.pop(RemoteLOCAL.PARAM_CHECKSUM, None)
            out.pop(RemoteS3.PARAM_CHECKSUM, None)

        return old_d == new_d

    @staticmethod
    def create(
        repo=None,
        cmd=None,
        deps=None,
        outs=None,
        outs_no_cache=None,
        metrics=None,
        metrics_no_cache=None,
        fname=None,
        cwd=None,
        wdir=None,
        locked=False,
        add=False,
        overwrite=True,
        ignore_build_cache=False,
        remove_outs=False,
        validate_state=True,
        outs_persist=None,
        outs_persist_no_cache=None,
    ):
        if outs is None:
            outs = []
        if deps is None:
            deps = []
        if outs_no_cache is None:
            outs_no_cache = []
        if metrics is None:
            metrics = []
        if metrics_no_cache is None:
            metrics_no_cache = []
        if outs_persist is None:
            outs_persist = []
        if outs_persist_no_cache is None:
            outs_persist_no_cache = []

        # Backward compatibility for `cwd` option
        if wdir is None and cwd is not None:
            if fname is not None and os.path.basename(fname) != fname:
                raise StageFileBadNameError(
                    "stage file name '{fname}' may not contain subdirectories"
                    " if '-c|--cwd' (deprecated) is specified. Use '-w|--wdir'"
                    " along with '-f' to specify stage file path and working"
                    " directory.".format(fname=fname))
            wdir = cwd
        else:
            wdir = os.curdir if wdir is None else wdir

        stage = Stage(repo=repo, wdir=wdir, cmd=cmd, locked=locked)

        Stage._fill_stage_outputs(
            stage,
            outs,
            outs_no_cache,
            metrics,
            metrics_no_cache,
            outs_persist,
            outs_persist_no_cache,
        )
        stage.deps = dependency.loads_from(stage, deps)

        stage._check_circular_dependency()
        stage._check_duplicated_arguments()

        fname = Stage._stage_fname(fname, stage.outs, add=add)
        wdir = os.path.abspath(wdir)

        if cwd is not None:
            path = os.path.join(wdir, fname)
        else:
            path = os.path.abspath(fname)

        Stage._check_stage_path(repo, wdir)
        Stage._check_stage_path(repo, os.path.dirname(path))

        stage.wdir = wdir
        stage.path = path

        # NOTE: remove outs before we check build cache
        if remove_outs:
            logger.warning("--remove-outs is deprecated."
                           " It is now the default behavior,"
                           " so there's no need to use this option anymore.")
            stage.remove_outs(ignore_remove=False)
            logger.warning("Build cache is ignored when using --remove-outs.")
            ignore_build_cache = True
        else:
            stage.unprotect_outs()

        if validate_state:
            if os.path.exists(path):
                if not ignore_build_cache and stage.is_cached:
                    logger.info("Stage is cached, skipping.")
                    return None

                msg = (
                    "'{}' already exists. Do you wish to run the command and "
                    "overwrite it?".format(stage.relpath))

                if not overwrite and not prompt.confirm(msg):
                    raise StageFileAlreadyExistsError(stage.relpath)

                os.unlink(path)

        return stage

    @staticmethod
    def _fill_stage_outputs(
        stage,
        outs,
        outs_no_cache,
        metrics,
        metrics_no_cache,
        outs_persist,
        outs_persist_no_cache,
    ):
        stage.outs = output.loads_from(stage, outs, use_cache=True)
        stage.outs += output.loads_from(stage,
                                        metrics,
                                        use_cache=True,
                                        metric=True)
        stage.outs += output.loads_from(stage,
                                        outs_persist,
                                        use_cache=True,
                                        persist=True)
        stage.outs += output.loads_from(stage, outs_no_cache, use_cache=False)
        stage.outs += output.loads_from(stage,
                                        metrics_no_cache,
                                        use_cache=False,
                                        metric=True)
        stage.outs += output.loads_from(stage,
                                        outs_persist_no_cache,
                                        use_cache=False,
                                        persist=True)

    @staticmethod
    def _check_dvc_filename(fname):
        if not Stage.is_valid_filename(fname):
            raise StageFileBadNameError(
                "bad stage filename '{}'. Stage files should be named"
                " 'Dvcfile' or have a '.dvc' suffix (e.g. '{}.dvc').".format(
                    os.path.relpath(fname), os.path.basename(fname)))

    @staticmethod
    def _check_file_exists(repo, fname):
        if not repo.tree.exists(fname):
            raise StageFileDoesNotExistError(fname)

    @staticmethod
    def _check_isfile(repo, fname):
        if not repo.tree.isfile(fname):
            raise StageFileIsNotDvcFileError(fname)

    @classmethod
    def _get_path_tag(cls, s):
        regex = re.compile(cls.TAG_REGEX)
        match = regex.match(s)
        if not match:
            return s, None
        return match.group("path"), match.group("tag")

    @staticmethod
    def load(repo, fname):
        fname, tag = Stage._get_path_tag(fname)

        # it raises the proper exceptions by priority:
        # 1. when the file doesn't exists
        # 2. filename is not a dvc filename
        # 3. path doesn't represent a regular file
        Stage._check_file_exists(repo, fname)
        Stage._check_dvc_filename(fname)
        Stage._check_isfile(repo, fname)

        with repo.tree.open(fname) as fd:
            d = load_stage_fd(fd, fname)
        # Making a deepcopy since the original structure
        # looses keys in deps and outs load
        state = copy.deepcopy(d)

        Stage.validate(d, fname=os.path.relpath(fname))
        path = os.path.abspath(fname)

        stage = Stage(
            repo=repo,
            path=path,
            wdir=os.path.abspath(
                os.path.join(os.path.dirname(path),
                             d.get(Stage.PARAM_WDIR, "."))),
            cmd=d.get(Stage.PARAM_CMD),
            md5=d.get(Stage.PARAM_MD5),
            locked=d.get(Stage.PARAM_LOCKED, False),
            tag=tag,
            state=state,
        )

        stage.deps = dependency.loadd_from(stage, d.get(Stage.PARAM_DEPS, []))
        stage.outs = output.loadd_from(stage, d.get(Stage.PARAM_OUTS, []))

        return stage

    def dumpd(self):
        from dvc.remote.local import RemoteLOCAL

        return {
            key: value
            for key, value in {
                Stage.PARAM_MD5:
                self.md5,
                Stage.PARAM_CMD:
                self.cmd,
                Stage.PARAM_WDIR:
                RemoteLOCAL.unixpath(
                    os.path.relpath(self.wdir, os.path.dirname(self.path))),
                Stage.PARAM_LOCKED:
                self.locked,
                Stage.PARAM_DEPS: [d.dumpd() for d in self.deps],
                Stage.PARAM_OUTS: [o.dumpd() for o in self.outs],
            }.items() if value
        }

    def dump(self):
        fname = self.path

        self._check_dvc_filename(fname)

        logger.info("Saving information to '{file}'.".format(
            file=os.path.relpath(fname)))
        d = self.dumpd()
        apply_diff(d, self._state)
        dump_stage_file(fname, self._state)

        self.repo.scm.track_file(os.path.relpath(fname))

    def _compute_md5(self):
        from dvc.output.base import OutputBase

        d = self.dumpd()

        # NOTE: removing md5 manually in order to not affect md5s in deps/outs
        if self.PARAM_MD5 in d.keys():
            del d[self.PARAM_MD5]

        # Ignore the wdir default value. In this case stage file w/o
        # wdir has the same md5 as a file with the default value specified.
        # It's important for backward compatibility with pipelines that
        # didn't have WDIR in their stage files.
        if d.get(self.PARAM_WDIR) == ".":
            del d[self.PARAM_WDIR]

        # NOTE: excluding parameters that don't affect the state of the
        # pipeline. Not excluding `OutputLOCAL.PARAM_CACHE`, because if
        # it has changed, we might not have that output in our cache.
        m = dict_md5(
            d,
            exclude=[
                self.PARAM_LOCKED,
                OutputBase.PARAM_METRIC,
                OutputBase.PARAM_TAGS,
                OutputBase.PARAM_PERSIST,
            ],
        )
        logger.debug("Computed stage '{}' md5: '{}'".format(self.relpath, m))
        return m

    def save(self):
        for dep in self.deps:
            dep.save()

        for out in self.outs:
            out.save()

        self.md5 = self._compute_md5()

    @staticmethod
    def _changed_entries(entries):
        ret = []
        for entry in entries:
            if entry.checksum and entry.changed_checksum():
                ret.append(entry.rel_path)
        return ret

    def check_can_commit(self, force):
        changed_deps = self._changed_entries(self.deps)
        changed_outs = self._changed_entries(self.outs)

        if changed_deps or changed_outs or self.changed_md5():
            msg = ("dependencies {}".format(changed_deps)
                   if changed_deps else "")
            msg += " and " if (changed_deps and changed_outs) else ""
            msg += "outputs {}".format(changed_outs) if changed_outs else ""
            msg += "md5" if not (changed_deps or changed_outs) else ""
            msg += " of '{}' changed. Are you sure you commit it?".format(
                self.relpath)
            if not force and not prompt.confirm(msg):
                raise StageCommitError(
                    "unable to commit changed '{}'. Use `-f|--force` to "
                    "force.`".format(self.relpath))
            self.save()

    def commit(self):
        for out in self.outs:
            out.commit()

    def _check_missing_deps(self):
        missing = [dep for dep in self.deps if not dep.exists]

        if any(missing):
            raise MissingDep(missing)

    @staticmethod
    def _warn_if_fish(executable):  # pragma: no cover
        if (executable is None
                or os.path.basename(os.path.realpath(executable)) != "fish"):
            return

        logger.warning(
            "DVC detected that you are using fish as your default "
            "shell. Be aware that it might cause problems by overwriting "
            "your current environment variables with values defined "
            "in '.fishrc', which might affect your command. See "
            "https://github.com/iterative/dvc/issues/1307. ")

    def _check_circular_dependency(self):
        from dvc.exceptions import CircularDependencyError

        circular_dependencies = set(d.path for d in self.deps) & set(
            o.path for o in self.outs)

        if circular_dependencies:
            raise CircularDependencyError(circular_dependencies.pop())

    def _check_duplicated_arguments(self):
        from dvc.exceptions import ArgumentDuplicationError
        from collections import Counter

        path_counts = Counter(edge.path for edge in self.deps + self.outs)

        for path, occurrence in path_counts.items():
            if occurrence > 1:
                raise ArgumentDuplicationError(path)

    def _run(self):
        self._check_missing_deps()
        executable = os.getenv("SHELL") if os.name != "nt" else None
        self._warn_if_fish(executable)

        p = subprocess.Popen(
            self.cmd,
            cwd=self.wdir,
            shell=True,
            env=fix_env(os.environ),
            executable=executable,
        )
        p.communicate()

        if p.returncode != 0:
            raise StageCmdFailedError(self)

    def run(self, dry=False, resume=False, no_commit=False, force=False):
        if (self.cmd or self.is_import) and not self.locked and not dry:
            self.remove_outs(ignore_remove=False, force=False)

        if self.locked:
            logger.info("Verifying outputs in locked stage '{stage}'".format(
                stage=self.relpath))
            if not dry:
                self.check_missing_outputs()

        elif self.is_import:
            logger.info("Importing '{dep}' -> '{out}'".format(
                dep=self.deps[0].path, out=self.outs[0].path))
            if not dry:
                if self._already_cached() and not force:
                    self.outs[0].checkout()
                else:
                    self.deps[0].download(self.outs[0].path_info,
                                          resume=resume)

        elif self.is_data_source:
            msg = "Verifying data sources in '{}'".format(self.relpath)
            logger.info(msg)
            if not dry:
                self.check_missing_outputs()

        else:
            logger.info("Running command:\n\t{}".format(self.cmd))
            if not dry:
                if (not force and not self.is_callback
                        and self._already_cached()):
                    self.checkout()
                else:
                    self._run()

        if not dry:
            self.save()
            if not no_commit:
                self.commit()

    def check_missing_outputs(self):
        paths = [
            out.path if out.scheme != "local" else out.rel_path
            for out in self.outs if not out.exists
        ]

        if paths:
            raise MissingDataSource(paths)

    def checkout(self, force=False, progress_callback=None):
        for out in self.outs:
            out.checkout(force=force,
                         tag=self.tag,
                         progress_callback=progress_callback)

    @staticmethod
    def _status(entries):
        ret = {}

        for entry in entries:
            ret.update(entry.status())

        return ret

    def status(self):
        ret = []

        if not self.locked:
            deps_status = self._status(self.deps)
            if deps_status:
                ret.append({"changed deps": deps_status})

        outs_status = self._status(self.outs)
        if outs_status:
            ret.append({"changed outs": outs_status})

        if self.changed_md5():
            ret.append("changed checksum")

        if self.is_callback:
            ret.append("always changed")

        if ret:
            return {self.relpath: ret}

        return {}

    def _already_cached(self):
        return (not self.changed_md5()
                and all(not dep.changed() for dep in self.deps)
                and all(not out.changed_cache() if out.
                        use_cache else not out.changed() for out in self.outs))

    def get_all_files_number(self):
        return sum(out.get_files_number() for out in self.outs)
예제 #27
0
    def __init__(self):
        self.version = Schema(int, ignore_extra_keys=True)
        self.script = Schema(Or(str, [str]))

        self.v1_app_info = {
            "id": str,
            "name": str,
            "icon": str,
            "version": str,
            "exec": str,
            Optional("exec_args"): str,
        }

        self.v1_files = {
            Optional("include"): [str],
            Optional("exclude"): [str],
        }

        self.v1_runtime = {
            Optional("debug"): bool,
            Optional("version"): str,
            Optional("path_mappings"): [str],
            Optional("env"): {
                str: Or(str, int, bool)
            },
        }

        self.v1_tests = {
            str: {
                "image": str,
                "command": str,
                Optional("use_host_x"): bool,
                Optional("env"): {
                    str: Or(str, int, bool)
                },
            }
        }

        self.v1_apt = Schema({
            "arch":
            Or(str, [str]),
            "sources": [{
                "sourceline": str,
                Optional("key_url"): str
            }],
            "include": [str],
            Optional("exclude"): [str],
            Optional("allow_unauthenticated"):
            bool,
        })
        self.v1_pacman = Schema({
            Optional("Architecture"):
            Or("auto", "x86_64", "i686", "aarch64"),
            Optional("repositories"): {
                str: [str]
            },
            Optional("keyrings"): [str],
            Optional("options"): {
                str: str
            },
            "include": [str],
            Optional("exclude"): [str],
        })

        self.v1_appdir = Schema({
            "path": str,
            "app_info": self.v1_app_info,
            Optional("files"): self.v1_files,
            Optional("apt"): self.v1_apt,
            Optional("pacman"): self.v1_pacman,
            Optional("runtime"): self.v1_runtime,
            Optional("test"): self.v1_tests,
            Optional("before_bundle"): self.script,
            Optional("after_bundle"): self.script,
            Optional("before_runtime"): self.script,
            Optional("after_runtime"): self.script,
        })

        self.v1_appimage = Schema({
            "arch": str,
            Optional("update-information"): str,
            Optional("sign-key"): str,
            Optional("file_name"): str,
        })

        self.v1 = Schema({
            "version": int,
            Optional("script"): self.script,
            "AppDir": self.v1_appdir,
            "AppImage": self.v1_appimage,
        })
예제 #28
0
try:
    from urllib.parse import urlparse
except ImportError:
    from urlparse import urlparse

from kafka import KafkaProducer
from kafka import KafkaProducer
from schema import Schema, Optional

login_schema = Schema({
    "type": "LOGIN_SUCCESSFUL",
    "payload": {
        "user_email": str,
        "app_name": str,
        "time": str,
        Optional("environment"): str
    }
})


class Notification:
    def __init__(self, broker_url):
        self.schema_list = {"LOGIN_SUCCESSFUL": login_schema}
        self.topic = "arc-rte-notifications"

        self.__producer = KafkaProducer(
            bootstrap_servers=self.get_kafka_brokers(broker_url),
            value_serializer=lambda v: json.dumps(v).encode("utf-8"),
        )

    def get_kafka_brokers(self, brokers):
예제 #29
0
from schema import And, Optional, Schema

point = Schema({
    "type": "point",
    "position": And(int, lambda n: n >= 0),
    Optional("uncertain"): And(bool, True),
})

location = Schema({
    "type": str,
    "start": point,
    "end": point,
    Optional("strand"): int
})

feature = Schema({
    "type": str,
    "id": str,
    Optional("location"): location,
    Optional("features"): list,
    Optional("qualifiers"): dict,
})


def validate(current):
    feature.validate(current)
    if current.get("features"):
        for sub_feature in current["features"]:
            validate(sub_feature)
예제 #30
0
def main():
    options = docopt(__doc__, version=VERSION)

    earlyoptionSchema = Schema({
        Optional('--log'): Or(None, And(
                                str,
                                Use(str.upper),
                                validateLogLevel,
                                error="Log level not recognized")
                              ),
        Optional('--quiet', default=False): bool,
        Optional('--debug'): bool,
        Optional('--url-only', default=False): bool,
        Optional('--batch', default=False): bool,
        Optional('--withNFO', default=False): bool,
        Optional('--withThumbnail', default=False): bool,
        Optional('--withName', default=False): bool,
        Optional('--withDescription', default=False): bool,
        Optional('--withTags', default=False): bool,
        Optional('--withPlaylist', default=False): bool,
        Optional('--withPublishAt', default=False): bool,
        Optional('--withPlatform', default=False): bool,
        Optional('--withCategory', default=False): bool,
        Optional('--withLanguage', default=False): bool,
        Optional('--withChannel', default=False): bool,
        # This allow to return all other options for further use: https://github.com/keleshev/schema#extra-keys
        object: object
    })

    schema = Schema({
        '--file': And(str, os.path.exists, validateVideo, error='file is not supported, please use mp4'),
        # Strict option checks - at the moment Schema needs to check Hook and Optional separately #
        Hook('--name', handler=_optionnalOrStrict): object,
        Hook('--description', handler=_optionnalOrStrict): object,
        Hook('--tags', handler=_optionnalOrStrict): object,
        Hook('--category', handler=_optionnalOrStrict): object,
        Hook('--language', handler=_optionnalOrStrict): object,
        Hook('--platform', handler=_optionnalOrStrict): object,
        Hook('--publishAt', handler=_optionnalOrStrict): object,
        Hook('--thumbnail', handler=_optionnalOrStrict): object,
        Hook('--channel', handler=_optionnalOrStrict): object,
        Hook('--playlist', handler=_optionnalOrStrict): object,
        # Validate checks #
        Optional('--name'): Or(None, And(
                                str,
                                lambda x: not x.isdigit(),
                                error="The video name should be a string")
                               ),
        Optional('--description'): Or(None, And(
                                        str,
                                        lambda x: not x.isdigit(),
                                        error="The video description should be a string")
                                      ),
        Optional('--tags'): Or(None, And(
                                    str,
                                    lambda x: not x.isdigit(),
                                    error="Tags should be a string")
                               ),
        Optional('--category'): Or(None, And(
                                    str,
                                    validateCategory,
                                    error="Category not recognized, please see --help")
                                   ),
        Optional('--language'): Or(None, And(
                                    str,
                                    validateLanguage,
                                    error="Language not recognized, please see --help")
                                   ),
        Optional('--privacy'): Or(None, And(
                                    str,
                                    validatePrivacy,
                                    error="Please use recognized privacy between public, unlisted or private")
                                  ),
        Optional('--nfo'): Or(None, str),
        Optional('--platform'): Or(None, And(str, validatePlatform, error="Sorry, upload platform not supported")),
        Optional('--publishAt'): Or(None, And(
                                    str,
                                    validatePublish,
                                    error="DATE should be the form YYYY-MM-DDThh:mm:ss and has to be in the future")
                                    ),
        Optional('--peertubeAt'): Or(None, And(
                                    str,
                                    validatePublish,
                                    error="DATE should be the form YYYY-MM-DDThh:mm:ss and has to be in the future")
                                    ),
        Optional('--youtubeAt'): Or(None, And(
                                    str,
                                    validatePublish,
                                    error="DATE should be the form YYYY-MM-DDThh:mm:ss and has to be in the future")
                                    ),
        Optional('--cca'): bool,
        Optional('--disable-comments'): bool,
        Optional('--nsfw'): bool,
        Optional('--thumbnail'): Or(None, And(
                                    str, validateThumbnail, error='thumbnail is not supported, please use jpg/jpeg'),
                                    ),
        Optional('--channel'): Or(None, str),
        Optional('--channelCreate'): bool,
        Optional('--playlist'): Or(None, str),
        Optional('--playlistCreate'): bool,
        '--help': bool,
        '--version': bool,
        # This allow to return all other options for further use: https://github.com/keleshev/schema#extra-keys
        object: object
    })
    # We need to validate early options first as withNFO and logs options should be prioritized
    try:
        options = earlyoptionSchema.validate(options)
        configureLogs(options)
    except SchemaError as e:
        logger.critical(e)
        exit(1)

    if options.get('--url-only') or options.get('--batch'):
        configureStdoutLogs()

    options = utils.parseNFO(options)

    # Once NFO are loaded, we need to revalidate strict options in case some were in NFO
    try:
        options = earlyoptionSchema.validate(options)
    except SchemaError as e:
        logger.critical(e)
        exit(1)

    if not options.get('--thumbnail'):
        options = utils.searchThumbnail(options)

    try:
        options = schema.validate(options)
    except SchemaError as e:
        logger.critical(e)
        exit(1)

    logger.debug("Python " + sys.version)
    logger.debug(options)

    if options.get('--platform') is None or "peertube" in options.get('--platform'):
        pt_upload.run(options)
    if options.get('--platform') is None or "youtube" in options.get('--platform'):
        yt_upload.run(options)