Beispiel #1
0
class Config(object):  # pylint: disable=too-many-instance-attributes
    """Class that manages configuration files for a dvc repo.

    Args:
        dvc_dir (str): optional path to `.dvc` directory, that is used to
            access repo-specific configs like .dvc/config and
            .dvc/config.local.
        validate (bool): optional flag to tell dvc if it should validate the
            config or just load it as is. 'True' by default.


    Raises:
        ConfigError: thrown when config has an invalid format.
    """

    APPNAME = "dvc"
    APPAUTHOR = "iterative"

    # NOTE: used internally in RemoteLOCAL to know config
    # location, that url should resolved relative to.
    PRIVATE_CWD = "_cwd"

    CONFIG = "config"
    CONFIG_LOCAL = "config.local"

    CREDENTIALPATH = "credentialpath"

    LEVEL_LOCAL = 0
    LEVEL_REPO = 1
    LEVEL_GLOBAL = 2
    LEVEL_SYSTEM = 3

    SECTION_CORE = "core"
    SECTION_CORE_LOGLEVEL = "loglevel"
    SECTION_CORE_LOGLEVEL_SCHEMA = All(
        Lower, Choices("info", "debug", "warning", "error")
    )
    SECTION_CORE_REMOTE = "remote"
    SECTION_CORE_INTERACTIVE = "interactive"
    SECTION_CORE_ANALYTICS = "analytics"
    SECTION_CORE_CHECKSUM_JOBS = "checksum_jobs"
    SECTION_CORE_HARDLINK_LOCK = "hardlink_lock"

    SECTION_CACHE = "cache"
    SECTION_CACHE_DIR = "dir"
    SECTION_CACHE_TYPE = "type"
    SECTION_CACHE_PROTECTED = "protected"
    SECTION_CACHE_SHARED = "shared"
    SECTION_CACHE_SHARED_SCHEMA = All(Lower, Choices("group"))
    SECTION_CACHE_LOCAL = "local"
    SECTION_CACHE_S3 = "s3"
    SECTION_CACHE_GS = "gs"
    SECTION_CACHE_SSH = "ssh"
    SECTION_CACHE_HDFS = "hdfs"
    SECTION_CACHE_AZURE = "azure"
    SECTION_CACHE_SLOW_LINK_WARNING = "slow_link_warning"
    SECTION_CACHE_SCHEMA = {
        SECTION_CACHE_LOCAL: str,
        SECTION_CACHE_S3: str,
        SECTION_CACHE_GS: str,
        SECTION_CACHE_HDFS: str,
        SECTION_CACHE_SSH: str,
        SECTION_CACHE_AZURE: str,
        SECTION_CACHE_DIR: str,
        SECTION_CACHE_TYPE: supported_cache_type,
        Optional(SECTION_CACHE_PROTECTED, default=False): Bool,
        SECTION_CACHE_SHARED: SECTION_CACHE_SHARED_SCHEMA,
        PRIVATE_CWD: str,
        Optional(SECTION_CACHE_SLOW_LINK_WARNING, default=True): Bool,
    }

    SECTION_CORE_SCHEMA = {
        SECTION_CORE_LOGLEVEL: SECTION_CORE_LOGLEVEL_SCHEMA,
        SECTION_CORE_REMOTE: Lower,
        Optional(SECTION_CORE_INTERACTIVE, default=False): Bool,
        Optional(SECTION_CORE_ANALYTICS, default=True): Bool,
        SECTION_CORE_CHECKSUM_JOBS: All(Coerce(int), Range(1)),
        Optional(SECTION_CORE_HARDLINK_LOCK, default=False): Bool,
    }

    # aws specific options
    SECTION_AWS_CREDENTIALPATH = CREDENTIALPATH
    SECTION_AWS_ENDPOINT_URL = "endpointurl"
    SECTION_AWS_LIST_OBJECTS = "listobjects"
    SECTION_AWS_REGION = "region"
    SECTION_AWS_PROFILE = "profile"
    SECTION_AWS_USE_SSL = "use_ssl"
    SECTION_AWS_SSE = "sse"
    SECTION_AWS_ACL = "acl"
    SECTION_AWS_GRANT_READ = "grant_read"
    SECTION_AWS_GRANT_READ_ACP = "grant_read_acp"
    SECTION_AWS_GRANT_WRITE_ACP = "grant_write_acp"
    SECTION_AWS_GRANT_FULL_CONTROL = "grant_full_control"

    # gcp specific options
    SECTION_GCP_CREDENTIALPATH = CREDENTIALPATH
    SECTION_GCP_PROJECTNAME = "projectname"

    # azure specific option
    SECTION_AZURE_CONNECTION_STRING = "connection_string"

    # Alibabacloud oss options
    SECTION_OSS_ACCESS_KEY_ID = "oss_key_id"
    SECTION_OSS_ACCESS_KEY_SECRET = "oss_key_secret"
    SECTION_OSS_ENDPOINT = "oss_endpoint"

    # GDrive options
    SECTION_GDRIVE_CLIENT_ID = "gdrive_client_id"
    SECTION_GDRIVE_CLIENT_SECRET = "gdrive_client_secret"
    SECTION_GDRIVE_USER_CREDENTIALS_FILE = "gdrive_user_credentials_file"

    SECTION_REMOTE_REGEX = r'^\s*remote\s*"(?P<name>.*)"\s*$'
    SECTION_REMOTE_FMT = 'remote "{}"'
    SECTION_REMOTE_URL = "url"
    SECTION_REMOTE_USER = "******"
    SECTION_REMOTE_PORT = "port"
    SECTION_REMOTE_KEY_FILE = "keyfile"
    SECTION_REMOTE_TIMEOUT = "timeout"
    SECTION_REMOTE_PASSWORD = "******"
    SECTION_REMOTE_ASK_PASSWORD = "******"
    SECTION_REMOTE_GSS_AUTH = "gss_auth"
    SECTION_REMOTE_NO_TRAVERSE = "no_traverse"
    SECTION_REMOTE_SCHEMA = {
        Required(SECTION_REMOTE_URL): str,
        SECTION_AWS_REGION: str,
        SECTION_AWS_PROFILE: str,
        SECTION_AWS_CREDENTIALPATH: str,
        SECTION_AWS_ENDPOINT_URL: str,
        Optional(SECTION_AWS_LIST_OBJECTS, default=False): Bool,
        Optional(SECTION_AWS_USE_SSL, default=True): Bool,
        SECTION_AWS_SSE: str,
        SECTION_AWS_ACL: str,
        SECTION_AWS_GRANT_READ: str,
        SECTION_AWS_GRANT_READ_ACP: str,
        SECTION_AWS_GRANT_WRITE_ACP: str,
        SECTION_AWS_GRANT_FULL_CONTROL: str,
        SECTION_GCP_PROJECTNAME: str,
        SECTION_CACHE_TYPE: supported_cache_type,
        Optional(SECTION_CACHE_PROTECTED, default=False): Bool,
        SECTION_REMOTE_USER: str,
        SECTION_REMOTE_PORT: Coerce(int),
        SECTION_REMOTE_KEY_FILE: str,
        SECTION_REMOTE_TIMEOUT: Coerce(int),
        SECTION_REMOTE_PASSWORD: str,
        SECTION_REMOTE_ASK_PASSWORD: Bool,
        SECTION_REMOTE_GSS_AUTH: Bool,
        SECTION_AZURE_CONNECTION_STRING: str,
        SECTION_OSS_ACCESS_KEY_ID: str,
        SECTION_OSS_ACCESS_KEY_SECRET: str,
        SECTION_OSS_ENDPOINT: str,
        SECTION_GDRIVE_CLIENT_ID: str,
        SECTION_GDRIVE_CLIENT_SECRET: str,
        SECTION_GDRIVE_USER_CREDENTIALS_FILE: str,
        PRIVATE_CWD: str,
        Optional(SECTION_REMOTE_NO_TRAVERSE, default=True): Bool,
    }

    SECTION_STATE = "state"
    SECTION_STATE_ROW_LIMIT = "row_limit"
    SECTION_STATE_ROW_CLEANUP_QUOTA = "row_cleanup_quota"
    SECTION_STATE_SCHEMA = {
        SECTION_STATE_ROW_LIMIT: All(Coerce(int), Range(1)),
        SECTION_STATE_ROW_CLEANUP_QUOTA: All(Coerce(int), Range(0, 100)),
    }

    SCHEMA = {
        Optional(SECTION_CORE, default={}): SECTION_CORE_SCHEMA,
        Match(SECTION_REMOTE_REGEX): SECTION_REMOTE_SCHEMA,
        Optional(SECTION_CACHE, default={}): SECTION_CACHE_SCHEMA,
        Optional(SECTION_STATE, default={}): SECTION_STATE_SCHEMA,
    }
    COMPILED_SCHEMA = Schema(SCHEMA)

    def __init__(self, dvc_dir=None, validate=True):
        self.dvc_dir = dvc_dir
        self.validate = validate

        if not dvc_dir:
            try:
                from dvc.repo import Repo

                self.dvc_dir = os.path.join(Repo.find_dvc_dir())
            except NotDvcRepoError:
                self.dvc_dir = None
        else:
            self.dvc_dir = os.path.abspath(os.path.realpath(dvc_dir))

        self.load()

    @staticmethod
    def get_global_config_dir():
        """Returns global config location. E.g. ~/.config/dvc/config.

        Returns:
            str: path to the global config directory.
        """
        from appdirs import user_config_dir

        return user_config_dir(
            appname=Config.APPNAME, appauthor=Config.APPAUTHOR
        )

    @staticmethod
    def get_system_config_dir():
        """Returns system config location. E.g. /etc/dvc.conf.

        Returns:
            str: path to the system config directory.
        """
        from appdirs import site_config_dir

        return site_config_dir(
            appname=Config.APPNAME, appauthor=Config.APPAUTHOR
        )

    @staticmethod
    def init(dvc_dir):
        """Initializes dvc config.

        Args:
            dvc_dir (str): path to .dvc directory.

        Returns:
            dvc.config.Config: config object.
        """
        config_file = os.path.join(dvc_dir, Config.CONFIG)
        open(config_file, "w+").close()
        return Config(dvc_dir)

    def _resolve_cache_path(self, config):
        cache = config.get(self.SECTION_CACHE)
        if cache is None:
            return

        cache_dir = cache.get(self.SECTION_CACHE_DIR)
        if cache_dir is None:
            return

        cache[self.PRIVATE_CWD] = os.path.dirname(config.filename)

    def _resolve_paths(self, config):
        if config.filename is None:
            return config

        ret = copy.deepcopy(config)
        self._resolve_cache_path(ret)

        for section in ret.values():
            if self.SECTION_REMOTE_URL not in section.keys():
                continue

            section[self.PRIVATE_CWD] = os.path.dirname(ret.filename)

        return ret

    def _load_configs(self):
        system_config_file = os.path.join(
            self.get_system_config_dir(), self.CONFIG
        )

        global_config_file = os.path.join(
            self.get_global_config_dir(), self.CONFIG
        )

        self._system_config = configobj.ConfigObj(system_config_file)
        self._global_config = configobj.ConfigObj(global_config_file)
        self._repo_config = configobj.ConfigObj()
        self._local_config = configobj.ConfigObj()

        if not self.dvc_dir:
            return

        config_file = os.path.join(self.dvc_dir, self.CONFIG)
        config_local_file = os.path.join(self.dvc_dir, self.CONFIG_LOCAL)

        self._repo_config = configobj.ConfigObj(config_file)
        self._local_config = configobj.ConfigObj(config_local_file)

    @property
    def config_local_file(self):
        return self._local_config.filename

    @property
    def config_file(self):
        return self._repo_config.filename

    def load(self):
        """Loads config from all the config files.

        Raises:
            dvc.config.ConfigError: thrown if config has invalid format.
        """
        self._load_configs()

        self.config = configobj.ConfigObj()
        for c in [
            self._system_config,
            self._global_config,
            self._repo_config,
            self._local_config,
        ]:
            c = self._resolve_paths(c)
            c = self._lower(c)
            self.config.merge(c)

        if not self.validate:
            return

        d = self.config.dict()
        try:
            d = self.COMPILED_SCHEMA(d)
        except Invalid as exc:
            raise ConfigError(str(exc), cause=exc)
        self.config = configobj.ConfigObj(d, write_empty_values=True)

    def save(self, config=None):
        """Saves config to config files.

        Raises:
            dvc.config.ConfigError: thrown if failed to write config file.
        """
        if config is not None:
            clist = [config]
        else:
            clist = [
                self._system_config,
                self._global_config,
                self._repo_config,
                self._local_config,
            ]

        for conf in clist:
            self._save(conf)

        self.load()

    @staticmethod
    def _save(config):
        if config.filename is None:
            return

        logger.debug("Writing '{}'.".format(config.filename))
        dname = os.path.dirname(os.path.abspath(config.filename))
        try:
            os.makedirs(dname)
        except OSError as exc:
            if exc.errno != errno.EEXIST:
                raise
        config.write()

    def unset(self, section, opt=None, level=None, force=False):
        """Unsets specified option and/or section in the config.

        Args:
            section (str): section name.
            opt (str): optional option name.
            level (int): config level to use.
            force (bool): don't error-out even if section doesn't exist. False
                by default.

        Raises:
            dvc.config.ConfigError: thrown if section doesn't exist and
                `force != True`.
        """
        config = self.get_configobj(level)

        if section not in config.keys():
            if force:
                return
            raise ConfigError("section '{}' doesn't exist".format(section))

        if opt:
            if opt not in config[section].keys():
                if force:
                    return
                raise ConfigError(
                    "option '{}.{}' doesn't exist".format(section, opt)
                )
            del config[section][opt]

            if not config[section]:
                del config[section]
        else:
            del config[section]

        self.save(config)

    def set(self, section, opt, value, level=None, force=True):
        """Sets specified option in the config.

        Args:
            section (str): section name.
            opt (str): option name.
            value: value to set option to.
            level (int): config level to use.
            force (bool): set option even if section already exists. True by
                default.

        Raises:
            dvc.config.ConfigError: thrown if section already exists and
                `force != True`.

        """
        config = self.get_configobj(level)

        if section not in config.keys():
            config[section] = {}
        elif not force:
            raise ConfigError(
                "Section '{}' already exists. Use `-f|--force` to overwrite "
                "section with new value.".format(section)
            )

        config[section][opt] = value
        self.save(config)

    def get(self, section, opt=None, level=None):
        """Return option value from the config.

        Args:
            section (str): section name.
            opt (str): option name.
            level (int): config level to use.

        Returns:
            value (str, int): option value.
        """
        config = self.get_configobj(level)

        if section not in config.keys():
            raise ConfigError("section '{}' doesn't exist".format(section))

        if opt not in config[section].keys():
            raise ConfigError(
                "option '{}.{}' doesn't exist".format(section, opt)
            )

        return config[section][opt]

    @staticmethod
    def _lower(config):
        new_config = configobj.ConfigObj()
        for s_key, s_value in config.items():
            new_s = {}
            for key, value in s_value.items():
                new_s[key.lower()] = str(value)
            new_config[s_key.lower()] = new_s
        return new_config

    def get_configobj(self, level):
        configs = {
            self.LEVEL_LOCAL: self._local_config,
            self.LEVEL_REPO: self._repo_config,
            self.LEVEL_GLOBAL: self._global_config,
            self.LEVEL_SYSTEM: self._system_config,
        }

        return configs.get(level, self._repo_config)

    def list_options(self, section_regex, option, level=None):
        ret = {}
        config = self.get_configobj(level)
        for section in config.keys():
            r = re.match(section_regex, section)
            if r:
                name = r.group("name")
                value = config[section].get(option, "")
                ret[name] = value
        return ret
from six import text_type
from taskgraph.loader.single_dep import schema
from taskgraph.transforms.base import TransformSequence
from taskgraph.util.attributes import copy_attributes_from_dependent_job
from taskgraph.util.scriptworker import (
    get_signing_cert_scope_per_platform, )
from taskgraph.util.treeherder import inherit_treeherder_from_dep
from taskgraph.transforms.task import task_description_schema
from voluptuous import Required, Optional

transforms = TransformSequence()

signing_description_schema = schema.extend({
    Required('depname', default='repackage'):
    text_type,
    Optional('label'):
    text_type,
    Optional('extra'):
    object,
    Optional('shipping-product'):
    task_description_schema['shipping-product'],
    Optional('shipping-phase'):
    task_description_schema['shipping-phase'],
})

transforms.add_validate(signing_description_schema)


@transforms.add
def make_signing_description(config, jobs):
    for job in jobs:
Beispiel #3
0
)
from taskgraph.util.taskcluster import get_artifact_path
from taskgraph.transforms.task import task_description_schema
from voluptuous import Required, Optional

# Voluptuous uses marker objects as dictionary *keys*, but they are not
# comparable, so we cast all of the keys back to regular strings
task_description_schema = {
    str(k): v
    for k, v in task_description_schema.schema.iteritems()
}

release_generate_checksums_signing_schema = schema.extend({
    Required('depname', default='release-generate-checksums'):
    basestring,
    Optional('label'):
    basestring,
    Optional('treeherder'):
    task_description_schema['treeherder'],
    Optional('shipping-product'):
    task_description_schema['shipping-product'],
    Optional('shipping-phase'):
    task_description_schema['shipping-phase'],
})

transforms = TransformSequence()
transforms.add_validate(release_generate_checksums_signing_schema)


@transforms.add
def make_release_generate_checksums_signing_description(config, jobs):
Beispiel #4
0
import gecko_taskgraph

from .base import TransformSequence
from ..util.cached_tasks import add_optimization
from ..util.schema import Schema, validate_schema
from ..util.treeherder import join_symbol

CACHE_TYPE = "content.v1"

FETCH_SCHEMA = Schema({
    # Name of the task.
    Required("name"):
    str,
    # Relative path (from config.path) to the file the task was defined
    # in.
    Optional("job-from"):
    str,
    # Description of the task.
    Required("description"):
    str,
    Optional(
        "fetch-alias",
        description="An alias that can be used instead of the real fetch job name in "
        "fetch stanzas for jobs.",
    ):
    str,
    Optional(
        "artifact-prefix",
        description="The prefix of the taskcluster artifact being uploaded. "
        "Defaults to `public/`; if it starts with something other than "
        "`public/` the artifact will require scopes to access.",
    str(k): v
    for k, v in job_description_schema.schema.iteritems()
}

transforms = TransformSequence()

taskref_or_string = Any(basestring, {Required('task-reference'): basestring})

beetmover_cdns_description_schema = Schema({
    Required('name'):
    basestring,
    Required('product'):
    basestring,
    Required('treeherder-platform'):
    basestring,
    Optional('attributes'): {
        basestring: object
    },
    Optional('job-from'):
    task_description_schema['job-from'],
    Optional('run'): {
        basestring: object
    },
    Optional('run-on-projects'):
    task_description_schema['run-on-projects'],
    Required('worker-type'):
    optionally_keyed_by('project', basestring),
    Optional('dependencies'): {
        basestring: taskref_or_string
    },
    Optional('index'): {
Beispiel #6
0
logger = logging.getLogger(__name__)

# Voluptuous uses marker objects as dictionary *keys*, but they are not
# comparable, so we cast all of the keys back to regular strings
task_description_schema = {
    str(k): v
    for k, v in task_description_schema.schema.iteritems()
}

# Schema for a build description
job_description_schema = Schema({
    # The name of the job and the job's label.  At least one must be specified,
    # and the label will be generated from the name if necessary, by prepending
    # the kind.
    Optional('name'):
    basestring,
    Optional('label'):
    basestring,

    # the following fields are passed directly through to the task description,
    # possibly modified by the run implementation.  See
    # taskcluster/taskgraph/transforms/task.py for the schema details.
    Required('description'):
    task_description_schema['description'],
    Optional('attributes'):
    task_description_schema['attributes'],
    Optional('dependencies'):
    task_description_schema['dependencies'],
    Optional('expires-after'):
    task_description_schema['expires-after'],
Beispiel #7
0
    def validate(self, metadata):
        data = saml_to_dict(metadata)
        key_descriptor = Schema(
            All([{
                'attrs':
                Schema(
                    {
                        'use':
                        All(
                            str,
                            In(KEYDESCRIPTOR_USES,
                               msg=DEFAULT_LIST_VALUE_ERROR.format(
                                   ', '.join(KEYDESCRIPTOR_USES)))),
                    },
                    required=True),
                'children': {
                    '{%s}KeyInfo' % (SIGNATURE): {
                        'attrs': {},
                        'children': {
                            '{%s}X509Data' % (SIGNATURE): {
                                'attrs': {},
                                'children': {
                                    '{%s}X509Certificate' % (SIGNATURE): {
                                        'attrs': {},
                                        'children': {},
                                        'text': All(str, _check_certificate)
                                    }
                                },
                                'text': None
                            }
                        },
                        'text': None
                    }
                },
                'text':
                None
            }], self._check_keydescriptor),
            required=True,
        )
        slo = Schema(
            All([{
                'attrs': dict,
                'children': dict,
                'text': None
            }], Length(min=1)),
            required=True,
        )
        acs = Schema(
            All([{
                'attrs': dict,
                'children': dict,
                'text': None
            }], Length(min=1)),
            required=True,
        )
        atcs = Schema(
            All([{
                'attrs': {
                    'index': str
                },
                'children': {
                    '{%s}ServiceName' % (METADATA): {
                        'attrs': dict,
                        'children': {},
                        'text': str
                    },
                    Optional('{%s}ServiceDescription' % (METADATA)): {
                        'attrs': dict,
                        'children': {},
                        'text': str
                    },
                    '{%s}RequestedAttribute' % (METADATA):
                    All([{
                        'attrs': {
                            'Name':
                            All(
                                str,
                                In(SPID_ATTRIBUTES_NAMES,
                                   msg=DEFAULT_LIST_VALUE_ERROR.format(
                                       ', '.join(SPID_ATTRIBUTES_NAMES)))),
                            Optional('NameFormat'):
                            Equal(NAME_FORMAT_BASIC,
                                  msg=DEFAULT_VALUE_ERROR.format(
                                      NAME_FORMAT_BASIC)),
                            Optional('FriendlyName'):
                            str,
                            Optional('isRequired'):
                            str
                        },
                        'children': {},
                        'text': None
                    }], Length(min=1)),
                },
                'text': None
            }], Length(min=1)),
            required=True,
        )
        name_id_format = Schema(
            {
                'attrs': {},
                'children': {},
                'text':
                Equal(NAMEID_FORMAT_TRANSIENT,
                      msg=DEFAULT_VALUE_ERROR.format(NAMEID_FORMAT_TRANSIENT)),
            },
            required=True,
        )
        spsso_descriptor_attr_schema = Schema(All({
            'protocolSupportEnumeration':
            Equal(PROTOCOL, msg=DEFAULT_VALUE_ERROR.format(PROTOCOL)),
            'AuthnRequestsSigned':
            Equal('true', msg=DEFAULT_VALUE_ERROR.format('true')),
            Optional('WantAssertionsSigned'):
            str,
        }),
                                              required=True)
        spsso = Schema(
            {
                'attrs': spsso_descriptor_attr_schema,
                'children': {
                    '{%s}KeyDescriptor' % (METADATA): key_descriptor,
                    '{%s}SingleLogoutService' % (METADATA): slo,
                    '{%s}AssertionConsumerService' % (METADATA): acs,
                    '{%s}AttributeConsumingService' % (METADATA): atcs,
                    '{%s}NameIDFormat' % (METADATA): name_id_format,
                },
                'text': None
            },
            required=True)
        entity_descriptor_schema = Schema(
            {
                '{%s}EntityDescriptor' % (METADATA): {
                    'attrs':
                    Schema(
                        {
                            'entityID': str,
                            Optional('ID'): str,
                            Optional('validUntil'): All(str, _check_utc_date),
                            Optional('cacheDuration'): str,
                            Optional('Name'): str,
                        },
                        required=True),
                    'children':
                    Schema(
                        {
                            Optional('{%s}Signature' % (SIGNATURE)):
                            Schema(
                                {
                                    'attrs': dict,
                                    'children': dict,
                                    'text': None
                                },
                                required=True,
                            ),
                            '{%s}SPSSODescriptor' % (METADATA):
                            spsso,
                            Optional('{%s}Organization' % (METADATA)):
                            dict,
                            Optional('{%s}ContactPerson' % (METADATA)):
                            list
                        },
                        required=True),
                    'text':
                    None
                }
            },
            required=True)
        errors = []
        try:
            entity_descriptor_schema(data)
        except MultipleInvalid as e:
            for err in e.errors:
                _val = data
                _paths = []
                _attr = None
                for idx, _path in enumerate(err.path):
                    if _path != 'children':
                        if _path == 'attrs':
                            try:
                                _attr = err.path[(idx + 1)]
                            except IndexError:
                                _attr = ''
                            break

                        # strip namespaces for better readability
                        _paths.append(_strip_namespaces(str(_path)))
                path = '/'.join(_paths)
                if _attr is not None:
                    path = '{} - attribute: {}'.format(path, _attr)
                for _ in err.path:
                    try:
                        _val = _val[_]
                    except IndexError:
                        _val = None
                    except KeyError:
                        _val = None
                errors.append(
                    ValidationDetail(_val, None, None, None, None, err.msg,
                                     path))
            raise SPIDValidationError(details=errors)
Beispiel #8
0
emborg_exe = "../bu".split()
emborg_overdue_exe = "../od".split()
# uses installed code
#emborg_exe = "emborg".split()
#emborg_overdue_exe = "emborg-overdue".split()
set_prefs(use_inform=True)
tests_dir = to_path(__file__).parent
emborg_dir = str(tests_dir.parent)
emborg_dir_wo_slash = emborg_dir.strip('/')
# remove the leading slashes, it will be added back in tests if needed

# schema for test cases {{{2
emborg_schema = Schema(
    {
        Required('name'): str,
        Optional('args', default='<PASS>'): Any(str, list),
        Optional('expected', default=""): str,
        Optional('expected_type', default=""): str,
        Optional('cmp_dirs', default=""): str,
        Optional('remove', default=""): str,
        Optional('dependencies', default=""): str,
    },
    required=True)
emborg_overdue_schema = Schema(
    {
        Required('name'): str,
        Optional('conf', default=""): str,
        Optional('args', default=[]): Any(str, list),
        Required('expected', default=""): str,
        Required('expected_type', default=""): str,
        Optional('dependencies', default=""): str,
Beispiel #9
0
    'pine': {
        'target_tasks_method': 'pine_tasks',
    },
    'kaios': {
        'target_tasks_method': 'kaios_tasks',
    },

    # the default parameters are used for projects that do not match above.
    'default': {
        'target_tasks_method': 'default',
    }
}

try_task_config_schema = Schema({
    Required('tasks'): [text_type],
    Optional('browsertime'):
    bool,
    Optional('chemspill-prio'):
    bool,
    Optional('disable-pgo'):
    bool,
    Optional('env'): {
        text_type: text_type
    },
    Optional('gecko-profile'):
    bool,
    Optional("optimize-strategies",
             description="Alternative optimization strategies to use instead of the default. "
             "A module path pointing to a dict to be use as the `strategy_override` "
             "argument in `taskgraph.optimize.optimize_task_graph`."):
    text_type,
Beispiel #10
0
    for broadcast_name, details in value.items():
        try:
            broadcast_value = eval("BroadcastTypes." + broadcast_name).value
        except AttributeError:
            raise Invalid("Unknown Broadcast Name: {}".format(broadcast_name))
        new_value[broadcast_value] = details
    return new_value


BROADCAST_SCHEMA = All(
    Schema({
        Required('name'): Coerce(str),
        Required('base'): Coerce(int),
        Required('min'): Coerce(int),
        Required('max'): Coerce(int),
        Optional('unit', default=""): Coerce(str),
        Required('converter', default=None): validate_converter,
    }), update_converter_if_none)

ATTRIBUTE_SCHEMA = All(Schema({str: [BROADCAST_SCHEMA]}), validate_broadcast)

MODULE_INFO_SCHEMA = All(
    Schema({
        Required('code'): Coerce(int),
        Required('protocol'): All(Lower, Any('any', Coerce(int))),
        Required('software_versions'): All([Coerce(int)]),
        Required('description'): Coerce(str),
        Required('attributes'): ATTRIBUTE_SCHEMA,
    }))

SCHEMA = All(Schema({
Beispiel #11
0
# comparable, so we cast all of the keys back to regular strings
task_description_schema = {str(k): v for k, v in task_description_schema.schema.iteritems()}

transforms = TransformSequence()

# shortcut for a string where task references are allowed
taskref_or_string = Any(
    basestring,
    {Required('task-reference'): basestring})

balrog_description_schema = Schema({
    # the dependent task (object) for this balrog job, used to inform balrogworker.
    Required('dependent-task'): object,

    # unique label to describe this balrog task, defaults to balrog-{dep.label}
    Optional('label'): basestring,

    # treeherder is allowed here to override any defaults we use for beetmover.  See
    # taskcluster/taskgraph/transforms/task.py for the schema details, and the
    # below transforms for defaults of various values.
    Optional('treeherder'): task_description_schema['treeherder'],
})


@transforms.add
def validate(config, jobs):
    for job in jobs:
        label = job.get('dependent-task', object).__dict__.get('label', '?no-label?')
        yield validate_schema(
            balrog_description_schema, job,
            "In balrog ({!r} kind) task for {!r}:".format(config.kind, label))
Beispiel #12
0
    get_branch_repo,
    get_branch_rev,
)

mozharness_run_schema = Schema({
    Required('using'):
    'mozharness',

    # the mozharness script used to run this task, relative to the testing/
    # directory and using forward slashes even on Windows
    Required('script'):
    basestring,

    # Additional paths to look for mozharness configs in. These should be
    # relative to the base of the source checkout
    Optional('config-paths'): [basestring],

    # the config files required for the task, relative to
    # testing/mozharness/configs or one of the paths specified in
    # `config-paths` and using forward slashes even on Windows
    Required('config'): [basestring],

    # any additional actions to pass to the mozharness command
    Optional('actions'): [
        Match('^[a-z0-9-]+$',
              "actions must be `-` seperated alphanumeric strings")
    ],

    # any additional options (without leading --) to be passed to mozharness
    Optional('options'): [
        Match(
Beispiel #13
0
Data types provided by plugin for Qeq calculation.

Register data types via the "aiida.data" entry point in setup.json.
"""

# You can directly use or subclass aiida.orm.data.Data
# or any other data type listed under 'verdi data'
from __future__ import absolute_import
import os
from aiida.orm import Dict
from voluptuous import Schema, Optional, Any, ExactSequence
from collections import OrderedDict

# key : [ accepted values, label ]
cmdline_options = OrderedDict([
    (Optional('build_grid', default=False), bool),
    # true/false input_grid_file dh1sz dh2sz dh3sz vdw_factor_e vdw_factor_f use_vdw_factor offset
    # e.g. build_grid_from_scratch 1 none 0.25 0.25 0.25 1.0 2.0 0 3.0
    (Optional('build_grid_from_scratch',
              default=[True, 'none']), ExactSequence([bool, str])),
    (Optional('grid_spacing',
              default=[0.25, 0.25, 0.25]), ExactSequence([float, float,
                                                          float])),
    # where to print the potential (e.g. between 1x vdw radius and 2x vdw radius)
    (Optional('vdw_factors',
              default=[False, 1.0, 2.0]), ExactSequence([bool, float, float])),
    (Optional('offset', default=3.0), float),
    (Optional('save_grid', default=[False,
                                    'grid.cube']), ExactSequence([bool, str])),
    (Optional('calculate_pot_diff', default=False), bool),
    (Optional('calculate_pot',
Beispiel #14
0
    'target.bz2.complete.mar',
]

beetmover_description_schema = schema.extend({
    # depname is used in taskref's to identify the taskID of the unsigned things
    Required('depname', default='build'):
    basestring,

    # unique label to describe this beetmover task, defaults to {dep.label}-beetmover
    Required('label'):
    basestring,

    # treeherder is allowed here to override any defaults we use for beetmover.  See
    # taskcluster/taskgraph/transforms/task.py for the schema details, and the
    # below transforms for defaults of various values.
    Optional('treeherder'):
    task_description_schema['treeherder'],
    Optional('attributes'):
    task_description_schema['attributes'],

    # locale is passed only for l10n beetmoving
    Optional('locale'):
    basestring,
    Required('shipping-phase'):
    task_description_schema['shipping-phase'],
    # Optional until we fix asan (run_on_projects?)
    Optional('shipping-product'):
    task_description_schema['shipping-product'],
})

transforms = TransformSequence()
Beispiel #15
0
    Required,
    Extra,
)

from taskgraph.transforms.base import TransformSequence
from taskgraph.transforms.tests import test_description_schema
from taskgraph.util.schema import optionally_keyed_by, resolve_keyed_by, Schema
from taskgraph.util.treeherder import split_symbol, join_symbol

transforms = TransformSequence()


raptor_description_schema = Schema(
    {
        # Raptor specific configs.
        Optional("apps"): optionally_keyed_by("test-platform", "subtest", [text_type]),
        Optional("raptor-test"): text_type,
        Optional("raptor-subtests"): optionally_keyed_by("app", "test-platform", list),
        Optional("activity"): optionally_keyed_by("app", text_type),
        Optional("binary-path"): optionally_keyed_by("app", text_type),
        # Configs defined in the 'test_description_schema'.
        Optional("max-run-time"): optionally_keyed_by(
            "app", test_description_schema["max-run-time"]
        ),
        Optional("run-on-projects"): optionally_keyed_by(
            "app",
            "test-name",
            "raptor-test",
            "subtest",
            test_description_schema["run-on-projects"],
        ),
Beispiel #16
0
         ]
       }

    :>json list response: A forum poll

    :statuscode 200: View successful
    :statuscode 403: User does not have permission to view thread
    :statuscode 404: Thread does not exist
    """
    return flask.jsonify(ForumPoll.from_pk(id, error=True, _404=True))


MODIFY_FORUM_POLL_SCHEMA = Schema({
    'choices':
    Schema({
        Optional('add', default=[]): [str],
        Optional('delete', default=[]): [int],
    }),
    'closed':
    BoolGET,
    'featured':
    BoolGET,
})


@bp.route('/polls/<int:id>', methods=['PUT'])
@require_permission('modify_forum_polls')
@validate_data(MODIFY_FORUM_POLL_SCHEMA)
def modify_poll(
    id: int,
    choices: Dict[str, list] = None,
Beispiel #17
0
    docker_worker_add_artifacts,
    get_vcsdir_name,
)
from taskgraph.util.hash import hash_paths
import taskgraph

CACHE_TYPE = "toolchains.v3"

toolchain_run_schema = Schema({
    Required("using"):
    "toolchain-script",
    # The script (in taskcluster/scripts/misc) to run.
    Required("script"):
    str,
    # Arguments to pass to the script.
    Optional("arguments"): [str],
    # Sparse profile to give to checkout using `run-task`.  If given,
    # a filename in `build/sparse-profiles`.  Defaults to
    # "toolchain-build", i.e., to
    # `build/sparse-profiles/toolchain-build`.  If `None`, instructs
    # `run-task` to not use a sparse profile at all.
    Required("sparse-profile"):
    Any(str, None),
    # Paths/patterns pointing to files that influence the outcome of a
    # toolchain build.
    Optional("resources"): [str],
    # Path to the artifact produced by the toolchain job
    Required("toolchain-artifact"):
    str,
    Optional(
        "toolchain-alias",
Beispiel #18
0
}

# shortcut for a string where task references are allowed
taskref_or_string = Any(basestring, {Required('task-reference'): basestring})

packaging_description_schema = Schema({
    # the dependant task (object) for this  job, used to inform repackaging.
    Required('dependent-task'):
    object,

    # depname is used in taskref's to identify the taskID of the signed things
    Required('depname', default='build'):
    basestring,

    # unique label to describe this repackaging task
    Optional('label'):
    basestring,

    # treeherder is allowed here to override any defaults we use for repackaging.  See
    # taskcluster/taskgraph/transforms/task.py for the schema details, and the
    # below transforms for defaults of various values.
    Optional('treeherder'):
    task_description_schema['treeherder'],

    # If a l10n task, the corresponding locale
    Optional('locale'):
    basestring,

    # Routes specific to this task, if defined
    Optional('routes'): [basestring],
Beispiel #19
0
from taskgraph.transforms.job import run_job_using
from taskgraph.transforms.job.common import (
    docker_worker_add_artifacts,
    generic_worker_add_artifacts,
    generic_worker_hg_commands,
    docker_worker_add_tooltool,
    support_vcs_checkout,
)

sm_run_schema = Schema({
    Required('using'): Any('spidermonkey', 'spidermonkey-package', 'spidermonkey-mozjs-crate',
                           'spidermonkey-rust-bindings'),

    # SPIDERMONKEY_VARIANT and SPIDERMONKEY_PLATFORM
    Required('spidermonkey-variant'): basestring,
    Optional('spidermonkey-platform'): basestring,

    # Base work directory used to set up the task.
    Required('workdir'): basestring,
})


@run_job_using("docker-worker", "spidermonkey", schema=sm_run_schema)
@run_job_using("docker-worker", "spidermonkey-package", schema=sm_run_schema)
@run_job_using("docker-worker", "spidermonkey-mozjs-crate",
               schema=sm_run_schema)
@run_job_using("docker-worker", "spidermonkey-rust-bindings",
               schema=sm_run_schema)
def docker_worker_spidermonkey(config, job, taskdesc):
    run = job['run']
Beispiel #20
0
    Required('run-time'): _by_platform(int),

    # Locales not to repack for
    Required('ignore-locales'): _by_platform([text_type]),

    # All l10n jobs use mozharness
    Required('mozharness'): {
        # Script to invoke for mozharness
        Required('script'): _by_platform(text_type),

        # Config files passed to the mozharness script
        Required('config'): _by_platform([text_type]),

        # Additional paths to look for mozharness configs in. These should be
        # relative to the base of the source checkout
        Optional('config-paths'): [text_type],

        # Options to pass to the mozharness script
        Optional('options'): _by_platform([text_type]),

        # Action commands to provide to mozharness script
        Required('actions'): _by_platform([text_type]),

        # if true, perform a checkout of a comm-central based branch inside the
        # gecko checkout
        Required('comm-checkout', default=False): bool,
    },
    # Items for the taskcluster index
    Optional('index'): {
        # Product to identify as in the taskcluster index
        Required('product'): _by_platform(text_type),
Beispiel #21
0
    def validate(self, request):
        xmlstr = request.saml_request
        data = saml_to_dict(xmlstr)
        if self._action == 'login':
            req_type = 'AuthnRequest'
        elif self._action == 'logout':
            req_type = 'LogoutRequest'
        issuer_name = data.get(
            '{urn:oasis:names:tc:SAML:2.0:protocol}%s' % (req_type),
            {}).get('children',
                    {}).get('{urn:oasis:names:tc:SAML:2.0:assertion}Issuer',
                            {}).get('text')
        if issuer_name is None:
            raise UnknownEntityIDError(
                'Issuer non presente nella {}'.format(req_type))
        try:
            sp_metadata = self._registry.load(issuer_name)
        except MetadataNotFoundError:
            raise UnknownEntityIDError(
                'L\'entity ID "{}" indicato nell\'elemento <Issuer> non corrisponde a nessun Service Provider registrato in questo Identity Provider di test.'
                .format(issuer_name))

        atcss = sp_metadata.attribute_consuming_services
        attribute_consuming_service_indexes = [
            str(el.get('attrs').get('index')) for el in atcss
            if 'index' in el.get('attrs', {})
        ]
        ascss = sp_metadata.assertion_consumer_services
        assertion_consumer_service_indexes = [
            str(el.get('index')) for el in ascss
        ]
        assertion_consumer_service_urls = [
            str(el.get('Location')) for el in ascss
        ]

        entity_id = self._config.entity_id

        issuer = Schema(
            {
                'attrs': {
                    'Format':
                    Equal(
                        NAMEID_FORMAT_ENTITY,
                        msg=DEFAULT_VALUE_ERROR.format(NAMEID_FORMAT_ENTITY)),
                    'NameQualifier':
                    Any(Url(), Match(r'^urn:'), msg="Invalid URI"),
                },
                'children': {},
                'text': str,
            },
            required=True,
        )

        name_id = Schema(
            {
                'attrs': {
                    'NameQualifier':
                    str,
                    'Format':
                    Equal(NAMEID_FORMAT_TRANSIENT,
                          msg=DEFAULT_VALUE_ERROR.format(
                              NAMEID_FORMAT_TRANSIENT)),
                },
                'children': {},
                'text': str
            },
            required=True,
        )

        name_id_policy = Schema(
            {
                'attrs': {
                    'Format':
                    Equal(NAMEID_FORMAT_TRANSIENT,
                          msg=DEFAULT_VALUE_ERROR.format(
                              NAMEID_FORMAT_TRANSIENT)),
                    Optional('SPNameQualifier'):
                    str,
                },
                'children': {},
                'text': None,
            },
            required=True,
        )

        conditions = Schema(
            {
                'attrs': {
                    'NotBefore': All(str, _check_utc_date),
                    'NotOnOrAfter': All(str, _check_utc_date),
                },
                'children': {},
                'text': None,
            },
            required=True,
        )

        authn_context_class_ref = Schema(
            {
                'attrs': {},
                'children': {},
                'text':
                All(
                    str,
                    In(SPID_LEVELS,
                       msg=DEFAULT_LIST_VALUE_ERROR.format(
                           ', '.join(SPID_LEVELS))))
            },
            required=True,
        )

        requested_authn_context = Schema(
            {
                'attrs': {
                    'Comparison': str
                },
                'children': {
                    '{%s}AuthnContextClassRef' % (ASSERTION):
                    authn_context_class_ref
                },
                'text': None
            },
            required=True,
        )

        scoping = Schema(
            {
                'attrs': {
                    'ProxyCount': Equal('0',
                                        msg=DEFAULT_VALUE_ERROR.format('0'))
                },
                'children': {},
                'text': None
            },
            required=True,
        )

        signature = Schema(
            {
                'attrs': dict,
                'children': dict,
                'text': None
            },
            required=True,
        )

        subject = Schema(
            {
                'attrs': {
                    'Format':
                    Equal(
                        NAMEID_FORMAT_ENTITY,
                        msg=DEFAULT_VALUE_ERROR.format(NAMEID_FORMAT_ENTITY)),
                    'NameQualifier':
                    str
                },
                'children': {},
                'text': None
            },
            required=True,
        )

        # LOGIN

        def check_assertion_consumer_service(attrs):
            keys = list(attrs.keys())
            if ('AssertionConsumerServiceURL' in keys
                    and 'ProtocolBinding' in keys
                    and 'AssertionConsumerServiceIndex' not in keys):
                _errors = []
                if attrs['ProtocolBinding'] != BINDING_HTTP_POST:
                    _errors.append(
                        Invalid(DEFAULT_VALUE_ERROR.format(BINDING_HTTP_POST),
                                path=['ProtocolBinding']))
                if attrs[
                        'AssertionConsumerServiceURL'] not in assertion_consumer_service_urls:
                    _errors.append(
                        Invalid(DEFAULT_VALUE_ERROR.format(
                            assertion_consumer_service_urls),
                                path=['AssertionConsumerServiceURL']))
                if _errors:
                    raise MultipleInvalid(errors=_errors)
                return attrs

            elif ('AssertionConsumerServiceURL' not in keys
                  and 'ProtocolBinding' not in keys
                  and 'AssertionConsumerServiceIndex' in keys):
                if attrs[
                        'AssertionConsumerServiceIndex'] not in assertion_consumer_service_indexes:
                    raise Invalid(DEFAULT_LIST_VALUE_ERROR.format(
                        ', '.join(assertion_consumer_service_indexes)),
                                  path=['AssertionConsumerServiceIndex'])
                return attrs

            else:
                raise Invalid(
                    'Uno e uno solo uno tra gli attributi o gruppi di attributi devono essere presenti: '
                    '[AssertionConsumerServiceIndex, [AssertionConsumerServiceUrl, ProtocolBinding]]'
                )

        authnrequest_attr_schema = Schema(All(
            {
                'ID':
                str,
                'Version':
                Equal('2.0', msg=DEFAULT_VALUE_ERROR.format('2.0')),
                'IssueInstant':
                All(str, _check_utc_date, self._check_date_in_range),
                'Destination':
                In([entity_id, self._config.absolute_sso_url],
                   msg=DEFAULT_VALUE_ERROR.format(entity_id)),
                Optional('ForceAuthn'):
                str,
                Optional('AttributeConsumingServiceIndex'):
                In(attribute_consuming_service_indexes,
                   msg=DEFAULT_LIST_VALUE_ERROR.format(
                       ', '.join(attribute_consuming_service_indexes))),
                Optional('AssertionConsumerServiceIndex'):
                str,
                Optional('AssertionConsumerServiceURL'):
                str,
                Optional('ProtocolBinding'):
                str,
            },
            check_assertion_consumer_service,
        ),
                                          required=True)

        AUTHNREQUEST_TAG = '{%s}AuthnRequest' % (PROTOCOL)

        authnrequest_schema = {
            AUTHNREQUEST_TAG: {
                'attrs':
                authnrequest_attr_schema,
                'children':
                Schema(
                    {
                        Optional('{%s}Subject' % (ASSERTION)): subject,
                        '{%s}Issuer' % (ASSERTION): issuer,
                        '{%s}NameIDPolicy' % (PROTOCOL): name_id_policy,
                        Optional('{%s}Conditions' % (ASSERTION)): conditions,
                        '{%s}RequestedAuthnContext' % (PROTOCOL):
                        requested_authn_context,
                        Optional('{%s}Scoping' % (PROTOCOL)): scoping,
                    },
                    required=True,
                ),
                'text':
                None
            }
        }

        # LOGOUT

        LOGOUTREQUEST_TAG = '{%s}LogoutRequest' % (PROTOCOL)

        logoutrequest_attr_schema = Schema(All({
            'ID':
            str,
            'Version':
            Equal('2.0', msg=DEFAULT_VALUE_ERROR.format('2.0')),
            'IssueInstant':
            All(str, _check_utc_date, self._check_date_in_range),
            'Destination':
            In([entity_id, self._config.absolute_slo_url],
               msg=DEFAULT_VALUE_ERROR.format(entity_id)),
            Optional('NotOnOrAfter'):
            All(str, _check_utc_date, self._check_date_not_expired),
            Optional('Reason'):
            str,
        }),
                                           required=True)

        logoutrequest_schema = {
            LOGOUTREQUEST_TAG: {
                'attrs':
                logoutrequest_attr_schema,
                'children':
                Schema(
                    {
                        '{%s}Issuer' % (ASSERTION): issuer,
                        '{%s}NameID' % (ASSERTION): name_id,
                        '{%s}SessionIndex' % (PROTOCOL): dict,
                    },
                    required=True),
                'text':
                None
            }
        }

        if self._binding == BINDING_HTTP_POST:
            if self._action == 'login':
                # Add signature schema
                _new_sub_schema = authnrequest_schema[AUTHNREQUEST_TAG][
                    'children'].extend(
                        {'{%s}Signature' % (SIGNATURE): signature})
                authnrequest_schema[AUTHNREQUEST_TAG][
                    'children'] = _new_sub_schema
            if self._action == 'logout':
                _new_sub_schema = logoutrequest_schema[LOGOUTREQUEST_TAG][
                    'children'].extend(
                        {'{%s}Signature' % (SIGNATURE): signature})
                logoutrequest_schema[LOGOUTREQUEST_TAG][
                    'children'] = _new_sub_schema

        authn_request = Schema(
            authnrequest_schema,
            required=True,
        )

        logout_request = Schema(
            logoutrequest_schema,
            required=True,
        )

        saml_schema = None
        if self._action == 'login':
            saml_schema = authn_request
        elif self._action == 'logout':
            saml_schema = logout_request
        errors = []
        try:
            saml_schema(data)
        except MultipleInvalid as e:
            for err in e.errors:
                _paths = []
                _attr = None
                for idx, _path in enumerate(err.path):
                    if _path != 'children':
                        if _path == 'attrs':
                            try:
                                _attr = err.path[(idx + 1)]
                            except IndexError:
                                _attr = ''
                            break

                        # strip namespaces for better readability
                        _paths.append(_strip_namespaces(str(_path)))
                path = '/'.join(_paths)
                if _attr is not None:
                    path += " - attribute: " + _attr

                # find value to show (iterate multiple times inside data
                # until we find the sub-element or attribute)
                _val = data
                for _ in err.path:
                    try:
                        _val = _val[_]
                    except KeyError:
                        _val = None
                    except ValueError:
                        _val = None

                # no need to show value if the error is the presence of the element
                _msg = err.msg
                if "extra keys not allowed" in _msg:
                    _val = None
                    _msg = "item not allowed"

                errors.append(
                    ValidationDetail(_val, None, None, None, None, _msg, path))
            raise SPIDValidationError(details=errors)
Beispiel #22
0
        },
        "upstreamArtifacts": worker["upstream-artifacts"],
        "version": worker["version"]
    }

    scope_prefix = config.graph_config["scriptworker"]["scope-prefix"]
    task_def["scopes"].extend([
        "{}:beetmover:action:{}".format(scope_prefix, worker["action"]),
        "{}:beetmover:bucket:{}".format(scope_prefix, worker["bucket"]),
    ])


@payload_builder(
    "scriptworker-github",
    schema={
        Optional("upstream-artifacts"): [{
            Required("taskId"): taskref_or_string,
            Required("taskType"): str,
            Required("paths"): [str],
        }],
        Required("artifact-map"): [object],
        Required("action"):
        str,
        Required("git-tag"):
        str,
        Required("git-revision"):
        str,
        Required("github-project"):
        str,
        Required("is-prerelease"):
        bool,
Beispiel #23
0
from voluptuous import (
    Extra,
    Any,
    Optional,
    Required,
    Exclusive,
)

logger = logging.getLogger(__name__)

# Schema for a build description
job_description_schema = Schema({
    # The name of the job and the job's label.  At least one must be specified,
    # and the label will be generated from the name if necessary, by prepending
    # the kind.
    Optional("name"):
    str,
    Optional("label"):
    str,
    # the following fields are passed directly through to the task description,
    # possibly modified by the run implementation.  See
    # taskcluster/taskgraph/transforms/task.py for the schema details.
    Required("description"):
    task_description_schema["description"],
    Optional("attributes"):
    task_description_schema["attributes"],
    Optional("job-from"):
    task_description_schema["job-from"],
    Optional("dependencies"):
    task_description_schema["dependencies"],
    Optional("soft-dependencies"):
Beispiel #24
0
from taskgraph.transforms.job.common import (docker_worker_add_tooltool,
                                             support_vcs_checkout)
from voluptuous import Any, Optional, Required

run_task_schema = Schema({
    Required('using'):
    'run-task',

    # if true, add a cache at ~worker/.cache, which is where things like pip
    # tend to hide their caches.  This cache is never added for level-1 jobs.
    # TODO Once bug 1526028 is fixed, this and 'use-caches' should be merged.
    Required('cache-dotcache'):
    bool,

    # Whether or not to use caches.
    Optional('use-caches'):
    bool,

    # if true (the default), perform a checkout of gecko on the worker
    Required('checkout'):
    bool,
    Optional(
        "cwd",
        description="Path to run command in. If a checkout is present, the path "
        "to the checkout will be interpolated with the key `checkout`",
    ):
    text_type,

    # The sparse checkout profile to use. Value is the filename relative to the
    # directory where sparse profiles are defined (build/sparse-profiles/).
    Required('sparse-profile'):
Beispiel #25
0
    Required('refs'): [All(tuple)],

    # parsed fields
    Required('depends_on'): {
        Required('nodes'): [All(basestring, Length(min=1, max=255))],
        Required('macros'): [All(basestring, Length(min=1, max=255))],
    },
    Required('empty'):
    bool,
    Required('config'):
    config_contract,
    Required('tags'):
    All(set),

    # For csv files
    Optional('agate_table'):
    object,
})

parsed_nodes_contract = Schema({
    str: parsed_node_contract,
})

parsed_macro_contract = unparsed_base_contract.extend({
    # identifiers
    Required('resource_type'):
    Any(NodeType.Macro),
    Required('unique_id'):
    All(basestring, Length(min=1, max=255)),
    Required('tags'):
    All(set),
Beispiel #26
0
PHONE = '^\+[0-9]+ \([0-9]{3}\) [0-9]{3}-[0-9]{4}$'


def _Skills(skills):
    if not isinstance(skills, list):
        raise TypeError('Invalid Skills, expected a list')
    for skill in skills:
        if not isinstance(skill['rating'], int):
            raise TypeError('Invalid Skills Rating, expected an INT')


_skills = Schema(_Skills)

user_put_schema = Schema(
    {
        Optional('name'): All(str, Match(SAFE_STRING, msg="Invalid Name")),
        Optional('company'): All(str, Match(SAFE_STRING,
                                            msg="Invalid Company")),
        Optional('email'): All(Email(msg="Invalid Email")),
        Optional('latitude'): All(float, msg="Invalid Latitude"),
        Optional('longitude'): All(float, msg="Invalid Longitude"),
        Optional('picture'): All(str, Match(URL, msg="Invalid URL")),
        Optional('skills'): _skills,
        Optional('phone'): All(str, Match(PHONE, msg="Invalid Phone"))
    },
    extra=REMOVE_EXTRA)

skills_get_schema = Schema({
    Optional('skill'):
    All(str, Match(SKILLS_STRING, msg="Invalid Skill")),
    Optional('rating'):
run_schema = Schema(
    {
        Required("using"): "debian-package",
        # Debian distribution
        Required("dist"): text_type,
        # Date of the snapshot (from snapshot.debian.org) to use, in the format
        # YYYYMMDDTHHMMSSZ. The same date is used for the base docker-image name
        # (only the YYYYMMDD part).
        Required("snapshot"): text_type,
        # URL/SHA256 of a source file to build, which can either be a source
        # control (.dsc), or a tarball.
        Required(Any("dsc", "tarball")): source_definition,
        # Package name. Normally derived from the source control or tarball file
        # name. Use in case the name doesn't match DSC_PACKAGE_RE or
        # SOURCE_PACKAGE_RE.
        Optional("name"): text_type,
        # Patch to apply to the extracted source.
        Optional("patch"): text_type,
        # Command to run before dpkg-buildpackage.
        Optional("pre-build-command"): text_type,
        # Architecture to build the package for.
        Optional("arch"): text_type,
        # List of package tasks to get build dependencies from.
        Optional("packages"): [text_type],
        # What resolver to use to install build dependencies. The default
        # (apt-get) is good in most cases, but in subtle cases involving
        # a *-backports archive, its solver might not be able to find a
        # solution that satisfies the build dependencies.
        Optional("resolver"): Any("apt-get", "aptitude"),
        # Base work directory used to set up the task.
        Required("workdir"): text_type,
Beispiel #28
0
            raise Invalid(f"Unsupported URL type {parsed.scheme}://")

        return schemas[parsed.scheme](data)

    return validate


class RelPath(str):
    pass


REMOTE_COMMON = {
    "url": str,
    "checksum_jobs": All(Coerce(int), Range(1)),
    "jobs": All(Coerce(int), Range(1)),
    Optional("no_traverse"): Bool,  # obsoleted
    "verify": Bool,
}
LOCAL_COMMON = {
    "type": supported_cache_type,
    Optional("protected", default=False): Bool,  # obsoleted
    "shared": All(Lower, Choices("group")),
    Optional("slow_link_warning", default=True): Bool,
}
HTTP_COMMON = {
    "auth": All(Lower, Choices("basic", "digest", "custom")),
    "custom_auth_header": str,
    "user": str,
    "password": str,
    "ask_password": Bool,
    "ssl_verify": Any(Bool, str),
Beispiel #29
0
        raise Invalid(f"Value {n!r} is not a valid package name")

    try:
        normalized = PackageVersion.normalize_python_package_name(n)
    except Exception as exc:
        raise Invalid(f"Failed to parse Python package name {n!r}: {str(exc)}")
    else:
        if normalized != n:
            raise Invalid(
                f"Python package name {n!r} is not in a normalized form, normalized: {normalized!r}"
            )


_RPM_PACKAGE_VERSION_SCHEMA = Schema({
    Required("package_name"): _NONEMPTY_STRING,
    Optional("arch"): _NONEMPTY_STRING,
    Optional("epoch"): _NONEMPTY_STRING,
    Optional("package_identifier"): _NONEMPTY_STRING,
    Optional("package_version"): _NONEMPTY_STRING,
    Optional("release"): _NONEMPTY_STRING,
    Optional("src"): bool,
})

_PYTHON_PACKAGE_VERSION_SCHEMA = Schema({
    Required("name"):
    _python_package_name,
    Optional("version"):
    _specifier_set,
    Optional("location"):
    Any(_NONEMPTY_STRING, None),
})
Beispiel #30
0
transforms = TransformSequence()

# shortcut for a string where task references are allowed
taskref_or_string = Any(basestring, {Required('task-reference'): basestring})

beetmover_description_schema = Schema({
    # the dependent task (object) for this beetmover job, used to inform beetmover.
    Required('dependent-task'):
    object,

    # depname is used in taskref's to identify the taskID of the unsigned things
    Required('depname', default='build'):
    basestring,

    # unique label to describe this beetmover task, defaults to {dep.label}-beetmover
    Optional('label'):
    basestring,

    # treeherder is allowed here to override any defaults we use for beetmover.  See
    # taskcluster/taskgraph/transforms/task.py for the schema details, and the
    # below transforms for defaults of various values.
    Optional('treeherder'):
    task_description_schema['treeherder'],

    # locale is passed only for l10n beetmoving
    Optional('locale'):
    basestring,
})


@transforms.add