def notify(): callback = { Required("url"): str, Optional("method"): Any("GET", "POST"), Optional("token"): str, Optional("dataset"): Any("minimal", "logs", "results", "all"), Optional("content-type"): Any("json", "urlencoded"), } return { Required("criteria"): Any( { Required("status"): Any("finished", "running", "complete", "canceled", "incomplete") }, { Required("status"): Any("complete", "incomplete"), Optional("type"): Any("regression", "progression"), }, ), Optional("verbosity"): Any("verbose", "quiet", "status-only"), Optional("recipients"): [{ Required("to"): { Required("method"): Any("email", "irc"), Optional("user"): str, Optional("email"): str, Optional("handle"): str, Optional("server"): str, } }], Exclusive("callback", "callback"): callback, Exclusive("callbacks", "callback"): [callback], Optional("compare"): { Optional("blacklist"): [str], Optional("query"): Any( { Required("username"): str, Required("name"): str }, { Required("entity"): str, Optional("conditions"): { str: str } }, ), }, }
def _device_deploy_schema(): return Schema({ "connections": dict, Required("methods"): dict, Optional("parameters"): _simple_params(), })
def _device_connections_commands(): return Schema({All(str): {"connect": str, Optional("tags"): list}})
def _by_platform(arg): return optionally_keyed_by('build-platform', arg) # When repacking the stub installer we need to pass a zip file and package name to the # repackage task. This is not needed for vanilla stub but analogous to the full installer. PACKAGE_FORMATS = copy.deepcopy(PACKAGE_FORMATS_VANILLA) PACKAGE_FORMATS['installer-stub']['inputs']['package'] = 'target-stub{archive_format}' PACKAGE_FORMATS['installer-stub']['args'].extend(["--package-name", "{package-name}"]) packaging_description_schema = schema.extend({ # depname is used in taskref's to identify the taskID of the signed things Required('depname', default='build'): text_type, # unique label to describe this repackaging task Optional('label'): text_type, # Routes specific to this task, if defined Optional('routes'): [text_type], # passed through directly to the job description Optional('extra'): task_description_schema['extra'], # Shipping product and phase Optional('shipping-product'): task_description_schema['shipping-product'], Optional('shipping-phase'): task_description_schema['shipping-phase'], Required('package-formats'): _by_platform([text_type]), # All l10n jobs use mozharness Required('mozharness'): {
Match, MultipleInvalid, Optional, Required, Schema, ) from lava_common.schemas import CONTEXT_VARIABLES from django.conf import settings INVALID_CHARACTER_ERROR_MSG = "Invalid character" CALLBACK_SCHEMA = { Required("url"): str, Optional("method"): Any("GET", "POST"), Optional("token"): str, Optional("dataset"): Any("minimal", "logs", "results", "all"), Optional("content-type"): Any("json", "urlencoded"), } class SubmissionException(UserWarning): """ Error raised if the submission is itself invalid. """ def _timeout_schema(): return Schema({ Exclusive("days", "timeout_unit"): int, Exclusive("hours", "timeout_unit"): int, Exclusive("minutes", "timeout_unit"): int,
Schema, taskref_or_string, ) from voluptuous import Required, Optional, Any mach_schema = Schema({ Required('using'): 'mach', # The mach command (omitting `./mach`) to run Required('mach'): taskref_or_string, # The sparse checkout profile to use. Value is the filename relative to the # directory where sparse profiles are defined (build/sparse-profiles/). Optional('sparse-profile'): Any(basestring, None), # if true, perform a checkout of a comm-central based branch inside the # gecko checkout Required('comm-checkout'): bool, # Base work directory used to set up the task. Required('workdir'): basestring, }) defaults = { 'comm-checkout': False, }
mozharness_run_schema = Schema({ Required('using'): 'mozharness', # the mozharness script used to run this task, relative to the testing/ # directory and using forward slashes even on Windows Required('script'): basestring, # the config files required for the task, relative to # testing/mozharness/configs and using forward slashes even on Windows Required('config'): [basestring], # any additional actions to pass to the mozharness command; not supported # on Windows Optional('actions'): [basestring], # any additional options (without leading --) to be passed to mozharness; # not supported on Windows Optional('options'): [basestring], # --custom-build-variant-cfg value (not supported on Windows) Optional('custom-build-variant-cfg'): basestring, # Extra metadata to use toward the workspace caching. # Only supported on docker-worker Optional('extra-workspace-cache-key'): basestring, # If not false, tooltool downloads will be enabled via relengAPIProxy
transforms = TransformSequence() # shortcut for a string where task references are allowed taskref_or_string = Any(basestring, {Required('task-reference'): basestring}) beetmover_description_schema = Schema({ # the dependent task (object) for this beetmover job, used to inform beetmover. Required('dependent-task'): object, # depname is used in taskref's to identify the taskID of the unsigned things Required('depname', default='build'): basestring, # unique label to describe this beetmover task, defaults to {dep.label}-beetmover Optional('label'): basestring, # treeherder is allowed here to override any defaults we use for beetmover. See # taskcluster/taskgraph/transforms/task.py for the schema details, and the # below transforms for defaults of various values. Optional('treeherder'): task_description_schema['treeherder'], # locale is passed only for l10n beetmoving Optional('locale'): basestring, Optional('shipping-phase'): task_description_schema['shipping-phase'], Optional('shipping-product'): task_description_schema['shipping-product'],
Required('run-time'): _by_platform(int), # Locales not to repack for Required('ignore-locales'): _by_platform([basestring]), # All l10n jobs use mozharness Required('mozharness'): { # Script to invoke for mozharness Required('script'): _by_platform(basestring), # Config files passed to the mozharness script Required('config'): _by_platform([basestring]), # Additional paths to look for mozharness configs in. These should be # relative to the base of the source checkout Optional('config-paths'): _by_platform([basestring]), # Options to pass to the mozharness script Required('options'): _by_platform([basestring]), # Action commands to provide to mozharness script Required('actions'): _by_platform([basestring]), # if true, perform a checkout of a comm-central based branch inside the # gecko checkout Required('comm-checkout', default=False): bool, }, # Items for the taskcluster index Optional('index'): { # Product to identify as in the taskcluster index Required('product'): _by_platform(basestring),
# treeherder environments (defaults to both staging and production) Required('environments', default=['production', 'staging']): ['production', 'staging'], }, # the provisioner-id/worker-type for the task 'worker-type': basestring, # information specific to the worker implementation that will run this task 'worker': Any({ 'implementation': Any('docker-worker', 'docker-engine'), # the docker image (in docker's `host/repo/image:tag` format) in which # to run the task; if omitted, this will be a reference to the image # generated by the 'docker-image' dependency, which must be defined in # 'dependencies' Optional('docker-image'): basestring, # worker features that should be enabled Required('relengapi-proxy', default=False): bool, Required('allow-ptrace', default=False): bool, Required('loopback-video', default=False): bool, Required('loopback-audio', default=False): bool, # caches to set up for the task 'caches': [{ # only one type is supported by any of the workers right now 'type': 'persistent', # name of the cache, allowing re-use by subsequent tasks naming the # same cache 'name': basestring,
text_type, Required("job-from"): task_description_schema["job-from"], Required("dependencies"): task_description_schema["dependencies"], Required("description"): task_description_schema["description"], Required("treeherder"): task_description_schema["treeherder"], Required("run-on-projects"): task_description_schema["run-on-projects"], Required("worker-type"): optionally_keyed_by("release-level", text_type), Required("worker"): object, Optional("scopes"): [text_type], Required("shipping-phase"): task_description_schema["shipping-phase"], Required("shipping-product"): task_description_schema["shipping-product"], Optional("extra"): task_description_schema["extra"], Optional("attributes"): task_description_schema["attributes"], }) transforms = TransformSequence() transforms.add_validate(push_snap_description_schema) @transforms.add
# "schema": { # "name": "digests", # "version": "1-0-0" # }, # "status": "success", # "summary": [] # } SCHEMA = S({"name": "digests", "version": Any("1-0-0")}) SUMMARY = S(list) DETAIL = S({Optional("artifact"): bool, "md5": md5_p, "path": str, "sha1": str, "sha256": str, "ssdeep": str}) DETAILS = S([DETAIL]) # digests schema for component (not package) COMPONENT_DIGESTS_SCHEMA = S({"_audit": Any(None, AUDIT), "_release": str, "details": DETAILS, "schema": SCHEMA,
Required, Exclusive, ) logger = logging.getLogger(__name__) # Voluptuous uses marker objects as dictionary *keys*, but they are not # comparable, so we cast all of the keys back to regular strings task_description_schema = {str(k): v for k, v in task_description_schema.schema.iteritems()} # Schema for a build description job_description_schema = Schema({ # The name of the job and the job's label. At least one must be specified, # and the label will be generated from the name if necessary, by prepending # the kind. Optional('name'): basestring, Optional('label'): basestring, # the following fields are passed directly through to the task description, # possibly modified by the run implementation. See # taskcluster/taskgraph/transforms/task.py for the schema details. Required('description'): task_description_schema['description'], Optional('attributes'): task_description_schema['attributes'], Optional('job-from'): task_description_schema['job-from'], Optional('dependencies'): task_description_schema['dependencies'], Optional('expires-after'): task_description_schema['expires-after'], Optional('routes'): task_description_schema['routes'], Optional('scopes'): task_description_schema['scopes'], Optional('tags'): task_description_schema['tags'], Optional('extra'): task_description_schema['extra'], Optional('notifications'): task_description_schema['notifications'],
def job(extra_context_variables=[]): context_variables = CONTEXT_VARIABLES + extra_context_variables lava_lxc = { Required("name"): str, Required("distribution"): str, Required("release"): str, Optional("arch"): str, Optional("mirror"): str, Optional("persist"): bool, Optional("security_mirror"): str, Optional("template"): str, Optional("timeout"): timeout(), Optional("verbose"): bool, } return All( { Required("job_name"): All(str, Length(min=1, max=200)), Optional("device_type"): All(str, Length(min=1, max=200)), Required("timeouts"): { Required("job"): timeout(), Optional("action"): timeout(), Optional("actions"): { str: timeout() }, Optional("connection"): timeout(), Optional("connections"): { str: timeout() }, }, Required("visibility"): Any("public", "personal", {"group": [str]}), Optional("context"): Schema({In(context_variables): Any(int, str, [int, str])}, extra=False), Optional("metadata"): { str: object }, Optional("priority"): Any("high", "medium", "low", Range(min=0, max=100)), Optional("tags"): [str], Optional("secrets"): dict, Optional("protocols"): { Optional("lava-lxc"): Any(lava_lxc, {str: lava_lxc}), Optional("lava-multinode"): { Required("roles"): { str: Any( { Required("device_type"): str, Required("count"): Range(min=0), Optional("context"): Schema( { In(context_variables): Any(int, str, [int, str]) }, extra=False, ), Optional("tags"): [str], Optional("timeout"): timeout(), }, { Required("connection"): str, Required("count"): Range(min=0), Required("expect_role"): str, Required("host_role"): str, Optional("request"): str, Optional("tags"): [str], Optional("timeout"): timeout(), Optional("context"): Schema( { In(context_variables): Any(int, str, [int, str]) }, extra=False, ), }, ) }, Optional("timeout"): timeout(), }, Optional("lava-vland"): Any( {str: { str: { Required("tags"): [str] } }}, {str: { Required("tags"): [str] }}, ), Optional("lava-xnbd"): { Required("port"): Any("auto", int), Optional("timeout"): timeout(), }, }, Optional("notify"): notify(), Optional("reboot_to_fastboot"): bool, Required("actions"): [{ Any("boot", "command", "deploy", "test"): dict }], }, extra_checks, )
add_scope_prefix, get_beetmover_bucket_scope, ) from gecko_taskgraph.util.taskcluster import get_artifact_prefix from gecko_taskgraph.transforms.task import task_description_schema from voluptuous import Any, Required, Optional from collections import defaultdict from copy import deepcopy beetmover_description_schema = schema.extend({ # depname is used in taskref's to identify the taskID of the unsigned things Required("depname", default="build"): str, # unique label to describe this beetmover task, defaults to {dep.label}-beetmover Optional("label"): str, Required("partner-bucket-scope"): optionally_keyed_by("release-level", str), Required("partner-public-path"): Any(None, str), Required("partner-private-path"): Any(None, str), Optional("extra"): object, Required("shipping-phase"): task_description_schema["shipping-phase"], Optional("shipping-product"): task_description_schema["shipping-product"], Optional("priority"): task_description_schema["priority"],
def _device_deploy_schema(): return Schema({ 'connections': dict, Required('methods'): dict, Optional('parameters'): _simple_params(), })
raise Invalid(f"Unsupported URL type {parsed.scheme}://") return schemas[parsed.scheme](data) return validate class RelPath(str): pass REMOTE_COMMON = { "url": str, "checksum_jobs": All(Coerce(int), Range(1)), "jobs": All(Coerce(int), Range(1)), Optional("no_traverse"): Bool, # obsoleted "verify": Bool, } LOCAL_COMMON = { "type": supported_cache_type, Optional("protected", default=False): Bool, # obsoleted "shared": All(Lower, Choices("group")), Optional("slow_link_warning", default=True): Bool, } HTTP_COMMON = { "auth": All(Lower, Choices("basic", "digest", "custom")), "custom_auth_header": str, "user": str, "password": str, "ask_password": Bool, "ssl_verify": Any(Bool, str),
from taskgraph.transforms.job import run_job_using from taskgraph.transforms.job.common import ( docker_worker_add_workspace_cache, docker_worker_setup_secrets, docker_worker_add_public_artifacts, docker_worker_support_vcs_checkout, ) haz_run_schema = Schema({ Required('using'): 'hazard', # The command to run within the task image (passed through to the worker) Required('command'): basestring, # The tooltool manifest to use; default in the script is used if omitted Optional('tooltool-manifest'): basestring, # The mozconfig to use; default in the script is used if omitted Optional('mozconfig'): basestring, # The set of secret names to which the task has access; these are prefixed # with `project/releng/goanna/{treeherder.kind}/level-{level}/`. Setting # this will enable any worker features required and set the task's scopes # appropriately. `true` here means ['*'], all secrets. Not supported on # Windows Required('secrets', default=False): Any(bool, [basestring]), }) @run_job_using("docker-worker", "hazard", schema=haz_run_schema) def docker_worker_hazard(config, job, taskdesc):
from voluptuous import All, Any, Maybe, Coerce, Length, Range, Match, Required, Optional, Schema SERVER_CONF_VALIDATOR = Schema({ Required('status'): Coerce(str), Required('message'): Coerce(str), Optional('gateway_uid'): All(Coerce(str), Match(r'^0x[0-9A-F]{8}$')), Optional('mqtt_topic', default='ota/global'): Coerce(str), Optional('mqtt_broker', default='broker.hivemq.com'): Coerce(str), Optional('end_device_multicast_addr', default='230.6.6.1:7777'): All( Coerce(str), Match( r'^(22[4-9]|230)(\.([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])){3}:\d{3,4}$' )), Optional('max_log_size', default='2'): All(Coerce(str), Match(r'^\d{1,2}$')), Optional('max_log_count', default='5'): All(Coerce(str), Match(r'^\d{1,2}$')), }) END_DEVICE_CONF_VALIDATOR = Schema({ Required('code'): Coerce(str), Required('id'): All(Coerce(str, msg='Invalid variable type, expected str'), Length(min=8, max=30, msg='Invalid Length, expected 8-30 char'),
# Allow ValueError to percolate up. unsupported = False try: value = int(v) except ValueError: unsupported = True else: unsupported = (value != 0) if unsupported: raise GadgetSpecificationError( 'Unsupported gadget.yaml format version: {}'.format(v)) return value GadgetYAML = Schema({ Optional('defaults'): { str: { str: object } }, Optional('connections'): [Schema({ Required('plug'): str, Optional('slot'): str, })], Optional('device-tree-origin', default='gadget'): str, Optional('device-tree'): str, Optional('format'): YAMLFormat,
def get_variant(test_platform): for v in VARIANTS: if "-{}/".format(v) in test_platform: return v return "" mozharness_test_run_schema = Schema({ Required("using"): "mozharness-test", Required("test"): test_description_schema, # Base work directory used to set up the task. Optional("workdir"): text_type, }) def test_packages_url(taskdesc): """Account for different platforms that name their test packages differently""" artifact_url = get_artifact_url( "<build>", get_artifact_path(taskdesc, "target.test_packages.json")) # for android shippable we need to add 'en-US' to the artifact url test = taskdesc["run"]["test"] if "android" in test["test-platform"] and (get_variant( test["test-platform"]) in ("shippable", "shippable-qr")): head, tail = os.path.split(artifact_url) artifact_url = os.path.join(head, "en-US", tail) return artifact_url
def schema(): base = { Required("method"): Msg("kexec", "'method' should be 'kexec'"), Required("boot_message"): str, Optional("prompts"): boot.prompts(), Optional("auto_login"): boot.auto_login(), # TODO: if auto_login => prompt is required Optional("deploy"): bool, Optional("command"): str, Optional("kernel"): str, Optional("dtb"): str, Optional("initrd"): str, Optional("options"): [str], Optional("kernel-config"): str, Optional("transfer_overlay"): boot.transfer_overlay(), Optional("on_panic"): bool, } return {**boot.schema(), **base}
import taskgraph CACHE_TYPE = 'toolchains.v2' toolchain_run_schema = Schema({ Required('using'): 'toolchain-script', # The script (in taskcluster/scripts/misc) to run. # Python scripts are invoked with `mach python` so vendored libraries # are available. Required('script'): basestring, # Arguments to pass to the script. Optional('arguments'): [basestring], # If not false, tooltool downloads will be enabled via relengAPIProxy # for either just public files, or all files. Not supported on Windows Required('tooltool-downloads'): Any( False, 'public', 'internal', ), # Sparse profile to give to checkout using `run-task`. If given, # a filename in `build/sparse-profiles`. Defaults to # "toolchain-build", i.e., to # `build/sparse-profiles/toolchain-build`. If `None`, instructs # `run-task` to not use a sparse profile at all.
from taskgraph.transforms.task import task_description_schema from voluptuous import ( Extra, Optional, Required, Exclusive, ) logger = logging.getLogger(__name__) # Schema for a build description job_description_schema = Schema({ # The name of the job and the job's label. At least one must be specified, # and the label will be generated from the name if necessary, by prepending # the kind. Optional('name'): basestring, Optional('label'): basestring, # the following fields are passed directly through to the task description, # possibly modified by the run implementation. See # taskcluster/taskgraph/transforms/task.py for the schema details. Required('description'): task_description_schema['description'], Optional('attributes'): task_description_schema['attributes'], Optional('job-from'): task_description_schema['job-from'], Optional('dependencies'): task_description_schema['dependencies'],
def _interactive_script_schema(): return Schema([{ Optional("name"): Match(r"^[a-zA-Z0-9-_]+$", msg=INVALID_CHARACTER_ERROR_MSG), Optional("command"): Any(None, str), Optional("delay"): int, Optional("lava-send"): str, Optional("lava-sync"): str, Optional("lava-wait"): str, Optional("lava-wait-all"): str, Optional("wait_for_prompt"): bool, Optional("failures"): [{ Required("message"): str, Optional("exception"): Any("InfrastructureError", "JobError", "TestError"), Optional("error"): str, }], Optional("successes"): [{ Required("message"): str }], }])
# Dont enable VA-API if CUDA is available if (os.getenv(ENV_VAAPI_SUPPORTED) == "true" and os.getenv(ENV_CUDA_SUPPORTED) != "true"): return HWACCEL_VAAPI return hwaccel_args STREAM_SCEHMA = Schema({ Required("stream_format", default="rtsp"): Any("rtsp", "rtmp", "mjpeg"), Required("path"): All(str, Length(min=1)), Required("port"): All(int, Range(min=1)), Optional("width", default=None): Maybe(int), Optional("height", default=None): Maybe(int), Optional("fps", default=None): Maybe(All(int, Range(min=1))), Optional("input_args", default=None): Maybe(list), Optional("hwaccel_args", default=CAMERA_HWACCEL_ARGS): check_for_hwaccels, Optional("codec", default=""): str, Optional("audio_codec", default="unset"): Maybe(str), Optional("rtsp_transport", default="tcp"): Any("tcp", "udp", "udp_multicast", "http"),
def _device_user_commands(): return Schema({All(str): {Required("do"): str, Optional("undo"): str}})
text_type, Required("filesize"): int, Required("private-artifact"): bool, Required("signing-formats"): [Any(*SUPPORTED_SIGNING_FORMATS)], Required("requestor"): basestring, Required("reason"): basestring, Required("artifact-name"): basestring, Required("fetch"): Any( { Optional("gpg-signature"): basestring, Optional('type'): 'static-url', Required('url'): basestring, }, { Required('type'): 'bmo-attachment', Required('attachment-id'): Any(basestring, int) }), Required("manifest_name"): basestring, }) def check_manifest(manifest): # XXX add any manifest checks we want. # XXX sha256 is a valid sha256? # XXX url is a reachable url?
def _device_commands_schema(): return Schema({ All(str): Any(list, dict, str), Optional("connections"): _device_connections_commands(), Optional("users"): _device_user_commands(), })
from taskgraph.transforms.task import task_description_schema from voluptuous import Any, Required, Optional # Voluptuous uses marker objects as dictionary *keys*, but they are not # comparable, so we cast all of the keys back to regular strings task_description_schema = {str(k): v for k, v in task_description_schema.schema.iteritems()} transforms = TransformSequence() taskref_or_string = Any( basestring, {Required('task-reference'): basestring}) beetmover_checksums_description_schema = schema.extend({ Required('depname', default='build'): basestring, Optional('label'): basestring, Optional('treeherder'): task_description_schema['treeherder'], Optional('locale'): basestring, Optional('shipping-phase'): task_description_schema['shipping-phase'], Optional('shipping-product'): task_description_schema['shipping-product'], }) @transforms.add def validate(config, jobs): for job in jobs: label = job.get('primary-dependency', object).__dict__.get('label', '?no-label?') validate_schema( beetmover_checksums_description_schema, job, "In checksums-signing ({!r} kind) task for {!r}:".format(config.kind, label)) yield job