def test_extend_schema_twice(self): "Extending a schema twice applies taskgraph checks." with self.assertRaises(Exception): Schema({ "kebab-case": int }).extend({ "more-kebab": int }).extend({"camelCase": int})
def test_worker_caches(task, transform): config, job, taskdesc, impl = transform(task) add_cache(job, taskdesc, "cache1", "/cache1") add_cache(job, taskdesc, "cache2", "/cache2", skip_untrusted=True) if impl not in ("docker-worker", "generic-worker"): pytest.xfail(f"caches not implemented for '{impl}'") key = "caches" if impl == "docker-worker" else "mounts" assert key in taskdesc["worker"] assert len(taskdesc["worker"][key]) == 2 # Create a new schema object with just the part relevant to caches. partial_schema = Schema(payload_builders[impl].schema.schema[key]) validate_schema(partial_schema, taskdesc["worker"][key], "validation error")
configure_taskdesc_for_run, ) from gecko_taskgraph.transforms.job.common import ( docker_worker_add_artifacts, generic_worker_add_artifacts, ) sm_run_schema = Schema( { Required("using"): Any( "spidermonkey", "spidermonkey-package", ), # SPIDERMONKEY_VARIANT and SPIDERMONKEY_PLATFORM Required("spidermonkey-variant"): str, Optional("spidermonkey-platform"): str, # Base work directory used to set up the task. Optional("workdir"): str, Required("tooltool-downloads"): Any( False, "public", "internal", ), } ) @run_job_using("docker-worker", "spidermonkey", schema=sm_run_schema) @run_job_using("docker-worker", "spidermonkey-package", schema=sm_run_schema) def docker_worker_spidermonkey(config, job, taskdesc): run = job["run"]
toolchain_run_schema = Schema( { Required("using"): "toolchain-script", # The script (in taskcluster/scripts/misc) to run. # Python scripts are invoked with `mach python` so vendored libraries # are available. Required("script"): str, # Arguments to pass to the script. Optional("arguments"): [str], # If not false, tooltool downloads will be enabled via relengAPIProxy # for either just public files, or all files. Not supported on Windows Required("tooltool-downloads"): Any( False, "public", "internal", ), # Sparse profile to give to checkout using `run-task`. If given, # Defaults to "toolchain-build". The value is relative to # "sparse-profile-prefix", optionally defined below is the path, # defaulting to "build/sparse-profiles". # i.e. `build/sparse-profiles/toolchain-build`. # If `None`, instructs `run-task` to not use a sparse profile at all. Required("sparse-profile"): Any(str, None), # The relative path to the sparse profile. Optional("sparse-profile-prefix"): str, # Paths/patterns pointing to files that influence the outcome of a # toolchain build. Optional("resources"): [str], # Path to the artifact produced by the toolchain job Required("toolchain-artifact"): str, Optional( "toolchain-alias", description="An alias that can be used instead of the real toolchain job name in " "fetch stanzas for jobs.", ): Any(str, [str]), # Base work directory used to set up the task. Optional("workdir"): str, } )
docker_image_schema = Schema({ # Name of the docker image. Required("name"): str, # Name of the parent docker image. Optional("parent"): str, # Treeherder symbol. Required("symbol"): str, # relative path (from config.path) to the file the docker image was defined # in. Optional("job-from"): str, # Arguments to use for the Dockerfile. Optional("args"): { str: str }, # Name of the docker image definition under taskcluster/docker, when # different from the docker image name. Optional("definition"): str, # List of package tasks this docker image depends on. Optional("packages"): [str], Optional( "index", description="information for indexing this build so its artifacts can be discovered", ): task_description_schema["index"], Optional( "cache", description="Whether this image should be cached based on inputs.", ): bool, })
source_test_description_schema = Schema({ # most fields are passed directly through as job fields, and are not # repeated here Extra: object, # The platform on which this task runs. This will be used to set up attributes # (for try selection) and treeherder metadata (for display). If given as a list, # the job will be "split" into multiple tasks, one with each platform. Required("platform"): Any(str, [str]), # Build labels required for the task. If this key is provided it must # contain a build label for the task platform. # The task will then depend on a build task, and the installer url will be # saved to the GECKO_INSTALLER_URL environment variable. Optional("require-build"): optionally_keyed_by("project", {str: str}), # These fields can be keyed by "platform", and are otherwise identical to # job descriptions. Required("worker-type"): optionally_keyed_by("platform", job_description_schema["worker-type"]), Required("worker"): optionally_keyed_by("platform", job_description_schema["worker"]), Optional("python-version"): [int], Optional("dependencies"): { k: optionally_keyed_by("platform", v) for k, v in job_description_schema["dependencies"].items() }, # A list of artifacts to install from 'fetch' tasks. Optional("fetches"): { str: optionally_keyed_by("platform", job_description_schema["fetches"][str]), }, })
mozharness_run_schema = Schema({ Required("using"): "mozharness", # the mozharness script used to run this task, relative to the testing/ # directory and using forward slashes even on Windows Required("script"): str, # Additional paths to look for mozharness configs in. These should be # relative to the base of the source checkout Optional("config-paths"): [str], # the config files required for the task, relative to # testing/mozharness/configs or one of the paths specified in # `config-paths` and using forward slashes even on Windows Required("config"): [str], # any additional actions to pass to the mozharness command Optional("actions"): [ Match("^[a-z0-9-]+$", "actions must be `-` seperated alphanumeric strings") ], # any additional options (without leading --) to be passed to mozharness Optional("options"): [ Match( "^[a-z0-9-]+(=[^ ]+)?$", "options must be `-` seperated alphanumeric strings (with optional argument)", ) ], # --custom-build-variant-cfg value Optional("custom-build-variant-cfg"): str, # Extra configuration options to pass to mozharness. Optional("extra-config"): dict, # If not false, tooltool downloads will be enabled via relengAPIProxy # for either just public files, or all files. Not supported on Windows Required("tooltool-downloads"): Any( False, "public", "internal", ), # The set of secret names to which the task has access; these are prefixed # with `project/releng/gecko/{treeherder.kind}/level-{level}/`. Setting # this will enable any worker features required and set the task's scopes # appropriately. `true` here means ['*'], all secrets. Not supported on # Windows Required("secrets"): Any(bool, [str]), # If true, taskcluster proxy will be enabled; note that it may also be enabled # automatically e.g., for secrets support. Not supported on Windows. Required("taskcluster-proxy"): bool, # If true, the build scripts will start Xvfb. Not supported on Windows. Required("need-xvfb"): bool, # If false, indicate that builds should skip producing artifacts. Not # supported on Windows. Required("keep-artifacts"): bool, # If specified, use the in-tree job script specified. Optional("job-script"): str, Required("requires-signed-builds"): bool, # Whether or not to use caches. Optional("use-caches"): bool, # If false, don't set MOZ_SIMPLE_PACKAGE_NAME # Only disableable on windows Required("use-simple-package"): bool, # If false don't pass --branch mozharness script # Only disableable on windows Required("use-magic-mh-args"): bool, # if true, perform a checkout of a comm-central based branch inside the # gecko checkout Required("comm-checkout"): bool, # Base work directory used to set up the task. Optional("workdir"): str, })
from gecko_taskgraph.transforms.base import TransformSequence from gecko_taskgraph.transforms.task import task_description_schema from gecko_taskgraph.util.schema import optionally_keyed_by, resolve_keyed_by, Schema from gecko_taskgraph.util.scriptworker import add_scope_prefix from voluptuous import Optional, Required push_flatpak_description_schema = Schema( { Required("name"): str, Required("job-from"): task_description_schema["job-from"], Required("dependencies"): task_description_schema["dependencies"], Required("description"): task_description_schema["description"], Required("treeherder"): task_description_schema["treeherder"], Required("run-on-projects"): task_description_schema["run-on-projects"], Required("worker-type"): optionally_keyed_by("release-level", str), Required("worker"): object, Optional("scopes"): [str], Required("shipping-phase"): task_description_schema["shipping-phase"], Required("shipping-product"): task_description_schema["shipping-product"], Optional("extra"): task_description_schema["extra"], Optional("attributes"): task_description_schema["attributes"], } ) transforms = TransformSequence() transforms.add_validate(push_flatpak_description_schema) @transforms.add def make_task_description(config, jobs):
# file, You can obtain one at http://mozilla.org/MPL/2.0/. """ Support for running mach python-test tasks (via run-task) """ from gecko_taskgraph.transforms.job import run_job_using, configure_taskdesc_for_run from gecko_taskgraph.util.schema import Schema from voluptuous import Required, Optional python_test_schema = Schema({ Required("using"): "python-test", # Python version to use Required("python-version"): int, # The subsuite to run Required("subsuite"): str, # Base work directory used to set up the task. Optional("workdir"): str, }) defaults = { "python-version": 3, "subsuite": "default", } @run_job_using("docker-worker", "python-test",
run_task_schema = Schema( { Required("using"): "run-task", # if true, add a cache at ~worker/.cache, which is where things like pip # tend to hide their caches. This cache is never added for level-1 jobs. # TODO Once bug 1526028 is fixed, this and 'use-caches' should be merged. Required("cache-dotcache"): bool, # Whether or not to use caches. Optional("use-caches"): bool, # if true (the default), perform a checkout of gecko on the worker Required("checkout"): bool, Optional( "cwd", description="Path to run command in. If a checkout is present, the path " "to the checkout will be interpolated with the key `checkout`", ): str, # The sparse checkout profile to use. Value is the filename relative to # "sparse-profile-prefix" which defaults to "build/sparse-profiles/". Required("sparse-profile"): Any(str, None), # The relative path to the sparse profile. Optional("sparse-profile-prefix"): str, # if true, perform a checkout of a comm-central based branch inside the # gecko checkout Required("comm-checkout"): bool, # The command arguments to pass to the `run-task` script, after the # checkout arguments. If a list, it will be passed directly; otherwise # it will be included in a single argument to `bash -cx`. Required("command"): Any([taskref_or_string], taskref_or_string), # Context to substitute into the command using format string # substitution (e.g {value}). This is useful if certain aspects of the # command need to be generated in transforms. Optional("command-context"): dict, # Base work directory used to set up the task. Optional("workdir"): str, # If not false, tooltool downloads will be enabled via relengAPIProxy # for either just public files, or all files. Only supported on # docker-worker. Required("tooltool-downloads"): Any( False, "public", "internal", ), # Whether to run as root. (defaults to False) Optional("run-as-root"): bool, } )
def test_schema(self): "Creating a schema applies taskgraph checks." with self.assertRaises(Exception): Schema({"camelCase": int})
# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. import unittest from mozunit import main from gecko_taskgraph.util.schema import ( validate_schema, resolve_keyed_by, Schema, ) schema = Schema({ "x": int, "y": str, }) class TestValidateSchema(unittest.TestCase): def test_valid(self): validate_schema(schema, {"x": 10, "y": "foo"}, "pfx") def test_invalid(self): try: validate_schema(schema, {"x": "not-int"}, "pfx") self.fail("no exception raised") except Exception as e: self.assertTrue(str(e).startswith("pfx\n")) class TestCheckSchema(unittest.TestCase):
common_schema = Schema({ # URL/SHA256 of a source file to build, which can either be a source # control (.dsc), or a tarball. Required(Any("dsc", "tarball")): source_definition, # Package name. Normally derived from the source control or tarball file # name. Use in case the name doesn't match DSC_PACKAGE_RE or # SOURCE_PACKAGE_RE. Optional("name"): str, # Patch to apply to the extracted source. Optional("patch"): str, # Command to run before dpkg-buildpackage. Optional("pre-build-command"): str, # Architecture to build the package for. Optional("arch"): str, # List of package tasks to get build dependencies from. Optional("packages"): [str], # What resolver to use to install build dependencies. The default # (apt-get) is good in most cases, but in subtle cases involving # a *-backports archive, its solver might not be able to find a # solution that satisfies the build dependencies. Optional("resolver"): Any("apt-get", "aptitude"), # Base work directory used to set up the task. Required("workdir"): str, })
perftest_description_schema = Schema({ # The test names and the symbols to use for them: [test-symbol, test-path] Optional("perftest"): [[str]], # Metrics to gather for the test. These will be merged # with options specified through perftest-perfherder-global Optional("perftest-metrics"): optionally_keyed_by( "perftest", Any( [str], {str: Any(None, {str: Any(None, str, [str])})}, ), ), # Perfherder data options that will be applied to # all metrics gathered. Optional("perftest-perfherder-global"): optionally_keyed_by("perftest", {str: Any(None, str, [str])}), # Extra options to add to the test's command Optional("perftest-extra-options"): optionally_keyed_by("perftest", [str]), # Variants of the test to make based on extra browsertime # arguments. Expecting: # [variant-suffix, options-to-use] # If variant-suffix is `null` then the options will be added # to the existing task. Otherwise, a new variant is created # with the given suffix and with its options replaced. Optional("perftest-btime-variants"): optionally_keyed_by("perftest", [[Any(None, str)]]), # These options will be parsed in the next schemas Extra: object, })
beetmover_push_to_release_description_schema = Schema({ Required("name"): str, Required("product"): str, Required("treeherder-platform"): str, Optional("attributes"): { str: object }, Optional("job-from"): task_description_schema["job-from"], Optional("run"): { str: object }, Optional("run-on-projects"): task_description_schema["run-on-projects"], Optional("dependencies"): { str: taskref_or_string }, Optional("index"): { str: str }, Optional("routes"): [str], Required("shipping-phase"): task_description_schema["shipping-phase"], Required("shipping-product"): task_description_schema["shipping-product"], Optional("extra"): task_description_schema["extra"], })
raptor_description_schema = Schema({ # Raptor specific configs. Optional("apps"): optionally_keyed_by("test-platform", "subtest", [str]), Optional("raptor-test"): str, Optional("raptor-subtests"): optionally_keyed_by("app", "test-platform", list), Optional("activity"): optionally_keyed_by("app", str), Optional("binary-path"): optionally_keyed_by("app", str), # Configs defined in the 'test_description_schema'. Optional("max-run-time"): optionally_keyed_by("app", "subtest", "test-platform", test_description_schema["max-run-time"]), Optional("run-on-projects"): optionally_keyed_by( "app", "test-name", "raptor-test", "subtest", "variant", test_description_schema["run-on-projects"], ), Optional("variants"): optionally_keyed_by("app", "subtest", test_description_schema["variants"]), Optional("target"): optionally_keyed_by("app", test_description_schema["target"]), Optional("tier"): optionally_keyed_by("app", "raptor-test", "subtest", "variant", test_description_schema["tier"]), Optional("test-url-param"): optionally_keyed_by("subtest", "test-platform", str), Optional("run-visual-metrics"): optionally_keyed_by("app", bool), Required("test-name"): test_description_schema["test-name"], Required("test-platform"): test_description_schema["test-platform"], Required("require-signed-extensions"): test_description_schema["require-signed-extensions"], Required("treeherder-symbol"): test_description_schema["treeherder-symbol"], # Any unrecognized keys will be validated against the test_description_schema. Extra: object, })
"ccov", ] def get_variant(test_platform): for v in VARIANTS: if f"-{v}/" in test_platform: return v return "" mozharness_test_run_schema = Schema({ Required("using"): "mozharness-test", Required("test"): test_description_schema, # Base work directory used to set up the task. Optional("workdir"): str, }) def test_packages_url(taskdesc): """Account for different platforms that name their test packages differently""" artifact_url = get_artifact_url( "<build>", get_artifact_path(taskdesc, "target.test_packages.json")) # for android shippable we need to add 'en-US' to the artifact url test = taskdesc["run"]["test"] if "android" in test["test-platform"] and (get_variant( test["test-platform"]) in ("shippable", "shippable-qr", "shippable-lite", "shippable-lite-qr")):
job_description_schema = Schema({ # The name of the job and the job's label. At least one must be specified, # and the label will be generated from the name if necessary, by prepending # the kind. Optional("name"): str, Optional("label"): str, # the following fields are passed directly through to the task description, # possibly modified by the run implementation. See # taskcluster/gecko_taskgraph/transforms/task.py for the schema details. Required("description"): task_description_schema["description"], Optional("attributes"): task_description_schema["attributes"], Optional("job-from"): task_description_schema["job-from"], Optional("dependencies"): task_description_schema["dependencies"], Optional("if-dependencies"): task_description_schema["if-dependencies"], Optional("soft-dependencies"): task_description_schema["soft-dependencies"], Optional("if-dependencies"): task_description_schema["if-dependencies"], Optional("requires"): task_description_schema["requires"], Optional("expires-after"): task_description_schema["expires-after"], Optional("routes"): task_description_schema["routes"], Optional("scopes"): task_description_schema["scopes"], Optional("tags"): task_description_schema["tags"], Optional("extra"): task_description_schema["extra"], Optional("treeherder"): task_description_schema["treeherder"], Optional("index"): task_description_schema["index"], Optional("run-on-projects"): task_description_schema["run-on-projects"], Optional("shipping-phase"): task_description_schema["shipping-phase"], Optional("shipping-product"): task_description_schema["shipping-product"], Optional("always-target"): task_description_schema["always-target"], Exclusive("optimization", "optimization"): task_description_schema["optimization"], Optional("use-sccache"): task_description_schema["use-sccache"], Optional("release-artifacts"): task_description_schema["release-artifacts"], Optional("priority"): task_description_schema["priority"], # The "when" section contains descriptions of the circumstances under which # this task should be included in the task graph. This will be converted # into an optimization, so it cannot be specified in a job description that # also gives 'optimization'. Exclusive("when", "optimization"): { # This task only needs to be run if a file matching one of the given # patterns has changed in the push. The patterns use the mozpack # match function (python/mozbuild/mozpack/path.py). Optional("files-changed"): [str], }, # A list of artifacts to install from 'fetch' tasks. Optional("fetches"): { str: [ str, { Required("artifact"): str, Optional("dest"): str, Optional("extract"): bool, Optional("verify-hash"): bool, }, ], }, # A description of how to run this job. "run": { # The key to a job implementation in a peer module to this one "using": str, # Base work directory used to set up the task. Optional("workdir"): str, # Any remaining content is verified against that job implementation's # own schema. Extra: object, }, Required("worker-type"): task_description_schema["worker-type"], # This object will be passed through to the task description, with additions # provided by the job's run-using function Optional("worker"): dict, })
from voluptuous import Required, Optional, Any mach_schema = Schema({ Required("using"): "mach", # The mach command (omitting `./mach`) to run Required("mach"): taskref_or_string, # The version of Python to run with. Either an absolute path to the binary # on the worker, a version identifier (e.g python2.7 or 3.8). There is no # validation performed to ensure the specified binaries actually exist. Optional("python-version"): Any(str, int, float), # The sparse checkout profile to use. Value is the filename relative to the # directory where sparse profiles are defined (build/sparse-profiles/). Optional("sparse-profile"): Any(str, None), # if true, perform a checkout of a comm-central based branch inside the # gecko checkout Required("comm-checkout"): bool, # Base work directory used to set up the task. Optional("workdir"): str, # Context to substitute into the command using format string # substitution (e.g {value}). This is useful if certain aspects of the # command need to be generated in transforms. Optional("command-context"): dict, }) defaults = {
from gecko_taskgraph.transforms.job.common import ( setup_secrets, docker_worker_add_artifacts, add_tooltool, ) haz_run_schema = Schema({ Required("using"): "hazard", # The command to run within the task image (passed through to the worker) Required("command"): str, # The mozconfig to use; default in the script is used if omitted Optional("mozconfig"): str, # The set of secret names to which the task has access; these are prefixed # with `project/releng/gecko/{treeherder.kind}/level-{level}/`. Setting # this will enable any worker features required and set the task's scopes # appropriately. `true` here means ['*'], all secrets. Not supported on # Windows Optional("secrets"): Any(bool, [str]), # Base work directory used to set up the task. Optional("workdir"): str, }) @run_job_using("docker-worker", "hazard", schema=haz_run_schema) def docker_worker_hazard(config, job, taskdesc): run = job["run"]