Example #1
0
def make_task_description(config, jobs):
    for job in jobs:
        if len(job["dependencies"]) != 1:
            raise Exception("Exactly 1 dependency is required")

        job["worker"]["upstream-artifacts"] = generate_upstream_artifacts(
            job["dependencies"]
        )

        resolve_keyed_by(
            job,
            "worker.channel",
            item_name=job["name"],
            **{"release-type": config.params["release_type"]},
        )
        resolve_keyed_by(
            job,
            "worker-type",
            item_name=job["name"],
            **{"release-level": config.params.release_level()},
        )
        if config.params.release_level() == "production":
            job.setdefault("scopes", []).append(
                add_scope_prefix(
                    config,
                    "flathub:firefox:{}".format(job["worker"]["channel"]),
                )
            )

        yield job
Example #2
0
def populate_repack_manifests_url(config, tasks):
    for task in tasks:
        partner_url_config = get_partner_url_config(config.params,
                                                    config.graph_config)

        for k in partner_url_config:
            if config.kind.startswith(k):
                task["worker"].setdefault(
                    "env", {})["REPACK_MANIFESTS_URL"] = partner_url_config[k]
                break
        else:
            raise Exception("Can't find partner REPACK_MANIFESTS_URL")

        for property in ("limit-locales", ):
            property = f"extra.{property}"
            resolve_keyed_by(
                task,
                property,
                property,
                **{"release-level": config.params.release_level()},
            )

        if task["worker"]["env"]["REPACK_MANIFESTS_URL"].startswith("git@"):
            task.setdefault("scopes", []).append(
                "secrets:get:project/releng/gecko/build/level-{level}/partner-github-ssh"
                .format(**config.params))

        yield task
Example #3
0
def handle_keyed_by(config, jobs):
    """Resolve fields that can be keyed by platform, etc."""
    fields = [
        "locales-file",
        "locales-per-chunk",
        "worker-type",
        "description",
        "run-time",
        "docker-image",
        "secrets",
        "fetches.toolchain",
        "fetches.fetch",
        "tooltool",
        "env",
        "ignore-locales",
        "mozharness.config",
        "mozharness.options",
        "mozharness.actions",
        "mozharness.script",
        "treeherder.tier",
        "treeherder.platform",
        "index.type",
        "index.product",
        "index.job-name",
        "when.files-changed",
    ]
    for job in jobs:
        job = copy.deepcopy(job)  # don't overwrite dict values here
        for field in fields:
            resolve_keyed_by(item=job, field=field, item_name=job["name"])
        yield job
Example #4
0
def fill_email_data(config, tasks):
    format_kwargs = {
        "head_rev": config.params["head_rev"],
        "project": config.params["project"],
        "th_root": "https://treeherder.mozilla.org/#/",
        "tiers": "&tier=1%2C2%2C3",
    }

    for task in tasks:
        format_kwargs["task_name"] = task["name"]
        format_kwargs["filterstring"] = "&searchStr=iris%20{}".format(
            task["name"])
        format_kwargs["chunk"] = task["worker"]["env"]["CURRENT_TEST_DIR"]

        resolve_keyed_by(
            task,
            "notify.email",
            item_name=task["name"],
            **{
                "project": config.params["project"],
            },
        )

        email = task["notify"].get("email")
        if email:
            email["link"]["href"] = email["link"]["href"].format(
                **format_kwargs)
            email["subject"] = email["subject"].format(**format_kwargs)

        yield task
Example #5
0
def format(config, tasks):
    """Apply format substitution to worker.env and worker.command."""

    format_params = {
        "release_config": get_release_config(config),
        "config_params": config.params,
    }

    for task in tasks:
        format_params["task"] = task

        command = task.get("worker", {}).get("command", [])
        task["worker"]["command"] = [x.format(**format_params) for x in command]

        env = task.get("worker", {}).get("env", {})
        for k in env.keys():
            resolve_keyed_by(
                env,
                k,
                "flatpak envs",
                **{
                    "release-level": config.params.release_level(),
                    "project": config.params["project"],
                }
            )
            task["worker"]["env"][k] = env[k].format(**format_params)

        yield task
Example #6
0
def resolve_keys(config, jobs):
    for job in jobs:
        resolve_keyed_by(
            job,
            "partner-bucket-scope",
            item_name=job["label"],
            **{"release-level": config.params.release_level()},
        )
        yield job
Example #7
0
def resolve_keys(config, jobs):
    for job in jobs:
        resolve_keyed_by(
            job,
            "use-sccache",
            item_name=job["name"],
            **{"release-level": config.params.release_level()},
        )
        yield job
Example #8
0
def handle_keyed_by(config, jobs):
    """Resolve fields that can be keyed by platform, etc."""
    fields = ["fetches.toolchain"]
    for job in jobs:
        job = copy.deepcopy(job)  # don't overwrite dict values here
        for field in fields:
            resolve_keyed_by(item=job, field=field, item_name=job["name"])

        yield job
Example #9
0
def handle_keyed_by_prereqs(config, tests):
    """
    Only resolve keys for prerequisite fields here since the
    these keyed-by options might have keyed-by fields
    as well.
    """
    for test in tests:
        resolve_keyed_by(test, "raptor-subtests", item_name=test["test-name"])
        yield test
Example #10
0
def make_task_description(config, jobs):
    for job in jobs:
        for key in ["worker-type", "scopes"]:
            resolve_keyed_by(
                job,
                key,
                item_name=job["name"],
                **{"release-level": config.params.release_level()})
        yield job
Example #11
0
def resolve_shipping_product(config, jobs):
    for job in jobs:
        resolve_keyed_by(
            job,
            "shipping-product",
            item_name=job["name"],
            **{
                "release-type": config.params["release_type"],
            },
        )
        yield job
Example #12
0
def handle_keyed_by(config, jobs):
    """Resolve fields that can be keyed by platform, etc."""
    fields = [
        "mozharness.config",
        "package-formats",
    ]
    for job in jobs:
        job = copy.deepcopy(job)  # don't overwrite dict values here
        for field in fields:
            resolve_keyed_by(item=job, field=field, item_name="?")
        yield job
Example #13
0
def resolve_keys(config, tasks):
    for task in tasks:
        env = task["worker"].setdefault("env", {})
        env["BRANCH"] = config.params["project"]
        for envvar in env:
            resolve_keyed_by(env, envvar, envvar, **config.params)

        for envvar in list(env.keys()):
            if not env.get(envvar):
                del env[envvar]
        yield task
Example #14
0
def handle_keyed_by_perftest(config, jobs):
    fields = [
        "perftest-metrics", "perftest-extra-options", "perftest-btime-variants"
    ]
    for job in jobs:
        if job.get("perftest") is None:
            yield job
            continue

        for field in fields:
            resolve_keyed_by(job, field, item_name=job["name"])

        job.pop("perftest")
        yield job
Example #15
0
def resolve_keys(config, jobs):
    for job in jobs:
        for field in ("worker-type", "attributes.artifact_map"):
            resolve_keyed_by(
                job,
                field,
                item_name=job["label"],
                **{
                    "release-level": config.params.release_level(),
                    "release-type": config.params["release_type"],
                    "project": config.params["project"],
                },
            )
        yield job
Example #16
0
def make_iris_tasks(config, jobs):
    # Each platform will get a copy of the test categories
    platforms = config.config.get("iris-build-platforms")

    # The fields needing to be resolve_keyed_by'd
    fields = [
        "dependencies.build",
        "fetches.build",
        "run.command",
        "run-on-projects",
        "treeherder.platform",
        "worker.docker-image",
        "worker.artifacts",
        "worker.env.PATH",
        "worker.max-run-time",
        "worker-type",
    ]

    for job in jobs:
        for platform in platforms:
            # Make platform-specific clones of each iris task
            clone = deepcopy(job)

            basename = clone["name"]
            clone["description"] = clone["description"].format(basename)
            clone["name"] = clone["name"] + "-" + platform

            # resolve_keyed_by picks the correct values based on
            # the `by-platform` keys in the task definitions
            for field in fields:
                resolve_keyed_by(
                    clone,
                    field,
                    clone["name"],
                    **{
                        "platform": platform,
                    },
                )

            # iris uses this to select the tests to run in this chunk
            clone["worker"]["env"]["CURRENT_TEST_DIR"] = basename

            # Clean up some entries when they aren't needed
            if clone["worker"]["docker-image"] is None:
                del clone["worker"]["docker-image"]
            if clone["worker"]["env"]["PATH"] is None:
                del clone["worker"]["env"]["PATH"]

            yield clone
Example #17
0
def mozconfig(config, jobs):
    for job in jobs:
        resolve_keyed_by(
            job,
            "run.mozconfig-variant",
            item_name=job["name"],
            **{
                "release-type": config.params["release_type"],
            },
        )
        mozconfig_variant = job["run"].pop("mozconfig-variant", None)
        if mozconfig_variant:
            job["run"].setdefault("extra-config",
                                  {})["mozconfig_variant"] = mozconfig_variant
        yield job
Example #18
0
def handle_keyed_by(config, jobs):
    """Resolve fields that can be keyed by project, etc."""
    fields = [
        "run.config",
        "run.extra-config",
    ]
    for job in jobs:
        job = copy.deepcopy(job)
        for field in fields:
            resolve_keyed_by(
                item=job,
                field=field,
                item_name=job["name"],
                **{"release-level": config.params.release_level()})
        yield job
Example #19
0
def split_raptor_subtests(config, tests):
    for test in tests:
        # For tests that have 'raptor-subtests' listed, we want to create a separate
        # test job for every subtest (i.e. split out each page-load URL into its own job)
        subtests = test.pop("raptor-subtests", None)
        if not subtests:
            yield test
            continue

        chunk_number = 0

        for subtest in subtests:
            chunk_number += 1

            # Create new test job
            chunked = deepcopy(test)
            chunked["chunk-number"] = chunk_number
            chunked["subtest"] = subtest
            chunked["subtest-symbol"] = subtest
            if isinstance(chunked["subtest"], list):
                chunked["subtest"] = subtest[0]
                chunked["subtest-symbol"] = subtest[1]
            chunked = resolve_keyed_by(chunked,
                                       "tier",
                                       chunked["subtest"],
                                       defer=["variant"])
            yield chunked
Example #20
0
def handle_keyed_by(config, jobs):
    """Resolve fields that can be keyed by project, etc."""
    fields = [
        "run.config",
        "run.product-field",
        "run.extra-config",
    ]

    release_config = get_release_config(config)
    version = release_config["version"]

    for job in jobs:
        for field in fields:
            resolve_keyed_by(
                item=job,
                field=field,
                item_name=job["name"],
                **{
                    "project": config.params["project"],
                    "release-level": config.params.release_level(),
                    "release-type": config.params["release_type"],
                },
            )

        for cfg in job["run"]["config"]:
            job["run"]["mach"].extend(["--config", cfg])

        if config.kind == "cron-bouncer-check":
            job["run"]["mach"].extend([
                "--product-field={}".format(job["run"]["product-field"]),
                "--products-url={}".format(job["run"]["products-url"]),
            ])
            del job["run"]["product-field"]
            del job["run"]["products-url"]
        elif config.kind == "release-bouncer-check":
            job["run"]["mach"].append(f"--version={version}")

        del job["run"]["config"]

        if "extra-config" in job["run"]:
            env = job["worker"].setdefault("env", {})
            env["EXTRA_MOZHARNESS_CONFIG"] = json.dumps(
                job["run"]["extra-config"], sort_keys=True)
            del job["run"]["extra-config"]

        yield job
Example #21
0
def stub_installer(config, jobs):
    for job in jobs:
        resolve_keyed_by(
            job,
            "stub-installer",
            item_name=job["name"],
            project=config.params["project"],
            **{
                "release-type": config.params["release_type"],
            },
        )
        job.setdefault("attributes", {})
        if job.get("stub-installer"):
            job["attributes"]["stub-installer"] = job["stub-installer"]
            job["worker"]["env"].update({"USE_STUB_INSTALLER": "1"})
        if "stub-installer" in job:
            del job["stub-installer"]
        yield job
Example #22
0
def add_notifications(config, jobs):
    for job in jobs:
        label = "{}-{}".format(config.kind, job["name"])

        resolve_keyed_by(job,
                         "emails",
                         label,
                         project=config.params["project"])
        emails = [
            email.format(config=config.__dict__) for email in job.pop("emails")
        ]

        command = [
            "release",
            "send-buglist-email",
            "--version",
            config.params["version"],
            "--product",
            job["shipping-product"],
            "--revision",
            config.params["head_rev"],
            "--build-number",
            str(config.params["build_number"]),
            "--repo",
            config.params["head_repository"],
        ]
        for address in emails:
            command += ["--address", address]
        command += [
            # We wrap this in `{'task-reference': ...}` below
            "--task-group-id",
            "<decision>",
        ]

        job["scopes"] = [f"notify:email:{address}" for address in emails]
        job["run"] = {
            "using": "mach",
            "sparse-profile": "mach",
            "mach": {
                "task-reference": " ".join(map(shell_quote, command))
            },
        }

        yield job
Example #23
0
def handle_keyed_by(config, jobs):
    """Resolve fields that can be keyed by platform, etc."""
    fields = [
        "update-no-wnp",
    ]
    for job in jobs:
        label = job.get("dependent-task",
                        object).__dict__.get("label", "?no-label?")
        for field in fields:
            resolve_keyed_by(
                item=job,
                field=field,
                item_name=label,
                **{
                    "project": config.params["project"],
                    "release-type": config.params["release_type"],
                },
            )
        yield job
Example #24
0
def handle_shell(config, jobs):
    """
    Handle the 'shell' property.
    """
    fields = [
        "run-on-projects",
        "worker.env",
    ]

    for job in jobs:
        if not job.get("shell"):
            yield job
            continue

        for field in fields:
            resolve_keyed_by(job, field, item_name=job["name"])

        del job["shell"]
        yield job
def add_notifications(config, jobs):
    release_config = get_release_config(config)

    for job in jobs:
        label = "{}-{}".format(config.kind, job["name"])

        notifications = job.pop("notifications", None)
        if notifications:
            resolve_keyed_by(notifications,
                             "emails",
                             label,
                             project=config.params["project"])
            emails = notifications["emails"]
            format_kwargs = dict(
                task=job,
                config=config.__dict__,
                release_config=release_config,
            )
            subject = titleformatter.format(notifications["subject"],
                                            **format_kwargs)
            message = titleformatter.format(notifications["message"],
                                            **format_kwargs)
            emails = [email.format(**format_kwargs) for email in emails]

            # By default, we only send mail on success to avoid messages like 'blah is in the
            # candidates dir' when cancelling graphs, dummy job failure, etc
            status_types = notifications.get("status-types", ["on-completed"])
            for s in status_types:
                job.setdefault("routes", []).extend(
                    [f"notify.email.{email}.{s}" for email in emails])

            # Customize the email subject to include release name and build number
            job.setdefault("extra", {}).update(
                {"notify": {
                    "email": {
                        "subject": subject,
                    }
                }})
            if message:
                job["extra"]["notify"]["email"]["content"] = message

        yield job
Example #26
0
 def test_no_by_not_only_by(self):
     self.assertEqual(
         resolve_keyed_by({"x": {
             "by-y": True,
             "a": 10
         }}, "x", "n"),
         {"x": {
             "by-y": True,
             "a": 10
         }},
     )
def make_task_description(config, jobs):
    release_config = get_release_config(config)
    for job in jobs:
        resolve_keyed_by(job,
                         "worker-type",
                         item_name=job["name"],
                         **{"release-level": config.params.release_level()})
        resolve_keyed_by(job,
                         "scopes",
                         item_name=job["name"],
                         **{"release-level": config.params.release_level()})

        job["worker"][
            "release-name"] = "{product}-{version}-build{build_number}".format(
                product=job["shipping-product"].capitalize(),
                version=release_config["version"],
                build_number=release_config["build_number"],
            )

        yield job
Example #28
0
def handle_keyed_by(config, jobs):
    """Resolve fields that can be keyed by platform, etc, but not `msix.*` fields that can be keyed by
    `package-format`.  Such fields are handled specially below.
    """
    fields = [
        "mozharness.config",
        "package-formats",
    ]
    for job in jobs:
        job = copy.deepcopy(job)  # don't overwrite dict values here
        for field in fields:
            resolve_keyed_by(
                item=job,
                field=field,
                item_name="?",
                **{
                    "release-type": config.params["release_type"],
                    "level": config.params["level"],
                },
            )
        yield job
Example #29
0
def handle_keyed_by(config, tests):
    fields = [
        "test-url-param",
        "variants",
        "limit-platforms",
        "activity",
        "binary-path",
        "fetches.fetch",
        "max-run-time",
        "run-on-projects",
        "target",
        "tier",
        "run-visual-metrics",
    ]
    for test in tests:
        for field in fields:
            resolve_keyed_by(test,
                             field,
                             item_name=test["test-name"],
                             defer=["variant"])
        yield test
Example #30
0
    def test_nested(self):
        x = {
            "by-foo": {
                "F1": {
                    "by-bar": {
                        "B1": 11,
                        "B2": 12,
                    },
                },
                "F2": 20,
                "default": 0,
            },
        }
        self.assertEqual(
            resolve_keyed_by({"x": x}, "x", "x", foo="F1", bar="B1"),
            {"x": 11})
        self.assertEqual(
            resolve_keyed_by({"x": x}, "x", "x", foo="F1", bar="B2"),
            {"x": 12})
        self.assertEqual(resolve_keyed_by({"x": x}, "x", "x", foo="F2"),
                         {"x": 20})
        self.assertEqual(
            resolve_keyed_by({"x": x}, "x", "x", foo="F99", bar="B1"),
            {"x": 0})

        # bar is deferred
        self.assertEqual(
            resolve_keyed_by({"x": x},
                             "x",
                             "x",
                             defer=["bar"],
                             foo="F1",
                             bar="B1"),
            {"x": {
                "by-bar": {
                    "B1": 11,
                    "B2": 12
                }
            }},
        )