Exemple #1
0
def set_fetches_and_locations(config, jobs):
    """Set defaults, including those that differ per worker implementation"""
    for job in jobs:
        dependencies = copy.deepcopy(job["dependencies"])

        for platform, label in dependencies.items():
            job["dependencies"] = {"build": label}

            aar_location = _get_aar_location(config, job, platform)
            prefix = get_artifact_prefix(job)
            if not prefix.endswith("/"):
                prefix = prefix + "/"
            if aar_location.startswith(prefix):
                aar_location = aar_location[len(prefix):]

            job.setdefault("fetches", {}).setdefault(platform, []).append({
                "artifact":
                aar_location,
                "extract":
                False,
            })

            aar_file_name = aar_location.split("/")[-1]
            env_var = MOZ_ANDROID_FAT_AAR_ENV_MAP[platform]
            job["worker"]["env"][env_var] = aar_file_name

        job["dependencies"] = dependencies

        yield job
def _generate_task_output_files(task, worker_implementation, repackage_config, partner):
    """We carefully generate an explicit list here, but there's an artifacts directory
    too, courtesy of generic_worker_add_artifacts() (windows) or docker_worker_add_artifacts().
    Any errors here are likely masked by that.
    """
    partner_output_path = f"{partner}/"
    artifact_prefix = get_artifact_prefix(task)

    if worker_implementation == ("docker-worker", "linux"):
        local_prefix = "/builds/worker/workspace/"
    elif worker_implementation == ("generic-worker", "windows"):
        local_prefix = "workspace/"
    else:
        raise NotImplementedError(
            f'Unsupported worker implementation: "{worker_implementation}"'
        )

    output_files = []
    for config in repackage_config:
        output_files.append(
            {
                "type": "file",
                "path": "{}outputs/{}{}".format(
                    local_prefix, partner_output_path, config["output"]
                ),
                "name": "{}/{}{}".format(
                    artifact_prefix, partner_output_path, config["output"]
                ),
            }
        )
    return output_files
Exemple #3
0
def _generate_task_output_files(
    task, worker_implementation, repackage_config, locale=None
):
    locale_output_path = f"{locale}/" if locale else ""
    artifact_prefix = get_artifact_prefix(task)

    if worker_implementation == ("docker-worker", "linux"):
        local_prefix = "/builds/worker/workspace/"
    elif worker_implementation == ("generic-worker", "windows"):
        local_prefix = "workspace/"
    else:
        raise NotImplementedError(
            f'Unsupported worker implementation: "{worker_implementation}"'
        )

    output_files = []
    for config in repackage_config:
        output_files.append(
            {
                "type": "file",
                "path": "{}outputs/{}{}".format(
                    local_prefix, locale_output_path, config["output"]
                ),
                "name": "{}/{}{}".format(
                    artifact_prefix, locale_output_path, config["output"]
                ),
            }
        )
    return output_files
Exemple #4
0
def generic_worker_add_artifacts(config, job, taskdesc):
    """ Adds an artifact directory to the task """
    # The path is the location on disk; it doesn't necessarily
    # mean the artifacts will be public or private; that is set via the name
    # attribute in add_artifacts.
    path = get_artifact_prefix(taskdesc)
    taskdesc["worker"].setdefault("env", {})["UPLOAD_DIR"] = path
    add_artifacts(config, job, taskdesc, path=path)
Exemple #5
0
def add_artifacts(config, job, taskdesc, path):
    taskdesc["worker"].setdefault("artifacts", []).append({
        "name":
        get_artifact_prefix(taskdesc),
        "path":
        path,
        "type":
        "directory",
    })
Exemple #6
0
def handle_artifact_prefix(config, jobs):
    """Resolve ``artifact_prefix`` in env vars"""
    for job in jobs:
        artifact_prefix = get_artifact_prefix(job)
        for k1, v1 in job.get("env", {}).items():
            if isinstance(v1, str):
                job["env"][k1] = v1.format(artifact_prefix=artifact_prefix)
            elif isinstance(v1, dict):
                for k2, v2 in v1.items():
                    job["env"][k1][k2] = v2.format(
                        artifact_prefix=artifact_prefix)
        yield job
Exemple #7
0
def generate_partials_upstream_artifacts(job,
                                         artifacts,
                                         platform,
                                         locale=None):
    artifact_prefix = get_artifact_prefix(job)
    if locale and locale != "en-US":
        artifact_prefix = f"{artifact_prefix}/{locale}"

    upstream_artifacts = [{
        "taskId": {
            "task-reference": "<partials-signing>"
        },
        "taskType":
        "signing",
        "paths": [f"{artifact_prefix}/{path}" for path, _ in artifacts],
        "locale":
        locale or "en-US",
    }]

    return upstream_artifacts
Exemple #8
0
def _generate_task_output_files(job, filenames, locale=None):
    locale_output_path = f"{locale}/" if locale else ""
    artifact_prefix = get_artifact_prefix(job)

    data = list()
    for filename in filenames:
        data.append({
            "type":
            "file",
            "path":
            f"/home/worker/artifacts/{filename}",
            "name":
            f"{artifact_prefix}/{locale_output_path}{filename}",
        })
    data.append({
        "type": "file",
        "path": "/home/worker/artifacts/manifest.json",
        "name": f"{artifact_prefix}/{locale_output_path}manifest.json",
    })
    return data
Exemple #9
0
def generate_partials_artifacts(job, release_history, platform, locale=None):
    artifact_prefix = get_artifact_prefix(job)
    if locale:
        artifact_prefix = f"{artifact_prefix}/{locale}"
    else:
        locale = "en-US"

    artifacts = get_partials_artifacts_from_params(release_history, platform, locale)

    upstream_artifacts = [
        {
            "taskId": {"task-reference": "<partials>"},
            "taskType": "partials",
            "paths": [
                f"{artifact_prefix}/{path}"
                for path, version in artifacts
                # TODO Use mozilla-version to avoid comparing strings. Otherwise Firefox 100 will
                # be considered smaller than Firefox 56
                if version is None or version >= "56"
            ],
            "formats": ["autograph_hash_only_mar384"],
        }
    ]

    old_mar_upstream_artifacts = {
        "taskId": {"task-reference": "<partials>"},
        "taskType": "partials",
        "paths": [
            f"{artifact_prefix}/{path}"
            for path, version in artifacts
            # TODO Use mozilla-version to avoid comparing strings. Otherwise Firefox 100 will be
            # considered smaller than Firefox 56
            if version is not None and version < "56"
        ],
        "formats": ["mar"],
    }

    if old_mar_upstream_artifacts["paths"]:
        upstream_artifacts.append(old_mar_upstream_artifacts)

    return upstream_artifacts
def make_job_description(config, jobs):
    for job in jobs:
        dep_job = job["primary-dependency"]
        attributes = copy_attributes_from_dependent_job(dep_job)
        build_platform = attributes["build_platform"]

        if job["build-platform"].startswith("win"):
            if dep_job.kind.endswith("signing"):
                continue
        if job["build-platform"].startswith("macosx"):
            if dep_job.kind.endswith("repack"):
                continue
        dependencies = {dep_job.attributes.get("kind"): dep_job.label}
        dependencies.update(dep_job.dependencies)

        signing_task = None
        for dependency in dependencies.keys():
            if build_platform.startswith("macosx") and dependency.endswith("signing"):
                signing_task = dependency
            elif build_platform.startswith("win") and dependency.endswith("repack"):
                signing_task = dependency

        attributes["repackage_type"] = "repackage"

        repack_id = job["extra"]["repack_id"]

        partner_config = get_partner_config_by_kind(config, config.kind)
        partner, subpartner, _ = repack_id.split("/")
        repack_stub_installer = partner_config[partner][subpartner].get(
            "repack_stub_installer"
        )
        if build_platform.startswith("win32") and repack_stub_installer:
            job["package-formats"].append("installer-stub")

        repackage_config = []
        for format in job.get("package-formats"):
            command = copy.deepcopy(PACKAGE_FORMATS[format])
            substs = {
                "archive_format": archive_format(build_platform),
                "executable_extension": executable_extension(build_platform),
            }
            command["inputs"] = {
                name: filename.format(**substs)
                for name, filename in command["inputs"].items()
            }
            repackage_config.append(command)

        run = job.get("mozharness", {})
        run.update(
            {
                "using": "mozharness",
                "script": "mozharness/scripts/repackage.py",
                "job-script": "taskcluster/scripts/builder/repackage.sh",
                "actions": ["setup", "repackage"],
                "extra-config": {
                    "repackage_config": repackage_config,
                },
            }
        )

        worker = {
            "chain-of-trust": True,
            "max-run-time": 7200 if build_platform.startswith("win") else 3600,
            "taskcluster-proxy": True if get_artifact_prefix(dep_job) else False,
            "env": {
                "REPACK_ID": repack_id,
            },
            # Don't add generic artifact directory.
            "skip-artifacts": True,
        }

        worker_type = "b-linux"
        worker["docker-image"] = {"in-tree": "debian11-amd64-build"}

        worker["artifacts"] = _generate_task_output_files(
            dep_job,
            worker_type_implementation(config.graph_config, worker_type),
            repackage_config,
            partner=repack_id,
        )

        description = (
            "Repackaging for repack_id '{repack_id}' for build '"
            "{build_platform}/{build_type}'".format(
                repack_id=job["extra"]["repack_id"],
                build_platform=attributes.get("build_platform"),
                build_type=attributes.get("build_type"),
            )
        )

        task = {
            "label": job["label"],
            "description": description,
            "worker-type": worker_type,
            "dependencies": dependencies,
            "attributes": attributes,
            "scopes": ["queue:get-artifact:releng/partner/*"],
            "run-on-projects": dep_job.attributes.get("run_on_projects"),
            "routes": job.get("routes", []),
            "extra": job.get("extra", {}),
            "worker": worker,
            "run": run,
            "fetches": _generate_download_config(
                dep_job,
                build_platform,
                signing_task,
                partner=repack_id,
                project=config.params["project"],
                repack_stub_installer=repack_stub_installer,
            ),
        }

        # we may have reduced the priority for partner jobs, otherwise task.py will set it
        if job.get("priority"):
            task["priority"] = job["priority"]
        if build_platform.startswith("macosx"):
            task.setdefault("fetches", {}).setdefault("toolchain", []).extend(
                [
                    "linux64-libdmg",
                    "linux64-hfsplus",
                    "linux64-node",
                ]
            )
        yield task
Exemple #11
0
def use_fetches(config, jobs):
    artifact_names = {}
    aliases = {}

    if config.kind in ("toolchain", "fetch"):
        jobs = list(jobs)
        for job in jobs:
            run = job.get("run", {})
            label = job["label"]
            get_attribute(artifact_names, label, run, "toolchain-artifact")
            value = run.get(f"{config.kind}-alias")
            if not value:
                value = []
            elif isinstance(value, str):
                value = [value]
            for alias in value:
                aliases[f"{config.kind}-{alias}"] = label

    for task in config.kind_dependencies_tasks.values():
        if task.kind in ("fetch", "toolchain"):
            get_attribute(
                artifact_names,
                task.label,
                task.attributes,
                f"{task.kind}-artifact",
            )
            value = task.attributes.get(f"{task.kind}-alias")
            if not value:
                value = []
            elif isinstance(value, str):
                value = [value]
            for alias in value:
                aliases[f"{task.kind}-{alias}"] = task.label

    artifact_prefixes = {}
    for job in order_tasks(config, jobs):
        artifact_prefixes[job["label"]] = get_artifact_prefix(job)

        fetches = job.pop("fetches", None)
        if not fetches:
            yield job
            continue

        job_fetches = []
        name = job.get("name", job.get("label"))
        dependencies = job.setdefault("dependencies", {})
        worker = job.setdefault("worker", {})
        prefix = get_artifact_prefix(job)
        has_sccache = False
        for kind, artifacts in fetches.items():
            if kind in ("fetch", "toolchain"):
                for fetch_name in artifacts:
                    label = f"{kind}-{fetch_name}"
                    label = aliases.get(label, label)
                    if label not in artifact_names:
                        raise Exception(
                            "Missing fetch job for {kind}-{name}: {fetch}".
                            format(kind=config.kind,
                                   name=name,
                                   fetch=fetch_name))

                    path = artifact_names[label]

                    dependencies[label] = label
                    job_fetches.append({
                        "artifact": path,
                        "task": f"<{label}>",
                        "extract": True,
                    })

                    if kind == "toolchain" and fetch_name.endswith("-sccache"):
                        has_sccache = True
            else:
                if kind not in dependencies:
                    raise Exception(
                        "{name} can't fetch {kind} artifacts because "
                        "it has no {kind} dependencies!".format(name=name,
                                                                kind=kind))
                dep_label = dependencies[kind]
                if dep_label in artifact_prefixes:
                    prefix = artifact_prefixes[dep_label]
                else:
                    if dep_label not in config.kind_dependencies_tasks:
                        raise Exception(
                            "{name} can't fetch {kind} artifacts because "
                            "there are no tasks with label {label} in kind dependencies!"
                            .format(
                                name=name,
                                kind=kind,
                                label=dependencies[kind],
                            ))

                    prefix = get_artifact_prefix(
                        config.kind_dependencies_tasks[dep_label])

                for artifact in artifacts:
                    if isinstance(artifact, str):
                        path = artifact
                        dest = None
                        extract = True
                        verify_hash = False
                    else:
                        path = artifact["artifact"]
                        dest = artifact.get("dest")
                        extract = artifact.get("extract", True)
                        verify_hash = artifact.get("verify-hash", False)

                    fetch = {
                        "artifact":
                        f"{prefix}/{path}"
                        if not path.startswith("/") else path[1:],
                        "task":
                        f"<{kind}>",
                        "extract":
                        extract,
                    }
                    if dest is not None:
                        fetch["dest"] = dest
                    if verify_hash:
                        fetch["verify-hash"] = verify_hash
                    job_fetches.append(fetch)

        if job.get("use-sccache") and not has_sccache:
            raise Exception(
                "Must provide an sccache toolchain if using sccache.")

        job_artifact_prefixes = {
            mozpath.dirname(fetch["artifact"])
            for fetch in job_fetches
            if not fetch["artifact"].startswith("public/")
        }
        if job_artifact_prefixes:
            # Use taskcluster-proxy and request appropriate scope.  For example, add
            # 'scopes: [queue:get-artifact:path/to/*]' for 'path/to/artifact.tar.xz'.
            worker["taskcluster-proxy"] = True
            for prefix in sorted(job_artifact_prefixes):
                scope = f"queue:get-artifact:{prefix}/*"
                if scope not in job.setdefault("scopes", []):
                    job["scopes"].append(scope)

        artifacts = {}
        for f in job_fetches:
            _, __, artifact = f["artifact"].rpartition("/")
            if "dest" in f:
                artifact = f"{f['dest']}/{artifact}"
            task = f["task"][1:-1]
            if artifact in artifacts:
                raise Exception(
                    f"Task {name} depends on {artifacts[artifact]} and {task} "
                    f"that both provide {artifact}")
            artifacts[artifact] = task

        env = worker.setdefault("env", {})
        env["MOZ_FETCHES"] = {
            "task-reference":
            json.dumps(sorted(job_fetches, key=lambda x: sorted(x.items())),
                       sort_keys=True)
        }
        # The path is normalized to an absolute path in run-task
        env.setdefault("MOZ_FETCHES_DIR", "fetches")

        yield job
Exemple #12
0
def make_task_description(config, jobs):
    # If no balrog release history, then don't generate partials
    if not config.params.get("release_history"):
        return
    for job in jobs:
        dep_job = job["primary-dependency"]

        treeherder = inherit_treeherder_from_dep(job, dep_job)
        treeherder.setdefault("symbol", "p(N)")

        label = job.get("label", f"partials-{dep_job.label}")

        dependencies = {dep_job.kind: dep_job.label}

        attributes = copy_attributes_from_dependent_job(dep_job)
        locale = dep_job.attributes.get("locale")
        if locale:
            attributes["locale"] = locale
            treeherder["symbol"] = f"p({locale})"
        attributes["shipping_phase"] = job["shipping-phase"]

        build_locale = locale or "en-US"

        build_platform = attributes["build_platform"]
        builds = get_builds(config.params["release_history"], build_platform,
                            build_locale)

        # If the list is empty there's no available history for this platform
        # and locale combination, so we can't build any partials.
        if not builds:
            continue

        extra = {"funsize": {"partials": list()}}
        update_number = 1

        locale_suffix = ""
        if locale:
            locale_suffix = f"{locale}/"
        artifact_path = "<{}/{}/{}target.complete.mar>".format(
            dep_job.kind,
            get_artifact_prefix(dep_job),
            locale_suffix,
        )
        for build in sorted(builds):
            partial_info = {
                "locale": build_locale,
                "from_mar": builds[build]["mar_url"],
                "to_mar": {
                    "artifact-reference": artifact_path
                },
                "branch": config.params["project"],
                "update_number": update_number,
                "dest_mar": build,
            }
            if "product" in builds[build]:
                partial_info["product"] = builds[build]["product"]
            if "previousVersion" in builds[build]:
                partial_info["previousVersion"] = builds[build][
                    "previousVersion"]
            if "previousBuildNumber" in builds[build]:
                partial_info["previousBuildNumber"] = builds[build][
                    "previousBuildNumber"]
            extra["funsize"]["partials"].append(partial_info)
            update_number += 1

        level = config.params["level"]

        worker = {
            "artifacts":
            _generate_task_output_files(dep_job, builds.keys(), locale),
            "implementation":
            "docker-worker",
            "docker-image": {
                "in-tree": "funsize-update-generator"
            },
            "os":
            "linux",
            "max-run-time":
            3600 if "asan" in dep_job.label else 900,
            "chain-of-trust":
            True,
            "taskcluster-proxy":
            True,
            "env": {
                "SIGNING_CERT":
                identify_desired_signing_keys(
                    config.params["project"],
                    config.params["release_product"]),
                "EXTRA_PARAMS":
                f"--arch={architecture(build_platform)}",
                "MAR_CHANNEL_ID":
                attributes["mar-channel-id"],
            },
        }
        if config.params.release_level() == "staging":
            worker["env"]["FUNSIZE_ALLOW_STAGING_PREFIXES"] = "true"

        task = {
            "label": label,
            "description": f"{dep_job.description} Partials",
            "worker-type": "b-linux",
            "dependencies": dependencies,
            "scopes": [],
            "attributes": attributes,
            "run-on-projects": dep_job.attributes.get("run_on_projects"),
            "treeherder": treeherder,
            "extra": extra,
            "worker": worker,
        }

        # We only want caching on linux/windows due to bug 1436977
        if int(level) == 3 and any(
            [build_platform.startswith(prefix)
             for prefix in ["linux", "win"]]):
            task["scopes"].append(
                "auth:aws-s3:read-write:tc-gp-private-1d-us-east-1/releng/mbsdiff-cache/"
            )

        yield task
Exemple #13
0
def split_public_and_private(config, jobs):
    # we need to separate private vs public destinations because beetmover supports one
    # in a single task. Only use a single task for each type though.
    partner_config = get_partner_config_by_kind(config, config.kind)
    for job in jobs:
        upstream_artifacts = job["primary-dependency"].release_artifacts
        attribution_task_ref = "<{}>".format(job["primary-dependency"].label)
        prefix = get_artifact_prefix(job["primary-dependency"])
        artifacts = defaultdict(list)
        for artifact in upstream_artifacts:
            partner, sub_partner, platform, locale, _ = artifact.replace(
                prefix + "/", "").split("/", 4)
            destination = "private"
            this_config = [
                p for p in partner_config["configs"]
                if (p["campaign"] == partner and p["content"] == sub_partner)
            ]
            if this_config[0].get("upload_to_candidates"):
                destination = "public"
            artifacts[destination].append(
                (artifact, partner, sub_partner, platform, locale))

        action_scope = add_scope_prefix(config,
                                        "beetmover:action:push-to-partner")
        public_bucket_scope = get_beetmover_bucket_scope(config)
        partner_bucket_scope = add_scope_prefix(config,
                                                job["partner-bucket-scope"])
        repl_dict = {
            "build_number":
            config.params["build_number"],
            "release_partner_build_number":
            config.params["release_partner_build_number"],
            "version":
            config.params["version"],
            "partner":
            "{partner}",  # we'll replace these later, per artifact
            "subpartner":
            "{subpartner}",
            "platform":
            "{platform}",
            "locale":
            "{locale}",
        }
        for destination, destination_artifacts in artifacts.items():
            this_job = deepcopy(job)

            if destination == "public":
                this_job["scopes"] = [public_bucket_scope, action_scope]
                this_job["partner_public"] = True
            else:
                this_job["scopes"] = [partner_bucket_scope, action_scope]
                this_job["partner_public"] = False

            partner_path_key = "partner-{destination}-path".format(
                destination=destination)
            partner_path = this_job[partner_path_key].format(**repl_dict)
            this_job.setdefault(
                "worker",
                {})["upstream-artifacts"] = generate_upstream_artifacts(
                    attribution_task_ref, destination_artifacts, partner_path)

            yield this_job