Exemple #1
0
        def __init__(self, task_id, artifact_name):
            for _ in redo.retrier(attempts=retry + 1, sleeptime=60):
                cot = cache._download_manager.session.get(
                    get_artifact_url(task_id, "public/chain-of-trust.json"))
                if cot.status_code >= 500:
                    continue
                cot.raise_for_status()
                break
            else:
                cot.raise_for_status()

            digest = algorithm = None
            data = json.loads(cot.text)
            for algorithm, digest in (data.get("artifacts",
                                               {}).get(artifact_name,
                                                       {}).items()):
                pass

            name = os.path.basename(artifact_name)
            artifact_url = get_artifact_url(
                task_id,
                artifact_name,
                use_proxy=not artifact_name.startswith("public/"),
            )
            super(ArtifactRecord, self).__init__(artifact_url,
                                                 name,
                                                 None,
                                                 digest,
                                                 algorithm,
                                                 unpack=True)
Exemple #2
0
def load_image_by_task_id(task_id, tag=None):
    artifact_url = get_artifact_url(task_id, "public/image.tar.zst")
    result = load_image(artifact_url, tag)
    print("Found docker image: {}:{}".format(result["image"], result["tag"]))
    if tag:
        print(f"Re-tagged as: {tag}")
    else:
        tag = "{}:{}".format(result["image"], result["tag"])
    print(f"Try: docker run -ti --rm {tag} bash")
    return True
Exemple #3
0
def load_parameters_file(spec, strict=True, overrides=None, trust_domain=None):
    """
    Load parameters from a path, url, decision task-id or project.

    Examples:
        task-id=fdtgsD5DQUmAQZEaGMvQ4Q
        project=mozilla-central
    """
    import requests
    from taskgraph.util import yaml
    from gecko_taskgraph.util.taskcluster import get_artifact_url, find_task_id

    if overrides is None:
        overrides = {}

    if not spec:
        return Parameters(strict=strict, **overrides)

    try:
        # reading parameters from a local parameters.yml file
        f = open(spec)
    except OSError:
        # fetching parameters.yml using task task-id, project or supplied url
        task_id = None
        if spec.startswith("task-id="):
            task_id = spec.split("=")[1]
        elif spec.startswith("project="):
            if trust_domain is None:
                raise ValueError(
                    "Can't specify parameters by project "
                    "if trust domain isn't supplied.", )
            index = "{trust_domain}.v2.{project}.latest.taskgraph.decision".format(
                trust_domain=trust_domain,
                project=spec.split("=")[1],
            )
            task_id = find_task_id(index)

        if task_id:
            spec = get_artifact_url(task_id, "public/parameters.yml")
        logger.info(f"Loading parameters from {spec}")
        resp = requests.get(spec, stream=True)
        resp.raise_for_status()
        f = resp.raw

    if spec.endswith(".yml"):
        kwargs = yaml.load_stream(f)
    elif spec.endswith(".json"):
        kwargs = json.load(f)
    else:
        raise TypeError(f"Parameters file `{spec}` is not JSON or YAML")

    kwargs.update(overrides)

    return Parameters(strict=strict, **kwargs)
Exemple #4
0
def test_packages_url(taskdesc):
    """Account for different platforms that name their test packages differently"""
    artifact_url = get_artifact_url(
        "<build>", get_artifact_path(taskdesc, "target.test_packages.json"))
    # for android shippable we need to add 'en-US' to the artifact url
    test = taskdesc["run"]["test"]
    if "android" in test["test-platform"] and (get_variant(
            test["test-platform"]) in ("shippable", "shippable-qr",
                                       "shippable-lite", "shippable-lite-qr")):
        head, tail = os.path.split(artifact_url)
        artifact_url = os.path.join(head, "en-US", tail)
    return artifact_url
Exemple #5
0
def installer_url(taskdesc):
    test = taskdesc["run"]["test"]
    mozharness = test["mozharness"]

    if "installer-url" in mozharness:
        installer_url = mozharness["installer-url"]
    else:
        upstream_task = ("<build-signing>" if
                         mozharness["requires-signed-builds"] else "<build>")
        installer_url = get_artifact_url(upstream_task,
                                         mozharness["build-artifact-name"])

    return installer_url
Exemple #6
0
        def repl(match):
            dependency, artifact_name = match.group(1, 2)

            if dependency == "self":
                raise KeyError(
                    f"task '{label}' can't reference artifacts of self")
            elif dependency == "decision":
                task_id = decision_task_id
            else:
                try:
                    task_id = dependencies[dependency]
                except KeyError:
                    raise KeyError(
                        "task '{}' has no dependency named '{}'".format(
                            label, dependency))

            assert artifact_name.startswith(
                "public/"
            ), "artifact-reference only supports public artifacts, not `{}`".format(
                artifact_name)
            return get_artifact_url(task_id, artifact_name)
Exemple #7
0
def test_is_backstop(responses, params, response_args, extra_params, expected):
    urls = {
        "index":
        get_index_url(
            BACKSTOP_INDEX.format(**{
                "trust-domain": "gecko",
                "project": params["project"]
            })),
        "artifact":
        get_artifact_url(LAST_BACKSTOP_ID, "public/parameters.yml"),
        "status":
        get_task_url(LAST_BACKSTOP_ID) + "/status",
    }

    for key in ("index", "status", "artifact"):
        if key in response_args:
            print(urls[key])
            responses.add(responses.GET, urls[key], **response_args[key])

    params.update(extra_params)
    assert is_backstop(params) is expected
Exemple #8
0
def mozharness_test_on_docker(config, job, taskdesc):
    run = job["run"]
    test = taskdesc["run"]["test"]
    mozharness = test["mozharness"]
    worker = taskdesc["worker"] = job["worker"]

    # apply some defaults
    worker["docker-image"] = test["docker-image"]
    worker["allow-ptrace"] = True  # required for all tests, for crashreporter
    worker["loopback-video"] = test["loopback-video"]
    worker["loopback-audio"] = test["loopback-audio"]
    worker["max-run-time"] = test["max-run-time"]
    worker["retry-exit-status"] = test["retry-exit-status"]
    if ("android-em-7.0-x86" in test["test-platform"]
            or "gonk-b2g-em-x86_64" in test["test-platform"]):
        worker["privileged"] = True

    artifacts = [
        # (artifact name prefix, in-image path)
        ("public/logs", "{workdir}/workspace/logs/".format(**run)),
        ("public/test", "{workdir}/artifacts/".format(**run)),
        (
            "public/test_info",
            "{workdir}/workspace/build/blobber_upload_dir/".format(**run),
        ),
    ]

    installer = installer_url(taskdesc)

    mozharness_url = get_artifact_url(
        "<build>", get_artifact_path(taskdesc, "mozharness.zip"))

    worker.setdefault("artifacts", [])
    worker["artifacts"].extend([{
        "name":
        prefix,
        "path":
        os.path.join("{workdir}/workspace".format(**run), path),
        "type":
        "directory",
    } for (prefix, path) in artifacts])

    env = worker.setdefault("env", {})
    env.update({
        "MOZHARNESS_CONFIG": " ".join(mozharness["config"]),
        "MOZHARNESS_SCRIPT": mozharness["script"],
        "MOZILLA_BUILD_URL": {
            "task-reference": installer
        },
        "NEED_PULSEAUDIO": "true",
        "NEED_WINDOW_MANAGER": "true",
        "ENABLE_E10S": str(bool(test.get("e10s"))).lower(),
        "WORKING_DIR": "/builds/worker",
    })

    if test.get("python-3"):
        env["PYTHON"] = "python3"

    # Legacy linux64 tests rely on compiz.
    if test.get("docker-image", {}).get("in-tree") == "desktop1604-test":
        env.update({"NEED_COMPIZ": "true"})

    # Bug 1602701/1601828 - use compiz on ubuntu1804 due to GTK asynchiness
    # when manipulating windows.
    if test.get("docker-image", {}).get("in-tree") == "ubuntu1804-test":
        if "wdspec" in job["run"]["test"]["suite"] or (
                "marionette" in job["run"]["test"]["suite"]
                and "headless" not in job["label"]):
            env.update({"NEED_COMPIZ": "true"})

    # Set MOZ_ENABLE_WAYLAND env variables to enable Wayland backend.
    if "wayland" in job["label"]:
        env["MOZ_ENABLE_WAYLAND"] = "1"

    if mozharness.get("mochitest-flavor"):
        env["MOCHITEST_FLAVOR"] = mozharness["mochitest-flavor"]

    if mozharness["set-moz-node-path"]:
        env["MOZ_NODE_PATH"] = "/usr/local/bin/node"

    if "actions" in mozharness:
        env["MOZHARNESS_ACTIONS"] = " ".join(mozharness["actions"])

    if config.params.is_try():
        env["TRY_COMMIT_MSG"] = config.params["message"]

    # handle some of the mozharness-specific options
    if test["reboot"]:
        raise Exception("reboot: {} not supported on generic-worker".format(
            test["reboot"]))

    # Support vcs checkouts regardless of whether the task runs from
    # source or not in case it is needed on an interactive loaner.
    support_vcs_checkout(config, job, taskdesc)

    # If we have a source checkout, run mozharness from it instead of
    # downloading a zip file with the same content.
    if test["checkout"]:
        env["MOZHARNESS_PATH"] = "{workdir}/checkouts/gecko/testing/mozharness".format(
            **run)
    else:
        env["MOZHARNESS_URL"] = {"task-reference": mozharness_url}

    extra_config = {
        "installer_url": installer,
        "test_packages_url": test_packages_url(taskdesc),
    }
    env["EXTRA_MOZHARNESS_CONFIG"] = {
        "task-reference": json.dumps(extra_config, sort_keys=True)
    }

    # Bug 1634554 - pass in decision task artifact URL to mozharness for WPT.
    # Bug 1645974 - test-verify-wpt and test-coverage-wpt need artifact URL.
    if "web-platform-tests" in test["suite"] or re.match(
            "test-(coverage|verify)-wpt", test["suite"]):
        env["TESTS_BY_MANIFEST_URL"] = {
            "artifact-reference": "<decision/public/tests-by-manifest.json.gz>"
        }

    command = [
        "{workdir}/bin/test-linux.sh".format(**run),
    ]
    command.extend(mozharness.get("extra-options", []))

    if test.get("test-manifests"):
        env["MOZHARNESS_TEST_PATHS"] = json.dumps(
            {test["suite"]: test["test-manifests"]}, sort_keys=True)

    # TODO: remove the need for run['chunked']
    elif mozharness.get("chunked") or test["chunks"] > 1:
        command.append("--total-chunk={}".format(test["chunks"]))
        command.append("--this-chunk={}".format(test["this-chunk"]))

    if "download-symbols" in mozharness:
        download_symbols = mozharness["download-symbols"]
        download_symbols = {
            True: "true",
            False: "false"
        }.get(download_symbols, download_symbols)
        command.append("--download-symbols=" + download_symbols)

    job["run"] = {
        "workdir": run["workdir"],
        "tooltool-downloads": mozharness["tooltool-downloads"],
        "checkout": test["checkout"],
        "command": command,
        "using": "run-task",
    }
    configure_taskdesc_for_run(config, job, taskdesc, worker["implementation"])
def make_task_description(config, jobs):
    for job in jobs:
        dep_job = job["primary-dependency"]
        attributes = dep_job.attributes

        build_platform = dep_job.attributes.get("build_platform")
        treeherder = None
        if "partner" not in config.kind and "eme-free" not in config.kind:
            treeherder = job.get("treeherder", {})

            dep_th_platform = (dep_job.task.get("extra", {}).get(
                "treeherder", {}).get("machine", {}).get("platform", ""))
            build_type = dep_job.attributes.get("build_type")
            treeherder.setdefault("platform",
                                  f"{dep_th_platform}/{build_type}")

            dep_treeherder = dep_job.task.get("extra",
                                              {}).get("treeherder", {})
            treeherder.setdefault("tier", dep_treeherder.get("tier", 1))
            treeherder.setdefault(
                "symbol",
                _generate_treeherder_symbol(
                    dep_treeherder.get("groupSymbol", "?"),
                    dep_treeherder.get("symbol")),
            )
            treeherder.setdefault("kind", "build")

        label = dep_job.label.replace("part-1", "poller")
        description = ("Mac Notarization Poller for build '"
                       "{build_platform}/{build_type}'".format(
                           build_platform=build_platform,
                           build_type=attributes.get("build_type")))

        attributes = (job["attributes"] if job.get("attributes") else
                      copy_attributes_from_dependent_job(dep_job))
        attributes["signed"] = True

        if dep_job.attributes.get("chunk_locales"):
            # Used for l10n attribute passthrough
            attributes["chunk_locales"] = dep_job.attributes.get(
                "chunk_locales")

        uuid_manifest_url = get_artifact_url("<part1>",
                                             "public/uuid_manifest.json")
        task = {
            "label": label,
            "description": description,
            "worker": {
                "implementation": "notarization-poller",
                "uuid-manifest": {
                    "task-reference": uuid_manifest_url
                },
                "max-run-time": 3600,
            },
            "worker-type": "mac-notarization-poller",
            "dependencies": {
                "part1": dep_job.label
            },
            "attributes": attributes,
            "run-on-projects": dep_job.attributes.get("run_on_projects"),
            "optimization": dep_job.optimization,
            "routes": job.get("routes", []),
            "shipping-product": job.get("shipping-product"),
            "shipping-phase": job.get("shipping-phase"),
        }

        if treeherder:
            task["treeherder"] = treeherder
        if job.get("extra"):
            task["extra"] = job["extra"]
        # we may have reduced the priority for partner jobs, otherwise task.py will set it
        if job.get("priority"):
            task["priority"] = job["priority"]

        yield task