def tasks_from_manifest(config, jobs): manifest = get_manifest() for job in jobs: for xpi_config in manifest: task = deepcopy(job) env = task.setdefault("worker", {}).setdefault("env", {}) run = task.setdefault("run", {}) if "directory" in xpi_config: run["cwd"] = "{checkout}/%s" % xpi_config["directory"] extra = task.setdefault("extra", {}) extra["directory"] = xpi_config["directory"] task["label"] = "build-{}".format(xpi_config["name"]) env["XPI_NAME"] = xpi_config["name"] task.setdefault("extra", {})["xpi-name"] = xpi_config["name"] try: checkout_config["ssh_secret_name"] = config.graph_config[ "github_clone_secret"] artifact_prefix = "xpi/build" except KeyError: artifact_prefix = "public/build" env["ARTIFACT_PREFIX"] = artifact_prefix artifacts = task["worker"].setdefault("artifacts", []) artifacts.append({ "type": "directory", "name": artifact_prefix, "path": "/builds/worker/artifacts", }) if xpi_config.get("install-type"): env["XPI_INSTALL_TYPE"] = xpi_config["install-type"] yield task
def test_tasks_from_manifest(config, jobs): manifest = get_manifest() for job in jobs: for xpi_config in manifest: xpi_name = xpi_config["name"] for target in sorted(xpi_config["tests"]): task = deepcopy(job) test_count.setdefault(xpi_name, 0) test_count[xpi_name] += 1 task.setdefault("extra", {})["xpi-name"] = xpi_name env = task.setdefault("worker", {}).setdefault("env", {}) run = task["run"] if "directory" in xpi_config: run["cwd"] = "{checkout}/%s" % xpi_config["directory"] extra = task.setdefault("extra", {}) extra["directory"] = xpi_config["directory"] else: run["cwd"] = "{checkout}" run["command"] = run["command"].format(target=target) task["label"] = "t-{}-{}".format(target, xpi_name) task["worker"]["docker-image"]["indexed"] = xpi_config["docker-image"] if os.environ.get("XPI_SSH_SECRET_NAME", ""): artifact_prefix = "xpi/build" else: artifact_prefix = "public/build" env["ARTIFACT_PREFIX"] = artifact_prefix yield task
def test_tasks_from_manifest(config, jobs): manifest = get_manifest() for job in jobs: for xpi_config in manifest: xpi_name = xpi_config["name"] for target in sorted(xpi_config["tests"]): task = deepcopy(job) test_count.setdefault(xpi_name, 0) test_count[xpi_name] += 1 task.setdefault("extra", {})["xpi-name"] = xpi_name env = task.setdefault("worker", {}).setdefault("env", {}) run = task["run"] if "directory" in xpi_config: run["cwd"] = "{checkout}/%s" % xpi_config["directory"] extra = task.setdefault("extra", {}) extra["directory"] = xpi_config["directory"] else: run["cwd"] = "{checkout}" run["command"] = run["command"].format(target=target) task["label"] = "t-{}-{}".format(target, xpi_name) try: checkout_config["ssh_secret_name"] = config.graph_config[ "github_clone_secret" ] artifact_prefix = "xpi/build" except KeyError: artifact_prefix = "public/build" env["ARTIFACT_PREFIX"] = artifact_prefix yield task
def tasks_from_manifest(config, jobs): manifest = get_manifest() for job in jobs: for xpi_config in manifest: task = deepcopy(job) env = task.setdefault("worker", {}).setdefault("env", {}) run = task.setdefault("run", {}) if "directory" in xpi_config: run["cwd"] = "{checkout}/%s" % xpi_config["directory"] extra = task.setdefault("extra", {}) extra["directory"] = xpi_config["directory"] task["label"] = "build-{}".format(xpi_config["name"]) env["XPI_NAME"] = xpi_config["name"] task.setdefault("extra", {})["xpi-name"] = xpi_config["name"] if os.environ.get("XPI_SSH_SECRET_NAME"): artifact_prefix = "xpi/build" else: artifact_prefix = "public/build" task.setdefault("attributes", {}) task["attributes"]["artifact_prefix"] = artifact_prefix env["ARTIFACT_PREFIX"] = artifact_prefix artifacts = task["worker"].setdefault("artifacts", []) artifacts.append({ "type": "directory", "name": artifact_prefix, "path": "/builds/worker/artifacts", }) task["worker"]["docker-image"]["indexed"] = xpi_config[ "docker-image"] if xpi_config.get("install-type"): env["XPI_INSTALL_TYPE"] = xpi_config["install-type"] yield task
def add_notifications(config, jobs): xpi_name = config.params.get("xpi_name") xpi_revision = config.params.get("xpi_revision") shipping_phase = config.params.get("shipping_phase") additional_shipit_emails = config.params.get("additional_shipit_emails", []) if not all([xpi_name, xpi_revision, shipping_phase]): return manifest = get_manifest() for job in jobs: if "primary-dependency" in job: dep = job.pop("primary-dependency") if dep.task.get("extra", {}).get("xpi-name") != xpi_name: continue attributes = dep.attributes.copy() if job.get("attributes"): attributes.update(job["attributes"]) job["attributes"] = attributes job.setdefault("dependencies", {}).update({"signing": dep.label}) if job.get("attributes", {}).get("shipping-phase") != shipping_phase: continue job["label"] = f"{config.kind}-{shipping_phase}" xpi_config = manifest[xpi_name] xpi_type = xpi_config["addon-type"] emails = evaluate_keyed_by( config.graph_config["release-promotion"]["notifications"] [xpi_type], "email", dict(phase=shipping_phase, level=config.params["level"]), ) if not emails: continue emails = (emails + additional_shipit_emails + xpi_config.get("additional-emails", [])) notifications = evaluate_keyed_by(job.pop("notifications"), "notification config", dict(phase=shipping_phase)) format_kwargs = dict(config=config.__dict__) subject = notifications["subject"].format(**format_kwargs) message = notifications["message"].format(**format_kwargs) # We only send mail on success to avoid messages like 'blah is in the # candidates dir' when cancelling graphs, dummy job failure, etc job.setdefault("routes", []).extend( [f"notify.email.{email}.on-completed" for email in emails]) job.setdefault("extra", {}).update({"notify": { "email": { "subject": subject } }}) if message: job["extra"]["notify"]["email"]["content"] = message yield job
def tasks_from_manifest(config, jobs): manifest = get_manifest() xpi_name = config.params.get("xpi_name") xpi_revision = None if xpi_name: xpi_revision = config.params.get("xpi_revision") for job in jobs: for xpi_config in manifest.values(): if not xpi_config.get("active"): continue if xpi_name and xpi_config["manifest_name"] != xpi_name: continue task = deepcopy(job) env = task.setdefault("worker", {}).setdefault("env", {}) run = task.setdefault("run", {}) checkout = run.setdefault("checkout", {}) checkout_config = checkout.setdefault(xpi_config['repo-prefix'], {}) env['REPO_PREFIX'] = xpi_config['repo-prefix'] checkout_config['path'] = '/builds/worker/checkouts/src' if 'branch' in xpi_config: checkout_config['head_ref'] = xpi_config['branch'] if 'directory' in xpi_config: run['cwd'] = '{checkout}/%s' % xpi_config['directory'] if xpi_revision: checkout_config['head_rev'] = xpi_revision task["label"] = "{}-{}".format(config.kind, xpi_config["manifest_name"]) env["XPI_NAME"] = xpi_config["manifest_name"] task.setdefault("extra", {})["xpi-name"] = xpi_config["manifest_name"] env["XPI_TYPE"] = xpi_config["addon-type"] if xpi_config.get("private-repo"): checkout_config['ssh_secret_name'] = config.graph_config[ "github_clone_secret"] artifact_prefix = "xpi/build" else: artifact_prefix = "public/build" env["ARTIFACT_PREFIX"] = artifact_prefix if xpi_config.get("install-type"): env["XPI_INSTALL_TYPE"] = xpi_config["install-type"] task.setdefault("attributes", {})["addon-type"] = xpi_config["addon-type"] task.setdefault("attributes", {})["xpis"] = {} artifacts = task.setdefault("worker", {}).setdefault("artifacts", []) for artifact in xpi_config["artifacts"]: artifact_name = "{}/{}".format(artifact_prefix, os.path.basename(artifact)) artifacts.append({ "type": "directory", "name": artifact_prefix, "path": "/builds/worker/artifacts", }) task["attributes"]["xpis"][artifact] = artifact_name env["XPI_ARTIFACTS"] = ";".join(xpi_config["artifacts"]) yield task
def test_tasks_from_manifest(config, tasks): manifest = get_manifest() for task in tasks: dep = task.pop("primary-dependency") task["attributes"] = dep.attributes.copy() task["dependencies"] = {"build": dep.label} xpi_name = dep.task["extra"]["xpi-name"] xpi_revision = config.params.get('xpi_revision') task.setdefault("extra", {})["xpi-name"] = xpi_name for xpi_config in manifest.get("xpis", []): if not xpi_config.get("active"): continue if xpi_config["name"] == xpi_name: break else: raise Exception( "Can't determine the upstream xpi_config for {}!".format( xpi_name)) env = task.setdefault("worker", {}).setdefault("env", {}) run = task.setdefault("run", {}) checkout = run.setdefault("checkout", {}) checkout_config = checkout.setdefault(xpi_config['repo-prefix'], {}) env['REPO_PREFIX'] = xpi_config['repo-prefix'] checkout_config['path'] = '/builds/worker/checkouts/src' if 'branch' in xpi_config: checkout_config['head_ref'] = xpi_config['branch'] if 'directory' in xpi_config: run['cwd'] = '{checkout}/%s' % xpi_config['directory'] if xpi_revision: checkout_config['head_rev'] = xpi_revision task["label"] = "test-{}".format(xpi_name) if xpi_config.get("private-repo"): checkout_config['ssh_secret_name'] = config.graph_config[ "github_clone_secret"] artifact_prefix = "xpi/build" task["worker"]["taskcluster-proxy"] = True else: artifact_prefix = "public/build" env["ARTIFACT_PREFIX"] = artifact_prefix paths = [] for artifact in xpi_config["artifacts"]: artifact_name = "{}/{}".format(artifact_prefix, os.path.basename(artifact)) paths.append(artifact_name) upstreamArtifacts = [ { "taskId": "<build>", "paths": paths }, ] env["XPI_UPSTREAM_URLS"] = json.dumps(upstreamArtifacts) yield task
def add_notifications(config, jobs): xpi_name = config.params.get("xpi_name") xpi_revision = config.params.get("xpi_revision") shipping_phase = config.params.get("shipping_phase") if not all([xpi_name, xpi_revision, shipping_phase]): return manifest = get_manifest() for job in jobs: if "primary-dependency" in job: dep = job.pop("primary-dependency") if dep.task.get("extra", {}).get("xpi-name") != xpi_name: continue attributes = dep.attributes.copy() if job.get("attributes"): attributes.update(job["attributes"]) job["attributes"] = attributes job.setdefault("dependencies", {}).update({"signing": dep.label}) if job.get("attributes", {}).get("shipping-phase") != shipping_phase: continue job['label'] = '{}-{}'.format(config.kind, shipping_phase) xpi_config = manifest[xpi_name] xpi_type = xpi_config['addon-type'] emails = evaluate_keyed_by( config.graph_config['release-promotion']['notifications'] [xpi_type], 'email', dict( phase=shipping_phase, )) + xpi_config.get( "additional-emails", []) notifications = evaluate_keyed_by(job.pop('notifications'), 'notification config', dict(phase=shipping_phase, )) format_kwargs = dict(config=config.__dict__, ) subject = notifications['subject'].format(**format_kwargs) message = notifications['message'].format(**format_kwargs) # We only send mail on success to avoid messages like 'blah is in the # candidates dir' when cancelling graphs, dummy job failure, etc job.setdefault('routes', []).extend( ['notify.email.{}.on-completed'.format(email) for email in emails]) job.setdefault('extra', {}).update( {'notify': { 'email': { 'subject': subject, } }}) if message: job['extra']['notify']['email']['content'] = message yield job
def test_tasks_from_manifest(config, tasks): manifest = get_manifest() for task in tasks: dep = task.pop("primary-dependency") task["attributes"] = dep.attributes.copy() task["dependencies"] = {"build": dep.label} xpi_name = dep.task["extra"]["xpi-name"] xpi_revision = config.params.get("xpi_revision") task.setdefault("extra", {})["xpi-name"] = xpi_name xpi_config = manifest[xpi_name] if not xpi_config.get("active"): continue env = task.setdefault("worker", {}).setdefault("env", {}) run = task.setdefault("run", {}) checkout = run.setdefault("checkout", {}) checkout_config = checkout.setdefault(xpi_config["repo-prefix"], {}) env["REPO_PREFIX"] = xpi_config["repo-prefix"] checkout_config["path"] = "/builds/worker/checkouts/vcs" if "branch" in xpi_config: checkout_config["head_ref"] = xpi_config["branch"] if "directory" in xpi_config: run["cwd"] = "{checkout}/%s" % xpi_config["directory"] if xpi_revision: checkout_config["head_rev"] = xpi_revision if "docker-image" in xpi_config: task["worker"]["docker-image"]["in-tree"] = xpi_config[ "docker-image"] task["label"] = f"test-{xpi_name}" if xpi_config.get("private-repo"): checkout_config["ssh_secret_name"] = config.graph_config[ "github_clone_secret"] artifact_prefix = "xpi/build" task["worker"]["taskcluster-proxy"] = True else: artifact_prefix = "public/build" env["ARTIFACT_PREFIX"] = artifact_prefix if xpi_config.get("install-type"): env["XPI_INSTALL_TYPE"] = xpi_config["install-type"] paths = [] for artifact in xpi_config["artifacts"]: artifact_name = f"{artifact_prefix}/{os.path.basename(artifact)}" paths.append(artifact_name) upstreamArtifacts = [{"taskId": "<build>", "paths": paths}] env["XPI_UPSTREAM_URLS"] = json.dumps(upstreamArtifacts) yield task
def tasks_from_manifest(config, jobs): manifest = get_manifest() xpi_name = config.params.get("xpi_name") xpi_revision = None if xpi_name: xpi_revision = config.params.get("xpi_revision") for job in jobs: for xpi_config in manifest.values(): if not xpi_config.get("active"): continue if xpi_name and xpi_config["manifest_name"] != xpi_name: continue task = deepcopy(job) env = task.setdefault("worker", {}).setdefault("env", {}) run = task.setdefault("run", {}) checkout = run.setdefault("checkout", {}) checkout_config = checkout.setdefault(xpi_config["repo-prefix"], {}) env["REPO_PREFIX"] = xpi_config["repo-prefix"] checkout_config["path"] = "/builds/worker/checkouts/vcs" if "branch" in xpi_config: checkout_config["head_ref"] = xpi_config["branch"] if "directory" in xpi_config: run["cwd"] = "{checkout}/%s" % xpi_config["directory"] if xpi_revision: checkout_config["head_rev"] = xpi_revision task["label"] = "{}-{}".format(config.kind, xpi_config["manifest_name"]) env["XPI_NAME"] = xpi_config["manifest_name"] task.setdefault("extra", {})["xpi-name"] = xpi_config["manifest_name"] env["XPI_TYPE"] = xpi_config["addon-type"] if xpi_config.get("private-repo"): checkout_config["ssh_secret_name"] = config.graph_config[ "github_clone_secret"] artifact_prefix = "xpi/build" repo_url = config.graph_config["taskgraph"]["repositories"][ xpi_config["repo-prefix"]]["default-repository"] if repo_url.startswith("https"): raise Exception( f"{xpi_config['manifest_name']} is a private repo but {repo_url} is a public url!\n" "Use the [email protected]:ORG/REPO url format.\n" "(See https://github.com/mozilla-extensions/xpi-manifest/blob/master/docs/adding-a-new-xpi.md#enabling-releases)" ) else: artifact_prefix = "public/build" env["ARTIFACT_PREFIX"] = artifact_prefix if xpi_config.get("install-type"): env["XPI_INSTALL_TYPE"] = xpi_config["install-type"] task.setdefault("attributes", {})["addon-type"] = xpi_config["addon-type"] task.setdefault("attributes", {})["xpis"] = {} if "docker-image" in xpi_config: task["worker"]["docker-image"]["in-tree"] = xpi_config[ "docker-image"] artifacts = task.setdefault("worker", {}).setdefault("artifacts", []) for artifact in xpi_config["artifacts"]: artifact_name = "{}/{}".format(artifact_prefix, os.path.basename(artifact)) artifacts.append({ "type": "directory", "name": artifact_prefix, "path": "/builds/worker/artifacts", }) task["attributes"]["xpis"][artifact] = artifact_name env["XPI_ARTIFACTS"] = ";".join(xpi_config["artifacts"]) yield task
from taskgraph.actions.registry import register_callback_action from taskgraph.util.taskcluster import get_artifact from taskgraph.taskgraph import TaskGraph from taskgraph.decision import taskgraph_decision from taskgraph.parameters import Parameters from taskgraph.util.taskgraph import find_decision_task, find_existing_tasks_from_previous_kinds from xpi_taskgraph.xpi_manifest import get_manifest RELEASE_PROMOTION_PROJECTS = ( "https://github.com/mozilla-extensions/xpi-manifest", "https://github.com/escapewindow/xpi-manifest", ) XPI_MANIFEST = get_manifest() def is_release_promotion_available(parameters): return parameters['head_repository'] in RELEASE_PROMOTION_PROJECTS @register_callback_action( name='release-promotion', title='Promote a XPI', symbol='${input.release_promotion_flavor}_${input.xpi_name}', description="Promote a XPI.", generic=False, order=500, context=[], available=is_release_promotion_available,
def add_beetmover_worker_config(config, tasks): manifest = get_manifest() for task in tasks: if not (config.params.get("version") and config.params.get("xpi_name") and config.params.get("head_ref") and config.params.get("build_number") and config.params.get("level")): continue xpi_name = config.params["xpi_name"] xpi_manifest = manifest[xpi_name] xpi_addon_type = xpi_manifest["addon-type"] build_number = config.params["build_number"] xpi_version = config.params["version"] release_name = ("{xpi_name}-{xpi_version}-build{build_number}").format( xpi_name=xpi_name, xpi_version=xpi_version, build_number=build_number, ) xpi_destinations = [] for artifact in xpi_manifest["artifacts"]: artifact_name = basename(artifact) xpi_destination = ( "pub/system-addons/{xpi_name}/{release_name}/{artifact_name}" ).format( xpi_name=xpi_name, artifact_name=artifact_name, release_name=release_name, ) xpi_destinations.append(xpi_destination) task_label = f"beetmover-{xpi_name}" task_description = ( "Upload signed XPI artifacts to " "pub/system-addons/{xpi_name}/{release_name}").format( xpi_name=xpi_name, release_name=release_name) resolve_keyed_by( task, "bucket-scope", item_name=task_label, **{"level": config.params["level"]}, ) dep = task["primary-dependency"] task_ref = {"task-reference": "<release-signing>"} branch = basename(config.params["head_ref"]) paths = list(dep.attributes["xpis"].values()) artifact_map_paths = { path: { "destinations": xpi_destinations } for path in paths } worker = { "upstream-artifacts": [ { "taskId": task_ref, "taskType": "signing", "paths": paths, "locale": "multi", }, ], "action-scope": "push-to-system-addons", "bucket-scope": task["bucket-scope"], "release-properties": { "app-name": "xpi", "app-version": xpi_version, "branch": branch, "build-id": release_name, }, "artifact-map": [ { "taskId": task_ref, "paths": artifact_map_paths, }, ], } task.setdefault("attributes", {})["addon-type"] = xpi_addon_type task = { "label": task_label, "name": task_label, "description": task_description, "dependencies": { "release-signing": dep.label }, "worker-type": task["worker-type"], "worker": worker, "attributes": task["attributes"], "run-on-tasks-for": task["run-on-tasks-for"], } yield task
def add_balrog_worker_config(config, tasks): manifest = get_manifest() for task in tasks: if not (config.params.get("version") and config.params.get("xpi_name") and config.params.get("head_ref") and config.params.get("build_number") and config.params.get("level")): continue xpi_name = config.params["xpi_name"] xpi_manifest = manifest[xpi_name] xpi_addon_type = xpi_manifest["addon-type"] xpi_version = config.params["version"] build_number = config.params["build_number"] release_name = "{xpi_name}-{xpi_version}-build{build_number}".format( xpi_name=xpi_name, xpi_version=xpi_version, build_number=build_number, ) task_label = f"balrog-{xpi_name}" task_description = ( "Create a Balrog release for the signed " "XPI artifacts uploaded to " "pub/system-addons/{xpi_name}/{release_name}/").format( xpi_name=xpi_name, release_name=release_name) dep = task["primary-dependency"] task_ref = {"task-reference": "<beetmover>"} paths = [ "public/manifest.json", "public/target.checksums", ] worker = { "action": "submit-system-addons", "server": task["balrog"]["server"], "upstream-artifacts": [ { "taskId": task_ref, "taskType": "beetmover", "paths": paths, }, ], } resolve_keyed_by( worker, "server", item_name=task_label, **{"level": config.params["level"]}, ) task.setdefault("attributes", {})["addon-type"] = xpi_addon_type task = { "label": task_label, "name": task_label, "description": task_description, "dependencies": { "beetmover": dep.label }, "worker-type": task["worker-type"], "worker": worker, "attributes": task["attributes"], "run-on-tasks-for": task["run-on-tasks-for"], } yield task
def build_worker_definition(config, jobs): for job in jobs: if not (config.params.get("version") and config.params.get("xpi_name") and config.params.get("build_number")): continue resolve_keyed_by(job, "scopes", item_name=job["name"], **{"level": config.params["level"]}) # translate input xpi_name to get manifest and graph info manifest = get_manifest() manifest_config = manifest[config.params["xpi_name"]] repo_prefix = manifest_config["repo-prefix"] graph_config = load_graph_config(ROOT) repo_url = graph_config["taskgraph"]["repositories"][repo_prefix][ "default-repository"] # repo_url: https://github.com/mozilla-releng/staging-xpi-public # repo_url: [email protected]:mozilla-extensions/https-upgrade-study-v2 # repo: mozilla-releng/staging-xpi-public repo = repo_url.split("github.com")[-1] repo = repo.strip(":/") # if this is false in the manifest, no need to create github-release task if not manifest_config.get("enable-github-release", False): continue worker_definition = { "artifact-map": _build_artifact_map(job), "git-tag": config.params["head_tag"], "git-revision": config.params["xpi_revision"], "github-project": repo, "is-prerelease": False, } release_variables = { "xpi_name": config.params["xpi_name"], "version": config.params["version"], "build_number": config.params["build_number"], } tag_name = manifest_config.get("release-tag", "{version}").format(**release_variables) worker_definition["git-tag"] = tag_name release_name = manifest_config.get( "release-name", "{xpi_name}-{version}-build{build_number}").format( **release_variables) job["worker"]["release-name"] = release_name dep = job["primary-dependency"] worker_definition["upstream-artifacts"] = [{ "taskId": { "task-reference": "<release-signing>" }, "taskType": "signing", "paths": list(dep.attributes["xpis"].values()), }] # TODO: test this once we can test on shipit if ("env" in dep.task.get("payload", {}) and "ARTIFACT_PREFIX" in dep.task["payload"]["env"]): if not dep.task["payload"]["env"]["ARTIFACT_PREFIX"].startswith( "public"): scopes = job.setdefault("scopes", []) scopes.append("queue:get-artifact:{}/*".format( dep.task["payload"]["env"]["ARTIFACT_PREFIX"].rstrip("/"))) job["worker"].update(worker_definition) job["dependencies"] = {"release-signing": dep.label} del job["primary-dependency"] yield job