def get_manifest(): manifest_paths = glob.glob(os.path.join(MANIFEST_DIR, "*.yml")) all_manifests = {} graph_config = load_graph_config(ROOT) for path in manifest_paths: rw_manifest = yaml.load_yaml(path) manifest_name = os.path.basename(path).replace(".yml", "") rw_manifest["manifest_name"] = manifest_name validate_schema(base_schema, deepcopy(rw_manifest), "Invalid manifest:") check_manifest(deepcopy(rw_manifest), graph_config) rw_manifest["artifacts"] = tuple(rw_manifest["artifacts"]) assert manifest_name not in all_manifests all_manifests[manifest_name] = ReadOnlyDict(rw_manifest) return ReadOnlyDict(all_manifests)
def try_config(self, worker_overrides, worker_suffixes, **kwargs): from taskgraph.config import load_graph_config from taskgraph.util.workertypes import get_worker_type overrides = {} if worker_overrides: for override in worker_overrides: alias, worker_pool = override.split("=", 1) if alias in overrides: print( "Can't override worker alias {alias} more than once. " "Already set to use {previous}, but also asked to use {new}." .format(alias=alias, previous=overrides[alias], new=worker_pool)) sys.exit(1) overrides[alias] = worker_pool if worker_suffixes: root = build.topsrcdir root = os.path.join(root, "taskcluster", "ci") graph_config = load_graph_config(root) for worker_suffix in worker_suffixes: alias, suffix = worker_suffix.split("=", 1) if alias in overrides: print( "Can't override worker alias {alias} more than once. " "Already set to use {previous}, but also asked " "to add suffix {suffix}.".format( alias=alias, previous=overrides[alias], suffix=suffix)) sys.exit(1) provisioner, worker_type = get_worker_type( graph_config, alias, level="1", release_level="staging", ) overrides[ alias] = "{provisioner}/{worker_type}{suffix}".format( provisioner=provisioner, worker_type=worker_type, suffix=suffix) if overrides: return {"worker-overrides": overrides}
def test_action_callback(options): import taskgraph.parameters import taskgraph.actions from taskgraph.util import yaml from taskgraph.config import load_graph_config def load_data(filename): with open(filename) as f: if filename.endswith(".yml"): return yaml.load_stream(f) elif filename.endswith(".json"): return json.load(f) else: raise Exception(f"unknown filename {filename}") try: task_id = options["task_id"] if options["input"]: input = load_data(options["input"]) else: input = None root = options["root"] graph_config = load_graph_config(root) trust_domain = graph_config["trust-domain"] graph_config.register() parameters = taskgraph.parameters.load_parameters_file( options["parameters"], strict=False, trust_domain=trust_domain) parameters.check() return taskgraph.actions.trigger_action_callback( task_group_id=options["task_group_id"], task_id=task_id, input=input, callback=options["callback"], parameters=parameters, root=root, test=True, ) except Exception: traceback.print_exc() sys.exit(1)
def graph_config(): return load_graph_config(os.path.join(GECKO, 'taskcluster', 'ci'))
def config(): graph_config = load_graph_config(os.path.join(GECKO, 'taskcluster', 'ci')) return TransformConfig('job_test', here, {}, {}, {}, graph_config, write_artifacts=False)
def config(): graph_config = load_graph_config(os.path.join(GECKO, 'taskcluster', 'ci')) return TransformConfig('job_test', here, {}, {}, [], graph_config)
def graph_config(): return load_graph_config(os.path.join(GECKO, "taskcluster", "ci"))
def build_worker_definition(config, jobs): for job in jobs: if not (config.params.get("version") and config.params.get("xpi_name") and config.params.get("build_number")): continue resolve_keyed_by(job, "scopes", item_name=job["name"], **{"level": config.params["level"]}) # translate input xpi_name to get manifest and graph info manifest = get_manifest() manifest_config = manifest[config.params["xpi_name"]] repo_prefix = manifest_config["repo-prefix"] graph_config = load_graph_config(ROOT) repo_url = graph_config["taskgraph"]["repositories"][repo_prefix][ "default-repository"] # repo_url: https://github.com/mozilla-releng/staging-xpi-public # repo_url: [email protected]:mozilla-extensions/https-upgrade-study-v2 # repo: mozilla-releng/staging-xpi-public repo = repo_url.split("github.com")[-1] repo = repo.strip(":/") # if this is false in the manifest, no need to create github-release task if not manifest_config.get("enable-github-release", False): continue worker_definition = { "artifact-map": _build_artifact_map(job), "git-tag": config.params["head_tag"], "git-revision": config.params["xpi_revision"], "github-project": repo, "is-prerelease": False, } release_variables = { "xpi_name": config.params["xpi_name"], "version": config.params["version"], "build_number": config.params["build_number"], } tag_name = manifest_config.get("release-tag", "{version}").format(**release_variables) worker_definition["git-tag"] = tag_name release_name = manifest_config.get( "release-name", "{xpi_name}-{version}-build{build_number}").format( **release_variables) job["worker"]["release-name"] = release_name dep = job["primary-dependency"] worker_definition["upstream-artifacts"] = [{ "taskId": { "task-reference": "<release-signing>" }, "taskType": "signing", "paths": list(dep.attributes["xpis"].values()), }] # TODO: test this once we can test on shipit if ("env" in dep.task.get("payload", {}) and "ARTIFACT_PREFIX" in dep.task["payload"]["env"]): if not dep.task["payload"]["env"]["ARTIFACT_PREFIX"].startswith( "public"): scopes = job.setdefault("scopes", []) scopes.append("queue:get-artifact:{}/*".format( dep.task["payload"]["env"]["ARTIFACT_PREFIX"].rstrip("/"))) job["worker"].update(worker_definition) job["dependencies"] = {"release-signing": dep.label} del job["primary-dependency"] yield job
def config(): graph_config = load_graph_config(os.path.join(GECKO, "taskcluster", "ci")) return TransformConfig("job_test", here, {}, {}, {}, graph_config, write_artifacts=False)