def use_fetches(config, jobs): artifact_names = {} aliases = {} if config.kind in ("toolchain", "fetch"): jobs = list(jobs) for job in jobs: run = job.get("run", {}) label = job["label"] get_attribute(artifact_names, label, run, "toolchain-artifact") value = run.get("{}-alias".format(config.kind)) if value: aliases["{}-{}".format(config.kind, value)] = label for task in config.kind_dependencies_tasks.values(): if task.kind in ("fetch", "toolchain"): get_attribute( artifact_names, task.label, task.attributes, "{kind}-artifact".format(kind=task.kind), ) value = task.attributes.get("{}-alias".format(task.kind)) if value: aliases["{}-{}".format(task.kind, value)] = task.label artifact_prefixes = {} for job in order_tasks(config, jobs): artifact_prefixes[job["label"]] = get_artifact_prefix(job) fetches = job.pop("fetches", None) if not fetches: yield job continue job_fetches = [] name = job.get("name", job.get("label")) dependencies = job.setdefault("dependencies", {}) worker = job.setdefault("worker", {}) prefix = get_artifact_prefix(job) has_sccache = False for kind, artifacts in fetches.items(): if kind in ("fetch", "toolchain"): for fetch_name in artifacts: label = "{kind}-{name}".format(kind=kind, name=fetch_name) label = aliases.get(label, label) if label not in artifact_names: raise Exception( "Missing fetch job for {kind}-{name}: {fetch}".format( kind=config.kind, name=name, fetch=fetch_name ) ) path = artifact_names[label] dependencies[label] = label job_fetches.append( { "artifact": path, "task": "<{label}>".format(label=label), "extract": True, } ) if kind == "toolchain" and fetch_name.endswith("-sccache"): has_sccache = True else: if kind not in dependencies: raise Exception( "{name} can't fetch {kind} artifacts because " "it has no {kind} dependencies!".format(name=name, kind=kind) ) dep_label = dependencies[kind] if dep_label in artifact_prefixes: prefix = artifact_prefixes[dep_label] else: if dep_label not in config.kind_dependencies_tasks: raise Exception( "{name} can't fetch {kind} artifacts because " "there are no tasks with label {label} in kind dependencies!".format( name=name, kind=kind, label=dependencies[kind], ) ) prefix = get_artifact_prefix( config.kind_dependencies_tasks[dep_label] ) for artifact in artifacts: if isinstance(artifact, text_type): path = artifact dest = None extract = True verify_hash = False else: path = artifact["artifact"] dest = artifact.get("dest") extract = artifact.get("extract", True) verify_hash = artifact.get("verify-hash", False) fetch = { "artifact": "{prefix}/{path}".format(prefix=prefix, path=path) if not path.startswith("/") else path[1:], "task": "<{dep}>".format(dep=kind), "extract": extract, } if dest is not None: fetch["dest"] = dest if verify_hash: fetch["verify-hash"] = verify_hash job_fetches.append(fetch) if job.get("use-sccache") and not has_sccache: raise Exception("Must provide an sccache toolchain if using sccache.") job_artifact_prefixes = { mozpath.dirname(fetch["artifact"]) for fetch in job_fetches if not fetch["artifact"].startswith("public/") } if job_artifact_prefixes: # Use taskcluster-proxy and request appropriate scope. For example, add # 'scopes: [queue:get-artifact:path/to/*]' for 'path/to/artifact.tar.xz'. worker["taskcluster-proxy"] = True for prefix in sorted(job_artifact_prefixes): scope = "queue:get-artifact:{}/*".format(prefix) if scope not in job.setdefault("scopes", []): job["scopes"].append(scope) env = worker.setdefault("env", {}) env["MOZ_FETCHES"] = { "task-reference": six.ensure_text( json.dumps( sorted(job_fetches, key=lambda x: sorted(x.items())), sort_keys=True ) ) } # The path is normalized to an absolute path in run-task env.setdefault("MOZ_FETCHES_DIR", "fetches") yield job
def use_fetches(config, jobs): artifact_names = {} aliases = {} if config.kind in ('toolchain', 'fetch'): jobs = list(jobs) for job in jobs: run = job.get('run', {}) label = job['label'] get_attribute( artifact_names, label, run, 'toolchain-artifact') value = run.get('{}-alias'.format(config.kind)) if value: aliases['{}-{}'.format(config.kind, value)] = label for task in config.kind_dependencies_tasks: if task.kind in ('fetch', 'toolchain'): get_attribute( artifact_names, task.label, task.attributes, '{kind}-artifact'.format(kind=task.kind), ) value = task.attributes.get('{}-alias'.format(task.kind)) if value: aliases['{}-{}'.format(task.kind, value)] = task.label artifact_prefixes = {} for job in order_tasks(config, jobs): artifact_prefixes[job["label"]] = get_artifact_prefix(job) fetches = job.pop("fetches", None) if not fetches: yield job continue job_fetches = [] name = job.get('name', job.get('label')) dependencies = job.setdefault('dependencies', {}) worker = job.setdefault('worker', {}) prefix = get_artifact_prefix(job) for kind, artifacts in fetches.items(): if kind in ('fetch', 'toolchain'): for fetch_name in artifacts: label = '{kind}-{name}'.format(kind=kind, name=fetch_name) label = aliases.get(label, label) if label not in artifact_names: raise Exception('Missing fetch job for {kind}-{name}: {fetch}'.format( kind=config.kind, name=name, fetch=fetch_name)) path = artifact_names[label] dependencies[label] = label job_fetches.append({ 'artifact': path, 'task': '<{label}>'.format(label=label), 'extract': True, }) if kind == 'toolchain' and fetch_name.endswith('-sccache'): job['needs-sccache'] = True else: if kind not in dependencies: raise Exception("{name} can't fetch {kind} artifacts because " "it has no {kind} dependencies!".format(name=name, kind=kind)) dep_label = dependencies[kind] if dep_label in artifact_prefixes: prefix = artifact_prefixes[dep_label] else: dep_tasks = [ task for task in config.kind_dependencies_tasks if task.label == dep_label ] if len(dep_tasks) != 1: raise Exception( "{name} can't fetch {kind} artifacts because " "there are {tasks} with label {label} in kind dependencies!".format( name=name, kind=kind, label=dependencies[kind], tasks="no tasks" if len(dep_tasks) == 0 else "multiple tasks", ) ) prefix = get_artifact_prefix(dep_tasks[0]) for artifact in artifacts: if isinstance(artifact, text_type): path = artifact dest = None extract = True else: path = artifact['artifact'] dest = artifact.get('dest') extract = artifact.get('extract', True) fetch = { 'artifact': '{prefix}/{path}'.format(prefix=prefix, path=path) if not path.startswith('/') else path[1:], 'task': '<{dep}>'.format(dep=kind), 'extract': extract, } if dest is not None: fetch['dest'] = dest job_fetches.append(fetch) job_artifact_prefixes = { mozpath.dirname(fetch["artifact"]) for fetch in job_fetches if not fetch["artifact"].startswith("public/") } if job_artifact_prefixes: # Use taskcluster-proxy and request appropriate scope. For example, add # 'scopes: [queue:get-artifact:path/to/*]' for 'path/to/artifact.tar.xz'. worker["taskcluster-proxy"] = True for prefix in sorted(job_artifact_prefixes): scope = "queue:get-artifact:{}/*".format(prefix) if scope not in job.setdefault("scopes", []): job["scopes"].append(scope) env = worker.setdefault('env', {}) env['MOZ_FETCHES'] = { 'task-reference': six.ensure_text(json.dumps(job_fetches, sort_keys=True)) } # The path is normalized to an absolute path in run-task env.setdefault('MOZ_FETCHES_DIR', 'fetches') yield job
def use_fetches(config, jobs): artifact_names = {} aliases = {} if config.kind in ("toolchain", "fetch"): jobs = list(jobs) for job in jobs: run = job.get("run", {}) label = job["label"] get_attribute(artifact_names, label, run, "toolchain-artifact") value = run.get(f"{config.kind}-alias") if value: aliases[f"{config.kind}-{value}"] = label for task in config.kind_dependencies_tasks: if task.kind in ("fetch", "toolchain"): get_attribute( artifact_names, task.label, task.attributes, f"{task.kind}-artifact", ) value = task.attributes.get(f"{task.kind}-alias") if value: aliases[f"{task.kind}-{value}"] = task.label artifact_prefixes = {} for job in order_tasks(config, jobs): artifact_prefixes[job["label"]] = get_artifact_prefix(job) fetches = job.pop("fetches", None) if not fetches: yield job continue job_fetches = [] name = job.get("name", job.get("label")) dependencies = job.setdefault("dependencies", {}) worker = job.setdefault("worker", {}) prefix = get_artifact_prefix(job) for kind, artifacts in fetches.items(): if kind in ("fetch", "toolchain"): for fetch_name in artifacts: label = f"{kind}-{fetch_name}" label = aliases.get(label, label) if label not in artifact_names: raise Exception( "Missing fetch job for {kind}-{name}: {fetch}". format(kind=config.kind, name=name, fetch=fetch_name)) path = artifact_names[label] dependencies[label] = label job_fetches.append({ "artifact": path, "task": f"<{label}>", "extract": True, }) else: if kind not in dependencies: raise Exception( "{name} can't fetch {kind} artifacts because " "it has no {kind} dependencies!".format(name=name, kind=kind)) dep_label = dependencies[kind] if dep_label in artifact_prefixes: prefix = artifact_prefixes[dep_label] else: dep_tasks = [ task for task in config.kind_dependencies_tasks if task.label == dep_label ] if len(dep_tasks) != 1: raise Exception( "{name} can't fetch {kind} artifacts because " "there are {tasks} with label {label} in kind dependencies!" .format( name=name, kind=kind, label=dependencies[kind], tasks="no tasks" if len(dep_tasks) == 0 else "multiple tasks", )) prefix = get_artifact_prefix(dep_tasks[0]) for artifact in artifacts: if isinstance(artifact, str): path = artifact dest = None extract = True else: path = artifact["artifact"] dest = artifact.get("dest") extract = artifact.get("extract", True) fetch = { "artifact": f"{prefix}/{path}", "task": f"<{kind}>", "extract": extract, } if dest is not None: fetch["dest"] = dest job_fetches.append(fetch) job_artifact_prefixes = { mozpath.dirname(fetch["artifact"]) for fetch in job_fetches if not fetch["artifact"].startswith("public/") } if job_artifact_prefixes: # Use taskcluster-proxy and request appropriate scope. For example, add # 'scopes: [queue:get-artifact:path/to/*]' for 'path/to/artifact.tar.xz'. worker["taskcluster-proxy"] = True for prefix in sorted(job_artifact_prefixes): scope = f"queue:get-artifact:{prefix}/*" if scope not in job.setdefault("scopes", []): job["scopes"].append(scope) env = worker.setdefault("env", {}) env["MOZ_FETCHES"] = { "task-reference": json.dumps(job_fetches, sort_keys=True) } env.setdefault("MOZ_FETCHES_DIR", "fetches") yield job