Esempio n. 1
0
 def _root_url(self, artifactdir=None, objdir=None):
     """Generate a publicly-accessible URL for the tasks's artifacts, or an objdir path"""
     if 'TASK_ID' in os.environ and 'RUN_ID' in os.environ:
         import taskcluster_urls
         from taskgraph.util.taskcluster import get_root_url
         return taskcluster_urls.api(
             get_root_url(False), 'queue', 'v1',
             'task/{}/runs/{}/artifacts/{}'.format(os.environ['TASK_ID'],
                                                   os.environ['RUN_ID'],
                                                   artifactdir))
     else:
         return os.path.join(self.topobjdir, objdir)
Esempio n. 2
0
def docker_worker_debian_package(config, job, taskdesc):
    run = job['run']

    name = taskdesc['label'].replace('{}-'.format(config.kind), '', 1)

    arch = run.get('arch', 'amd64')

    worker = taskdesc['worker']
    worker['artifacts'] = []
    version = {
        'wheezy': 7,
        'jessie': 8,
        'stretch': 9,
        'buster': 10,
    }[run['dist']]
    image = 'debian%d' % version
    if arch != 'amd64':
        image += '-' + arch
    image += '-packages'
    worker['docker-image'] = {'in-tree': image}

    add_artifacts(config, job, taskdesc, path='/tmp/artifacts')

    env = worker.setdefault('env', {})
    env['DEBFULLNAME'] = 'Mozilla build team'
    env['DEBEMAIL'] = '*****@*****.**'

    if 'dsc' in run:
        src = run['dsc']
        unpack = 'dpkg-source -x {src_file} {package}'
        package_re = DSC_PACKAGE_RE
    elif 'tarball' in run:
        src = run['tarball']
        unpack = ('mkdir {package} && '
                  'tar -C {package} -axf {src_file} --strip-components=1')
        package_re = SOURCE_PACKAGE_RE
    else:
        raise RuntimeError('Unreachable')
    src_url = src['url']
    src_file = os.path.basename(src_url)
    src_sha256 = src['sha256']
    package = run.get('name')
    if not package:
        package = package_re.match(src_file).group(0)
    unpack = unpack.format(src_file=src_file, package=package)

    resolver = run.get('resolver', 'apt-get')
    if resolver == 'apt-get':
        resolver = 'apt-get -yyq --no-install-recommends'
    elif resolver == 'aptitude':
        resolver = ('aptitude -y --without-recommends -o '
                    'Aptitude::ProblemResolver::Hints::KeepBuildDeps='
                    '"reject {}-build-deps :UNINST"').format(package)
    else:
        raise RuntimeError('Unreachable')

    adjust = ''
    if 'patch' in run:
        # We don't use robustcheckout or run-task to get a checkout. So for
        # this one file we'd need from a checkout, download it.
        env["PATCH_URL"] = config.params.file_url(
            "build/debian-packages/{patch}".format(patch=run["patch"]),
        )
        adjust += 'curl -sL $PATCH_URL | patch -p1 && '
    if 'pre-build-command' in run:
        adjust += run['pre-build-command'] + ' && '
    if 'tarball' in run:
        adjust += 'mv ../{src_file} ../{package}_{ver}.orig.tar.gz && '.format(
            src_file=src_file,
            package=package,
            ver='$(dpkg-parsechangelog | awk \'$1=="Version:"{print $2}\' | cut -f 1 -d -)',
        )
    if 'patch' not in run and 'pre-build-command' not in run:
        adjust += ('debchange -l ".{prefix}moz" --distribution "{dist}"'
                   ' "Mozilla backport for {dist}." < /dev/null && ').format(
            prefix=name.split('-', 1)[0],
            dist=run['dist'],
        )

    worker['command'] = [
        'sh',
        '-x',
        '-c',
        # Add sources for packages coming from other package tasks.
        '/usr/local/sbin/setup_packages.sh {root_url} $PACKAGES && '
        'apt-get update && '
        # Upgrade packages that might have new versions in package tasks.
        'apt-get dist-upgrade && '
        'cd /tmp && '
        # Get, validate and extract the package source.
        '(dget -d -u {src_url} || exit 100) && '
        'echo "{src_sha256}  {src_file}" | sha256sum -c && '
        '{unpack} && '
        'cd {package} && '
        # Optionally apply patch and/or pre-build command.
        '{adjust}'
        # Install the necessary build dependencies.
        '(mk-build-deps -i -r debian/control -t \'{resolver}\' || exit 100) && '
        # Build the package
        'DEB_BUILD_OPTIONS="parallel=$(nproc) nocheck" dpkg-buildpackage && '
        # Copy the artifacts
        'mkdir -p {artifacts}/debian && '
        'dcmd cp ../{package}_*.changes {artifacts}/debian/ && '
        'cd {artifacts} && '
        # Make the artifacts directory usable as an APT repository.
        'apt-ftparchive sources debian | gzip -c9 > debian/Sources.gz && '
        'apt-ftparchive packages debian | gzip -c9 > debian/Packages.gz'
        .format(
            root_url=get_root_url(False),
            package=package,
            src_url=src_url,
            src_file=src_file,
            src_sha256=src_sha256,
            unpack=unpack,
            adjust=adjust,
            artifacts='/tmp/artifacts',
            resolver=resolver,
        )
    ]

    if run.get('packages'):
        env = worker.setdefault('env', {})
        env['PACKAGES'] = {
            'task-reference': ' '.join('<{}>'.format(p)
                                       for p in run['packages'])
        }
        deps = taskdesc.setdefault('dependencies', {})
        for p in run['packages']:
            deps[p] = 'packages-{}'.format(p)

    # Use the command generated above as the base for the index hash.
    # We rely on it not varying depending on the head_repository or head_rev.
    digest_data = list(worker['command'])
    if 'patch' in run:
        digest_data.append(
            hash_path(os.path.join(GECKO, 'build', 'debian-packages', run['patch'])))

    if not taskgraph.fast:
        taskdesc['cache'] = {
            'type': 'packages.v1',
            'name': name,
            'digest-data': digest_data
        }
Esempio n. 3
0
def create_interactive_action(parameters, graph_config, input, task_group_id,
                              task_id):
    # fetch the original task definition from the taskgraph, to avoid
    # creating interactive copies of unexpected tasks.  Note that this only applies
    # to docker-worker tasks, so we can assume the docker-worker payload format.
    decision_task_id, full_task_graph, label_to_taskid = fetch_graph_and_labels(
        parameters, graph_config)
    task = taskcluster.get_task_definition(task_id)
    label = task["metadata"]["name"]

    def edit(task):
        if task.label != label:
            return task
        task_def = task.task

        # drop task routes (don't index this!)
        task_def["routes"] = []

        # only try this once
        task_def["retries"] = 0

        # short expirations, at least 3 hour maxRunTime
        task_def["deadline"] = {"relative-datestamp": "12 hours"}
        task_def["created"] = {"relative-datestamp": "0 hours"}
        task_def["expires"] = {"relative-datestamp": "1 day"}

        # filter scopes with the SCOPE_WHITELIST
        task.task["scopes"] = [
            s for s in task.task.get("scopes", []) if any(
                p.match(s) for p in SCOPE_WHITELIST)
        ]

        payload = task_def["payload"]

        # make sure the task runs for long enough..
        payload["maxRunTime"] = max(3600 * 3, payload.get("maxRunTime", 0))

        # no caches or artifacts
        payload["cache"] = {}
        payload["artifacts"] = {}

        # enable interactive mode
        payload.setdefault("features", {})["interactive"] = True
        payload.setdefault("env", {})["TASKCLUSTER_INTERACTIVE"] = "true"

        return task

    # Create the task and any of its dependencies. This uses a new taskGroupId to avoid
    # polluting the existing taskGroup with interactive tasks.
    action_task_id = os.environ.get("TASK_ID")
    label_to_taskid = create_tasks(
        graph_config,
        [label],
        full_task_graph,
        label_to_taskid,
        parameters,
        decision_task_id=action_task_id,
        modifier=edit,
    )

    taskId = label_to_taskid[label]
    logger.info(
        "Created interactive task {}; sending notification".format(taskId))

    if input and "notify" in input:
        email = input["notify"]
        # no point sending to a noreply address!
        if email == "*****@*****.**":
            return

        info = {
            "url":
            taskcluster_urls.ui(get_root_url(False),
                                "tasks/{}/connect".format(taskId)),
            "label":
            label,
            "revision":
            parameters["head_rev"],
            "repo":
            parameters["head_repository"],
        }
        send_email(
            email,
            subject=EMAIL_SUBJECT.format(**info),
            content=EMAIL_CONTENT.format(**info),
            link={
                "text": "Connect",
                "href": info["url"],
            },
            use_proxy=True,
        )
Esempio n. 4
0
def fill_template(config, tasks):
    available_packages = set()
    for task in config.kind_dependencies_tasks:
        if task.kind != 'packages':
            continue
        name = task.label.replace('packages-', '')
        available_packages.add(name)

    context_hashes = {}

    for task in order_image_tasks(config, tasks):
        image_name = task.pop('name')
        job_symbol = task.pop('symbol')
        args = task.pop('args', {})
        definition = task.pop('definition', image_name)
        packages = task.pop('packages', [])
        parent = task.pop('parent', None)

        for p in packages:
            if p not in available_packages:
                raise Exception('Missing package job for {}-{}: {}'.format(
                    config.kind, image_name, p))

        # Generating the context hash relies on arguments being set, so we
        # set this now, although it's not the final value (it's a
        # task-reference value, see further below). We add the package routes
        # containing a hash to get the overall docker image hash, so changes
        # to packages will be reflected in the docker image hash.
        args['DOCKER_IMAGE_PACKAGES'] = ' '.join('<{}>'.format(p)
                                                 for p in packages)
        if parent:
            args['DOCKER_IMAGE_PARENT'] = '{}:{}'.format(
                parent, context_hashes[parent])

        args['TASKCLUSTER_ROOT_URL'] = get_root_url(False)

        if not taskgraph.fast:
            context_path = os.path.join('taskcluster', 'docker', definition)
            context_hash = generate_context_hash(GECKO, context_path,
                                                 image_name, args)
        else:
            context_hash = '0' * 40
        digest_data = [context_hash]
        context_hashes[image_name] = context_hash

        description = 'Build the docker image {} for use by dependent tasks'.format(
            image_name)

        # Adjust the zstandard compression level based on the execution level.
        # We use faster compression for level 1 because we care more about
        # end-to-end times. We use slower/better compression for other levels
        # because images are read more often and it is worth the trade-off to
        # burn more CPU once to reduce image size.
        zstd_level = '3' if int(config.params['level']) == 1 else '10'

        # include some information that is useful in reconstructing this task
        # from JSON
        taskdesc = {
            'label':
            'build-docker-image-' + image_name,
            'description':
            description,
            'attributes': {
                'image_name': image_name
            },
            'expires-after':
            '28 days' if config.params.is_try() else '1 year',
            'scopes': [
                'secrets:get:project/taskcluster/gecko/hgfingerprint',
                'secrets:get:project/taskcluster/gecko/hgmointernal',
            ],
            'treeherder': {
                'symbol': job_symbol,
                'platform': 'taskcluster-images/opt',
                'kind': 'other',
                'tier': 1,
            },
            'run-on-projects': [],
            'worker-type':
            'images',
            'worker': {
                'implementation':
                'docker-worker',
                'os':
                'linux',
                'artifacts': [{
                    'type': 'file',
                    'path': '/builds/worker/workspace/artifacts/image.tar.zst',
                    'name': 'public/image.tar.zst',
                }],
                'env': {
                    'HG_STORE_PATH': '/builds/worker/checkouts/hg-store',
                    'HASH': context_hash,
                    'PROJECT': config.params['project'],
                    'IMAGE_NAME': image_name,
                    'DOCKER_IMAGE_ZSTD_LEVEL': zstd_level,
                    'GECKO_BASE_REPOSITORY': config.params['base_repository'],
                    'GECKO_HEAD_REPOSITORY': config.params['head_repository'],
                    'GECKO_HEAD_REV': config.params['head_rev'],
                },
                'chain-of-trust':
                True,
                'docker-in-docker':
                True,
                'taskcluster-proxy':
                True,
                'max-run-time':
                7200,
            },
        }
        # Retry for 'funsize-update-generator' if exit status code is -1
        if image_name in ['funsize-update-generator']:
            taskdesc['worker']['retry-exit-status'] = [-1]

        worker = taskdesc['worker']

        # We use the in-tree image_builder image to build docker images, but
        # that can't be used to build the image_builder image itself,
        # obviously. So we fall back to an image on docker hub, identified
        # by hash.  After the image-builder image is updated, it's best to push
        # and update this hash as well, to keep image-builder builds up to date.
        if image_name == 'image_builder':
            hash = 'sha256:c6622fd3e5794842ad83d129850330b26e6ba671e39c58ee288a616a3a1c4c73'
            worker['docker-image'] = 'taskcluster/image_builder@' + hash
            # Keep in sync with the Dockerfile used to generate the
            # docker image whose digest is referenced above.
            worker['volumes'] = [
                '/builds/worker/checkouts',
                '/builds/worker/workspace',
            ]
            cache_name = 'imagebuilder-v1'
        else:
            worker['docker-image'] = {'in-tree': 'image_builder'}
            cache_name = 'imagebuilder-sparse-{}'.format(_run_task_suffix())
            # Force images built against the in-tree image builder to
            # have a different digest by adding a fixed string to the
            # hashed data.
            # Append to this data whenever the image builder's output behavior
            # is changed, in order to force all downstream images to be rebuilt and
            # cached distinctly.
            digest_data.append('image_builder')
            # Updated for squashing images in Bug 1527394
            digest_data.append('squashing layers')

        worker['caches'] = [{
            'type': 'persistent',
            'name': cache_name,
            'mount-point': '/builds/worker/checkouts',
        }]

        for k, v in args.items():
            if k == 'DOCKER_IMAGE_PACKAGES':
                worker['env'][k] = {'task-reference': v}
            else:
                worker['env'][k] = v

        if packages:
            deps = taskdesc.setdefault('dependencies', {})
            for p in sorted(packages):
                deps[p] = 'packages-{}'.format(p)

        if parent:
            deps = taskdesc.setdefault('dependencies', {})
            deps['parent'] = 'build-docker-image-{}'.format(parent)
            worker['env']['DOCKER_IMAGE_PARENT_TASK'] = {
                'task-reference': '<parent>',
            }
        if 'index' in task:
            taskdesc['index'] = task['index']

        if task.get('cache', True) and not taskgraph.fast:
            taskdesc['cache'] = {
                'type': 'docker-images.v2',
                'name': image_name,
                'digest-data': digest_data,
            }

        yield taskdesc
Esempio n. 5
0
def create_interactive_action(parameters, graph_config, input, task_group_id,
                              task_id):
    # fetch the original task definition from the taskgraph, to avoid
    # creating interactive copies of unexpected tasks.  Note that this only applies
    # to docker-worker tasks, so we can assume the docker-worker payload format.
    decision_task_id, full_task_graph, label_to_taskid = fetch_graph_and_labels(
        parameters, graph_config)
    task = taskcluster.get_task_definition(task_id)
    label = task['metadata']['name']

    def edit(task):
        if task.label != label:
            return task
        task_def = task.task

        # drop task routes (don't index this!)
        task_def['routes'] = []

        # only try this once
        task_def['retries'] = 0

        # short expirations, at least 3 hour maxRunTime
        task_def['deadline'] = {'relative-datestamp': '12 hours'}
        task_def['created'] = {'relative-datestamp': '0 hours'}
        task_def['expires'] = {'relative-datestamp': '1 day'}

        # filter scopes with the SCOPE_WHITELIST
        task.task['scopes'] = [
            s for s in task.task.get('scopes', []) if any(
                p.match(s) for p in SCOPE_WHITELIST)
        ]

        payload = task_def['payload']

        # make sure the task runs for long enough..
        payload['maxRunTime'] = max(3600 * 3, payload.get('maxRunTime', 0))

        # no caches or artifacts
        payload['cache'] = {}
        payload['artifacts'] = {}

        # enable interactive mode
        payload.setdefault('features', {})['interactive'] = True
        payload.setdefault('env', {})['TASKCLUSTER_INTERACTIVE'] = 'true'

        return task

    # Create the task and any of its dependencies. This uses a new taskGroupId to avoid
    # polluting the existing taskGroup with interactive tasks.
    label_to_taskid = create_tasks(graph_config, [label],
                                   full_task_graph,
                                   label_to_taskid,
                                   parameters,
                                   modifier=edit)

    taskId = label_to_taskid[label]
    logger.info(
        'Created interactive task {}; sending notification'.format(taskId))

    if input and 'notify' in input:
        email = input['notify']
        # no point sending to a noreply address!
        if email == '*****@*****.**':
            return

        info = {
            'url':
            taskcluster_urls.ui(get_root_url(False),
                                'tasks/{}/connect'.format(taskId)),
            'label':
            label,
            'revision':
            parameters['head_rev'],
            'repo':
            parameters['head_repository'],
        }
        send_email(email,
                   subject=EMAIL_SUBJECT.format(**info),
                   content=EMAIL_CONTENT.format(**info),
                   link={
                       'text': 'Connect',
                       'href': info['url'],
                   },
                   use_proxy=True)
Esempio n. 6
0
def docker_worker_debian_package(config, job, taskdesc):
    run = job["run"]

    name = taskdesc["label"].replace("{}-".format(config.kind), "", 1)

    arch = run.get("arch", "amd64")

    worker = taskdesc["worker"]
    worker.setdefault("artifacts", [])
    version = {
        "wheezy": 7,
        "jessie": 8,
        "stretch": 9,
        "buster": 10,
    }[run["dist"]]
    image = "debian%d" % version
    if arch != "amd64":
        image += "-" + arch
    image += "-packages"
    worker["docker-image"] = {"in-tree": image}

    add_artifacts(config, job, taskdesc, path="/tmp/artifacts")

    env = worker.setdefault("env", {})
    env["DEBFULLNAME"] = "Mozilla build team"
    env["DEBEMAIL"] = "*****@*****.**"

    if "dsc" in run:
        src = run["dsc"]
        unpack = "dpkg-source -x {src_file} {package}"
        package_re = DSC_PACKAGE_RE
    elif "tarball" in run:
        src = run["tarball"]
        unpack = (
            "mkdir {package} && "
            "tar -C {package} -axf {src_file} --strip-components=1"
        )
        package_re = SOURCE_PACKAGE_RE
    else:
        raise RuntimeError("Unreachable")
    src_url = src["url"]
    src_file = os.path.basename(src_url)
    src_sha256 = src["sha256"]
    package = run.get("name")
    if not package:
        package = package_re.match(src_file).group(0)
    unpack = unpack.format(src_file=src_file, package=package)

    resolver = run.get("resolver", "apt-get")
    if resolver == "apt-get":
        resolver = "apt-get -yyq --no-install-recommends"
    elif resolver == "aptitude":
        resolver = (
            "aptitude -y --without-recommends -o "
            "Aptitude::ProblemResolver::Hints::KeepBuildDeps="
            '"reject {}-build-deps :UNINST"'
        ).format(package)
    else:
        raise RuntimeError("Unreachable")

    adjust = ""
    if "patch" in run:
        # We don't use robustcheckout or run-task to get a checkout. So for
        # this one file we'd need from a checkout, download it.
        env["PATCH_URL"] = config.params.file_url(
            "build/debian-packages/{patch}".format(patch=run["patch"]),
        )
        adjust += "curl -sL $PATCH_URL | patch -p1 && "
    if "pre-build-command" in run:
        adjust += run["pre-build-command"] + " && "
    if "tarball" in run:
        adjust += "mv ../{src_file} ../{package}_{ver}.orig.tar.gz && ".format(
            src_file=src_file,
            package=package,
            ver="$(dpkg-parsechangelog | awk '$1==\"Version:\"{print $2}' | cut -f 1 -d -)",
        )
    if "patch" not in run and "pre-build-command" not in run:
        adjust += (
            'debchange -l ".{prefix}moz" --distribution "{dist}"'
            ' "Mozilla backport for {dist}." < /dev/null && '
        ).format(
            prefix=name.split("-", 1)[0],
            dist=run["dist"],
        )

    worker["command"] = [
        "sh",
        "-x",
        "-c",
        # Add sources for packages coming from other package tasks.
        "/usr/local/sbin/setup_packages.sh {root_url} $PACKAGES && "
        "apt-get update && "
        # Upgrade packages that might have new versions in package tasks.
        "apt-get dist-upgrade && " "cd /tmp && "
        # Get, validate and extract the package source.
        "(dget -d -u {src_url} || exit 100) && "
        'echo "{src_sha256}  {src_file}" | sha256sum -c && '
        "{unpack} && "
        "cd {package} && "
        # Optionally apply patch and/or pre-build command.
        "{adjust}"
        # Install the necessary build dependencies.
        "(mk-build-deps -i -r debian/control -t '{resolver}' || exit 100) && "
        # Build the package
        'DEB_BUILD_OPTIONS="parallel=$(nproc) nocheck" dpkg-buildpackage && '
        # Copy the artifacts
        "mkdir -p {artifacts}/debian && "
        "dcmd cp ../{package}_*.changes {artifacts}/debian/ && "
        "cd {artifacts} && "
        # Make the artifacts directory usable as an APT repository.
        "apt-ftparchive sources debian | gzip -c9 > debian/Sources.gz && "
        "apt-ftparchive packages debian | gzip -c9 > debian/Packages.gz".format(
            root_url=get_root_url(False),
            package=package,
            src_url=src_url,
            src_file=src_file,
            src_sha256=src_sha256,
            unpack=unpack,
            adjust=adjust,
            artifacts="/tmp/artifacts",
            resolver=resolver,
        ),
    ]

    if run.get("packages"):
        env = worker.setdefault("env", {})
        env["PACKAGES"] = {
            "task-reference": " ".join("<{}>".format(p) for p in run["packages"])
        }
        deps = taskdesc.setdefault("dependencies", {})
        for p in run["packages"]:
            deps[p] = "packages-{}".format(p)

    # Use the command generated above as the base for the index hash.
    # We rely on it not varying depending on the head_repository or head_rev.
    digest_data = list(worker["command"])
    if "patch" in run:
        digest_data.append(
            hash_path(os.path.join(GECKO, "build", "debian-packages", run["patch"]))
        )

    if not taskgraph.fast:
        taskdesc["cache"] = {
            "type": "packages.v1",
            "name": name,
            "digest-data": digest_data,
        }
Esempio n. 7
0
def docker_worker_debian_package(config, job, taskdesc):
    run = job['run']

    name = taskdesc['label'].replace('{}-'.format(config.kind), '', 1)

    docker_repo = 'debian'
    arch = run.get('arch', 'amd64')
    if arch != 'amd64':
        docker_repo = '{}/{}'.format(arch, docker_repo)

    worker = taskdesc['worker']
    worker['artifacts'] = []
    worker['docker-image'] = '{repo}:{dist}-{date}'.format(
        repo=docker_repo, dist=run['dist'], date=run['snapshot'][:8])
    # Retry on apt-get errors.
    worker['retry-exit-status'] = [100]

    add_artifacts(config, job, taskdesc, path='/tmp/artifacts')

    env = worker.setdefault('env', {})
    env['DEBFULLNAME'] = 'Mozilla build team'
    env['DEBEMAIL'] = '*****@*****.**'

    if 'dsc' in run:
        src = run['dsc']
        unpack = 'dpkg-source -x {src_file} {package}'
        package_re = DSC_PACKAGE_RE
    elif 'tarball' in run:
        src = run['tarball']
        unpack = ('mkdir {package} && '
                  'tar -C {package} -axf {src_file} --strip-components=1')
        package_re = SOURCE_PACKAGE_RE
    else:
        raise RuntimeError('Unreachable')
    src_url = src['url']
    src_file = os.path.basename(src_url)
    src_sha256 = src['sha256']
    package = package_re.match(src_file).group(0)
    unpack = unpack.format(src_file=src_file, package=package)

    base_deps = [
        'apt-utils',
        'build-essential',
        'devscripts',
        'fakeroot',
    ]

    resolver = run.get('resolver', 'apt-get')
    if resolver == 'apt-get':
        resolver = 'apt-get -yyq --no-install-recommends'
    elif resolver == 'aptitude':
        resolver = ('aptitude -y --without-recommends -o '
                    'Aptitude::ProblemResolver::Hints::KeepBuildDeps='
                    '"reject {}-build-deps :UNINST"').format(package)
        base_deps.append('aptitude')
    else:
        raise RuntimeError('Unreachable')

    adjust = ''
    if 'patch' in run:
        # We can't depend on docker images, so we don't have robustcheckout or
        # or run-task to get a checkout. So for this one file we'd need
        # from a checkout, download it.
        env['PATCH_URL'] = '{head_repo}/raw-file/{head_rev}/build/debian-packages/{patch}'.format(
            head_repo=config.params['head_repository'],
            head_rev=config.params['head_rev'],
            patch=run['patch'],
        )
        adjust += 'curl -sL $PATCH_URL | patch -p1 && '
    if 'pre-build-command' in run:
        adjust += run['pre-build-command'] + ' && '
    if 'tarball' in run:
        adjust += 'mv ../{src_file} ../{package}_{ver}.orig.tar.gz && '.format(
            src_file=src_file,
            package=package,
            ver=
            '$(dpkg-parsechangelog | awk \'$1=="Version:"{print $2}\' | cut -f 1 -d -)',
        )
    if 'patch' not in run and 'pre-build-command' not in run:
        adjust += ('debchange -l ".{prefix}moz" --distribution "{dist}"'
                   ' "Mozilla backport for {dist}." < /dev/null && ').format(
                       prefix=name.split('-', 1)[0],
                       dist=run['dist'],
                   )

    queue_url = taskcluster_urls.api(get_root_url(), 'queue', 'v1', '')

    # We can't depend on docker images (since docker images depend on packages),
    # so we inline the whole script here.
    worker['command'] = [
        'sh',
        '-x',
        '-c',
        # Fill /etc/apt/sources.list with the relevant snapshot repository.
        'echo "deb http://snapshot.debian.org/archive/debian'
        '/{snapshot}/ {dist} main" > /etc/apt/sources.list && '
        'echo "deb http://snapshot.debian.org/archive/debian'
        '/{snapshot}/ {dist}-updates main" >> /etc/apt/sources.list && '
        'echo "deb http://snapshot.debian.org/archive/debian'
        '/{snapshot}/ {dist}-backports main" >> /etc/apt/sources.list && '
        'echo "deb http://snapshot.debian.org/archive/debian-security'
        '/{snapshot}/ {dist}/updates main" >> /etc/apt/sources.list && '
        'apt-get update -o Acquire::Check-Valid-Until=false -q && '
        # Add sources for packages coming from other package tasks.
        'apt-get install -yyq apt-transport-https ca-certificates && '
        'for task in $PACKAGES; do '
        '  echo "deb [trusted=yes] {queue_url}task/$task/artifacts/public/build/ debian/" '
        '>> /etc/apt/sources.list; '
        'done && '
        # Install the base utilities required to build debian packages.
        'apt-get update -o Acquire::Check-Valid-Until=false -q && '
        'apt-get install -yyq {base_deps} && '
        'cd /tmp && '
        # Get, validate and extract the package source.
        'dget -d -u {src_url} && '
        'echo "{src_sha256}  {src_file}" | sha256sum -c && '
        '{unpack} && '
        'cd {package} && '
        # Optionally apply patch and/or pre-build command.
        '{adjust}'
        # Install the necessary build dependencies.
        'mk-build-deps -i -r debian/control -t \'{resolver}\' && '
        # Build the package
        'DEB_BUILD_OPTIONS="parallel=$(nproc) nocheck" dpkg-buildpackage && '
        # Copy the artifacts
        'mkdir -p {artifacts}/debian && '
        'dcmd cp ../{package}_*.changes {artifacts}/debian/ && '
        'cd {artifacts} && '
        # Make the artifacts directory usable as an APT repository.
        'apt-ftparchive sources debian | gzip -c9 > debian/Sources.gz && '
        'apt-ftparchive packages debian | gzip -c9 > debian/Packages.gz'.
        format(
            queue_url=queue_url,
            package=package,
            snapshot=run['snapshot'],
            dist=run['dist'],
            src_url=src_url,
            src_file=src_file,
            src_sha256=src_sha256,
            unpack=unpack,
            adjust=adjust,
            artifacts='/tmp/artifacts',
            base_deps=' '.join(base_deps),
            resolver=resolver,
        )
    ]

    if run.get('packages'):
        env = worker.setdefault('env', {})
        env['PACKAGES'] = {
            'task-reference':
            ' '.join('<{}>'.format(p) for p in run['packages'])
        }
        deps = taskdesc.setdefault('dependencies', {})
        for p in run['packages']:
            deps[p] = 'packages-{}'.format(p)

    # Use the command generated above as the base for the index hash.
    # We rely on it not varying depending on the head_repository or head_rev.
    digest_data = list(worker['command'])
    if 'patch' in run:
        digest_data.append(
            hash_path(
                os.path.join(GECKO, 'build', 'debian-packages', run['patch'])))

    if docker_repo != 'debian':
        digest_data.append(docker_repo)

    if not taskgraph.fast:
        taskdesc['cache'] = {
            'type': 'packages.v1',
            'name': name,
            'digest-data': digest_data
        }