コード例 #1
0
def build_cache(config, tasks):
    repo_name = subprocess.check_output(["git", "remote", "get-url", "origin"]).rstrip()
    repo_name = repo_name.replace(".git", "").rstrip("/")
    repo_name = repo_name.split("/")[-1]

    for task in tasks:
        if task.get("cache", True) and not taskgraph.fast:
            digest_data = []
            directory = task.get("extra", {}).get("directory", BASE_DIR)
            directory = os.path.join(BASE_DIR, directory)
            files = list_files(directory)
            files.update(list_files(os.path.join(BASE_DIR, "taskcluster")))
            for path in ADDITIONAL_FILES:
                if os.path.exists(path):
                    files.update({path})
            h = hashlib.sha256()
            for path in sorted(list(files)):
                h.update(
                    "{} {}\n".format(
                        hash_path(os.path.realpath(os.path.join(BASE_DIR, path))), path
                    )
                )
            task.setdefault("attributes", {}).setdefault("cached_task", {})
            cache_name = task["label"].replace(":", "-")
            task["cache"] = {
                "type": "{}.v2".format(repo_name),
                "name": cache_name,
                "digest-data": [h.hexdigest()],
            }

        yield task
コード例 #2
0
def hash_taskcluster_yml(filename):
    '''
    Generate a hash of the given .taskcluster.yml.  This is the first 10 digits
    of the sha256 of the file's content, and is used by administrative scripts
    to create a hook based on this content.
    '''
    return hash.hash_path(filename)[:10]
コード例 #3
0
def build_cache(config, tasks):
    for task in tasks:
        # Only cache tasks on PRs and push. Ignore actions.
        if config.params["tasks_for"] not in ("github-pull-request", "github-push"):
            yield task
            continue
        if task.get("cache", True) and not taskgraph.fast:
            digest_data = []
            digest_data.append(
                json.dumps(
                    task.get("attributes", {}).get("digest-extra", {}),
                    indent=2,
                    sort_keys=True,
                )
            )
            resources = task["attributes"]["resources"]
            for resource in resources:
                path = os.path.join(BASE_DIR, resource)
                if os.path.isdir(resource):
                    digest_data.append(hash_paths(path, [""]))
                elif os.path.isfile(resource):
                    digest_data.append(hash_path(path))
                else:
                    raise Exception(f"Unknown resource {resource}")
            cache_name = task["name"].replace(":", "-")
            task["cache"] = {
                "type": f"xpi-manifest.v1.{config.kind}",
                "name": cache_name,
                "digest-data": digest_data,
            }

        yield task
コード例 #4
0
def hash_paths_extended(base_path, patterns):
    """
    Works like taskgraph.util.hash.hash_paths, except it is able to account for Thunderbird source
    code being part of a separate repository.
    Two file finders are created if necessary.
    """
    gecko_patterns, comm_patterns = split_patterns_list(patterns)
    gecko_finder = get_file_finder(base_path)
    comm_finder = get_file_finder(mozpath.join(base_path, "comm"))

    h = hashlib.sha256()
    files = []
    for (patterns, finder, prefix) in [
        (gecko_patterns, gecko_finder, None),
        (comm_patterns, comm_finder, "comm/"),
    ]:
        for pattern in patterns:
            if prefix:
                pattern = pattern.lstrip(prefix)
            found = list(process_found(finder.find(pattern), prefix))
            if found:
                files.extend(found)
            else:
                raise Exception("%s did not match anything" % pattern)
    for path in sorted(files):
        if path.endswith((".pyc", ".pyd", ".pyo")):
            continue
        h.update(
            six.ensure_binary("{} {}\n".format(
                hash_path(mozpath.abspath(mozpath.join(base_path, path))),
                mozpath.normsep(path),
            )))
    return h.hexdigest()
コード例 #5
0
def docker_worker_debian_package(config, job, taskdesc):
    run = job['run']

    name = taskdesc['label'].replace('{}-'.format(config.kind), '', 1)

    arch = run.get('arch', 'amd64')

    worker = taskdesc['worker']
    worker['artifacts'] = []
    version = {
        'wheezy': 7,
        'jessie': 8,
        'stretch': 9,
        'buster': 10,
    }[run['dist']]
    image = 'debian%d' % version
    if arch != 'amd64':
        image += '-' + arch
    image += '-packages'
    worker['docker-image'] = {'in-tree': image}

    add_artifacts(config, job, taskdesc, path='/tmp/artifacts')

    env = worker.setdefault('env', {})
    env['DEBFULLNAME'] = 'Mozilla build team'
    env['DEBEMAIL'] = '*****@*****.**'

    if 'dsc' in run:
        src = run['dsc']
        unpack = 'dpkg-source -x {src_file} {package}'
        package_re = DSC_PACKAGE_RE
    elif 'tarball' in run:
        src = run['tarball']
        unpack = ('mkdir {package} && '
                  'tar -C {package} -axf {src_file} --strip-components=1')
        package_re = SOURCE_PACKAGE_RE
    else:
        raise RuntimeError('Unreachable')
    src_url = src['url']
    src_file = os.path.basename(src_url)
    src_sha256 = src['sha256']
    package = run.get('name')
    if not package:
        package = package_re.match(src_file).group(0)
    unpack = unpack.format(src_file=src_file, package=package)

    resolver = run.get('resolver', 'apt-get')
    if resolver == 'apt-get':
        resolver = 'apt-get -yyq --no-install-recommends'
    elif resolver == 'aptitude':
        resolver = ('aptitude -y --without-recommends -o '
                    'Aptitude::ProblemResolver::Hints::KeepBuildDeps='
                    '"reject {}-build-deps :UNINST"').format(package)
    else:
        raise RuntimeError('Unreachable')

    adjust = ''
    if 'patch' in run:
        # We don't use robustcheckout or run-task to get a checkout. So for
        # this one file we'd need from a checkout, download it.
        env["PATCH_URL"] = config.params.file_url(
            "build/debian-packages/{patch}".format(patch=run["patch"]),
        )
        adjust += 'curl -sL $PATCH_URL | patch -p1 && '
    if 'pre-build-command' in run:
        adjust += run['pre-build-command'] + ' && '
    if 'tarball' in run:
        adjust += 'mv ../{src_file} ../{package}_{ver}.orig.tar.gz && '.format(
            src_file=src_file,
            package=package,
            ver='$(dpkg-parsechangelog | awk \'$1=="Version:"{print $2}\' | cut -f 1 -d -)',
        )
    if 'patch' not in run and 'pre-build-command' not in run:
        adjust += ('debchange -l ".{prefix}moz" --distribution "{dist}"'
                   ' "Mozilla backport for {dist}." < /dev/null && ').format(
            prefix=name.split('-', 1)[0],
            dist=run['dist'],
        )

    worker['command'] = [
        'sh',
        '-x',
        '-c',
        # Add sources for packages coming from other package tasks.
        '/usr/local/sbin/setup_packages.sh {root_url} $PACKAGES && '
        'apt-get update && '
        # Upgrade packages that might have new versions in package tasks.
        'apt-get dist-upgrade && '
        'cd /tmp && '
        # Get, validate and extract the package source.
        '(dget -d -u {src_url} || exit 100) && '
        'echo "{src_sha256}  {src_file}" | sha256sum -c && '
        '{unpack} && '
        'cd {package} && '
        # Optionally apply patch and/or pre-build command.
        '{adjust}'
        # Install the necessary build dependencies.
        '(mk-build-deps -i -r debian/control -t \'{resolver}\' || exit 100) && '
        # Build the package
        'DEB_BUILD_OPTIONS="parallel=$(nproc) nocheck" dpkg-buildpackage && '
        # Copy the artifacts
        'mkdir -p {artifacts}/debian && '
        'dcmd cp ../{package}_*.changes {artifacts}/debian/ && '
        'cd {artifacts} && '
        # Make the artifacts directory usable as an APT repository.
        'apt-ftparchive sources debian | gzip -c9 > debian/Sources.gz && '
        'apt-ftparchive packages debian | gzip -c9 > debian/Packages.gz'
        .format(
            root_url=get_root_url(False),
            package=package,
            src_url=src_url,
            src_file=src_file,
            src_sha256=src_sha256,
            unpack=unpack,
            adjust=adjust,
            artifacts='/tmp/artifacts',
            resolver=resolver,
        )
    ]

    if run.get('packages'):
        env = worker.setdefault('env', {})
        env['PACKAGES'] = {
            'task-reference': ' '.join('<{}>'.format(p)
                                       for p in run['packages'])
        }
        deps = taskdesc.setdefault('dependencies', {})
        for p in run['packages']:
            deps[p] = 'packages-{}'.format(p)

    # Use the command generated above as the base for the index hash.
    # We rely on it not varying depending on the head_repository or head_rev.
    digest_data = list(worker['command'])
    if 'patch' in run:
        digest_data.append(
            hash_path(os.path.join(GECKO, 'build', 'debian-packages', run['patch'])))

    if not taskgraph.fast:
        taskdesc['cache'] = {
            'type': 'packages.v1',
            'name': name,
            'digest-data': digest_data
        }
コード例 #6
0
ファイル: debian_package.py プロジェクト: gsnedders/gecko
def docker_worker_debian_package(config, job, taskdesc):
    run = job['run']

    name = taskdesc['label'].replace('{}-'.format(config.kind), '', 1)

    docker_repo = 'debian'
    arch = run.get('arch', 'amd64')
    if arch != 'amd64':
        docker_repo = '{}/{}'.format(arch, docker_repo)

    worker = taskdesc['worker']
    worker['artifacts'] = []
    worker['docker-image'] = '{repo}:{dist}-{date}'.format(
        repo=docker_repo, dist=run['dist'], date=run['snapshot'][:8])

    add_artifacts(config, job, taskdesc, path='/tmp/artifacts')

    env = worker.setdefault('env', {})
    env['DEBFULLNAME'] = 'Mozilla build team'
    env['DEBEMAIL'] = '*****@*****.**'

    if 'dsc' in run:
        src = run['dsc']
        unpack = 'dpkg-source -x {src_file} {package}'
        package_re = DSC_PACKAGE_RE
    elif 'tarball' in run:
        src = run['tarball']
        unpack = ('mkdir {package} && '
                  'tar -C {package} -axf {src_file} --strip-components=1')
        package_re = SOURCE_PACKAGE_RE
    else:
        raise RuntimeError('Unreachable')
    src_url = src['url']
    src_file = os.path.basename(src_url)
    src_sha256 = src['sha256']
    package = package_re.match(src_file).group(0)
    unpack = unpack.format(src_file=src_file, package=package)

    base_deps = [
        'apt-utils',
        'build-essential',
        'devscripts',
        'fakeroot',
    ]

    resolver = run.get('resolver', 'apt-get')
    if resolver == 'apt-get':
        resolver = 'apt-get -yyq --no-install-recommends'
    elif resolver == 'aptitude':
        resolver = ('aptitude -y --without-recommends -o '
                    'Aptitude::ProblemResolver::Hints::KeepBuildDeps='
                    '"reject {}-build-deps :UNINST"').format(package)
        base_deps.append('aptitude')
    else:
        raise RuntimeError('Unreachable')

    adjust = ''
    if 'patch' in run:
        # We can't depend on docker images, so we don't have robustcheckout or
        # or run-task to get a checkout. So for this one file we'd need
        # from a checkout, download it.
        env['PATCH_URL'] = '{head_repo}/raw-file/{head_rev}/build/debian-packages/{patch}'.format(
            head_repo=config.params['head_repository'],
            head_rev=config.params['head_rev'],
            patch=run['patch'],
        )
        adjust += 'curl -sL $PATCH_URL | patch -p1 && '
    if 'pre-build-command' in run:
        adjust += run['pre-build-command'] + ' && '
    if 'tarball' in run:
        adjust += 'mv ../{src_file} ../{package}_{ver}.orig.tar.gz && '.format(
            src_file=src_file,
            package=package,
            ver=
            '$(dpkg-parsechangelog | awk \'$1=="Version:"{print $2}\' | cut -f 1 -d -)',
        )
    if 'patch' not in run and 'pre-build-command' not in run:
        adjust += ('debchange -l ".{prefix}moz" --distribution "{dist}"'
                   ' "Mozilla backport for {dist}." < /dev/null && ').format(
                       prefix=name.split('-', 1)[0],
                       dist=run['dist'],
                   )

    # We can't depend on docker images (since docker images depend on packages),
    # so we inline the whole script here.
    worker['command'] = [
        'sh',
        '-x',
        '-c',
        # Fill /etc/apt/sources.list with the relevant snapshot repository.
        'echo "deb http://snapshot.debian.org/archive/debian'
        '/{snapshot}/ {dist} main" > /etc/apt/sources.list && '
        'echo "deb http://snapshot.debian.org/archive/debian'
        '/{snapshot}/ {dist}-updates main" >> /etc/apt/sources.list && '
        'echo "deb http://snapshot.debian.org/archive/debian'
        '/{snapshot}/ {dist}-backports main" >> /etc/apt/sources.list && '
        'echo "deb http://snapshot.debian.org/archive/debian-security'
        '/{snapshot}/ {dist}/updates main" >> /etc/apt/sources.list && '
        'apt-get update -o Acquire::Check-Valid-Until=false -q && '
        # Add sources for packages coming from other package tasks.
        'apt-get install -yyq apt-transport-https ca-certificates && '
        'for task in $PACKAGES; do '
        '  echo "deb [trusted=yes] https://queue.taskcluster.net/v1/task'
        '/$task/artifacts/public/build/ debian/" '
        '>> /etc/apt/sources.list; '
        'done && '
        # Install the base utilities required to build debian packages.
        'apt-get update -o Acquire::Check-Valid-Until=false -q && '
        'apt-get install -yyq {base_deps} && '
        'cd /tmp && '
        # Get, validate and extract the package source.
        'dget -d -u {src_url} && '
        'echo "{src_sha256}  {src_file}" | sha256sum -c && '
        '{unpack} && '
        'cd {package} && '
        # Optionally apply patch and/or pre-build command.
        '{adjust}'
        # Install the necessary build dependencies.
        'mk-build-deps -i -r debian/control -t \'{resolver}\' && '
        # Build the package
        'DEB_BUILD_OPTIONS="parallel=$(nproc) nocheck" dpkg-buildpackage && '
        # Copy the artifacts
        'mkdir -p {artifacts}/debian && '
        'dcmd cp ../{package}_*.changes {artifacts}/debian/ && '
        'cd {artifacts} && '
        # Make the artifacts directory usable as an APT repository.
        'apt-ftparchive sources debian | gzip -c9 > debian/Sources.gz && '
        'apt-ftparchive packages debian | gzip -c9 > debian/Packages.gz'.
        format(
            package=package,
            snapshot=run['snapshot'],
            dist=run['dist'],
            src_url=src_url,
            src_file=src_file,
            src_sha256=src_sha256,
            unpack=unpack,
            adjust=adjust,
            artifacts='/tmp/artifacts',
            base_deps=' '.join(base_deps),
            resolver=resolver,
        )
    ]

    # Use the command generated above as the base for the index hash.
    # We rely on it not varying depending on the head_repository or head_rev.
    data = list(worker['command'])
    if 'patch' in run:
        data.append(
            hash_path(
                os.path.join(GECKO, 'build', 'debian-packages', run['patch'])))

    if docker_repo != 'debian':
        data.append(docker_repo)

    if run.get('packages'):
        env = worker.setdefault('env', {})
        env['PACKAGES'] = {
            'task-reference':
            ' '.join('<{}>'.format(p) for p in run['packages'])
        }
        deps = taskdesc.setdefault('dependencies', {})
        for p in run['packages']:
            deps[p] = 'packages-{}'.format(p)
            data.append(p)

    add_optimization(config,
                     taskdesc,
                     cache_type='packages.v1',
                     cache_name=name,
                     digest_data=data)
コード例 #7
0
ファイル: debian_package.py プロジェクト: luke-chang/gecko-1
def docker_worker_debian_package(config, job, taskdesc):
    run = job['run']

    name = taskdesc['label'].replace('{}-'.format(config.kind), '', 1)

    worker = taskdesc['worker']
    worker['artifacts'] = []
    worker['docker-image'] = 'debian:{dist}-{date}'.format(
        dist=run['dist'],
        date=run['snapshot'][:8])

    add_public_artifacts(config, job, taskdesc, path='/tmp/artifacts')

    env = worker.setdefault('env', {})
    env['DEBFULLNAME'] = 'Mozilla build team'
    env['DEBEMAIL'] = '*****@*****.**'

    if 'dsc' in run:
        src = run['dsc']
        unpack = 'dpkg-source -x {src_file} {package}'
        package_re = DSC_PACKAGE_RE
    elif 'tarball' in run:
        src = run['tarball']
        unpack = ('mkdir {package} && '
                  'tar -C {package} -axf {src_file} --strip-components=1')
        package_re = SOURCE_PACKAGE_RE
    else:
        raise RuntimeError('Unreachable')
    src_url = src['url']
    src_file = os.path.basename(src_url)
    src_sha256 = src['sha256']
    package = package_re.match(src_file).group(0)
    unpack = unpack.format(src_file=src_file, package=package)

    base_deps = [
        'apt-utils',
        'build-essential',
        'devscripts',
        'fakeroot',
    ]

    resolver = run.get('resolver', 'apt-get')
    if resolver == 'apt-get':
        resolver = 'apt-get -yyq --no-install-recommends'
    elif resolver == 'aptitude':
        resolver = ('aptitude -y --without-recommends -o '
                    'Aptitude::ProblemResolver::Hints::KeepBuildDeps='
                    '"reject {}-build-deps :UNINST"').format(package)
        base_deps.append('aptitude')
    else:
        raise RuntimeError('Unreachable')

    adjust = ''
    if 'patch' in run:
        # We can't depend on docker images, so we don't have robustcheckout or
        # or run-task to get a checkout. So for this one file we'd need
        # from a checkout, download it.
        env['PATCH_URL'] = '{head_repo}/raw-file/{head_rev}/build/debian-packages/{patch}'.format(
            head_repo=config.params['head_repository'],
            head_rev=config.params['head_rev'],
            patch=run['patch'],
        )
        adjust += 'curl -sL $PATCH_URL | patch -p1 && '
    if 'pre-build-command' in run:
        adjust += run['pre-build-command'] + ' && '
    if 'tarball' in run:
        adjust += 'mv ../{src_file} ../{package}_{ver}.orig.tar.gz && '.format(
            src_file=src_file,
            package=package,
            ver='$(dpkg-parsechangelog | awk \'$1=="Version:"{print $2}\' | cut -f 1 -d -)',
        )
    if 'patch' not in run and 'pre-build-command' not in run:
        adjust += ('debchange -l ".{prefix}moz" --distribution "{dist}"'
                   ' "Mozilla backport for {dist}." < /dev/null && ').format(
            prefix=name.split('-', 1)[0],
            dist=run['dist'],
        )

    # We can't depend on docker images (since docker images depend on packages),
    # so we inline the whole script here.
    worker['command'] = [
        'sh',
        '-x',
        '-c',
        # Fill /etc/apt/sources.list with the relevant snapshot repository.
        'echo "deb http://snapshot.debian.org/archive/debian'
        '/{snapshot}/ {dist} main" > /etc/apt/sources.list && '
        'echo "deb http://snapshot.debian.org/archive/debian'
        '/{snapshot}/ {dist}-updates main" >> /etc/apt/sources.list && '
        'echo "deb http://snapshot.debian.org/archive/debian'
        '/{snapshot}/ {dist}-backports main" >> /etc/apt/sources.list && '
        'echo "deb http://snapshot.debian.org/archive/debian-security'
        '/{snapshot}/ {dist}/updates main" >> /etc/apt/sources.list && '
        'apt-get update -o Acquire::Check-Valid-Until=false -q && '
        # Add sources for packages coming from other package tasks.
        'apt-get install -yyq apt-transport-https ca-certificates && '
        'for task in $PACKAGES; do '
        '  echo "deb [trusted=yes] https://queue.taskcluster.net/v1/task'
        '/$task/runs/0/artifacts/public/build/ debian/" '
        '>> /etc/apt/sources.list; '
        'done && '
        # Install the base utilities required to build debian packages.
        'apt-get update -o Acquire::Check-Valid-Until=false -q && '
        'apt-get install -yyq {base_deps} && '
        'cd /tmp && '
        # Get, validate and extract the package source.
        'dget -d -u {src_url} && '
        'echo "{src_sha256}  {src_file}" | sha256sum -c && '
        '{unpack} && '
        'cd {package} && '
        # Optionally apply patch and/or pre-build command.
        '{adjust}'
        # Install the necessary build dependencies.
        'mk-build-deps -i -r debian/control -t \'{resolver}\' && '
        # Build the package
        'DEB_BUILD_OPTIONS="parallel=$(nproc) nocheck" dpkg-buildpackage && '
        # Copy the artifacts
        'mkdir -p {artifacts}/debian && '
        'dcmd cp ../{package}_*.changes {artifacts}/debian/ && '
        'cd {artifacts} && '
        # Make the artifacts directory usable as an APT repository.
        'apt-ftparchive sources debian | gzip -c9 > debian/Sources.gz && '
        'apt-ftparchive packages debian | gzip -c9 > debian/Packages.gz'
        .format(
            package=package,
            snapshot=run['snapshot'],
            dist=run['dist'],
            src_url=src_url,
            src_file=src_file,
            src_sha256=src_sha256,
            unpack=unpack,
            adjust=adjust,
            artifacts='/tmp/artifacts',
            base_deps=' '.join(base_deps),
            resolver=resolver,
        )
    ]

    # Use the command generated above as the base for the index hash.
    # We rely on it not varying depending on the head_repository or head_rev.
    data = list(worker['command'])
    if 'patch' in run:
        data.append(hash_path(os.path.join(GECKO, 'build', 'debian-packages', run['patch'])))

    if run.get('packages'):
        env = worker.setdefault('env', {})
        env['PACKAGES'] = {
            'task-reference': ' '.join('<{}>'.format(p)
                                       for p in run['packages'])
        }
        deps = taskdesc.setdefault('dependencies', {})
        for p in run['packages']:
            deps[p] = 'packages-{}'.format(p)
            data.append(p)

    add_optimization(config, taskdesc, cache_type='packages.v1',
                     cache_name=name, digest_data=data)
コード例 #8
0
ファイル: task.py プロジェクト: wenshiqi0/gecko-dev
def _run_task_suffix():
    """String to append to cache names under control of run-task."""
    return hash_path(RUN_TASK)[0:20]
コード例 #9
0
def docker_worker_debian_package(config, job, taskdesc):
    run = job["run"]

    name = taskdesc["label"].replace("{}-".format(config.kind), "", 1)

    arch = run.get("arch", "amd64")

    worker = taskdesc["worker"]
    worker.setdefault("artifacts", [])
    version = {
        "wheezy": 7,
        "jessie": 8,
        "stretch": 9,
        "buster": 10,
    }[run["dist"]]
    image = "debian%d" % version
    if arch != "amd64":
        image += "-" + arch
    image += "-packages"
    worker["docker-image"] = {"in-tree": image}

    add_artifacts(config, job, taskdesc, path="/tmp/artifacts")

    env = worker.setdefault("env", {})
    env["DEBFULLNAME"] = "Mozilla build team"
    env["DEBEMAIL"] = "*****@*****.**"

    if "dsc" in run:
        src = run["dsc"]
        unpack = "dpkg-source -x {src_file} {package}"
        package_re = DSC_PACKAGE_RE
    elif "tarball" in run:
        src = run["tarball"]
        unpack = (
            "mkdir {package} && "
            "tar -C {package} -axf {src_file} --strip-components=1"
        )
        package_re = SOURCE_PACKAGE_RE
    else:
        raise RuntimeError("Unreachable")
    src_url = src["url"]
    src_file = os.path.basename(src_url)
    src_sha256 = src["sha256"]
    package = run.get("name")
    if not package:
        package = package_re.match(src_file).group(0)
    unpack = unpack.format(src_file=src_file, package=package)

    resolver = run.get("resolver", "apt-get")
    if resolver == "apt-get":
        resolver = "apt-get -yyq --no-install-recommends"
    elif resolver == "aptitude":
        resolver = (
            "aptitude -y --without-recommends -o "
            "Aptitude::ProblemResolver::Hints::KeepBuildDeps="
            '"reject {}-build-deps :UNINST"'
        ).format(package)
    else:
        raise RuntimeError("Unreachable")

    adjust = ""
    if "patch" in run:
        # We don't use robustcheckout or run-task to get a checkout. So for
        # this one file we'd need from a checkout, download it.
        env["PATCH_URL"] = config.params.file_url(
            "build/debian-packages/{patch}".format(patch=run["patch"]),
        )
        adjust += "curl -sL $PATCH_URL | patch -p1 && "
    if "pre-build-command" in run:
        adjust += run["pre-build-command"] + " && "
    if "tarball" in run:
        adjust += "mv ../{src_file} ../{package}_{ver}.orig.tar.gz && ".format(
            src_file=src_file,
            package=package,
            ver="$(dpkg-parsechangelog | awk '$1==\"Version:\"{print $2}' | cut -f 1 -d -)",
        )
    if "patch" not in run and "pre-build-command" not in run:
        adjust += (
            'debchange -l ".{prefix}moz" --distribution "{dist}"'
            ' "Mozilla backport for {dist}." < /dev/null && '
        ).format(
            prefix=name.split("-", 1)[0],
            dist=run["dist"],
        )

    worker["command"] = [
        "sh",
        "-x",
        "-c",
        # Add sources for packages coming from other package tasks.
        "/usr/local/sbin/setup_packages.sh {root_url} $PACKAGES && "
        "apt-get update && "
        # Upgrade packages that might have new versions in package tasks.
        "apt-get dist-upgrade && " "cd /tmp && "
        # Get, validate and extract the package source.
        "(dget -d -u {src_url} || exit 100) && "
        'echo "{src_sha256}  {src_file}" | sha256sum -c && '
        "{unpack} && "
        "cd {package} && "
        # Optionally apply patch and/or pre-build command.
        "{adjust}"
        # Install the necessary build dependencies.
        "(mk-build-deps -i -r debian/control -t '{resolver}' || exit 100) && "
        # Build the package
        'DEB_BUILD_OPTIONS="parallel=$(nproc) nocheck" dpkg-buildpackage && '
        # Copy the artifacts
        "mkdir -p {artifacts}/debian && "
        "dcmd cp ../{package}_*.changes {artifacts}/debian/ && "
        "cd {artifacts} && "
        # Make the artifacts directory usable as an APT repository.
        "apt-ftparchive sources debian | gzip -c9 > debian/Sources.gz && "
        "apt-ftparchive packages debian | gzip -c9 > debian/Packages.gz".format(
            root_url=get_root_url(False),
            package=package,
            src_url=src_url,
            src_file=src_file,
            src_sha256=src_sha256,
            unpack=unpack,
            adjust=adjust,
            artifacts="/tmp/artifacts",
            resolver=resolver,
        ),
    ]

    if run.get("packages"):
        env = worker.setdefault("env", {})
        env["PACKAGES"] = {
            "task-reference": " ".join("<{}>".format(p) for p in run["packages"])
        }
        deps = taskdesc.setdefault("dependencies", {})
        for p in run["packages"]:
            deps[p] = "packages-{}".format(p)

    # Use the command generated above as the base for the index hash.
    # We rely on it not varying depending on the head_repository or head_rev.
    digest_data = list(worker["command"])
    if "patch" in run:
        digest_data.append(
            hash_path(os.path.join(GECKO, "build", "debian-packages", run["patch"]))
        )

    if not taskgraph.fast:
        taskdesc["cache"] = {
            "type": "packages.v1",
            "name": name,
            "digest-data": digest_data,
        }