Beispiel #1
0
def get_digest_data(config, run, taskdesc):
    files = list(run.get('resources', []))
    # This file
    files.append('taskcluster/taskgraph/transforms/job/toolchain.py')
    # The script
    files.append('taskcluster/scripts/misc/{}'.format(run['script']))
    # Tooltool manifest if any is defined:
    tooltool_manifest = taskdesc['worker']['env'].get('TOOLTOOL_MANIFEST')
    if tooltool_manifest:
        files.append(tooltool_manifest)

    # Accumulate dependency hashes for index generation.
    data = [hash_paths(GECKO, files)]

    # If the task has dependencies, we need those dependencies to influence
    # the index path. So take the digest from the files above, add the list
    # of its dependencies, and hash the aggregate.
    # If the task has no dependencies, just use the digest from above.
    deps = taskdesc['dependencies']
    if deps:
        data.extend(sorted(deps.values()))

    # Likewise script arguments should influence the index.
    args = run.get('arguments')
    if args:
        data.extend(args)
    return data
Beispiel #2
0
def get_digest_data(config, run, taskdesc):
    files = list(run.pop('resources', []))
    # The script
    files.append('taskcluster/scripts/misc/{}'.format(run['script']))
    # Tooltool manifest if any is defined:
    tooltool_manifest = taskdesc['worker']['env'].get('TOOLTOOL_MANIFEST')
    if tooltool_manifest:
        files.append(tooltool_manifest)

    # Accumulate dependency hashes for index generation.
    data = [hash_paths(GECKO, files)]

    data.append(taskdesc['attributes']['toolchain-artifact'])

    # If the task uses an in-tree docker image, we want it to influence
    # the index path as well. Ideally, the content of the docker image itself
    # should have an influence, but at the moment, we can't get that
    # information here. So use the docker image name as a proxy. Not a lot of
    # changes to docker images actually have an impact on the resulting
    # toolchain artifact, so we'll just rely on such important changes to be
    # accompanied with a docker image name change.
    image = taskdesc['worker'].get('docker-image', {}).get('in-tree')
    if image:
        data.append(image)

    # Likewise script arguments should influence the index.
    args = run.get('arguments')
    if args:
        data.extend(args)

    if taskdesc['attributes'].get('rebuild-on-release'):
        # Add whether this is a release branch or not
        data.append(str(config.params['project'] in RELEASE_PROJECTS))
    return data
def build_cache(config, tasks):
    for task in tasks:
        # Only cache tasks on PRs and push. Ignore actions.
        if config.params["tasks_for"] not in ("github-pull-request", "github-push"):
            yield task
            continue
        if task.get("cache", True) and not taskgraph.fast:
            digest_data = []
            digest_data.append(
                json.dumps(
                    task.get("attributes", {}).get("digest-extra", {}),
                    indent=2,
                    sort_keys=True,
                )
            )
            resources = task["attributes"]["resources"]
            for resource in resources:
                path = os.path.join(BASE_DIR, resource)
                if os.path.isdir(resource):
                    digest_data.append(hash_paths(path, [""]))
                elif os.path.isfile(resource):
                    digest_data.append(hash_path(path))
                else:
                    raise Exception(f"Unknown resource {resource}")
            cache_name = task["name"].replace(":", "-")
            task["cache"] = {
                "type": f"xpi-manifest.v1.{config.kind}",
                "name": cache_name,
                "digest-data": digest_data,
            }

        yield task
Beispiel #4
0
def get_digest_data(config, run, taskdesc):
    """
    Copied from taskgraph.transforms.job.toolchain, with minor
    modifications to support the required script path.
    """
    files = list(run.pop('resources', []))
    # This file
    files.append('comm/taskcluster/comm_taskgraph/transforms/job/toolchain.py')
    # The script
    files.append('{}/{}'.format(TOOLCHAIN_SCRIPT_PATH, run['script']))
    # Tooltool manifest if any is defined:
    tooltool_manifest = taskdesc['worker']['env'].get('TOOLTOOL_MANIFEST')
    if tooltool_manifest:
        files.append(tooltool_manifest)

    # Accumulate dependency hashes for index generation.
    data = [hash_paths(GECKO, files)]

    # If the task uses an in-tree docker image, we want it to influence
    # the index path as well. Ideally, the content of the docker image itself
    # should have an influence, but at the moment, we can't get that
    # information here. So use the docker image name as a proxy. Not a lot of
    # changes to docker images actually have an impact on the resulting
    # toolchain artifact, so we'll just rely on such important changes to be
    # accompanied with a docker image name change.
    image = taskdesc['worker'].get('docker-image', {}).get('in-tree')
    if image:
        data.append(image)

    # Likewise script arguments should influence the index.
    args = run.get('arguments')
    if args:
        data.extend(args)
    return data
Beispiel #5
0
def add_optimization(config, run, taskdesc):
    files = list(run.get('resources', []))
    # This file
    files.append('taskcluster/taskgraph/transforms/job/toolchain.py')
    # The script
    files.append('taskcluster/scripts/misc/{}'.format(run['script']))
    # Tooltool manifest if any is defined:
    tooltool_manifest = taskdesc['worker']['env'].get('TOOLTOOL_MANIFEST')
    if tooltool_manifest:
        files.append(tooltool_manifest)

    # Accumulate dependency hashes for index generation.
    data = [hash_paths(GECKO, files)]

    # If the task has dependencies, we need those dependencies to influence
    # the index path. So take the digest from the files above, add the list
    # of its dependencies, and hash the aggregate.
    # If the task has no dependencies, just use the digest from above.
    deps = taskdesc['dependencies']
    if deps:
        data.extend(sorted(deps.values()))

    # Likewise script arguments should influence the index.
    args = run.get('arguments')
    if args:
        data.extend(args)

    label = taskdesc['label']
    subs = {
        'name': label.replace('%s-' % config.kind, ''),
        'digest': hashlib.sha256('\n'.join(data)).hexdigest()
    }

    # We'll try to find a cached version of the toolchain at levels above
    # and including the current level, starting at the highest level.
    index_routes = []
    for level in reversed(range(int(config.params['level']), 4)):
        subs['level'] = level
        index_routes.append(TOOLCHAIN_INDEX.format(**subs))
    taskdesc['optimization'] = {'index-search': index_routes}

    # ... and cache at the lowest level.
    taskdesc.setdefault('routes', []).append('index.{}'.format(
        TOOLCHAIN_INDEX.format(**subs)))
Beispiel #6
0
def build_cache(config, tasks):
    for task in tasks:
        if task.get("cache", True) and not taskgraph.fast:
            digest_data = []
            digest_data.append(
                json.dumps(task.get("attributes", {}).get("digest-extra", {}),
                           indent=2,
                           sort_keys=True))
            resources = task["attributes"]["resources"]
            for resource in resources:
                digest_data.append(
                    hash_paths(os.path.join(BASE_DIR, resource), ['']))
            cache_name = task["name"].replace(":", "-")
            task["cache"] = {
                "type": "scriptworker-scripts.v1.{}".format(config.kind),
                "name": cache_name,
                "digest-data": digest_data,
            }

        yield task
Beispiel #7
0
def get_digest_data(config, run, taskdesc):
    files = list(run.get('resources', []))
    # This file
    files.append('taskcluster/taskgraph/transforms/job/toolchain.py')
    # The script
    files.append('taskcluster/scripts/misc/{}'.format(run['script']))
    # Tooltool manifest if any is defined:
    tooltool_manifest = taskdesc['worker']['env'].get('TOOLTOOL_MANIFEST')
    if tooltool_manifest:
        files.append(tooltool_manifest)

    # Accumulate dependency hashes for index generation.
    data = [hash_paths(GECKO, files)]

    # If the task has dependencies, we need those dependencies to influence
    # the index path. So take the digest from the files above, add the list
    # of its dependencies, and hash the aggregate.
    # If the task has no dependencies, just use the digest from above.
    deps = taskdesc['dependencies']
    if deps:
        data.extend(sorted(deps.values()))

    # If the task uses an in-tree docker image, we want it to influence
    # the index path as well. Ideally, the content of the docker image itself
    # should have an influence, but at the moment, we can't get that
    # information here. So use the docker image name as a proxy. Not a lot of
    # changes to docker images actually have an impact on the resulting
    # toolchain artifact, so we'll just rely on such important changes to be
    # accompanied with a docker image name change.
    image = taskdesc['worker'].get('docker-image', {}).get('in-tree')
    if image:
        data.extend(image)

    # Likewise script arguments should influence the index.
    args = run.get('arguments')
    if args:
        data.extend(args)
    return data
Beispiel #8
0
def get_digest_data(config, run, taskdesc):
    files = list(run.get('resources', []))
    # This file
    files.append('taskcluster/taskgraph/transforms/job/toolchain.py')
    # The script
    files.append('taskcluster/scripts/misc/{}'.format(run['script']))
    # Tooltool manifest if any is defined:
    tooltool_manifest = taskdesc['worker']['env'].get('TOOLTOOL_MANIFEST')
    if tooltool_manifest:
        files.append(tooltool_manifest)

    # Accumulate dependency hashes for index generation.
    data = [hash_paths(GECKO, files)]

    # If the task has dependencies, we need those dependencies to influence
    # the index path. So take the digest from the files above, add the list
    # of its dependencies, and hash the aggregate.
    # If the task has no dependencies, just use the digest from above.
    deps = taskdesc['dependencies']
    if deps:
        data.extend(sorted(deps.values()))

    # If the task uses an in-tree docker image, we want it to influence
    # the index path as well. Ideally, the content of the docker image itself
    # should have an influence, but at the moment, we can't get that
    # information here. So use the docker image name as a proxy. Not a lot of
    # changes to docker images actually have an impact on the resulting
    # toolchain artifact, so we'll just rely on such important changes to be
    # accompanied with a docker image name change.
    image = taskdesc['worker'].get('docker-image', {}).get('in-tree')
    if image:
        data.extend(image)

    # Likewise script arguments should influence the index.
    args = run.get('arguments')
    if args:
        data.extend(args)
    return data
Beispiel #9
0
def add_optimizations(config, run, taskdesc):
    files = list(run.get('resources', []))
    # This file
    files.append('taskcluster/taskgraph/transforms/job/toolchain.py')
    # The script
    files.append('taskcluster/scripts/misc/{}'.format(run['script']))

    label = taskdesc['label']
    subs = {
        'name': label.replace('toolchain-', '').split('/')[0],
        'digest': hash_paths(GECKO, files),
    }

    optimizations = taskdesc.setdefault('optimizations', [])

    # We'll try to find a cached version of the toolchain at levels above
    # and including the current level, starting at the highest level.
    for level in reversed(range(int(config.params['level']), 4)):
        subs['level'] = level
        optimizations.append(['index-search', TOOLCHAIN_INDEX.format(**subs)])

    # ... and cache at the lowest level.
    taskdesc.setdefault('routes', []).append(
        'index.{}'.format(TOOLCHAIN_INDEX.format(**subs)))
Beispiel #10
0
def add_optimizations(config, run, taskdesc):
    files = list(run.get('resources', []))
    # This file
    files.append('taskcluster/taskgraph/transforms/job/toolchain.py')
    # The script
    files.append('taskcluster/scripts/misc/{}'.format(run['script']))

    label = taskdesc['label']
    subs = {
        'name': label.replace('toolchain-', '').split('/')[0],
        'digest': hash_paths(GECKO, files),
    }

    optimizations = taskdesc.setdefault('optimizations', [])

    # We'll try to find a cached version of the toolchain at levels above
    # and including the current level, starting at the highest level.
    for level in reversed(range(int(config.params['level']), 4)):
        subs['level'] = level
        optimizations.append(['index-search', TOOLCHAIN_INDEX.format(**subs)])

    # ... and cache at the lowest level.
    taskdesc.setdefault('routes', []).append(
        'index.{}'.format(TOOLCHAIN_INDEX.format(**subs)))
Beispiel #11
0
def get_digest_data(config, run, taskdesc):
    files = list(run.pop("resources", []))
    # The script
    files.append("taskcluster/scripts/toolchain/{}".format(run["script"]))

    # Accumulate dependency hashes for index generation.
    data = [hash_paths(config.graph_config.vcs_root, files)]

    # If the task uses an in-tree docker image, we want it to influence
    # the index path as well. Ideally, the content of the docker image itself
    # should have an influence, but at the moment, we can't get that
    # information here. So use the docker image name as a proxy. Not a lot of
    # changes to docker images actually have an impact on the resulting
    # toolchain artifact, so we'll just rely on such important changes to be
    # accompanied with a docker image name change.
    image = taskdesc["worker"].get("docker-image", {}).get("in-tree")
    if image:
        data.append(image)

    # Likewise script arguments should influence the index.
    args = run.get("arguments")
    if args:
        data.extend(args)
    return data