Exemplo n.º 1
0
def make_task_description(config, jobs):
    """Given a build description, create a task description"""
    # import plugin modules first, before iterating over jobs
    import_all()
    for job in jobs:
        if 'label' not in job:
            if 'name' not in job:
                raise Exception("job has neither a name nor a label")
            job['label'] = '{}-{}'.format(config.kind, job['name'])
        if job.get('name'):
            del job['name']

        impl, os = worker_type_implementation(job['worker-type'])
        worker = job.setdefault('worker', {})
        assert 'implementation' not in worker
        worker['implementation'] = impl
        if os:
            worker['os'] = os

        taskdesc = copy.deepcopy(job)

        # fill in some empty defaults to make run implementations easier
        taskdesc.setdefault('attributes', {})
        taskdesc.setdefault('dependencies', {})
        taskdesc.setdefault('routes', [])
        taskdesc.setdefault('scopes', [])
        taskdesc.setdefault('extra', {})

        # give the function for job.run.using on this worker implementation a
        # chance to set up the task description.
        configure_taskdesc_for_run(config, job, taskdesc, impl)
        del taskdesc['run']

        # yield only the task description, discarding the job description
        yield taskdesc
Exemplo n.º 2
0
def use_profile_data(config, jobs):
    for job in jobs:
        use_pgo = job.pop('use-pgo', False)
        disable_pgo = config.params['try_task_config'].get(
            'disable-pgo', False)
        artifact_build = job['attributes'].get('artifact-build')
        if not use_pgo or disable_pgo or artifact_build:
            yield job
            continue

        # If use_pgo is True, the task uses the generate-profile task of the
        # same name. Otherwise a task can specify a specific generate-profile
        # task to use in the use_pgo field.
        if use_pgo is True:
            name = job['name']
        else:
            name = use_pgo
        dependencies = 'generate-profile-{}'.format(name)
        job.setdefault('dependencies', {})['generate-profile'] = dependencies
        job.setdefault('fetches', {})['generate-profile'] = ['profdata.tar.xz']
        job['worker']['env'].update({"TASKCLUSTER_PGO_PROFILE_USE": "1"})

        _, worker_os = worker_type_implementation(config.graph_config,
                                                  job['worker-type'])
        if worker_os == "linux":
            # LTO linkage needs more open files than the default from run-task.
            job['worker']['env'].update({"MOZ_LIMIT_NOFILE": "8192"})

        yield job
Exemplo n.º 3
0
def use_profile_data(config, jobs):
    for job in jobs:
        use_pgo = job.pop("use-pgo", False)
        disable_pgo = config.params["try_task_config"].get(
            "disable-pgo", False)
        artifact_build = job["attributes"].get("artifact-build")
        if not use_pgo or disable_pgo or artifact_build:
            yield job
            continue

        # If use_pgo is True, the task uses the generate-profile task of the
        # same name. Otherwise a task can specify a specific generate-profile
        # task to use in the use_pgo field.
        if use_pgo is True:
            name = job["name"]
        else:
            name = use_pgo
        dependencies = "generate-profile-{}".format(name)
        job.setdefault("dependencies", {})["generate-profile"] = dependencies
        job.setdefault("fetches", {})["generate-profile"] = ["profdata.tar.xz"]
        job["worker"]["env"].update({"TASKCLUSTER_PGO_PROFILE_USE": "1"})

        _, worker_os = worker_type_implementation(config.graph_config,
                                                  job["worker-type"])
        if worker_os == "linux":
            # LTO linkage needs more open files than the default from run-task.
            job["worker"]["env"].update({"MOZ_LIMIT_NOFILE": "8192"})

        yield job
Exemplo n.º 4
0
def add_resource_monitor(config, jobs):
    for job in jobs:
        if job.get("attributes", {}).get("resource-monitor"):
            worker_implementation, worker_os = worker_type_implementation(
                config.graph_config, job["worker-type"])
            # Normalise worker os so that linux-bitbar and similar use linux tools.
            worker_os = worker_os.split("-")[0]
            if "win7" in job["worker-type"]:
                arch = "32"
            else:
                arch = "64"
            job.setdefault("fetches", {})
            job["fetches"].setdefault("toolchain", [])
            job["fetches"]["toolchain"].append(
                f"{worker_os}{arch}-resource-monitor")

            if worker_implementation == "docker-worker":
                artifact_source = "/builds/worker/monitoring/resource-monitor.json"
            else:
                artifact_source = "monitoring/resource-monitor.json"
            job["worker"].setdefault("artifacts", [])
            job["worker"]["artifacts"].append({
                "name": "public/monitoring/resource-monitor.json",
                "type": "file",
                "path": artifact_source,
            })
            # Set env for output file
            job["worker"].setdefault("env", {})
            job["worker"]["env"]["RESOURCE_MONITOR_OUTPUT"] = artifact_source

        yield job
Exemplo n.º 5
0
def disable_non_linux_workers(config, jobs):
    """
    Never try to run tasks on macosx or windows workers.
    """
    for job in jobs:
        impl, os = worker_type_implementation(config.graph_config,
                                              job["worker-type"])
        if os in ("macosx", "windows"):
            job["optimization"] = {"always": None}
        yield job
Exemplo n.º 6
0
def set_implementation(config, jobs):
    for job in jobs:
        impl, os = worker_type_implementation(config.graph_config, job["worker-type"])
        if os:
            job.setdefault("tags", {})["os"] = os
        if impl:
            job.setdefault("tags", {})["worker-implementation"] = impl
        worker = job.setdefault("worker", {})
        assert "implementation" not in worker
        worker["implementation"] = impl
        if os:
            worker["os"] = os
        yield job
Exemplo n.º 7
0
def set_implementation(config, jobs):
    for job in jobs:
        impl, os = worker_type_implementation(config.graph_config, job['worker-type'])
        if os:
            job.setdefault('tags', {})['os'] = os
        if impl:
            job.setdefault('tags', {})['worker-implementation'] = impl
        worker = job.setdefault('worker', {})
        assert 'implementation' not in worker
        worker['implementation'] = impl
        if os:
            worker['os'] = os
        yield job
Exemplo n.º 8
0
def set_defaults(config, jobs):
    """Set defaults, including those that differ per worker implementation"""
    for job in jobs:
        job['treeherder'].setdefault('kind', 'build')
        job['treeherder'].setdefault('tier', 1)
        _, worker_os = worker_type_implementation(config.graph_config, job['worker-type'])
        worker = job.setdefault('worker', {})
        worker.setdefault('env', {})
        if worker_os == "linux":
            worker.setdefault('docker-image', {'in-tree': 'debian7-amd64-build'})
            worker['chain-of-trust'] = True
        elif worker_os == "windows":
            worker['chain-of-trust'] = True

        yield job
Exemplo n.º 9
0
def set_defaults(config, jobs):
    """Set defaults, including those that differ per worker implementation"""
    for job in jobs:
        job['treeherder'].setdefault('kind', 'build')
        job['treeherder'].setdefault('tier', 1)
        _, worker_os = worker_type_implementation(job['worker-type'])
        worker = job.setdefault('worker', {})
        worker.setdefault('env', {})
        if worker_os == "linux":
            worker.setdefault('docker-image', {'in-tree': 'debian7-amd64-build'})
            worker['chain-of-trust'] = True
        elif worker_os == "windows":
            worker['chain-of-trust'] = True

        yield job
Exemplo n.º 10
0
def set_defaults(config, jobs):
    """Set defaults, including those that differ per worker implementation"""
    for job in jobs:
        job["treeherder"].setdefault("kind", "build")
        job["treeherder"].setdefault("tier", 1)
        _, worker_os = worker_type_implementation(config.graph_config,
                                                  job["worker-type"])
        worker = job.setdefault("worker", {})
        worker.setdefault("env", {})
        worker["chain-of-trust"] = True
        if worker_os == "linux":
            worker.setdefault("docker-image",
                              {"in-tree": "debian8-amd64-build"})

        yield job
Exemplo n.º 11
0
def set_defaults(config, jobs):
    """Set defaults, including those that differ per worker implementation"""
    for job in jobs:
        job['treeherder'].setdefault('kind', 'build')
        job['treeherder'].setdefault('tier', 1)
        job.setdefault('needs-sccache', True)
        _, worker_os = worker_type_implementation(job['worker-type'])
        if worker_os == "linux":
            worker = job.setdefault('worker')
            worker.setdefault('docker-image', {'in-tree': 'desktop-build'})
            worker['chain-of-trust'] = True
            extra = job.setdefault('extra', {})
            extra.setdefault('chainOfTrust', {})
            extra['chainOfTrust'].setdefault('inputs', {})
            extra['chainOfTrust']['inputs']['docker-image'] = {
                "task-reference": "<docker-image>"
            }
        elif worker_os in set(["macosx", "windows"]):
            job['worker'].setdefault('env', {})
        yield job
Exemplo n.º 12
0
def set_implementation(config, tasks):
    """
    Set the worker implementation based on the worker-type alias.
    """
    for task in tasks:
        worker = task.setdefault("worker", {})
        if "implementation" in task["worker"]:
            yield task
            continue

        impl, os = worker_type_implementation(config.graph_config, task["worker-type"])

        tags = task.setdefault("tags", {})
        tags["worker-implementation"] = impl
        if os:
            task["tags"]["os"] = os
        worker["implementation"] = impl
        if os:
            worker["os"] = os

        yield task
Exemplo n.º 13
0
def add_resource_monitor(config, jobs):
    for job in jobs:
        if job.get("attributes", {}).get("resource-monitor"):
            worker_implementation, worker_os = worker_type_implementation(
                config.graph_config, job["worker-type"]
            )
            # Normalise worker os so that linux-bitbar and similar use linux tools.
            worker_os = worker_os.split("-")[0]
            # We don't currently support an Arm worker, due to gopsutil's indirect
            # dependencies (go-ole)
            if "aarch64" in job["worker-type"]:
                yield job
                continue
            elif "win7" in job["worker-type"]:
                arch = "32"
            else:
                arch = "64"
            job.setdefault("fetches", {})
            job["fetches"].setdefault("toolchain", [])
            job["fetches"]["toolchain"].append(
                "{}{}-resource-monitor".format(worker_os, arch)
            )

            if worker_implementation == "docker-worker":
                artifact_source = "/builds/worker/monitoring/resource-monitor.json"
            else:
                artifact_source = "monitoring/resource-monitor.json"
            job["worker"].setdefault("artifacts", [])
            job["worker"]["artifacts"].append(
                {
                    "name": "public/monitoring/resource-monitor.json",
                    "type": "file",
                    "path": artifact_source,
                }
            )
            # Set env for output file
            job["worker"].setdefault("env", {})
            job["worker"]["env"]["RESOURCE_MONITOR_OUTPUT"] = artifact_source

        yield job
Exemplo n.º 14
0
def make_task_description(config, jobs):
    """Given a build description, create a task description"""
    # import plugin modules first, before iterating over jobs
    import_all()
    for job in jobs:
        if 'label' not in job:
            if 'name' not in job:
                raise Exception("job has neither a name nor a label")
            job['label'] = '{}-{}'.format(config.kind, job['name'])
        if job.get('name'):
            del job['name']

        impl, os = worker_type_implementation(job['worker-type'])
        if os:
            job.setdefault('tags', {})['os'] = os
        if impl:
            job.setdefault('tags', {})['worker-implementation'] = impl
        worker = job.setdefault('worker', {})
        assert 'implementation' not in worker
        worker['implementation'] = impl
        if os:
            worker['os'] = os

        taskdesc = copy.deepcopy(job)

        # fill in some empty defaults to make run implementations easier
        taskdesc.setdefault('attributes', {})
        taskdesc.setdefault('dependencies', {})
        taskdesc.setdefault('routes', [])
        taskdesc.setdefault('scopes', [])
        taskdesc.setdefault('extra', {})

        # give the function for job.run.using on this worker implementation a
        # chance to set up the task description.
        configure_taskdesc_for_run(config, job, taskdesc, impl)
        del taskdesc['run']

        # yield only the task description, discarding the job description
        yield taskdesc
Exemplo n.º 15
0
def make_job_description(config, jobs):
    for job in jobs:
        dep_job = job['primary-dependency']
        attributes = copy_attributes_from_dependent_job(dep_job)
        build_platform = attributes['build_platform']

        if job['build-platform'].startswith('win'):
            if dep_job.kind.endswith('signing'):
                continue
        if job['build-platform'].startswith('macosx'):
            if dep_job.kind.endswith('repack'):
                continue
        dependencies = {dep_job.attributes.get('kind'): dep_job.label}
        dependencies.update(dep_job.dependencies)

        signing_task = None
        for dependency in dependencies.keys():
            if build_platform.startswith('macosx') and dependency.endswith(
                    'signing'):
                signing_task = dependency
            elif build_platform.startswith('win') and dependency.endswith(
                    'repack'):
                signing_task = dependency

        attributes['repackage_type'] = 'repackage'

        level = config.params['level']
        repack_id = job['extra']['repack_id']

        repackage_config = []
        for format in job.get('package-formats'):
            command = copy.deepcopy(PACKAGE_FORMATS[format])
            substs = {
                'archive_format': archive_format(build_platform),
                'executable_extension': executable_extension(build_platform),
            }
            command['inputs'] = {
                name: filename.format(**substs)
                for name, filename in command['inputs'].items()
            }
            repackage_config.append(command)

        run = job.get('mozharness', {})
        run.update({
            'using': 'mozharness',
            'script': 'mozharness/scripts/repackage.py',
            'job-script': 'taskcluster/scripts/builder/repackage.sh',
            'actions': ['setup', 'repackage'],
            'extra-workspace-cache-key': 'repackage',
            'extra-config': {
                'repackage_config': repackage_config,
            },
        })

        worker = {
            'chain-of-trust': True,
            'max-run-time': 7200 if build_platform.startswith('win') else 3600,
            'taskcluster-proxy':
            True if get_artifact_prefix(dep_job) else False,
            'env': {
                'REPACK_ID': repack_id,
            },
            # Don't add generic artifact directory.
            'skip-artifacts': True,
        }

        if build_platform.startswith('win'):
            worker_type = 'aws-provisioner-v1/gecko-%s-b-win2012' % level
            run['use-magic-mh-args'] = False
        else:
            if build_platform.startswith('macosx'):
                worker_type = 'aws-provisioner-v1/gecko-%s-b-linux' % level
            else:
                raise NotImplementedError(
                    'Unsupported build_platform: "{}"'.format(build_platform))

            run['tooltool-downloads'] = 'internal'
            worker['docker-image'] = {"in-tree": "debian7-amd64-build"}

        worker['artifacts'] = _generate_task_output_files(
            dep_job,
            worker_type_implementation(worker_type),
            repackage_config,
            partner=repack_id,
        )

        description = ("Repackaging for repack_id '{repack_id}' for build '"
                       "{build_platform}/{build_type}'".format(
                           repack_id=job['extra']['repack_id'],
                           build_platform=attributes.get('build_platform'),
                           build_type=attributes.get('build_type')))

        task = {
            'label':
            job['label'],
            'description':
            description,
            'worker-type':
            worker_type,
            'dependencies':
            dependencies,
            'attributes':
            attributes,
            'scopes': ['queue:get-artifact:releng/partner/*'],
            'run-on-projects':
            dep_job.attributes.get('run_on_projects'),
            'routes':
            job.get('routes', []),
            'extra':
            job.get('extra', {}),
            'worker':
            worker,
            'run':
            run,
            'fetches':
            _generate_download_config(dep_job,
                                      build_platform,
                                      signing_task,
                                      partner=repack_id,
                                      project=config.params["project"]),
        }

        if build_platform.startswith('macosx'):
            task['toolchains'] = [
                'linux64-libdmg',
                'linux64-hfsplus',
                'linux64-node',
            ]
        yield task
Exemplo n.º 16
0
        return frozen_args

    return inner


@pytest.mark.parametrize(
    'task', [
        {
            'worker-type': 'aws-provisioner-v1/gecko-1-b-linux'
        },
        {
            'worker-type': 'releng-hardware/gecko-t-win10-64-hw'
        },
    ],
    ids=lambda t: worker_type_implementation(t['worker-type'])[0])
def test_worker_caches(task, transform):
    config, job, taskdesc, impl = transform(task)
    add_cache(job, taskdesc, 'cache1', '/cache1')
    add_cache(job, taskdesc, 'cache2', '/cache2', skip_untrusted=True)

    if impl not in ('docker-worker', 'generic-worker'):
        pytest.xfail("caches not implemented for '{}'".format(impl))

    key = 'caches' if impl == 'docker-worker' else 'mounts'
    assert key in taskdesc['worker']
    assert len(taskdesc['worker'][key]) == 2

    # Create a new schema object with just the part relevant to caches.
    partial_schema = Schema(payload_builders[impl].schema.schema.schema[key])
    validate_schema(partial_schema, taskdesc['worker'][key],
Exemplo n.º 17
0
def make_job_description(config, jobs):
    for job in jobs:
        dep_job = job['primary-dependency']
        dependencies = {dep_job.attributes.get('kind'): dep_job.label}
        if len(dep_job.dependencies) > 1 and not config.kind == 'repackage-msi':
            # repackage-signing can end up with multiple deps...
            raise NotImplementedError(
                "Can't repackage a signing task with multiple dependencies")
        signing_dependencies = dep_job.dependencies
        # This is so we get the build task in our dependencies to
        # have better beetmover support.
        dependencies.update(signing_dependencies)

        attributes = copy_attributes_from_dependent_job(dep_job)
        attributes['repackage_type'] = 'repackage'

        locale = attributes.get('locale', job.get('locale'))
        if locale:
            attributes['locale'] = locale

        treeherder = job.get('treeherder', {})
        if attributes.get('nightly'):
            treeherder.setdefault('symbol', 'Nr')
        else:
            treeherder.setdefault('symbol', 'Rpk')
        dep_th_platform = dep_job.task.get('extra', {}).get(
            'treeherder', {}).get('machine', {}).get('platform', '')
        treeherder.setdefault('platform', "{}/opt".format(dep_th_platform))
        treeherder.setdefault('tier', 1)
        treeherder.setdefault('kind', 'build')

        if config.kind == 'repackage-msi':
            treeherder['symbol'] = 'MSI({})'.format(locale or 'N')

        build_task = None
        signing_task = None
        repackage_signing_task = None
        for dependency in dependencies.keys():
            if 'repackage-signing' in dependency:
                repackage_signing_task = dependency
            elif 'signing' in dependency:
                signing_task = dependency
            else:
                build_task = dependency

        _fetch_subst_locale = 'en-US'
        if locale:
            # XXXCallek: todo: rewrite dependency finding
            # Use string splice to strip out 'nightly-l10n-' .. '-<chunk>/opt'
            # We need this additional dependency to support finding the mar binary
            # Which is needed in order to generate a new complete.mar
            dependencies['build'] = "build-{}/opt".format(
                dependencies[build_task][13:dependencies[build_task].rfind('-')])
            build_task = 'build'
            _fetch_subst_locale = locale

        level = config.params['level']
        build_platform = attributes['build_platform']

        use_stub = attributes.get('stub-installer')

        repackage_config = []
        package_formats = job.get('package-formats')
        if use_stub and not repackage_signing_task:
            # if repackage_signing_task doesn't exists, generate the stub installer
            package_formats += ['installer-stub']
        for format in package_formats:
            command = copy.deepcopy(PACKAGE_FORMATS[format])
            substs = {
                'archive_format': archive_format(build_platform),
                'executable_extension': executable_extension(build_platform),
                '_locale': _fetch_subst_locale,
                'architecture': architecture(build_platform),
                'version_display': config.params['version'],
            }
            # Allow us to replace args a well, but specifying things expanded in mozharness
            # Without breaking .format and without allowing unknown through
            substs.update({name: '{{{}}}'.format(name)
                           for name in MOZHARNESS_EXPANSIONS})
            command['inputs'] = {
                name: filename.format(**substs)
                for name, filename in command['inputs'].items()
            }
            command['args'] = [
                arg.format(**substs) for arg in command['args']
            ]
            if 'installer' in format and 'aarch64' not in build_platform:
                command['args'].append('--use-upx')
            repackage_config.append(command)

        run = job.get('mozharness', {})
        run.update({
            'using': 'mozharness',
            'script': 'mozharness/scripts/repackage.py',
            'job-script': 'taskcluster/scripts/builder/repackage.sh',
            'actions': ['setup', 'repackage'],
            'extra-workspace-cache-key': 'repackage',
            'extra-config': {
                'repackage_config': repackage_config,
            },
        })

        worker = {
            'chain-of-trust': True,
            'max-run-time': 7200 if build_platform.startswith('win') else 3600,
            # Don't add generic artifact directory.
            'skip-artifacts': True,
        }

        if locale:
            # Make sure we specify the locale-specific upload dir
            worker.setdefault('env', {}).update(LOCALE=locale)

        if build_platform.startswith('win'):
            worker_type = 'aws-provisioner-v1/gecko-%s-b-win2012' % level
            run['use-magic-mh-args'] = False
        else:
            if build_platform.startswith(('linux', 'macosx')):
                worker_type = 'aws-provisioner-v1/gecko-%s-b-linux' % level
            else:
                raise NotImplementedError(
                    'Unsupported build_platform: "{}"'.format(build_platform)
                )

            run['tooltool-downloads'] = 'internal'
            worker['docker-image'] = {"in-tree": "debian7-amd64-build"}

        worker['artifacts'] = _generate_task_output_files(
            dep_job, worker_type_implementation(worker_type),
            repackage_config=repackage_config,
            locale=locale,
        )

        description = (
            "Repackaging for locale '{locale}' for build '"
            "{build_platform}/{build_type}'".format(
                locale=attributes.get('locale', 'en-US'),
                build_platform=attributes.get('build_platform'),
                build_type=attributes.get('build_type')
            )
        )

        task = {
            'label': job['label'],
            'description': description,
            'worker-type': worker_type,
            'dependencies': dependencies,
            'attributes': attributes,
            'run-on-projects': dep_job.attributes.get('run_on_projects'),
            'treeherder': treeherder,
            'routes': job.get('routes', []),
            'extra': job.get('extra', {}),
            'worker': worker,
            'run': run,
            'fetches': _generate_download_config(dep_job, build_platform, build_task,
                                                 signing_task, repackage_signing_task,
                                                 locale=locale,
                                                 project=config.params["project"],
                                                 existing_fetch=job.get('fetches')),
            'release-artifacts': [artifact['name'] for artifact in worker['artifacts']]
        }

        if build_platform.startswith('macosx'):
            task['toolchains'] = [
                'linux64-libdmg',
                'linux64-hfsplus',
                'linux64-node',
            ]
        yield task
Exemplo n.º 18
0
def use_fetches(config, jobs):
    artifact_names = {}

    for task in config.kind_dependencies_tasks:
        if task.kind in ('fetch', 'toolchain'):
            get_attribute(
                artifact_names,
                task.label,
                task.attributes,
                '{kind}-artifact'.format(kind=task.kind),
            )

    for job in jobs:
        fetches = job.pop('fetches', None)
        if not fetches:
            yield job
            continue

        # Hack added for `mach artifact toolchain` to support reading toolchain
        # kinds in isolation.
        if 'fetch' in fetches and config.params.get('ignore_fetches'):
            fetches['fetch'][:] = []

        job_fetches = []
        name = job.get('name', job.get('label'))
        dependencies = job.setdefault('dependencies', {})
        prefix = get_artifact_prefix(job)
        for kind, artifacts in fetches.items():
            if kind in ('fetch', 'toolchain'):
                for fetch_name in artifacts:
                    label = '{kind}-{name}'.format(kind=kind, name=fetch_name)
                    if label not in artifact_names:
                        raise Exception(
                            'Missing fetch job for {kind}-{name}: {fetch}'.
                            format(kind=config.kind,
                                   name=name,
                                   fetch=fetch_name))

                    path = artifact_names[label]
                    if not path.startswith('public/'):
                        raise Exception(
                            'Non-public artifacts not supported for {kind}-{name}: '
                            '{fetch}'.format(kind=config.kind,
                                             name=name,
                                             fetch=fetch_name))

                    dependencies[label] = label
                    job_fetches.append({
                        'artifact': path,
                        'task': '<{label}>'.format(label=label),
                        'extract': True,
                    })
            else:
                if kind not in dependencies:
                    raise Exception(
                        "{name} can't fetch {kind} artifacts because "
                        "it has no {kind} dependencies!".format(name=name,
                                                                kind=kind))

                for artifact in artifacts:
                    if isinstance(artifact, basestring):
                        path = artifact
                        dest = None
                        extract = True
                    else:
                        path = artifact['artifact']
                        dest = artifact.get('dest')
                        extract = artifact.get('extract', True)

                    fetch = {
                        'artifact':
                        '{prefix}/{path}'.format(prefix=prefix, path=path),
                        'task':
                        '<{dep}>'.format(dep=kind),
                        'extract':
                        extract,
                    }
                    if dest is not None:
                        fetch['dest'] = dest
                    job_fetches.append(fetch)

        env = job.setdefault('worker', {}).setdefault('env', {})
        env['MOZ_FETCHES'] = {
            'task-reference': json.dumps(job_fetches, sort_keys=True)
        }

        impl, os = worker_type_implementation(job['worker-type'])
        if os == 'windows':
            env.setdefault('MOZ_FETCHES_DIR', 'fetches')
        else:
            workdir = job['run'].get('workdir', '/builds/worker')
            env.setdefault('MOZ_FETCHES_DIR', '{}/fetches'.format(workdir))

        yield job
Exemplo n.º 19
0
def make_job_description(config, jobs):
    for job in jobs:
        dep_job = job["primary-dependency"]
        dependencies = {dep_job.kind: dep_job.label}

        attributes = copy_attributes_from_dependent_job(dep_job)
        attributes["repackage_type"] = "repackage"

        locale = attributes.get("locale", job.get("locale"))
        if locale:
            attributes["locale"] = locale

        treeherder = job.get("treeherder", {})
        treeherder.setdefault("symbol", "Rpk")
        dep_th_platform = dep_job.task.get("extra",
                                           {}).get("treeherder-platform")
        treeherder.setdefault("platform", dep_th_platform)
        treeherder.setdefault("tier", 1)
        treeherder.setdefault("kind", "build")

        if config.kind == "repackage-msi":
            treeherder["symbol"] = "MSI({})".format(locale or "N")

        signing_task = None
        repackage_signing_task = None
        for dependency in dependencies.keys():
            if "repackage-signing" in dependency:
                repackage_signing_task = dependency
            elif "signing" in dependency:
                signing_task = dependency

        _fetch_subst_locale = "en-US"
        if locale:
            _fetch_subst_locale = locale

        worker_type = job["worker-type"]
        build_platform = attributes["build_platform"]

        use_stub = attributes.get("stub-installer")

        repackage_config = []
        package_formats = job.get("package-formats")
        if use_stub and not repackage_signing_task:
            # if repackage_signing_task doesn't exists, generate the stub installer
            package_formats += ["installer-stub"]
        for format in package_formats:
            command = copy.deepcopy(PACKAGE_FORMATS[format])
            substs = {
                "archive_format": archive_format(build_platform),
                "_locale": _fetch_subst_locale,
                "architecture": architecture(build_platform),
                "version_display": config.params["version"],
                "mar-channel-id": attributes["mar-channel-id"],
            }
            # Allow us to replace args a well, but specifying things expanded in mozharness
            # Without breaking .format and without allowing unknown through
            substs.update({
                name: "{{{}}}".format(name)
                for name in MOZHARNESS_EXPANSIONS
            })
            command["inputs"] = {
                name: filename.format(**substs)
                for name, filename in command["inputs"].items()
            }
            command["args"] = [arg.format(**substs) for arg in command["args"]]
            if "installer" in format and "aarch64" not in build_platform:
                command["args"].append("--use-upx")
            repackage_config.append(command)

        run = job.get("mozharness", {})
        run.update({
            "using": "mozharness",
            "script": "mozharness/scripts/repackage.py",
            "job-script": "taskcluster/scripts/builder/repackage.sh",
            "actions": ["setup", "repackage"],
            "extra-config": {
                "repackage_config": repackage_config,
            },
        })

        worker = job.get("worker", {})
        worker.update({
            "chain-of-trust":
            True,
            "max-run-time":
            7200 if build_platform.startswith("win") else 3600,
            # Don't add generic artifact directory.
            "skip-artifacts":
            True,
        })

        if locale:
            # Make sure we specify the locale-specific upload dir
            worker.setdefault("env", {})["LOCALE"] = locale

        worker["artifacts"] = _generate_task_output_files(
            dep_job,
            worker_type_implementation(config.graph_config, worker_type),
            repackage_config=repackage_config,
            locale=locale,
        )

        description = ("Repackaging for locale '{locale}' for build '"
                       "{build_platform}/{build_type}'".format(
                           locale=attributes.get("locale", "en-US"),
                           build_platform=attributes.get("build_platform"),
                           build_type=attributes.get("build_type"),
                       ))

        task = {
            "label":
            job["label"],
            "description":
            description,
            "worker-type":
            worker_type,
            "dependencies":
            dependencies,
            "if-dependencies": [dep_job.kind],
            "attributes":
            attributes,
            "run-on-projects":
            dep_job.attributes.get("run_on_projects"),
            "optimization":
            dep_job.optimization,
            "treeherder":
            treeherder,
            "routes":
            job.get("routes", []),
            "extra":
            job.get("extra", {}),
            "worker":
            worker,
            "run":
            run,
            "fetches":
            _generate_download_config(
                dep_job,
                build_platform,
                signing_task,
                repackage_signing_task,
                locale=locale,
                project=config.params["project"],
                existing_fetch=job.get("fetches"),
            ),
            "release-artifacts":
            [artifact["name"] for artifact in worker["artifacts"]],
        }

        if build_platform.startswith("macosx"):
            task.setdefault("fetches", {}).setdefault("toolchain", []).extend([
                "linux64-libdmg",
                "linux64-hfsplus",
                "linux64-node",
            ])
        yield task
Exemplo n.º 20
0
def make_job_description(config, jobs):
    for job in jobs:
        dep_job = job['primary-dependency']
        attributes = copy_attributes_from_dependent_job(dep_job)
        build_platform = attributes['build_platform']

        if job['build-platform'].startswith('win'):
            if dep_job.kind.endswith('signing'):
                continue
        if job['build-platform'].startswith('macosx'):
            if dep_job.kind.endswith('repack'):
                continue
        dependencies = {dep_job.attributes.get('kind'): dep_job.label}
        dependencies.update(dep_job.dependencies)

        signing_task = None
        for dependency in dependencies.keys():
            if build_platform.startswith('macosx') and dependency.endswith(
                    'signing'):
                signing_task = dependency
            elif build_platform.startswith('win') and dependency.endswith(
                    'repack'):
                signing_task = dependency

        attributes['repackage_type'] = 'repackage'

        repack_id = job['extra']['repack_id']

        partner_config = get_partner_config_by_kind(config, config.kind)
        partner, subpartner, _ = repack_id.split('/')
        repack_stub_installer = partner_config[partner][subpartner].get(
            'repack_stub_installer')
        if build_platform.startswith('win32') and repack_stub_installer:
            job['package-formats'].append('installer-stub')

        repackage_config = []
        for format in job.get('package-formats'):
            command = copy.deepcopy(PACKAGE_FORMATS[format])
            substs = {
                'archive_format': archive_format(build_platform),
                'executable_extension': executable_extension(build_platform),
            }
            command['inputs'] = {
                name: filename.format(**substs)
                for name, filename in command['inputs'].items()
            }
            repackage_config.append(command)

        run = job.get('mozharness', {})
        run.update({
            'using': 'mozharness',
            'script': 'mozharness/scripts/repackage.py',
            'job-script': 'taskcluster/scripts/builder/repackage.sh',
            'actions': ['setup', 'repackage'],
            'extra-config': {
                'repackage_config': repackage_config,
            },
        })

        worker = {
            'chain-of-trust': True,
            'max-run-time': 7200 if build_platform.startswith('win') else 3600,
            'taskcluster-proxy':
            True if get_artifact_prefix(dep_job) else False,
            'env': {
                'REPACK_ID': repack_id,
            },
            # Don't add generic artifact directory.
            'skip-artifacts': True,
        }

        worker_type = 'b-linux'
        worker['docker-image'] = {"in-tree": "debian8-amd64-build"}

        worker['artifacts'] = _generate_task_output_files(
            dep_job,
            worker_type_implementation(config.graph_config, worker_type),
            repackage_config,
            partner=repack_id,
        )

        description = ("Repackaging for repack_id '{repack_id}' for build '"
                       "{build_platform}/{build_type}'".format(
                           repack_id=job['extra']['repack_id'],
                           build_platform=attributes.get('build_platform'),
                           build_type=attributes.get('build_type')))

        task = {
            'label':
            job['label'],
            'description':
            description,
            'worker-type':
            worker_type,
            'dependencies':
            dependencies,
            'attributes':
            attributes,
            'scopes': ['queue:get-artifact:releng/partner/*'],
            'run-on-projects':
            dep_job.attributes.get('run_on_projects'),
            'routes':
            job.get('routes', []),
            'extra':
            job.get('extra', {}),
            'worker':
            worker,
            'run':
            run,
            'fetches':
            _generate_download_config(
                dep_job,
                build_platform,
                signing_task,
                partner=repack_id,
                project=config.params["project"],
                repack_stub_installer=repack_stub_installer),
        }

        # we may have reduced the priority for partner jobs, otherwise task.py will set it
        if job.get('priority'):
            task['priority'] = job['priority']
        if build_platform.startswith('macosx'):
            task.setdefault('fetches', {}).setdefault('toolchain', []).extend([
                'linux64-libdmg',
                'linux64-hfsplus',
                'linux64-node',
            ])
        yield task
Exemplo n.º 21
0
def make_job_description(config, jobs):
    for job in jobs:
        dep_job = job["primary-dependency"]
        attributes = copy_attributes_from_dependent_job(dep_job)
        build_platform = attributes["build_platform"]

        if job["build-platform"].startswith("win"):
            if dep_job.kind.endswith("signing"):
                continue
        if job["build-platform"].startswith("macosx"):
            if dep_job.kind.endswith("repack"):
                continue
        dependencies = {dep_job.attributes.get("kind"): dep_job.label}
        dependencies.update(dep_job.dependencies)

        signing_task = None
        for dependency in dependencies.keys():
            if build_platform.startswith("macosx") and dependency.endswith(
                    "signing"):
                signing_task = dependency
            elif build_platform.startswith("win") and dependency.endswith(
                    "repack"):
                signing_task = dependency

        attributes["repackage_type"] = "repackage"

        repack_id = job["extra"]["repack_id"]

        partner_config = get_partner_config_by_kind(config, config.kind)
        partner, subpartner, _ = repack_id.split("/")
        repack_stub_installer = partner_config[partner][subpartner].get(
            "repack_stub_installer")
        if build_platform.startswith("win32") and repack_stub_installer:
            job["package-formats"].append("installer-stub")

        repackage_config = []
        for format in job.get("package-formats"):
            command = copy.deepcopy(PACKAGE_FORMATS[format])
            substs = {
                "archive_format": archive_format(build_platform),
                "executable_extension": executable_extension(build_platform),
            }
            command["inputs"] = {
                name: filename.format(**substs)
                for name, filename in command["inputs"].items()
            }
            repackage_config.append(command)

        run = job.get("mozharness", {})
        run.update({
            "using": "mozharness",
            "script": "mozharness/scripts/repackage.py",
            "job-script": "taskcluster/scripts/builder/repackage.sh",
            "actions": ["setup", "repackage"],
            "extra-config": {
                "repackage_config": repackage_config,
            },
        })

        worker = {
            "chain-of-trust": True,
            "max-run-time": 7200 if build_platform.startswith("win") else 3600,
            "taskcluster-proxy":
            True if get_artifact_prefix(dep_job) else False,
            "env": {
                "REPACK_ID": repack_id,
            },
            # Don't add generic artifact directory.
            "skip-artifacts": True,
        }

        worker_type = "b-linux"
        worker["docker-image"] = {"in-tree": "debian10-amd64-build"}

        worker["artifacts"] = _generate_task_output_files(
            dep_job,
            worker_type_implementation(config.graph_config, worker_type),
            repackage_config,
            partner=repack_id,
        )

        description = ("Repackaging for repack_id '{repack_id}' for build '"
                       "{build_platform}/{build_type}'".format(
                           repack_id=job["extra"]["repack_id"],
                           build_platform=attributes.get("build_platform"),
                           build_type=attributes.get("build_type"),
                       ))

        task = {
            "label":
            job["label"],
            "description":
            description,
            "worker-type":
            worker_type,
            "dependencies":
            dependencies,
            "attributes":
            attributes,
            "scopes": ["queue:get-artifact:releng/partner/*"],
            "run-on-projects":
            dep_job.attributes.get("run_on_projects"),
            "routes":
            job.get("routes", []),
            "extra":
            job.get("extra", {}),
            "worker":
            worker,
            "run":
            run,
            "fetches":
            _generate_download_config(
                dep_job,
                build_platform,
                signing_task,
                partner=repack_id,
                project=config.params["project"],
                repack_stub_installer=repack_stub_installer,
            ),
        }

        # we may have reduced the priority for partner jobs, otherwise task.py will set it
        if job.get("priority"):
            task["priority"] = job["priority"]
        if build_platform.startswith("macosx"):
            task.setdefault("fetches", {}).setdefault("toolchain", []).extend([
                "linux64-libdmg",
                "linux64-hfsplus",
                "linux64-node",
            ])
        yield task
Exemplo n.º 22
0
def make_job_description(config, jobs):
    for job in jobs:
        dep_job = job['primary-dependency']
        dependencies = {dep_job.attributes.get('kind'): dep_job.label}
        if len(dep_job.dependencies) > 1 and not config.kind == 'repackage-msi':
            # repackage-signing can end up with multiple deps...
            raise NotImplementedError(
                "Can't repackage a signing task with multiple dependencies")

        attributes = copy_attributes_from_dependent_job(dep_job)
        attributes['repackage_type'] = 'repackage'

        locale = attributes.get('locale', job.get('locale'))
        if locale:
            attributes['locale'] = locale

        treeherder = job.get('treeherder', {})
        if attributes.get('nightly'):
            treeherder.setdefault('symbol', 'Nr')
        else:
            treeherder.setdefault('symbol', 'Rpk')
        dep_th_platform = dep_job.task.get('extra', {}).get('treeherder-platform')
        treeherder.setdefault('platform', dep_th_platform)
        treeherder.setdefault('tier', 1)
        treeherder.setdefault('kind', 'build')

        if config.kind == 'repackage-msi':
            treeherder['symbol'] = 'MSI({})'.format(locale or 'N')

        signing_task = None
        repackage_signing_task = None
        for dependency in dependencies.keys():
            if 'repackage-signing' in dependency:
                repackage_signing_task = dependency
            elif 'signing' in dependency:
                signing_task = dependency

        _fetch_subst_locale = 'en-US'
        if locale:
            _fetch_subst_locale = locale

        worker_type = job['worker-type']
        build_platform = attributes['build_platform']

        use_stub = attributes.get('stub-installer')

        repackage_config = []
        package_formats = job.get('package-formats')
        if use_stub and not repackage_signing_task:
            # if repackage_signing_task doesn't exists, generate the stub installer
            package_formats += ['installer-stub']
        for format in package_formats:
            command = copy.deepcopy(PACKAGE_FORMATS[format])
            substs = {
                'archive_format': archive_format(build_platform),
                '_locale': _fetch_subst_locale,
                'architecture': architecture(build_platform),
                'version_display': config.params['version'],
                'mar-channel-id': attributes['mar-channel-id'],
            }
            # Allow us to replace args a well, but specifying things expanded in mozharness
            # Without breaking .format and without allowing unknown through
            substs.update({name: '{{{}}}'.format(name)
                           for name in MOZHARNESS_EXPANSIONS})
            command['inputs'] = {
                name: filename.format(**substs)
                for name, filename in command['inputs'].items()
            }
            command['args'] = [
                arg.format(**substs) for arg in command['args']
            ]
            if 'installer' in format and 'aarch64' not in build_platform:
                command['args'].append('--use-upx')
            repackage_config.append(command)

        run = job.get('mozharness', {})
        run.update({
            'using': 'mozharness',
            'script': 'mozharness/scripts/repackage.py',
            'job-script': 'taskcluster/scripts/builder/repackage.sh',
            'actions': ['setup', 'repackage'],
            'extra-workspace-cache-key': 'repackage',
            'extra-config': {
                'repackage_config': repackage_config,
            },
        })

        worker = job.get('worker', {})
        worker.update({
            'chain-of-trust': True,
            'max-run-time': 7200 if build_platform.startswith('win') else 3600,
            # Don't add generic artifact directory.
            'skip-artifacts': True,
        })

        if locale:
            # Make sure we specify the locale-specific upload dir
            worker.setdefault('env', {}).update(LOCALE=locale)

        worker['artifacts'] = _generate_task_output_files(
            dep_job, worker_type_implementation(config.graph_config, worker_type),
            repackage_config=repackage_config,
            locale=locale,
        )

        description = (
            "Repackaging for locale '{locale}' for build '"
            "{build_platform}/{build_type}'".format(
                locale=attributes.get('locale', 'en-US'),
                build_platform=attributes.get('build_platform'),
                build_type=attributes.get('build_type')
            )
        )

        task = {
            'label': job['label'],
            'description': description,
            'worker-type': worker_type,
            'dependencies': dependencies,
            'attributes': attributes,
            'run-on-projects': dep_job.attributes.get('run_on_projects'),
            'treeherder': treeherder,
            'routes': job.get('routes', []),
            'extra': job.get('extra', {}),
            'worker': worker,
            'run': run,
            'fetches': _generate_download_config(dep_job, build_platform,
                                                 signing_task, repackage_signing_task,
                                                 locale=locale,
                                                 project=config.params["project"],
                                                 existing_fetch=job.get('fetches')),
            'release-artifacts': [artifact['name'] for artifact in worker['artifacts']]
        }

        if build_platform.startswith('macosx'):
            task.setdefault('fetches', {}).setdefault('toolchain', []).extend([
                'linux64-libdmg',
                'linux64-hfsplus',
                'linux64-node',
            ])
        yield task