Ejemplo n.º 1
0
def generate_upstream_artifacts(job, signing_task_ref, build_task_ref):
    build_mapping = CHECKSUMS_BUILD_ARTIFACTS
    signing_mapping = CHECKSUMS_SIGNING_ARTIFACTS

    artifact_prefix = get_artifact_prefix(job)

    upstream_artifacts = [{
        "taskId": {"task-reference": build_task_ref},
        "taskType": "build",
        "paths": ["{}/{}".format(artifact_prefix, p)
                  for p in build_mapping],
        "locale": "en-US",
        }, {
        "taskId": {"task-reference": signing_task_ref},
        "taskType": "signing",
        "paths": ["{}/{}".format(artifact_prefix, p)
                  for p in signing_mapping],
        "locale": "en-US",
    }]

    return upstream_artifacts
Ejemplo n.º 2
0
def docker_worker_use_artifacts(config, job, taskdesc, use_artifacts):
    """Set a JSON object of artifact URLs in an environment variable.

    This will tell the run-task script to download the artifacts.
    """
    urls = {}
    prefix = get_artifact_prefix(taskdesc)
    for kind, artifacts in use_artifacts.items():
        if kind not in taskdesc['dependencies']:
            raise Exception("{label} can't use '{kind}' artifacts because it has no '{kind}' "
                            "dependency!".format(label=job['label'], kind=kind))
        task_id = '<{}>'.format(kind)
        urls[kind] = []

        for artifact in artifacts:
            path = '/'.join([prefix, artifact])
            urls[kind].append(get_artifact_url(task_id, path))

    env = taskdesc['worker'].setdefault('env', {})
    env['USE_ARTIFACT_URLS'] = {'task-reference': json.dumps(urls)}
    env['USE_ARTIFACT_PATH'] = '/builds/worker/use-artifacts'
Ejemplo n.º 3
0
def generate_partials_upstream_artifacts(job,
                                         artifacts,
                                         platform,
                                         locale=None):
    artifact_prefix = get_artifact_prefix(job)
    if locale and locale != "en-US":
        artifact_prefix = "{}/{}".format(artifact_prefix, locale)

    upstream_artifacts = [{
        "taskId": {
            "task-reference": "<partials-signing>"
        },
        "taskType":
        "signing",
        "paths":
        ["{}/{}".format(artifact_prefix, path) for path, _ in artifacts],
        "locale":
        locale or "en-US",
    }]

    return upstream_artifacts
Ejemplo n.º 4
0
def _generate_task_output_files(task, worker_implementation, repackage_config, locale=None):
    locale_output_path = '{}/'.format(locale) if locale else ''
    artifact_prefix = get_artifact_prefix(task)

    if worker_implementation == ('docker-worker', 'linux'):
        local_prefix = '/builds/worker/workspace/'
    elif worker_implementation == ('generic-worker', 'windows'):
        local_prefix = ''
    else:
        raise NotImplementedError(
            'Unsupported worker implementation: "{}"'.format(worker_implementation))

    output_files = []
    for config in repackage_config:
        output_files.append({
            'type': 'file',
            'path': '{}build/outputs/{}{}'
                    .format(local_prefix, locale_output_path, config['output']),
            'name': '{}/{}{}'.format(artifact_prefix, locale_output_path, config['output']),
        })
    return output_files
Ejemplo n.º 5
0
def generate_partials_upstream_artifacts(job,
                                         artifacts,
                                         platform,
                                         locale=None):
    artifact_prefix = get_artifact_prefix(job)
    if locale and locale != 'en-US':
        artifact_prefix = '{}/{}'.format(artifact_prefix, locale)

    upstream_artifacts = [{
        'taskId': {
            'task-reference': '<partials-signing>'
        },
        'taskType':
        'signing',
        'paths':
        ["{}/{}".format(artifact_prefix, path) for path, _ in artifacts],
        'locale':
        locale or 'en-US',
    }]

    return upstream_artifacts
Ejemplo n.º 6
0
def generate_upstream_artifacts(job, dependencies):
    artifact_prefix = get_artifact_prefix(job)
    apks = [{
        'taskId': {
            'task-reference': '<{}>'.format(task_kind)
        },
        'taskType': 'signing',
        'paths': ['{}/target.apk'.format(artifact_prefix)],
    } for task_kind in dependencies.keys()
            if 'google-play-strings' not in task_kind]

    google_play_strings = [{
        'taskId': {
            'task-reference': '<{}>'.format(task_kind)
        },
        'taskType': 'build',
        'paths': ['public/google_play_strings.json'],
        'optional': True,
    } for task_kind in dependencies.keys()
                           if 'google-play-strings' in task_kind]

    return apks + google_play_strings
Ejemplo n.º 7
0
def generate_upstream_artifacts(job, release_history, platform, locale=None):
    artifact_prefix = get_artifact_prefix(job)
    if locale:
        artifact_prefix = '{}/{}'.format(artifact_prefix, locale)
    else:
        locale = 'en-US'

    artifacts = get_partials_artifacts(release_history, platform, locale)

    upstream_artifacts = [{
        "taskId": {"task-reference": '<partials>'},
        "taskType": 'partials',
        "paths": [
            "{}/{}".format(artifact_prefix, path)
            for path, version in artifacts
            # TODO Use mozilla-version to avoid comparing strings. Otherwise Firefox 100 will be
            # considered smaller than Firefox 56
            if version is None or version >= '56'
        ],
        "formats": ["autograph_hash_only_mar384"],
    }]

    old_mar_upstream_artifacts = {
        "taskId": {"task-reference": '<partials>'},
        "taskType": 'partials',
        "paths": [
            "{}/{}".format(artifact_prefix, path)
            for path, version in artifacts
            # TODO Use mozilla-version to avoid comparing strings. Otherwise Firefox 100 will be
            # considered smaller than Firefox 56
            if version is not None and version < '56'
        ],
        "formats": ["mar"],
    }

    if old_mar_upstream_artifacts["paths"]:
        upstream_artifacts.append(old_mar_upstream_artifacts)

    return upstream_artifacts
Ejemplo n.º 8
0
def _generate_task_output_files(job, filenames, locale=None):
    locale_output_path = "{}/".format(locale) if locale else ""
    artifact_prefix = get_artifact_prefix(job)

    data = list()
    for filename in filenames:
        data.append({
            "type":
            "file",
            "path":
            "/home/worker/artifacts/{}".format(filename),
            "name":
            "{}/{}{}".format(artifact_prefix, locale_output_path, filename),
        })
    data.append({
        "type":
        "file",
        "path":
        "/home/worker/artifacts/manifest.json",
        "name":
        "{}/{}manifest.json".format(artifact_prefix, locale_output_path),
    })
    return data
Ejemplo n.º 9
0
def _generate_task_output_files(job, filenames, locale=None):
    locale_output_path = '{}/'.format(locale) if locale else ''
    artifact_prefix = get_artifact_prefix(job)

    data = list()
    for filename in filenames:
        data.append({
            'type':
            'file',
            'path':
            '/home/worker/artifacts/{}'.format(filename),
            'name':
            '{}/{}{}'.format(artifact_prefix, locale_output_path, filename)
        })
    data.append({
        'type':
        'file',
        'path':
        '/home/worker/artifacts/manifest.json',
        'name':
        '{}/{}manifest.json'.format(artifact_prefix, locale_output_path)
    })
    return data
Ejemplo n.º 10
0
def _generate_task_output_files(task, build_platform, partner):
    """We carefully generate an explicit list here, but there's an artifacts directory
    too, courtesy of generic_worker_add_artifacts() (windows) or docker_worker_add_artifacts().
    Any errors here are likely masked by that.
    """
    partner_output_path = '{}/'.format(partner)
    artifact_prefix = get_artifact_prefix(task)

    if build_platform.startswith('macosx'):
        output_files = [{
            'type':
            'file',
            'path':
            '/builds/worker/workspace/build/artifacts/{}target.dmg'.format(
                partner_output_path),
            'name':
            '{}/{}target.dmg'.format(artifact_prefix, partner_output_path),
        }]

    elif build_platform.startswith('win'):
        output_files = [{
            'type':
            'file',
            'path':
            '{}/{}target.installer.exe'.format(artifact_prefix,
                                               partner_output_path),
            'name':
            '{}/{}target.installer.exe'.format(artifact_prefix,
                                               partner_output_path),
        }]

    if output_files:
        return output_files

    raise NotImplementedError(
        'Unsupported build_platform: "{}"'.format(build_platform))
Ejemplo n.º 11
0
def make_job_description(config, jobs):
    for job in jobs:
        dep_job = job["primary-dependency"]
        attributes = copy_attributes_from_dependent_job(dep_job)
        build_platform = attributes["build_platform"]

        if job["build-platform"].startswith("win"):
            if dep_job.kind.endswith("signing"):
                continue
        if job["build-platform"].startswith("macosx"):
            if dep_job.kind.endswith("repack"):
                continue
        dependencies = {dep_job.attributes.get("kind"): dep_job.label}
        dependencies.update(dep_job.dependencies)

        signing_task = None
        for dependency in dependencies.keys():
            if build_platform.startswith("macosx") and dependency.endswith(
                    "signing"):
                signing_task = dependency
            elif build_platform.startswith("win") and dependency.endswith(
                    "repack"):
                signing_task = dependency

        attributes["repackage_type"] = "repackage"

        repack_id = job["extra"]["repack_id"]

        partner_config = get_partner_config_by_kind(config, config.kind)
        partner, subpartner, _ = repack_id.split("/")
        repack_stub_installer = partner_config[partner][subpartner].get(
            "repack_stub_installer")
        if build_platform.startswith("win32") and repack_stub_installer:
            job["package-formats"].append("installer-stub")

        repackage_config = []
        for format in job.get("package-formats"):
            command = copy.deepcopy(PACKAGE_FORMATS[format])
            substs = {
                "archive_format": archive_format(build_platform),
                "executable_extension": executable_extension(build_platform),
            }
            command["inputs"] = {
                name: filename.format(**substs)
                for name, filename in command["inputs"].items()
            }
            repackage_config.append(command)

        run = job.get("mozharness", {})
        run.update({
            "using": "mozharness",
            "script": "mozharness/scripts/repackage.py",
            "job-script": "taskcluster/scripts/builder/repackage.sh",
            "actions": ["setup", "repackage"],
            "extra-config": {
                "repackage_config": repackage_config,
            },
        })

        worker = {
            "chain-of-trust": True,
            "max-run-time": 7200 if build_platform.startswith("win") else 3600,
            "taskcluster-proxy":
            True if get_artifact_prefix(dep_job) else False,
            "env": {
                "REPACK_ID": repack_id,
            },
            # Don't add generic artifact directory.
            "skip-artifacts": True,
        }

        worker_type = "b-linux"
        worker["docker-image"] = {"in-tree": "debian10-amd64-build"}

        worker["artifacts"] = _generate_task_output_files(
            dep_job,
            worker_type_implementation(config.graph_config, worker_type),
            repackage_config,
            partner=repack_id,
        )

        description = ("Repackaging for repack_id '{repack_id}' for build '"
                       "{build_platform}/{build_type}'".format(
                           repack_id=job["extra"]["repack_id"],
                           build_platform=attributes.get("build_platform"),
                           build_type=attributes.get("build_type"),
                       ))

        task = {
            "label":
            job["label"],
            "description":
            description,
            "worker-type":
            worker_type,
            "dependencies":
            dependencies,
            "attributes":
            attributes,
            "scopes": ["queue:get-artifact:releng/partner/*"],
            "run-on-projects":
            dep_job.attributes.get("run_on_projects"),
            "routes":
            job.get("routes", []),
            "extra":
            job.get("extra", {}),
            "worker":
            worker,
            "run":
            run,
            "fetches":
            _generate_download_config(
                dep_job,
                build_platform,
                signing_task,
                partner=repack_id,
                project=config.params["project"],
                repack_stub_installer=repack_stub_installer,
            ),
        }

        # we may have reduced the priority for partner jobs, otherwise task.py will set it
        if job.get("priority"):
            task["priority"] = job["priority"]
        if build_platform.startswith("macosx"):
            task.setdefault("fetches", {}).setdefault("toolchain", []).extend([
                "linux64-libdmg",
                "linux64-hfsplus",
                "linux64-node",
            ])
        yield task
Ejemplo n.º 12
0
def make_job_description(config, jobs):
    for job in jobs:
        dep_job = job['dependent-task']
        attributes = copy_attributes_from_dependent_job(dep_job)
        build_platform = attributes['build_platform']

        if job['build-platform'].startswith('win'):
            if dep_job.kind.endswith('signing'):
                continue
        if job['build-platform'].startswith('macosx'):
            if dep_job.kind.endswith('repack'):
                continue
        dependencies = {dep_job.attributes.get('kind'): dep_job.label}
        dependencies.update(dep_job.dependencies)

        signing_task = None
        for dependency in dependencies.keys():
            if build_platform.startswith('macosx') and dependency.endswith(
                    'signing'):
                signing_task = dependency
            elif build_platform.startswith('win') and dependency.endswith(
                    'repack'):
                signing_task = dependency
        signing_task_ref = "<{}>".format(signing_task)

        attributes['repackage_type'] = 'repackage'

        level = config.params['level']
        repack_id = job['extra']['repack_id']

        run = job.get('mozharness', {})
        run.update({
            'using': 'mozharness',
            'script': 'mozharness/scripts/repackage.py',
            'job-script': 'taskcluster/scripts/builder/repackage.sh',
            'actions': ['download_input', 'setup', 'repackage'],
            'extra-workspace-cache-key': 'repackage',
        })

        worker = {
            'env':
            _generate_task_env(build_platform,
                               signing_task,
                               signing_task_ref,
                               partner=repack_id),
            'artifacts':
            _generate_task_output_files(dep_job,
                                        build_platform,
                                        partner=repack_id),
            'chain-of-trust':
            True,
            'max-run-time':
            7200 if build_platform.startswith('win') else 3600,
            'taskcluster-proxy':
            True if get_artifact_prefix(dep_job) else False,
        }

        worker['env'].update(REPACK_ID=repack_id)

        if build_platform.startswith('win'):
            worker_type = 'aws-provisioner-v1/gecko-%s-b-win2012' % level
            run['use-magic-mh-args'] = False
        else:
            if build_platform.startswith('macosx'):
                worker_type = 'aws-provisioner-v1/gecko-%s-b-linux' % level
            else:
                raise NotImplementedError(
                    'Unsupported build_platform: "{}"'.format(build_platform))

            run['tooltool-downloads'] = 'internal'
            worker['docker-image'] = {"in-tree": "debian7-amd64-build"}

        description = ("Repackaging for repack_id '{repack_id}' for build '"
                       "{build_platform}/{build_type}'".format(
                           repack_id=job['extra']['repack_id'],
                           build_platform=attributes.get('build_platform'),
                           build_type=attributes.get('build_type')))

        task = {
            'label': job['label'],
            'description': description,
            'worker-type': worker_type,
            'dependencies': dependencies,
            'attributes': attributes,
            'scopes': ['queue:get-artifact:releng/partner/*'],
            'run-on-projects': dep_job.attributes.get('run_on_projects'),
            'routes': job.get('routes', []),
            'extra': job.get('extra', {}),
            'worker': worker,
            'run': run,
        }

        if build_platform.startswith('macosx'):
            task['toolchains'] = [
                'linux64-libdmg',
                'linux64-hfsplus',
            ]
        yield task
Ejemplo n.º 13
0
def make_job_description(config, jobs):
    for job in jobs:
        dep_job = job['primary-dependency']
        attributes = copy_attributes_from_dependent_job(dep_job)
        build_platform = attributes['build_platform']

        if job['build-platform'].startswith('win'):
            if dep_job.kind.endswith('signing'):
                continue
        if job['build-platform'].startswith('macosx'):
            if dep_job.kind.endswith('repack'):
                continue
        dependencies = {dep_job.attributes.get('kind'): dep_job.label}
        dependencies.update(dep_job.dependencies)

        signing_task = None
        for dependency in dependencies.keys():
            if build_platform.startswith('macosx') and dependency.endswith(
                    'signing'):
                signing_task = dependency
            elif build_platform.startswith('win') and dependency.endswith(
                    'repack'):
                signing_task = dependency

        attributes['repackage_type'] = 'repackage'

        repack_id = job['extra']['repack_id']

        partner_config = get_partner_config_by_kind(config, config.kind)
        partner, subpartner, _ = repack_id.split('/')
        repack_stub_installer = partner_config[partner][subpartner].get(
            'repack_stub_installer')
        if build_platform.startswith('win32') and repack_stub_installer:
            job['package-formats'].append('installer-stub')

        repackage_config = []
        for format in job.get('package-formats'):
            command = copy.deepcopy(PACKAGE_FORMATS[format])
            substs = {
                'archive_format': archive_format(build_platform),
                'executable_extension': executable_extension(build_platform),
            }
            command['inputs'] = {
                name: filename.format(**substs)
                for name, filename in command['inputs'].items()
            }
            repackage_config.append(command)

        run = job.get('mozharness', {})
        run.update({
            'using': 'mozharness',
            'script': 'mozharness/scripts/repackage.py',
            'job-script': 'taskcluster/scripts/builder/repackage.sh',
            'actions': ['setup', 'repackage'],
            'extra-config': {
                'repackage_config': repackage_config,
            },
        })

        worker = {
            'chain-of-trust': True,
            'max-run-time': 7200 if build_platform.startswith('win') else 3600,
            'taskcluster-proxy':
            True if get_artifact_prefix(dep_job) else False,
            'env': {
                'REPACK_ID': repack_id,
            },
            # Don't add generic artifact directory.
            'skip-artifacts': True,
        }

        worker_type = 'b-linux'
        worker['docker-image'] = {"in-tree": "debian8-amd64-build"}

        worker['artifacts'] = _generate_task_output_files(
            dep_job,
            worker_type_implementation(config.graph_config, worker_type),
            repackage_config,
            partner=repack_id,
        )

        description = ("Repackaging for repack_id '{repack_id}' for build '"
                       "{build_platform}/{build_type}'".format(
                           repack_id=job['extra']['repack_id'],
                           build_platform=attributes.get('build_platform'),
                           build_type=attributes.get('build_type')))

        task = {
            'label':
            job['label'],
            'description':
            description,
            'worker-type':
            worker_type,
            'dependencies':
            dependencies,
            'attributes':
            attributes,
            'scopes': ['queue:get-artifact:releng/partner/*'],
            'run-on-projects':
            dep_job.attributes.get('run_on_projects'),
            'routes':
            job.get('routes', []),
            'extra':
            job.get('extra', {}),
            'worker':
            worker,
            'run':
            run,
            'fetches':
            _generate_download_config(
                dep_job,
                build_platform,
                signing_task,
                partner=repack_id,
                project=config.params["project"],
                repack_stub_installer=repack_stub_installer),
        }

        # we may have reduced the priority for partner jobs, otherwise task.py will set it
        if job.get('priority'):
            task['priority'] = job['priority']
        if build_platform.startswith('macosx'):
            task.setdefault('fetches', {}).setdefault('toolchain', []).extend([
                'linux64-libdmg',
                'linux64-hfsplus',
                'linux64-node',
            ])
        yield task
Ejemplo n.º 14
0
def use_fetches(config, jobs):
    all_fetches = {}

    for task in config.kind_dependencies_tasks:
        if task.kind != 'fetch':
            continue

        name = task.label.replace('%s-' % task.kind, '')
        get_attribute(all_fetches, name, task.attributes, 'fetch-artifact')

    for job in jobs:
        fetches = job.pop('fetches', None)
        if not fetches:
            yield job
            continue

        # Hack added for `mach artifact toolchain` to support reading toolchain
        # kinds in isolation.
        if 'fetch' in fetches and config.params.get('ignore_fetches'):
            fetches['fetch'][:] = []

        job_fetches = []
        name = job.get('name', job.get('label'))
        dependencies = job.setdefault('dependencies', {})
        prefix = get_artifact_prefix(job)
        for kind, artifacts in fetches.items():
            if kind == 'fetch':
                for fetch in artifacts:
                    if fetch not in all_fetches:
                        raise Exception(
                            'Missing fetch job for {kind}-{name}: {fetch}'.
                            format(kind=config.kind, name=name, fetch=fetch))

                    path = all_fetches[fetch]
                    if not path.startswith('public/'):
                        raise Exception(
                            'Non-public artifacts not supported for {kind}-{name}: '
                            '{fetch}'.format(kind=config.kind,
                                             name=name,
                                             fetch=fetch))

                    dep = 'fetch-{}'.format(fetch)
                    dependencies[dep] = dep
                    job_fetches.append('{path}@<{dep}>'.format(path=path,
                                                               dep=dep))

            else:
                if kind not in dependencies:
                    raise Exception(
                        "{name} can't fetch {kind} artifacts because "
                        "it has no {kind} dependencies!".format(name=name,
                                                                kind=kind))

                for path in artifacts:
                    job_fetches.append('{prefix}/{path}@<{dep}>'.format(
                        prefix=prefix, path=path, dep=kind))

        env = job.setdefault('worker', {}).setdefault('env', {})
        env['MOZ_FETCHES'] = {'task-reference': ' '.join(job_fetches)}

        workdir = job['run'].get('workdir', '/builds/worker')
        env.setdefault('MOZ_FETCHES_DIR', '{}/fetches'.format(workdir))
        yield job
Ejemplo n.º 15
0
def use_fetches(config, jobs):
    artifact_names = {}

    for task in config.kind_dependencies_tasks:
        if task.kind in ('fetch', 'toolchain'):
            get_attribute(
                artifact_names,
                task.label,
                task.attributes,
                '{kind}-artifact'.format(kind=task.kind),
            )

    for job in jobs:
        fetches = job.pop('fetches', None)
        if not fetches:
            yield job
            continue

        # Hack added for `mach artifact toolchain` to support reading toolchain
        # kinds in isolation.
        if 'fetch' in fetches and config.params.get('ignore_fetches'):
            fetches['fetch'][:] = []

        job_fetches = []
        name = job.get('name', job.get('label'))
        dependencies = job.setdefault('dependencies', {})
        prefix = get_artifact_prefix(job)
        for kind, artifacts in fetches.items():
            if kind in ('fetch', 'toolchain'):
                for fetch_name in artifacts:
                    label = '{kind}-{name}'.format(kind=kind, name=fetch_name)
                    if label not in artifact_names:
                        raise Exception(
                            'Missing fetch job for {kind}-{name}: {fetch}'.
                            format(kind=config.kind,
                                   name=name,
                                   fetch=fetch_name))

                    path = artifact_names[label]
                    if not path.startswith('public/'):
                        raise Exception(
                            'Non-public artifacts not supported for {kind}-{name}: '
                            '{fetch}'.format(kind=config.kind,
                                             name=name,
                                             fetch=fetch_name))

                    dependencies[label] = label
                    job_fetches.append({
                        'artifact': path,
                        'task': '<{label}>'.format(label=label),
                        'extract': True,
                    })
            else:
                if kind not in dependencies:
                    raise Exception(
                        "{name} can't fetch {kind} artifacts because "
                        "it has no {kind} dependencies!".format(name=name,
                                                                kind=kind))

                for artifact in artifacts:
                    if isinstance(artifact, basestring):
                        path = artifact
                        dest = None
                        extract = True
                    else:
                        path = artifact['artifact']
                        dest = artifact.get('dest')
                        extract = artifact.get('extract', True)

                    fetch = {
                        'artifact':
                        '{prefix}/{path}'.format(prefix=prefix, path=path),
                        'task':
                        '<{dep}>'.format(dep=kind),
                        'extract':
                        extract,
                    }
                    if dest is not None:
                        fetch['dest'] = dest
                    job_fetches.append(fetch)

        env = job.setdefault('worker', {}).setdefault('env', {})
        env['MOZ_FETCHES'] = {
            'task-reference': json.dumps(job_fetches, sort_keys=True)
        }

        impl, os = worker_type_implementation(job['worker-type'])
        if os == 'windows':
            env.setdefault('MOZ_FETCHES_DIR', 'fetches')
        else:
            workdir = job['run'].get('workdir', '/builds/worker')
            env.setdefault('MOZ_FETCHES_DIR', '{}/fetches'.format(workdir))

        yield job
Ejemplo n.º 16
0
def generate_upstream_artifacts(job,
                                signing_task_ref,
                                build_task_ref,
                                platform,
                                locale=None):
    build_mapping = UPSTREAM_ARTIFACT_UNSIGNED_PATHS
    signing_mapping = UPSTREAM_ARTIFACT_SIGNED_PATHS

    artifact_prefix = get_artifact_prefix(job)
    if locale:
        artifact_prefix = '{}/{}'.format(artifact_prefix, locale)
        platform = "{}-l10n".format(platform)

    if platform.endswith("-source"):
        return [{
            "taskId": {
                "task-reference": signing_task_ref
            },
            "taskType":
            "signing",
            "paths": [
                "{}/{}".format(artifact_prefix, p)
                for p in UPSTREAM_SOURCE_ARTIFACTS
            ],
            "locale":
            locale or "en-US",
        }]

    upstream_artifacts = []

    # Some platforms (like android-api-16-nightly-l10n) may not depend on any unsigned artifact
    if build_mapping[platform]:
        upstream_artifacts.append({
            "taskId": {
                "task-reference": build_task_ref
            },
            "taskType":
            "build",
            "paths": [
                "{}/{}".format(artifact_prefix, p)
                for p in build_mapping[platform]
            ],
            "locale":
            locale or "en-US",
        })

    upstream_artifacts.append({
        "taskId": {
            "task-reference": signing_task_ref
        },
        "taskType":
        "signing",
        "paths": [
            "{}/{}".format(artifact_prefix, p)
            for p in signing_mapping[platform]
        ],
        "locale":
        locale or "en-US",
    })

    if not locale and "android" in platform:
        # edge case to support 'multi' locale paths
        multi_platform = "{}-multi".format(platform)
        upstream_artifacts.extend([{
            "taskId": {
                "task-reference": build_task_ref
            },
            "taskType":
            "build",
            "paths": [
                "{}/{}".format(artifact_prefix, p)
                for p in build_mapping[multi_platform]
            ],
            "locale":
            "multi",
        }, {
            "taskId": {
                "task-reference": signing_task_ref
            },
            "taskType":
            "signing",
            "paths": [
                "{}/{}".format(artifact_prefix, p)
                for p in signing_mapping[multi_platform]
            ],
            "locale":
            "multi",
        }])

    return upstream_artifacts
Ejemplo n.º 17
0
def generate_beetmover_artifact_map(config, job, **kwargs):
    """Generate the beetmover artifact map.

    Currently only applies to beetmover tasks.

    Args:
        config (): Current taskgraph configuration.
        job (dict): The current job being generated
    Common kwargs:
        platform (str): The current build platform
        locale (str): The current locale being beetmoved.

    Returns:
        list: A list of dictionaries containing source->destination
            maps for beetmover.
    """
    platform = kwargs.get("platform", "")
    resolve_keyed_by(
        job,
        "attributes.artifact_map",
        job["label"],
        **{
            "release-type": config.params["release_type"],
            "platform": platform,
        },
    )
    map_config = deepcopy(cached_load_yaml(job["attributes"]["artifact_map"]))
    base_artifact_prefix = map_config.get(
        "base_artifact_prefix", get_artifact_prefix(job)
    )

    artifacts = list()

    dependencies = job["dependencies"].keys()

    if kwargs.get("locale"):
        if isinstance(kwargs["locale"], list):
            locales = kwargs["locale"]
        else:
            locales = [kwargs["locale"]]
    else:
        locales = map_config["default_locales"]

    resolve_keyed_by(
        map_config,
        "s3_bucket_paths",
        job["label"],
        **{"build-type": job["attributes"]["build-type"]},
    )

    for locale, dep in sorted(itertools.product(locales, dependencies)):
        paths = dict()
        for filename in map_config["mapping"]:
            # Relevancy checks
            if dep not in map_config["mapping"][filename]["from"]:
                # We don't get this file from this dependency.
                continue
            if locale != "multi" and not map_config["mapping"][filename]["all_locales"]:
                # This locale either doesn't produce or shouldn't upload this file.
                continue
            if (
                "only_for_platforms" in map_config["mapping"][filename]
                and platform
                not in map_config["mapping"][filename]["only_for_platforms"]
            ):
                # This platform either doesn't produce or shouldn't upload this file.
                continue
            if (
                "not_for_platforms" in map_config["mapping"][filename]
                and platform in map_config["mapping"][filename]["not_for_platforms"]
            ):
                # This platform either doesn't produce or shouldn't upload this file.
                continue
            if "partials_only" in map_config["mapping"][filename]:
                continue

            # deepcopy because the next time we look at this file the locale will differ.
            file_config = deepcopy(map_config["mapping"][filename])

            for field in [
                "destinations",
                "locale_prefix",
                "source_path_modifier",
                "update_balrog_manifest",
                "pretty_name",
                "checksums_path",
            ]:
                resolve_keyed_by(file_config, field, job["label"], locale=locale)

            # This format string should ideally be in the configuration file,
            # but this would mean keeping variable names in sync between code + config.
            destinations = [
                "{s3_bucket_path}/{dest_path}/{filename}".format(
                    s3_bucket_path=bucket_path,
                    dest_path=dest_path,
                    locale_prefix=file_config["locale_prefix"],
                    filename=file_config.get("pretty_name", filename),
                )
                for dest_path, bucket_path in itertools.product(
                    file_config["destinations"], map_config["s3_bucket_paths"]
                )
            ]
            # Creating map entries
            # Key must be artifact path, to avoid trampling duplicates, such
            # as public/build/target.apk and public/build/multi/target.apk
            key = os.path.join(
                base_artifact_prefix,
                file_config["source_path_modifier"],
                filename,
            )

            paths[key] = {
                "destinations": destinations,
            }
            if file_config.get("checksums_path"):
                paths[key]["checksums_path"] = file_config["checksums_path"]

            # optional flag: balrog manifest
            if file_config.get("update_balrog_manifest"):
                paths[key]["update_balrog_manifest"] = True
                if file_config.get("balrog_format"):
                    paths[key]["balrog_format"] = file_config["balrog_format"]

        if not paths:
            # No files for this dependency/locale combination.
            continue

        # Render all variables for the artifact map
        platforms = deepcopy(map_config.get("platform_names", {}))
        if platform:
            for key in platforms.keys():
                resolve_keyed_by(platforms, key, job["label"], platform=platform)

        version = read_version_file()
        upload_date = datetime.fromtimestamp(config.params["build_date"])

        if job["attributes"]["build-type"] == "nightly":
            folder_prefix = upload_date.strftime("%Y/%m/%Y-%m-%d-%H-%M-%S-")
            # TODO: Remove this when version.txt has versioning fixed
            version = version.split("-")[0]
        else:
            folder_prefix = f"{version}/android/"

        kwargs.update(
            {"locale": locale, "version": version, "folder_prefix": folder_prefix}
        )
        kwargs.update(**platforms)
        paths = jsone.render(paths, kwargs)
        artifacts.append(
            {
                "taskId": {"task-reference": "<{}>".format(dep)},
                "locale": locale,
                "paths": paths,
            }
        )

    return artifacts
Ejemplo n.º 18
0
def _generate_task_output_files(task, build_platform, locale=None):
    locale_output_path = '{}/'.format(locale) if locale else ''
    artifact_prefix = get_artifact_prefix(task)

    if build_platform.startswith('linux') or build_platform.startswith(
            'macosx'):
        output_files = [{
            'type':
            'file',
            'path':
            '/builds/worker/workspace/build/artifacts/{}target.complete.mar'.
            format(locale_output_path),
            'name':
            '{}/{}target.complete.mar'.format(artifact_prefix,
                                              locale_output_path),
        }]

        if build_platform.startswith('macosx'):
            output_files.append({
                'type':
                'file',
                'path':
                '/builds/worker/workspace/build/artifacts/{}target.dmg'.format(
                    locale_output_path),
                'name':
                '{}/{}target.dmg'.format(artifact_prefix, locale_output_path),
            })

    elif build_platform.startswith('win'):
        output_files = [{
            'type':
            'file',
            'path':
            '{}/{}target.installer.exe'.format(artifact_prefix,
                                               locale_output_path),
            'name':
            '{}/{}target.installer.exe'.format(artifact_prefix,
                                               locale_output_path),
        }, {
            'type':
            'file',
            'path':
            '{}/{}target.complete.mar'.format(artifact_prefix,
                                              locale_output_path),
            'name':
            '{}/{}target.complete.mar'.format(artifact_prefix,
                                              locale_output_path),
        }]

        # Stub installer is only generated on win32
        if '32' in build_platform:
            output_files.append({
                'type':
                'file',
                'path':
                '{}/{}target.stub-installer.exe'.format(
                    artifact_prefix, locale_output_path),
                'name':
                '{}/{}target.stub-installer.exe'.format(
                    artifact_prefix, locale_output_path),
            })

    if output_files:
        return output_files

    raise NotImplementedError(
        'Unsupported build_platform: "{}"'.format(build_platform))
Ejemplo n.º 19
0
def generate_upstream_artifacts(job,
                                build_task_ref,
                                build_signing_task_ref,
                                repackage_task_ref,
                                repackage_signing_task_ref,
                                platform,
                                locale=None,
                                project=None):

    build_mapping = UPSTREAM_ARTIFACT_UNSIGNED_PATHS
    build_signing_mapping = UPSTREAM_ARTIFACT_SIGNED_PATHS
    repackage_mapping = UPSTREAM_ARTIFACT_REPACKAGE_PATHS
    repackage_signing_mapping = UPSTREAM_ARTIFACT_SIGNED_REPACKAGE_PATHS

    artifact_prefix = get_artifact_prefix(job)
    if locale:
        artifact_prefix = '{}/{}'.format(artifact_prefix, locale)
        platform = "{}-l10n".format(platform)

    upstream_artifacts = []

    task_refs = [
        build_task_ref, build_signing_task_ref, repackage_task_ref,
        repackage_signing_task_ref
    ]
    tasktypes = ['build', 'signing', 'repackage', 'repackage']
    mapping = [
        build_mapping, build_signing_mapping, repackage_mapping,
        repackage_signing_mapping
    ]

    for ref, tasktype, mapping in zip(task_refs, tasktypes, mapping):
        platform_was_previously_matched_by_regex = None
        for platform_regex, paths in mapping.iteritems():
            if platform_regex.match(platform) is not None:
                _check_platform_matched_only_one_regex(
                    tasktype, platform,
                    platform_was_previously_matched_by_regex, platform_regex)
                if paths:
                    usable_paths = paths[:]

                    use_stub = job["attributes"].get('stub-installer')
                    if not use_stub:
                        if 'target.stub-installer.exe' in usable_paths:
                            usable_paths.remove('target.stub-installer.exe')
                    upstream_artifacts.append({
                        "taskId": {
                            "task-reference": ref
                        },
                        "taskType":
                        tasktype,
                        "paths": [
                            "{}/{}".format(artifact_prefix, path)
                            for path in usable_paths
                        ],
                        "locale":
                        locale or "en-US",
                    })
                platform_was_previously_matched_by_regex = platform_regex

    return upstream_artifacts
Ejemplo n.º 20
0
def generate_beetmover_upstream_artifacts(
    config, job, platform, locale=None, dependencies=None, **kwargs
):
    """Generate the upstream artifacts for beetmover, using the artifact map.

    Currently only applies to beetmover tasks.

    Args:
        job (dict): The current job being generated
        dependencies (list): A list of the job's dependency labels.
        platform (str): The current build platform
        locale (str): The current locale being beetmoved.

    Returns:
        list: A list of dictionaries conforming to the upstream_artifacts spec.
    """
    base_artifact_prefix = get_artifact_prefix(job)
    resolve_keyed_by(
        job,
        "attributes.artifact_map",
        "artifact map",
        **{
            "release-type": config.params["release_type"],
            "platform": platform,
        },
    )
    map_config = deepcopy(cached_load_yaml(job["attributes"]["artifact_map"]))
    upstream_artifacts = list()

    if not locale:
        locales = map_config["default_locales"]
    elif isinstance(locale, list):
        locales = locale
    else:
        locales = [locale]

    if not dependencies:
        if job.get("dependencies"):
            dependencies = job["dependencies"].keys()
        elif job.get("primary-dependency"):
            dependencies = [job["primary-dependency"].kind]
        else:
            raise Exception("Unsupported type of dependency. Got job: {}".format(job))

    for locale, dep in itertools.product(locales, dependencies):
        paths = list()

        for filename in map_config["mapping"]:
            if dep not in map_config["mapping"][filename]["from"]:
                continue
            if locale != "multi" and not map_config["mapping"][filename]["all_locales"]:
                continue
            if (
                "only_for_platforms" in map_config["mapping"][filename]
                and platform
                not in map_config["mapping"][filename]["only_for_platforms"]
            ):
                continue
            if (
                "not_for_platforms" in map_config["mapping"][filename]
                and platform in map_config["mapping"][filename]["not_for_platforms"]
            ):
                continue
            if "partials_only" in map_config["mapping"][filename]:
                continue
            # The next time we look at this file it might be a different locale.
            file_config = deepcopy(map_config["mapping"][filename])
            resolve_keyed_by(
                file_config,
                "source_path_modifier",
                "source path modifier",
                locale=locale,
            )

            kwargs["locale"] = locale

            paths.append(
                os.path.join(
                    base_artifact_prefix,
                    jsone.render(file_config["source_path_modifier"], kwargs),
                    jsone.render(filename, kwargs),
                )
            )

        if job.get("dependencies") and getattr(
            job["dependencies"][dep], "release_artifacts", None
        ):
            paths = [
                path
                for path in paths
                if path in job["dependencies"][dep].release_artifacts
            ]

        if not paths:
            continue

        upstream_artifacts.append(
            {
                "taskId": {"task-reference": "<{}>".format(dep)},
                "taskType": map_config["tasktype_map"].get(dep),
                "paths": sorted(paths),
                "locale": locale,
            }
        )

    upstream_artifacts.sort(key=lambda u: u["paths"])
    return upstream_artifacts
Ejemplo n.º 21
0
def use_fetches(config, jobs):
    artifact_names = {}
    aliases = {}

    if config.kind in ("toolchain", "fetch"):
        jobs = list(jobs)
        for job in jobs:
            run = job.get("run", {})
            label = job["label"]
            get_attribute(artifact_names, label, run, "toolchain-artifact")
            value = run.get("{}-alias".format(config.kind))
            if value:
                aliases["{}-{}".format(config.kind, value)] = label

    for task in config.kind_dependencies_tasks.values():
        if task.kind in ("fetch", "toolchain"):
            get_attribute(
                artifact_names,
                task.label,
                task.attributes,
                "{kind}-artifact".format(kind=task.kind),
            )
            value = task.attributes.get("{}-alias".format(task.kind))
            if value:
                aliases["{}-{}".format(task.kind, value)] = task.label

    artifact_prefixes = {}
    for job in order_tasks(config, jobs):
        artifact_prefixes[job["label"]] = get_artifact_prefix(job)

        fetches = job.pop("fetches", None)
        if not fetches:
            yield job
            continue

        job_fetches = []
        name = job.get("name", job.get("label"))
        dependencies = job.setdefault("dependencies", {})
        worker = job.setdefault("worker", {})
        prefix = get_artifact_prefix(job)
        has_sccache = False
        for kind, artifacts in fetches.items():
            if kind in ("fetch", "toolchain"):
                for fetch_name in artifacts:
                    label = "{kind}-{name}".format(kind=kind, name=fetch_name)
                    label = aliases.get(label, label)
                    if label not in artifact_names:
                        raise Exception(
                            "Missing fetch job for {kind}-{name}: {fetch}".format(
                                kind=config.kind, name=name, fetch=fetch_name
                            )
                        )

                    path = artifact_names[label]

                    dependencies[label] = label
                    job_fetches.append(
                        {
                            "artifact": path,
                            "task": "<{label}>".format(label=label),
                            "extract": True,
                        }
                    )

                    if kind == "toolchain" and fetch_name.endswith("-sccache"):
                        has_sccache = True
            else:
                if kind not in dependencies:
                    raise Exception(
                        "{name} can't fetch {kind} artifacts because "
                        "it has no {kind} dependencies!".format(name=name, kind=kind)
                    )
                dep_label = dependencies[kind]
                if dep_label in artifact_prefixes:
                    prefix = artifact_prefixes[dep_label]
                else:
                    if dep_label not in config.kind_dependencies_tasks:
                        raise Exception(
                            "{name} can't fetch {kind} artifacts because "
                            "there are no tasks with label {label} in kind dependencies!".format(
                                name=name,
                                kind=kind,
                                label=dependencies[kind],
                            )
                        )

                    prefix = get_artifact_prefix(
                        config.kind_dependencies_tasks[dep_label]
                    )

                for artifact in artifacts:
                    if isinstance(artifact, text_type):
                        path = artifact
                        dest = None
                        extract = True
                        verify_hash = False
                    else:
                        path = artifact["artifact"]
                        dest = artifact.get("dest")
                        extract = artifact.get("extract", True)
                        verify_hash = artifact.get("verify-hash", False)

                    fetch = {
                        "artifact": "{prefix}/{path}".format(prefix=prefix, path=path)
                        if not path.startswith("/")
                        else path[1:],
                        "task": "<{dep}>".format(dep=kind),
                        "extract": extract,
                    }
                    if dest is not None:
                        fetch["dest"] = dest
                    if verify_hash:
                        fetch["verify-hash"] = verify_hash
                    job_fetches.append(fetch)

        if job.get("use-sccache") and not has_sccache:
            raise Exception("Must provide an sccache toolchain if using sccache.")

        job_artifact_prefixes = {
            mozpath.dirname(fetch["artifact"])
            for fetch in job_fetches
            if not fetch["artifact"].startswith("public/")
        }
        if job_artifact_prefixes:
            # Use taskcluster-proxy and request appropriate scope.  For example, add
            # 'scopes: [queue:get-artifact:path/to/*]' for 'path/to/artifact.tar.xz'.
            worker["taskcluster-proxy"] = True
            for prefix in sorted(job_artifact_prefixes):
                scope = "queue:get-artifact:{}/*".format(prefix)
                if scope not in job.setdefault("scopes", []):
                    job["scopes"].append(scope)

        env = worker.setdefault("env", {})
        env["MOZ_FETCHES"] = {
            "task-reference": six.ensure_text(
                json.dumps(
                    sorted(job_fetches, key=lambda x: sorted(x.items())), sort_keys=True
                )
            )
        }
        # The path is normalized to an absolute path in run-task
        env.setdefault("MOZ_FETCHES_DIR", "fetches")

        yield job
Ejemplo n.º 22
0
def make_task_description(config, jobs):
    # If no balrog release history, then don't generate partials
    if not config.params.get('release_history'):
        return
    for job in jobs:
        dep_job = job['primary-dependency']

        treeherder = inherit_treeherder_from_dep(job, dep_job)
        treeherder.setdefault('symbol', 'p(N)')

        label = job.get('label', "partials-{}".format(dep_job.label))

        dependencies = {dep_job.kind: dep_job.label}

        attributes = copy_attributes_from_dependent_job(dep_job)
        locale = dep_job.attributes.get('locale')
        if locale:
            attributes['locale'] = locale
            treeherder['symbol'] = "p({})".format(locale)
        attributes['shipping_phase'] = job['shipping-phase']

        build_locale = locale or 'en-US'

        build_platform = attributes['build_platform']
        builds = get_builds(config.params['release_history'], build_platform,
                            build_locale)

        # If the list is empty there's no available history for this platform
        # and locale combination, so we can't build any partials.
        if not builds:
            continue

        extra = {'funsize': {'partials': list()}}
        update_number = 1

        locale_suffix = ''
        if locale:
            locale_suffix = '{}/'.format(locale)
        artifact_path = "<{}/{}/{}target.complete.mar>".format(
            dep_job.kind,
            get_artifact_prefix(dep_job),
            locale_suffix,
        )
        for build in sorted(builds):
            partial_info = {
                'locale': build_locale,
                'from_mar': builds[build]['mar_url'],
                'to_mar': {
                    'artifact-reference': artifact_path
                },
                'branch': config.params['project'],
                'update_number': update_number,
                'dest_mar': build,
            }
            if 'product' in builds[build]:
                partial_info['product'] = builds[build]['product']
            if 'previousVersion' in builds[build]:
                partial_info['previousVersion'] = builds[build][
                    'previousVersion']
            if 'previousBuildNumber' in builds[build]:
                partial_info['previousBuildNumber'] = builds[build][
                    'previousBuildNumber']
            extra['funsize']['partials'].append(partial_info)
            update_number += 1

        level = config.params['level']

        worker = {
            'artifacts':
            _generate_task_output_files(dep_job, builds.keys(), locale),
            'implementation':
            'docker-worker',
            'docker-image': {
                'in-tree': 'funsize-update-generator'
            },
            'os':
            'linux',
            'max-run-time':
            3600 if 'asan' in dep_job.label else 900,
            'chain-of-trust':
            True,
            'taskcluster-proxy':
            True,
            'env': {
                'SIGNING_CERT':
                identify_desired_signing_keys(
                    config.params["project"],
                    config.params['release_product']),
                'EXTRA_PARAMS':
                '--arch={}'.format(architecture(build_platform)),
                'MAR_CHANNEL_ID':
                attributes['mar-channel-id']
            }
        }
        if config.params.release_level() == 'staging':
            worker['env']['FUNSIZE_ALLOW_STAGING_PREFIXES'] = 'true'

        task = {
            'label':
            label,
            'description':
            "{} Partials".format(dep_job.task["metadata"]["description"]),
            'worker-type':
            'b-linux',
            'dependencies':
            dependencies,
            'scopes': [],
            'attributes':
            attributes,
            'run-on-projects':
            dep_job.attributes.get('run_on_projects'),
            'treeherder':
            treeherder,
            'extra':
            extra,
            'worker':
            worker,
        }

        # We only want caching on linux/windows due to bug 1436977
        if int(level) == 3 \
                and any([build_platform.startswith(prefix) for prefix in ['linux', 'win']]):
            task['scopes'].append(
                'auth:aws-s3:read-write:tc-gp-private-1d-us-east-1/releng/mbsdiff-cache/'
            )

        yield task
Ejemplo n.º 23
0
def generic_worker_add_artifacts(config, job, taskdesc):
    """ Adds an artifact directory to the task """
    # The path is the location on disk; it doesn't necessarily
    # mean the artifacts will be public or private; that is set via the name
    # attribute in add_artifacts.
    add_artifacts(config, job, taskdesc, path=get_artifact_prefix(taskdesc))
Ejemplo n.º 24
0
def make_task_description(config, jobs):
    # If no balrog release history, then don't generate partials
    if not config.params.get('release_history'):
        return
    for job in jobs:
        dep_job = job['primary-dependency']

        treeherder = job.get('treeherder', {})
        treeherder.setdefault('symbol', 'p(N)')

        label = job.get('label', "partials-{}".format(dep_job.label))
        dep_th_platform = dep_job.task.get('extra', {}).get(
            'treeherder', {}).get('machine', {}).get('platform', '')

        treeherder.setdefault('platform', "{}/opt".format(dep_th_platform))
        treeherder.setdefault('kind', 'build')
        treeherder.setdefault('tier', 1)

        dependencies = {dep_job.kind: dep_job.label}

        attributes = copy_attributes_from_dependent_job(dep_job)
        locale = dep_job.attributes.get('locale')
        if locale:
            attributes['locale'] = locale
            treeherder['symbol'] = "p({})".format(locale)
        attributes['shipping_phase'] = job['shipping-phase']

        build_locale = locale or 'en-US'

        builds = get_builds(config.params['release_history'], dep_th_platform,
                            build_locale)

        # If the list is empty there's no available history for this platform
        # and locale combination, so we can't build any partials.
        if not builds:
            continue

        extra = {'funsize': {'partials': list()}}
        update_number = 1

        locale_suffix = ''
        if locale:
            locale_suffix = '{}/'.format(locale)
        artifact_path = "<{}/{}/{}target.complete.mar>".format(
            dep_job.kind,
            get_artifact_prefix(dep_job),
            locale_suffix,
        )
        for build in sorted(builds):
            partial_info = {
                'locale': build_locale,
                'from_mar': builds[build]['mar_url'],
                'to_mar': {
                    'artifact-reference': artifact_path
                },
                'platform': get_balrog_platform_name(dep_th_platform),
                'branch': config.params['project'],
                'update_number': update_number,
                'dest_mar': build,
            }
            if 'product' in builds[build]:
                partial_info['product'] = builds[build]['product']
            if 'previousVersion' in builds[build]:
                partial_info['previousVersion'] = builds[build][
                    'previousVersion']
            if 'previousBuildNumber' in builds[build]:
                partial_info['previousBuildNumber'] = builds[build][
                    'previousBuildNumber']
            extra['funsize']['partials'].append(partial_info)
            update_number += 1

        mar_channel_id = None
        if config.params['project'] == 'mozilla-beta':
            if 'devedition' in label:
                mar_channel_id = 'firefox-mozilla-aurora'
            else:
                mar_channel_id = 'firefox-mozilla-beta'
        elif config.params['project'] == 'mozilla-release':
            mar_channel_id = 'firefox-mozilla-release'
        elif 'esr' in config.params['project']:
            mar_channel_id = 'firefox-mozilla-esr'

        level = config.params['level']

        worker = {
            'artifacts':
            _generate_task_output_files(dep_job, builds.keys(), locale),
            'implementation':
            'docker-worker',
            'docker-image': {
                'in-tree': 'funsize-update-generator'
            },
            'os':
            'linux',
            'max-run-time':
            3600 if 'asan' in dep_job.label else 600,
            'chain-of-trust':
            True,
            'taskcluster-proxy':
            True,
            'env': {
                'SHA1_SIGNING_CERT':
                'nightly_sha1',
                'SHA384_SIGNING_CERT':
                'nightly_sha384',
                'DATADOG_API_SECRET':
                'project/releng/gecko/build/level-{}/datadog-api-key'.format(
                    level),
                'EXTRA_PARAMS':
                '--arch={}'.format(architecture(attributes['build_platform'])),
            }
        }
        if mar_channel_id:
            worker['env']['ACCEPTED_MAR_CHANNEL_IDS'] = mar_channel_id
        if config.params.release_level() == 'staging':
            worker['env']['FUNSIZE_ALLOW_STAGING_PREFIXES'] = 'true'

        task = {
            'label':
            label,
            'description':
            "{} Partials".format(dep_job.task["metadata"]["description"]),
            'worker-type':
            'aws-provisioner-v1/gecko-%s-b-linux' % level,
            'dependencies':
            dependencies,
            'scopes': [
                'secrets:get:project/releng/gecko/build/level-%s/datadog-api-key'
                % level
            ],
            'attributes':
            attributes,
            'run-on-projects':
            dep_job.attributes.get('run_on_projects'),
            'treeherder':
            treeherder,
            'extra':
            extra,
            'worker':
            worker,
        }

        # We only want caching on linux/windows due to bug 1436977
        if int(level) == 3 \
                and any([platform in dep_th_platform for platform in ['linux', 'windows']]):
            task['scopes'].append(
                'auth:aws-s3:read-write:tc-gp-private-1d-us-east-1/releng/mbsdiff-cache/'
            )

        yield task
Ejemplo n.º 25
0
def generate_upstream_artifacts(job, build_task_ref, repackage_task_ref,
                                repackage_signing_task_ref, platform,
                                repack_id, partner_path):

    upstream_artifacts = []
    artifact_prefix = get_artifact_prefix(job)

    if "linux" in platform:
        upstream_artifacts.append({
            "taskId": {
                "task-reference": build_task_ref
            },
            "taskType":
            "build",
            "paths":
            ["{}/{}/target.tar.bz2".format(artifact_prefix, repack_id)],
            "locale":
            partner_path,
        })
        upstream_artifacts.append({
            "taskId": {
                "task-reference": repackage_signing_task_ref
            },
            "taskType":
            "repackage",
            "paths":
            ["{}/{}/target.tar.bz2.asc".format(artifact_prefix, repack_id)],
            "locale":
            partner_path,
        })
    elif "macosx" in platform:
        upstream_artifacts.append({
            "taskId": {
                "task-reference": repackage_task_ref
            },
            "taskType":
            "repackage",
            "paths": ["{}/{}/target.dmg".format(artifact_prefix, repack_id)],
            "locale":
            partner_path,
        })
        upstream_artifacts.append({
            "taskId": {
                "task-reference": repackage_signing_task_ref
            },
            "taskType":
            "repackage",
            "paths":
            ["{}/{}/target.dmg.asc".format(artifact_prefix, repack_id)],
            "locale":
            partner_path,
        })
    elif "win" in platform:
        upstream_artifacts.append({
            "taskId": {
                "task-reference": repackage_signing_task_ref
            },
            "taskType":
            "repackage",
            "paths":
            ["{}/{}/target.installer.exe".format(artifact_prefix, repack_id)],
            "locale":
            partner_path,
        })
        upstream_artifacts.append({
            "taskId": {
                "task-reference": repackage_signing_task_ref
            },
            "taskType":
            "repackage",
            "paths": [
                "{}/{}/target.installer.exe.asc".format(
                    artifact_prefix, repack_id)
            ],
            "locale":
            partner_path,
        })

    if not upstream_artifacts:
        raise Exception("Couldn't find any upstream artifacts.")

    return upstream_artifacts
Ejemplo n.º 26
0
def make_job_description(config, jobs):
    for job in jobs:
        dep_job = job['primary-dependency']
        attributes = copy_attributes_from_dependent_job(dep_job)
        build_platform = attributes['build_platform']

        if job['build-platform'].startswith('win'):
            if dep_job.kind.endswith('signing'):
                continue
        if job['build-platform'].startswith('macosx'):
            if dep_job.kind.endswith('repack'):
                continue
        dependencies = {dep_job.attributes.get('kind'): dep_job.label}
        dependencies.update(dep_job.dependencies)

        signing_task = None
        for dependency in dependencies.keys():
            if build_platform.startswith('macosx') and dependency.endswith(
                    'signing'):
                signing_task = dependency
            elif build_platform.startswith('win') and dependency.endswith(
                    'repack'):
                signing_task = dependency

        attributes['repackage_type'] = 'repackage'

        level = config.params['level']
        repack_id = job['extra']['repack_id']

        repackage_config = []
        for format in job.get('package-formats'):
            command = copy.deepcopy(PACKAGE_FORMATS[format])
            substs = {
                'archive_format': archive_format(build_platform),
                'executable_extension': executable_extension(build_platform),
            }
            command['inputs'] = {
                name: filename.format(**substs)
                for name, filename in command['inputs'].items()
            }
            repackage_config.append(command)

        run = job.get('mozharness', {})
        run.update({
            'using': 'mozharness',
            'script': 'mozharness/scripts/repackage.py',
            'job-script': 'taskcluster/scripts/builder/repackage.sh',
            'actions': ['setup', 'repackage'],
            'extra-workspace-cache-key': 'repackage',
            'extra-config': {
                'repackage_config': repackage_config,
            },
        })

        worker = {
            'chain-of-trust': True,
            'max-run-time': 7200 if build_platform.startswith('win') else 3600,
            'taskcluster-proxy':
            True if get_artifact_prefix(dep_job) else False,
            'env': {
                'REPACK_ID': repack_id,
            },
            # Don't add generic artifact directory.
            'skip-artifacts': True,
        }

        if build_platform.startswith('win'):
            worker_type = 'aws-provisioner-v1/gecko-%s-b-win2012' % level
            run['use-magic-mh-args'] = False
        else:
            if build_platform.startswith('macosx'):
                worker_type = 'aws-provisioner-v1/gecko-%s-b-linux' % level
            else:
                raise NotImplementedError(
                    'Unsupported build_platform: "{}"'.format(build_platform))

            run['tooltool-downloads'] = 'internal'
            worker['docker-image'] = {"in-tree": "debian7-amd64-build"}

        worker['artifacts'] = _generate_task_output_files(
            dep_job,
            worker_type_implementation(worker_type),
            repackage_config,
            partner=repack_id,
        )

        description = ("Repackaging for repack_id '{repack_id}' for build '"
                       "{build_platform}/{build_type}'".format(
                           repack_id=job['extra']['repack_id'],
                           build_platform=attributes.get('build_platform'),
                           build_type=attributes.get('build_type')))

        task = {
            'label':
            job['label'],
            'description':
            description,
            'worker-type':
            worker_type,
            'dependencies':
            dependencies,
            'attributes':
            attributes,
            'scopes': ['queue:get-artifact:releng/partner/*'],
            'run-on-projects':
            dep_job.attributes.get('run_on_projects'),
            'routes':
            job.get('routes', []),
            'extra':
            job.get('extra', {}),
            'worker':
            worker,
            'run':
            run,
            'fetches':
            _generate_download_config(dep_job,
                                      build_platform,
                                      signing_task,
                                      partner=repack_id,
                                      project=config.params["project"]),
        }

        if build_platform.startswith('macosx'):
            task['toolchains'] = [
                'linux64-libdmg',
                'linux64-hfsplus',
                'linux64-node',
            ]
        yield task
Ejemplo n.º 27
0
def use_fetches(config, jobs):
    artifact_names = {}
    aliases = {}

    if config.kind in ('toolchain', 'fetch'):
        jobs = list(jobs)
        for job in jobs:
            run = job.get('run', {})
            label = job['label']
            get_attribute(
                artifact_names, label, run, 'toolchain-artifact')
            value = run.get('{}-alias'.format(config.kind))
            if value:
                aliases['{}-{}'.format(config.kind, value)] = label

    for task in config.kind_dependencies_tasks:
        if task.kind in ('fetch', 'toolchain'):
            get_attribute(
                artifact_names, task.label, task.attributes,
                '{kind}-artifact'.format(kind=task.kind),
            )
            value = task.attributes.get('{}-alias'.format(task.kind))
            if value:
                aliases['{}-{}'.format(task.kind, value)] = task.label

    artifact_prefixes = {}
    for job in order_tasks(config, jobs):
        artifact_prefixes[job["label"]] = get_artifact_prefix(job)

        fetches = job.pop("fetches", None)
        if not fetches:
            yield job
            continue

        job_fetches = []
        name = job.get('name', job.get('label'))
        dependencies = job.setdefault('dependencies', {})
        worker = job.setdefault('worker', {})
        prefix = get_artifact_prefix(job)
        for kind, artifacts in fetches.items():
            if kind in ('fetch', 'toolchain'):
                for fetch_name in artifacts:
                    label = '{kind}-{name}'.format(kind=kind, name=fetch_name)
                    label = aliases.get(label, label)
                    if label not in artifact_names:
                        raise Exception('Missing fetch job for {kind}-{name}: {fetch}'.format(
                            kind=config.kind, name=name, fetch=fetch_name))

                    path = artifact_names[label]

                    dependencies[label] = label
                    job_fetches.append({
                        'artifact': path,
                        'task': '<{label}>'.format(label=label),
                        'extract': True,
                    })

                    if kind == 'toolchain' and fetch_name.endswith('-sccache'):
                        job['needs-sccache'] = True
            else:
                if kind not in dependencies:
                    raise Exception("{name} can't fetch {kind} artifacts because "
                                    "it has no {kind} dependencies!".format(name=name, kind=kind))
                dep_label = dependencies[kind]
                if dep_label in artifact_prefixes:
                    prefix = artifact_prefixes[dep_label]
                else:
                    dep_tasks = [
                        task
                        for task in config.kind_dependencies_tasks
                        if task.label == dep_label
                    ]
                    if len(dep_tasks) != 1:
                        raise Exception(
                            "{name} can't fetch {kind} artifacts because "
                            "there are {tasks} with label {label} in kind dependencies!".format(
                                name=name,
                                kind=kind,
                                label=dependencies[kind],
                                tasks="no tasks"
                                if len(dep_tasks) == 0
                                else "multiple tasks",
                            )
                        )

                    prefix = get_artifact_prefix(dep_tasks[0])

                for artifact in artifacts:
                    if isinstance(artifact, text_type):
                        path = artifact
                        dest = None
                        extract = True
                    else:
                        path = artifact['artifact']
                        dest = artifact.get('dest')
                        extract = artifact.get('extract', True)

                    fetch = {
                        'artifact': '{prefix}/{path}'.format(prefix=prefix, path=path)
                                    if not path.startswith('/') else path[1:],
                        'task': '<{dep}>'.format(dep=kind),
                        'extract': extract,
                    }
                    if dest is not None:
                        fetch['dest'] = dest
                    job_fetches.append(fetch)

        job_artifact_prefixes = {
            mozpath.dirname(fetch["artifact"])
            for fetch in job_fetches
            if not fetch["artifact"].startswith("public/")
        }
        if job_artifact_prefixes:
            # Use taskcluster-proxy and request appropriate scope.  For example, add
            # 'scopes: [queue:get-artifact:path/to/*]' for 'path/to/artifact.tar.xz'.
            worker["taskcluster-proxy"] = True
            for prefix in sorted(job_artifact_prefixes):
                scope = "queue:get-artifact:{}/*".format(prefix)
                if scope not in job.setdefault("scopes", []):
                    job["scopes"].append(scope)

        env = worker.setdefault('env', {})
        env['MOZ_FETCHES'] = {
            'task-reference': six.ensure_text(json.dumps(job_fetches,
                                                         sort_keys=True))
        }
        # The path is normalized to an absolute path in run-task
        env.setdefault('MOZ_FETCHES_DIR', 'fetches')

        yield job
Ejemplo n.º 28
0
def use_fetches(config, jobs):
    artifact_names = {}
    aliases = {}

    if config.kind == 'toolchain':
        jobs = list(jobs)
        for job in jobs:
            run = job.get('run', {})
            label = 'toolchain-{}'.format(job['name'])
            get_attribute(
                artifact_names, label, run, 'toolchain-artifact')
            value = run.get('toolchain-alias')
            if value:
                aliases['toolchain-{}'.format(value)] = label

    for task in config.kind_dependencies_tasks:
        if task.kind in ('fetch', 'toolchain'):
            get_attribute(
                artifact_names, task.label, task.attributes,
                '{kind}-artifact'.format(kind=task.kind),
            )
            value = task.attributes.get('{}-alias'.format(task.kind))
            if value:
                aliases['{}-{}'.format(task.kind, value)] = task.label

    for job in jobs:
        fetches = job.pop('fetches', None)
        if not fetches:
            yield job
            continue

        job_fetches = []
        name = job.get('name', job.get('label'))
        dependencies = job.setdefault('dependencies', {})
        worker = job.setdefault('worker', {})
        prefix = get_artifact_prefix(job)
        for kind, artifacts in fetches.items():
            if kind in ('fetch', 'toolchain'):
                for fetch_name in artifacts:
                    label = '{kind}-{name}'.format(kind=kind, name=fetch_name)
                    label = aliases.get(label, label)
                    if label not in artifact_names:
                        raise Exception('Missing fetch job for {kind}-{name}: {fetch}'.format(
                            kind=config.kind, name=name, fetch=fetch_name))

                    path = artifact_names[label]
                    if not path.startswith('public/'):
                        # Use taskcluster-proxy and request appropriate scope.  For example, add
                        # 'scopes: [queue:get-artifact:path/to/*]' for 'path/to/artifact.tar.xz'.
                        worker['taskcluster-proxy'] = True
                        dirname = mozpath.dirname(path)
                        scope = 'queue:get-artifact:{}/*'.format(dirname)
                        if scope not in job.setdefault('scopes', []):
                            job['scopes'].append(scope)

                    dependencies[label] = label
                    job_fetches.append({
                        'artifact': path,
                        'task': '<{label}>'.format(label=label),
                        'extract': True,
                    })

                    if kind == 'toolchain' and fetch_name.endswith('-sccache'):
                        job['needs-sccache'] = True
            else:
                if kind not in dependencies:
                    raise Exception("{name} can't fetch {kind} artifacts because "
                                    "it has no {kind} dependencies!".format(name=name, kind=kind))

                for artifact in artifacts:
                    if isinstance(artifact, basestring):
                        path = artifact
                        dest = None
                        extract = True
                    else:
                        path = artifact['artifact']
                        dest = artifact.get('dest')
                        extract = artifact.get('extract', True)

                    fetch = {
                        'artifact': '{prefix}/{path}'.format(prefix=prefix, path=path)
                                    if not path.startswith('/') else path[1:],
                        'task': '<{dep}>'.format(dep=kind),
                        'extract': extract,
                    }
                    if dest is not None:
                        fetch['dest'] = dest
                    job_fetches.append(fetch)

        env = worker.setdefault('env', {})
        env['MOZ_FETCHES'] = {'task-reference': json.dumps(job_fetches, sort_keys=True)}
        # The path is normalized to an absolute path in run-task
        env.setdefault('MOZ_FETCHES_DIR', 'fetches')

        yield job
Ejemplo n.º 29
0
def generate_upstream_artifacts(
    config, job, dependencies, platform, locale=None, project=None
):

    build_mapping = UPSTREAM_ARTIFACT_UNSIGNED_PATHS
    build_signing_mapping = UPSTREAM_ARTIFACT_SIGNED_PATHS
    repackage_mapping = UPSTREAM_ARTIFACT_REPACKAGE_PATHS
    repackage_signing_mapping = UPSTREAM_ARTIFACT_SIGNED_REPACKAGE_PATHS
    msi_signing_mapping = UPSTREAM_ARTIFACT_SIGNED_MSI_PATHS
    mar_signing_mapping = UPSTREAM_ARTIFACT_SIGNED_MAR_PATHS

    artifact_prefix = get_artifact_prefix(job)
    if locale:
        artifact_prefix = '{}/{}'.format(artifact_prefix, locale)
        platform = "{}-l10n".format(platform)

    upstream_artifacts = []

    for task_type, mapping in [
        ("build", build_mapping),
        ("signing", build_signing_mapping),
    ]:
        platform_was_previously_matched_by_regex = None
        for platform_regex, paths in mapping.iteritems():
            if platform_regex.match(platform) is not None:
                _check_platform_matched_only_one_regex(
                    task_type, platform, platform_was_previously_matched_by_regex, platform_regex
                )
                platform_was_previously_matched_by_regex = platform_regex
                if paths:
                    usable_paths = paths[:]

                    if 'target.langpack.xpi' in usable_paths and \
                            not project == "mozilla-central":
                        # XXX This is only beetmoved for m-c nightlies.
                        # we should determine this better
                        usable_paths.remove('target.langpack.xpi')

                        if not len(usable_paths):
                            # We may have removed our only path.
                            continue

                    upstream_artifacts.append({
                        "taskId": {"task-reference": "<{}>".format(task_type)},
                        "taskType": task_type,
                        "paths": ["{}/{}".format(artifact_prefix, path) for path in usable_paths],
                        "locale": locale or "en-US",
                    })

    for task_type, cot_type, paths in [
        ('repackage', 'repackage', repackage_mapping),
        ('repackage-signing', 'repackage', repackage_signing_mapping),
        ('repackage-signing-msi', 'repackage', msi_signing_mapping),
        ('mar-signing', 'signing', mar_signing_mapping),
    ]:
        if task_type not in dependencies:
            continue

        paths = ["{}/{}".format(artifact_prefix, path) for path in paths]
        paths = [
            path for path in paths
            if path in dependencies[task_type].release_artifacts]

        if not paths:
            continue

        upstream_artifacts.append({
            "taskId": {"task-reference": "<{}>".format(task_type)},
            "taskType": cot_type,
            "paths": paths,
            "locale": locale or "en-US",
        })

    return upstream_artifacts
Ejemplo n.º 30
0
def add_artifacts(config, job, taskdesc, path):
    taskdesc['worker'].setdefault('artifacts', []).append({
        'name': get_artifact_prefix(taskdesc),
        'path': path,
        'type': 'directory',
    })