示例#1
0
def define_upstream_artifacts(config, jobs):
    partner_configs = get_partner_config_by_kind(config, config.kind)
    if not partner_configs:
        return

    for job in jobs:
        dep_job = job['dependent-task']
        build_platform = dep_job.attributes.get('build_platform')

        repack_ids = []
        for partner, partner_config in partner_configs.iteritems():
            for sub_partner, cfg in partner_config.iteritems():
                if not cfg or build_platform not in cfg["platforms"]:
                    continue
                for locale in locales_per_build_platform(build_platform, cfg.get('locales', [])):
                    repack_ids.append("{}/{}/{}".format(partner, sub_partner, locale))

        artifacts_specifications = generate_specifications_of_artifacts_to_sign(
            dep_job,
            keep_locale_template=True,
            kind=config.kind,
        )
        job['upstream-artifacts'] = [{
            'taskId': {'task-reference': '<{}>'.format(job['depname'])},
            'taskType': 'build',
            'paths': [
                path_template.format(locale=repack_id)
                for repack_id in repack_ids
                for path_template in spec['artifacts']
            ],
            'formats': spec['formats'],
        } for spec in artifacts_specifications]

        yield job
示例#2
0
def chunk_partners(config, jobs):
    partner_configs = get_partner_config_by_kind(config, config.kind)

    for job in jobs:
        dep_job = job['primary-dependency']
        build_platform = dep_job.attributes["build_platform"]
        # already chunked
        if dep_job.task.get('extra', {}).get('repack_id'):
            repack_id = dep_job.task['extra']['repack_id']
            if _check_repack_ids_by_platform(build_platform, repack_id):
                continue
            partner_job = copy.deepcopy(job)
            partner_job.setdefault('extra', {}).setdefault('repack_id', repack_id)
            yield partner_job
            continue
        # not already chunked
        for partner, partner_config in partner_configs.iteritems():
            for sub_partner, cfg in partner_config.iteritems():
                if build_platform not in cfg.get("platforms", []):
                    continue
                locales = locales_per_build_platform(build_platform, cfg.get('locales', []))
                for locale in locales:
                    repack_id = "{}/{}/{}".format(partner, sub_partner, locale)
                    if _check_repack_ids_by_platform(build_platform, repack_id):
                        continue
                    partner_job = copy.deepcopy(job)  # don't overwrite dict values here
                    partner_job.setdefault('extra', {})
                    partner_job['extra']['repack_id'] = repack_id

                    yield partner_job
示例#3
0
def define_upstream_artifacts(config, jobs):
    partner_configs = get_partner_config_by_kind(config, config.kind)
    if not partner_configs:
        return

    for job in jobs:
        dep_job = job['primary-dependency']
        repack_ids = job['extra']['repack_ids']
        artifacts_specifications = generate_specifications_of_artifacts_to_sign(
            dep_job,
            keep_locale_template=True,
            kind=config.kind,
        )
        job['upstream-artifacts'] = [{
            'taskId': {
                'task-reference': '<{}>'.format(job['depname'])
            },
            'taskType':
            'build',
            'paths': [
                path_template.format(locale=repack_id)
                for path_template in spec['artifacts']
                for repack_id in repack_ids
            ],
            'formats':
            spec['formats'],
        } for spec in artifacts_specifications]

        yield job
示例#4
0
def define_upstream_artifacts(config, jobs):
    partner_configs = get_partner_config_by_kind(config, config.kind)
    if not partner_configs:
        return

    for job in jobs:
        dep_job = job['primary-dependency']
        job['depname'] = dep_job.label
        job['attributes'] = copy_attributes_from_dependent_job(dep_job)

        repack_ids = job['extra']['repack_ids']
        artifacts_specifications = generate_specifications_of_artifacts_to_sign(
            config,
            job,
            keep_locale_template=True,
            kind=config.kind,
        )
        task_type = 'build'
        if 'notarization' in job['depname']:
            task_type = 'scriptworker'
        job['upstream-artifacts'] = [{
            'taskId': {'task-reference': '<{}>'.format(dep_job.kind)},
            'taskType': task_type,
            'paths': [
                path_template.format(locale=repack_id)
                for path_template in spec['artifacts']
                for repack_id in repack_ids
            ],
            'formats': spec['formats'],
        } for spec in artifacts_specifications]

        yield job
示例#5
0
def chunk_partners(config, jobs):
    partner_configs = get_partner_config_by_kind(config, config.kind)

    for job in jobs:
        dep_job = job['primary-dependency']
        build_platform = dep_job.attributes["build_platform"]
        repack_id = dep_job.task.get('extra', {}).get('repack_id')
        repack_ids = dep_job.task.get('extra', {}).get('repack_ids')
        copy_repack_ids = job.pop('copy-repack-ids', False)

        if copy_repack_ids:
            assert repack_ids, "dep_job {} doesn't have repack_ids!".format(
                dep_job.label)
            job.setdefault('extra', {})['repack_ids'] = repack_ids
            yield job
        # first downstream of the repack task, no chunking or fanout has been done yet
        elif not any([repack_id, repack_ids]):
            platform_repack_ids = _get_repack_ids_by_platform(
                partner_configs, build_platform)
            # we chunk mac signing
            if config.kind in ("release-partner-repack-signing",
                               "release-eme-free-repack-signing",
                               "release-partner-repack-notarization-part-1",
                               "release-eme-free-repack-notarization-part-1"):
                repacks_per_chunk = job.get('repacks-per-chunk')
                chunks, remainder = divmod(len(platform_repack_ids),
                                           repacks_per_chunk)
                if remainder:
                    chunks = int(chunks + 1)
                for this_chunk in range(1, chunks + 1):
                    chunk = chunkify(platform_repack_ids, this_chunk, chunks)
                    partner_job = copy.deepcopy(job)
                    partner_job.setdefault('extra',
                                           {}).setdefault('repack_ids', chunk)
                    partner_job['extra']['repack_suffix'] = str(this_chunk)
                    yield partner_job
            # linux and windows we fan out immediately to one task per partner-sub_partner-locale
            else:
                for repack_id in platform_repack_ids:
                    partner_job = copy.deepcopy(
                        job)  # don't overwrite dict values here
                    partner_job.setdefault('extra', {})
                    partner_job['extra']['repack_id'] = repack_id
                    yield partner_job
        # fan out chunked mac signing for repackage
        elif repack_ids:
            for repack_id in repack_ids:
                partner_job = copy.deepcopy(job)
                partner_job.setdefault('extra',
                                       {}).setdefault('repack_id', repack_id)
                yield partner_job
        # otherwise we've fully fanned out already, continue by passing repack_id on
        else:
            partner_job = copy.deepcopy(job)
            partner_job.setdefault('extra',
                                   {}).setdefault('repack_id', repack_id)
            yield partner_job
def split_public_and_private(config, jobs):
    public_bucket_scope = get_beetmover_bucket_scope(config)
    partner_config = get_partner_config_by_kind(config, config.kind)

    for job in jobs:
        partner_bucket_scope = add_scope_prefix(config, job["partner-bucket-scope"])
        partner, subpartner, _ = job["extra"]["repack_id"].split("/")

        if partner_config[partner][subpartner].get("upload_to_candidates"):
            # public
            yield populate_scopes_and_worker_type(
                config, job, public_bucket_scope, partner_public=True
            )
        else:
            # private
            yield populate_scopes_and_worker_type(
                config, job, partner_bucket_scope, partner_public=False
            )
示例#7
0
def add_command_arguments(config, tasks):
    release_config = get_release_config(config)

    # staging releases - pass reduced set of locales to the repacking script
    all_locales = set()
    partner_config = get_partner_config_by_kind(config, config.kind)
    for partner in partner_config.values():
        for sub_partner in partner.values():
            all_locales.update(sub_partner.get("locales", []))

    for task in tasks:
        # add the MOZHARNESS_OPTIONS, eg version=61.0, build-number=1, platform=win64
        if not task["attributes"]["build_platform"].endswith("-shippable"):
            raise Exception(
                "Unexpected partner repack platform: {}".format(
                    task["attributes"]["build_platform"], ), )
        platform = task["attributes"]["build_platform"].partition(
            "-shippable")[0]
        task["run"]["options"] = [
            "version={}".format(release_config["version"]),
            "build-number={}".format(release_config["build_number"]),
            "platform={}".format(platform),
        ]
        if task["extra"]["limit-locales"]:
            for locale in all_locales:
                task["run"]["options"].append("limit-locale={}".format(locale))
        if "partner" in config.kind and config.params["release_partners"]:
            for partner in config.params["release_partners"]:
                task["run"]["options"].append("partner={}".format(partner))

        # The upstream taskIds are stored a special environment variable, because we want to use
        # task-reference's to resolve dependencies, but the string handling of MOZHARNESS_OPTIONS
        # blocks that. It's space-separated string of ids in the end.
        task["worker"]["env"]["UPSTREAM_TASKIDS"] = {
            "task-reference":
            " ".join(["<{}>".format(dep) for dep in task["dependencies"]])
        }

        # Forward the release type for bouncer product construction
        task["worker"]["env"]["RELEASE_TYPE"] = config.params["release_type"]

        yield task
示例#8
0
def add_command_arguments(config, tasks):
    release_config = get_release_config(config)

    # staging releases - pass reduced set of locales to the repacking script
    all_locales = set()
    partner_config = get_partner_config_by_kind(config, config.kind)
    for partner in partner_config.values():
        for sub_partner in partner.values():
            all_locales.update(sub_partner.get('locales', []))

    for task in tasks:
        # add the MOZHARNESS_OPTIONS, eg version=61.0, build-number=1, platform=win64
        if not task['attributes']['build_platform'].endswith('-shippable'):
            raise Exception(
                "Unexpected partner repack platform: {}".format(
                    task['attributes']['build_platform'], ), )
        platform = task['attributes']['build_platform'].partition(
            '-shippable')[0]
        task['run']['options'] = [
            'version={}'.format(release_config['version']),
            'build-number={}'.format(release_config['build_number']),
            'platform={}'.format(platform),
        ]
        if task['extra']['limit-locales']:
            for locale in all_locales:
                task['run']['options'].append('limit-locale={}'.format(locale))
        if 'partner' in config.kind and config.params['release_partners']:
            for partner in config.params['release_partners']:
                task['run']['options'].append('partner={}'.format(partner))

        # The upstream taskIds are stored a special environment variable, because we want to use
        # task-reference's to resolve dependencies, but the string handling of MOZHARNESS_OPTIONS
        # blocks that. It's space-separated string of ids in the end.
        task['worker']['env']['UPSTREAM_TASKIDS'] = {
            'task-reference':
            ' '.join(['<{}>'.format(dep) for dep in task['dependencies']])
        }

        # Forward the release type for bouncer product construction
        task['worker']['env']['RELEASE_TYPE'] = config.params['release_type']

        yield task
def split_public_and_private(config, jobs):
    public_bucket_scope = get_beetmover_bucket_scope(config)
    partner_config = get_partner_config_by_kind(config, config.kind)

    for job in jobs:
        partner_bucket_scope = add_scope_prefix(config,
                                                job['partner-bucket-scope'])
        partner, subpartner, _ = job['extra']['repack_id'].split('/')

        # public
        if partner_config[partner][subpartner].get('upload_to_candidates'):
            yield populate_scopes_and_worker_type(config,
                                                  job,
                                                  public_bucket_scope,
                                                  partner_public=True)
        # private
        yield populate_scopes_and_worker_type(config,
                                              job,
                                              partner_bucket_scope,
                                              partner_public=False)
def chunk_partners(config, jobs):
    partner_configs = get_partner_config_by_kind(config, config.kind)

    for job in jobs:
        dep_job = job['dependent-task']
        build_platform = dep_job.attributes["build_platform"]
        for partner, partner_config in partner_configs.iteritems():
            for sub_partner, cfg in partner_config.iteritems():
                if build_platform not in cfg.get("platforms", []):
                    continue
                locales = locales_per_build_platform(build_platform,
                                                     cfg.get('locales', []))
                for locale in locales:
                    repack_id = "{}/{}/{}".format(partner, sub_partner, locale)

                    partner_job = copy.deepcopy(
                        job)  # don't overwrite dict values here
                    partner_job.setdefault('extra', {})
                    partner_job['extra']['repack_id'] = repack_id

                    yield partner_job
示例#11
0
def define_upstream_artifacts(config, jobs):
    partner_configs = get_partner_config_by_kind(config, config.kind)
    if not partner_configs:
        return

    for job in jobs:
        dep_job = job["primary-dependency"]
        job["depname"] = dep_job.label
        job["attributes"] = copy_attributes_from_dependent_job(dep_job)

        repack_ids = job["extra"]["repack_ids"]
        artifacts_specifications = generate_specifications_of_artifacts_to_sign(
            config,
            job,
            keep_locale_template=True,
            kind=config.kind,
        )
        task_type = "build"
        if "notarization" in job["depname"]:
            task_type = "scriptworker"
        job["upstream-artifacts"] = [{
            "taskId": {
                "task-reference": "<{}>".format(dep_job.kind)
            },
            "taskType":
            task_type,
            "paths": [
                path_template.format(locale=repack_id)
                for path_template in spec["artifacts"]
                for repack_id in repack_ids
            ],
            "formats":
            spec["formats"],
        } for spec in artifacts_specifications]

        yield job
示例#12
0
def make_repackage_signing_description(config, jobs):
    for job in jobs:
        dep_job = job['primary-dependency']
        repack_id = dep_job.task['extra']['repack_id']
        attributes = dep_job.attributes
        build_platform = dep_job.attributes.get('build_platform')
        is_nightly = dep_job.attributes.get(
            'nightly', dep_job.attributes.get('shippable'))

        # Mac & windows
        label = dep_job.label.replace("repackage-", "repackage-signing-")
        # Linux
        label = label.replace("chunking-dummy-", "repackage-signing-")
        description = (
            "Signing of repackaged artifacts for partner repack id '{repack_id}' for build '"
            "{build_platform}/{build_type}'".format(
                repack_id=repack_id,
                build_platform=attributes.get('build_platform'),
                build_type=attributes.get('build_type')))

        if 'linux' in build_platform:
            # we want the repack job, via the dependencies for the the chunking-dummy dep_job
            for dep in dep_job.dependencies.values():
                if dep.startswith('release-partner-repack'):
                    dependencies = {"repack": dep}
                    break
        else:
            # we have a genuine repackage job as our parent
            dependencies = {"repackage": dep_job.label}

        attributes = copy_attributes_from_dependent_job(dep_job)
        attributes['repackage_type'] = 'repackage-signing'

        signing_cert_scope = get_signing_cert_scope_per_platform(
            build_platform, is_nightly, config)
        scopes = [signing_cert_scope]

        if 'win' in build_platform:
            upstream_artifacts = [{
                "taskId": {
                    "task-reference": "<repackage>"
                },
                "taskType":
                "repackage",
                "paths": [
                    get_artifact_path(
                        dep_job, "{}/target.installer.exe".format(repack_id)),
                ],
                "formats": ["autograph_authenticode", "autograph_gpg"]
            }]

            partner_config = get_partner_config_by_kind(config, config.kind)
            partner, subpartner, _ = repack_id.split('/')
            repack_stub_installer = partner_config[partner][subpartner].get(
                'repack_stub_installer')
            if build_platform.startswith('win32') and repack_stub_installer:
                upstream_artifacts.append({
                    "taskId": {
                        "task-reference": "<repackage>"
                    },
                    "taskType":
                    "repackage",
                    "paths": [
                        get_artifact_path(
                            dep_job,
                            "{}/target.stub-installer.exe".format(repack_id)),
                    ],
                    "formats": ["autograph_authenticode", "autograph_gpg"]
                })
        elif 'mac' in build_platform:
            upstream_artifacts = [{
                "taskId": {
                    "task-reference": "<repackage>"
                },
                "taskType":
                "repackage",
                "paths": [
                    get_artifact_path(dep_job,
                                      "{}/target.dmg".format(repack_id)),
                ],
                "formats": ["autograph_gpg"]
            }]
        elif 'linux' in build_platform:
            upstream_artifacts = [{
                "taskId": {
                    "task-reference": "<repack>"
                },
                "taskType":
                "repackage",
                "paths": [
                    get_artifact_path(dep_job,
                                      "{}/target.tar.bz2".format(repack_id)),
                ],
                "formats": ["autograph_gpg"]
            }]

        task = {
            'label': label,
            'description': description,
            'worker-type': 'linux-signing',
            'worker': {
                'implementation': 'scriptworker-signing',
                'upstream-artifacts': upstream_artifacts,
                'max-run-time': 3600
            },
            'scopes': scopes,
            'dependencies': dependencies,
            'attributes': attributes,
            'run-on-projects': dep_job.attributes.get('run_on_projects'),
            'extra': {
                'repack_id': repack_id,
            }
        }
        # we may have reduced the priority for partner jobs, otherwise task.py will set it
        if job.get('priority'):
            task['priority'] = job['priority']

        yield task
def make_job_description(config, jobs):
    for job in jobs:
        dep_job = job['primary-dependency']
        attributes = copy_attributes_from_dependent_job(dep_job)
        build_platform = attributes['build_platform']

        if job['build-platform'].startswith('win'):
            if dep_job.kind.endswith('signing'):
                continue
        if job['build-platform'].startswith('macosx'):
            if dep_job.kind.endswith('repack'):
                continue
        dependencies = {dep_job.attributes.get('kind'): dep_job.label}
        dependencies.update(dep_job.dependencies)

        signing_task = None
        for dependency in dependencies.keys():
            if build_platform.startswith('macosx') and dependency.endswith(
                    'signing'):
                signing_task = dependency
            elif build_platform.startswith('win') and dependency.endswith(
                    'repack'):
                signing_task = dependency

        attributes['repackage_type'] = 'repackage'

        repack_id = job['extra']['repack_id']

        partner_config = get_partner_config_by_kind(config, config.kind)
        partner, subpartner, _ = repack_id.split('/')
        repack_stub_installer = partner_config[partner][subpartner].get(
            'repack_stub_installer')
        if build_platform.startswith('win32') and repack_stub_installer:
            job['package-formats'].append('installer-stub')

        repackage_config = []
        for format in job.get('package-formats'):
            command = copy.deepcopy(PACKAGE_FORMATS[format])
            substs = {
                'archive_format': archive_format(build_platform),
                'executable_extension': executable_extension(build_platform),
            }
            command['inputs'] = {
                name: filename.format(**substs)
                for name, filename in command['inputs'].items()
            }
            repackage_config.append(command)

        run = job.get('mozharness', {})
        run.update({
            'using': 'mozharness',
            'script': 'mozharness/scripts/repackage.py',
            'job-script': 'taskcluster/scripts/builder/repackage.sh',
            'actions': ['setup', 'repackage'],
            'extra-workspace-cache-key': 'repackage',
            'extra-config': {
                'repackage_config': repackage_config,
            },
        })

        worker = {
            'chain-of-trust': True,
            'max-run-time': 7200 if build_platform.startswith('win') else 3600,
            'taskcluster-proxy':
            True if get_artifact_prefix(dep_job) else False,
            'env': {
                'REPACK_ID': repack_id,
            },
            # Don't add generic artifact directory.
            'skip-artifacts': True,
        }

        if build_platform.startswith('win'):
            worker_type = 'b-win2012'
            run['use-magic-mh-args'] = False
        else:
            if build_platform.startswith('macosx'):
                worker_type = 'b-linux'
            else:
                raise NotImplementedError(
                    'Unsupported build_platform: "{}"'.format(build_platform))

            run['tooltool-downloads'] = 'internal'
            worker['docker-image'] = {"in-tree": "debian7-amd64-build"}

        worker['artifacts'] = _generate_task_output_files(
            dep_job,
            worker_type_implementation(config.graph_config, worker_type),
            repackage_config,
            partner=repack_id,
        )

        description = ("Repackaging for repack_id '{repack_id}' for build '"
                       "{build_platform}/{build_type}'".format(
                           repack_id=job['extra']['repack_id'],
                           build_platform=attributes.get('build_platform'),
                           build_type=attributes.get('build_type')))

        task = {
            'label':
            job['label'],
            'description':
            description,
            'worker-type':
            worker_type,
            'dependencies':
            dependencies,
            'attributes':
            attributes,
            'scopes': ['queue:get-artifact:releng/partner/*'],
            'run-on-projects':
            dep_job.attributes.get('run_on_projects'),
            'routes':
            job.get('routes', []),
            'extra':
            job.get('extra', {}),
            'worker':
            worker,
            'run':
            run,
            'fetches':
            _generate_download_config(
                dep_job,
                build_platform,
                signing_task,
                partner=repack_id,
                project=config.params["project"],
                repack_stub_installer=repack_stub_installer),
        }

        # we may have reduced the priority for partner jobs, otherwise task.py will set it
        if job.get('priority'):
            task['priority'] = job['priority']
        if build_platform.startswith('macosx'):
            task['toolchains'] = [
                'linux64-libdmg',
                'linux64-hfsplus',
                'linux64-node',
            ]
        yield task
def make_task_worker(config, jobs):
    for job in jobs:
        platform = job["attributes"]["build_platform"]
        repack_id = job["extra"]["repack_id"]
        partner, subpartner, locale = job['extra']['repack_id'].split('/')
        partner_config = get_partner_config_by_kind(config, config.kind)
        repack_stub_installer = partner_config[partner][subpartner].get(
            'repack_stub_installer')
        build_task = None
        repackage_task = None
        repackage_signing_task = None

        for dependency in job["dependencies"].keys():
            if 'repackage-signing' in dependency:
                repackage_signing_task = dependency
            elif 'repackage' in dependency:
                repackage_task = dependency
            else:
                build_task = "build"

        build_task_ref = "<" + str(build_task) + ">"
        repackage_task_ref = "<" + str(repackage_task) + ">"
        repackage_signing_task_ref = "<" + str(repackage_signing_task) + ">"

        # generate the partner path; we'll send this to beetmover as the "locale"
        ftp_platform = get_ftp_platform(platform)
        repl_dict = {
            "build_number":
            config.params['build_number'],
            "locale":
            locale,
            "partner":
            partner,
            "platform":
            ftp_platform,
            "release_partner_build_number":
            config.params['release_partner_build_number'],
            "subpartner":
            subpartner,
            "version":
            config.params['version'],
        }
        partner_public = job['partner_public']
        if partner_public:
            partner_path_key = 'partner-public-path'
        else:
            partner_path_key = 'partner-private-path'
        # Kinds can set these to None
        if not job[partner_path_key]:
            continue
        partner_path = job[partner_path_key].format(**repl_dict)
        del (job['partner_public'])
        del (job['partner-private-path'])
        del (job['partner-public-path'])
        del (job['partner-bucket-scope'])

        worker = {
            'implementation':
            'beetmover',
            'release-properties':
            craft_release_properties(config, job),
            'upstream-artifacts':
            generate_upstream_artifacts(job, build_task_ref,
                                        repackage_task_ref,
                                        repackage_signing_task_ref, platform,
                                        repack_id, partner_path,
                                        repack_stub_installer),
            'partner-public':
            partner_public,
        }
        job["worker"] = worker

        yield job
示例#15
0
def make_job_description(config, jobs):
    for job in jobs:
        dep_job = job["primary-dependency"]
        attributes = copy_attributes_from_dependent_job(dep_job)
        build_platform = attributes["build_platform"]

        if job["build-platform"].startswith("win"):
            if dep_job.kind.endswith("signing"):
                continue
        if job["build-platform"].startswith("macosx"):
            if dep_job.kind.endswith("repack"):
                continue
        dependencies = {dep_job.attributes.get("kind"): dep_job.label}
        dependencies.update(dep_job.dependencies)

        signing_task = None
        for dependency in dependencies.keys():
            if build_platform.startswith("macosx") and dependency.endswith(
                    "signing"):
                signing_task = dependency
            elif build_platform.startswith("win") and dependency.endswith(
                    "repack"):
                signing_task = dependency

        attributes["repackage_type"] = "repackage"

        repack_id = job["extra"]["repack_id"]

        partner_config = get_partner_config_by_kind(config, config.kind)
        partner, subpartner, _ = repack_id.split("/")
        repack_stub_installer = partner_config[partner][subpartner].get(
            "repack_stub_installer")
        if build_platform.startswith("win32") and repack_stub_installer:
            job["package-formats"].append("installer-stub")

        repackage_config = []
        for format in job.get("package-formats"):
            command = copy.deepcopy(PACKAGE_FORMATS[format])
            substs = {
                "archive_format": archive_format(build_platform),
                "executable_extension": executable_extension(build_platform),
            }
            command["inputs"] = {
                name: filename.format(**substs)
                for name, filename in command["inputs"].items()
            }
            repackage_config.append(command)

        run = job.get("mozharness", {})
        run.update({
            "using": "mozharness",
            "script": "mozharness/scripts/repackage.py",
            "job-script": "taskcluster/scripts/builder/repackage.sh",
            "actions": ["setup", "repackage"],
            "extra-config": {
                "repackage_config": repackage_config,
            },
        })

        worker = {
            "chain-of-trust": True,
            "max-run-time": 7200 if build_platform.startswith("win") else 3600,
            "taskcluster-proxy":
            True if get_artifact_prefix(dep_job) else False,
            "env": {
                "REPACK_ID": repack_id,
            },
            # Don't add generic artifact directory.
            "skip-artifacts": True,
        }

        worker_type = "b-linux"
        worker["docker-image"] = {"in-tree": "debian10-amd64-build"}

        worker["artifacts"] = _generate_task_output_files(
            dep_job,
            worker_type_implementation(config.graph_config, worker_type),
            repackage_config,
            partner=repack_id,
        )

        description = ("Repackaging for repack_id '{repack_id}' for build '"
                       "{build_platform}/{build_type}'".format(
                           repack_id=job["extra"]["repack_id"],
                           build_platform=attributes.get("build_platform"),
                           build_type=attributes.get("build_type"),
                       ))

        task = {
            "label":
            job["label"],
            "description":
            description,
            "worker-type":
            worker_type,
            "dependencies":
            dependencies,
            "attributes":
            attributes,
            "scopes": ["queue:get-artifact:releng/partner/*"],
            "run-on-projects":
            dep_job.attributes.get("run_on_projects"),
            "routes":
            job.get("routes", []),
            "extra":
            job.get("extra", {}),
            "worker":
            worker,
            "run":
            run,
            "fetches":
            _generate_download_config(
                dep_job,
                build_platform,
                signing_task,
                partner=repack_id,
                project=config.params["project"],
                repack_stub_installer=repack_stub_installer,
            ),
        }

        # we may have reduced the priority for partner jobs, otherwise task.py will set it
        if job.get("priority"):
            task["priority"] = job["priority"]
        if build_platform.startswith("macosx"):
            task.setdefault("fetches", {}).setdefault("toolchain", []).extend([
                "linux64-libdmg",
                "linux64-hfsplus",
                "linux64-node",
            ])
        yield task
def make_repackage_signing_description(config, jobs):
    for job in jobs:
        dep_job = job["primary-dependency"]
        repack_id = dep_job.task["extra"]["repack_id"]
        attributes = dep_job.attributes
        build_platform = dep_job.attributes.get("build_platform")
        is_shippable = dep_job.attributes.get("shippable")

        # Mac & windows
        label = dep_job.label.replace("repackage-", "repackage-signing-")
        # Linux
        label = label.replace("chunking-dummy-", "repackage-signing-")
        description = "Signing of repackaged artifacts for partner repack id '{repack_id}' for build '" "{build_platform}/{build_type}'".format(  # NOQA: E501
            repack_id=repack_id,
            build_platform=attributes.get("build_platform"),
            build_type=attributes.get("build_type"),
        )

        if "linux" in build_platform:
            # we want the repack job, via the dependencies for the the chunking-dummy dep_job
            for dep in dep_job.dependencies.values():
                if dep.startswith("release-partner-repack"):
                    dependencies = {"repack": dep}
                    break
        else:
            # we have a genuine repackage job as our parent
            dependencies = {"repackage": dep_job.label}

        attributes = copy_attributes_from_dependent_job(dep_job)
        attributes["repackage_type"] = "repackage-signing"

        signing_cert_scope = get_signing_cert_scope_per_platform(
            build_platform, is_shippable, config)
        scopes = [signing_cert_scope]

        if "win" in build_platform:
            upstream_artifacts = [{
                "taskId": {
                    "task-reference": "<repackage>"
                },
                "taskType":
                "repackage",
                "paths": [
                    get_artifact_path(
                        dep_job, "{}/target.installer.exe".format(repack_id)),
                ],
                "formats": ["autograph_authenticode", "autograph_gpg"],
            }]

            partner_config = get_partner_config_by_kind(config, config.kind)
            partner, subpartner, _ = repack_id.split("/")
            repack_stub_installer = partner_config[partner][subpartner].get(
                "repack_stub_installer")
            if build_platform.startswith("win32") and repack_stub_installer:
                upstream_artifacts.append({
                    "taskId": {
                        "task-reference": "<repackage>"
                    },
                    "taskType":
                    "repackage",
                    "paths": [
                        get_artifact_path(
                            dep_job,
                            "{}/target.stub-installer.exe".format(repack_id),
                        ),
                    ],
                    "formats": ["autograph_authenticode", "autograph_gpg"],
                })
        elif "mac" in build_platform:
            upstream_artifacts = [{
                "taskId": {
                    "task-reference": "<repackage>"
                },
                "taskType":
                "repackage",
                "paths": [
                    get_artifact_path(dep_job,
                                      "{}/target.dmg".format(repack_id)),
                ],
                "formats": ["autograph_gpg"],
            }]
        elif "linux" in build_platform:
            upstream_artifacts = [{
                "taskId": {
                    "task-reference": "<repack>"
                },
                "taskType":
                "repackage",
                "paths": [
                    get_artifact_path(dep_job,
                                      "{}/target.tar.bz2".format(repack_id)),
                ],
                "formats": ["autograph_gpg"],
            }]

        task = {
            "label": label,
            "description": description,
            "worker-type": "linux-signing",
            "worker": {
                "implementation": "scriptworker-signing",
                "upstream-artifacts": upstream_artifacts,
                "max-run-time": 3600,
            },
            "scopes": scopes,
            "dependencies": dependencies,
            "attributes": attributes,
            "run-on-projects": dep_job.attributes.get("run_on_projects"),
            "extra": {
                "repack_id": repack_id,
            },
        }
        # we may have reduced the priority for partner jobs, otherwise task.py will set it
        if job.get("priority"):
            task["priority"] = job["priority"]

        yield task
示例#17
0
def add_command_arguments(config, tasks):
    enabled_partners = config.params.get("release_partners")
    dependencies = {}
    fetches = defaultdict(set)
    attributions = []
    release_artifacts = []
    attribution_config = get_partner_config_by_kind(config, config.kind)

    for partner_config in attribution_config.get("configs", []):
        # we might only be interested in a subset of all partners, eg for a respin
        if enabled_partners and partner_config[
                "campaign"] not in enabled_partners:
            continue
        attribution_code = generate_attribution_code(
            attribution_config["defaults"], partner_config)
        for platform in partner_config["platforms"]:
            stage_platform = platform.replace("-shippable", "")
            for locale in partner_config["locales"]:
                # find the upstream, throw away locales we don't have, somehow. Skip ?
                if locale == "en-US":
                    upstream_label = "repackage-signing-{platform}/opt".format(
                        platform=platform)
                    upstream_artifact = "target.installer.exe"
                else:
                    upstream_label = "repackage-signing-l10n-{locale}-{platform}/opt".format(
                        locale=locale, platform=platform)
                    upstream_artifact = "{locale}/target.installer.exe".format(
                        locale=locale)
                if upstream_label not in config.kind_dependencies_tasks:
                    raise Exception(
                        "Can't find upstream task for {} {}".format(
                            platform, locale))
                upstream = config.kind_dependencies_tasks[upstream_label]

                # set the dependencies to just what we need rather than all of l10n
                dependencies.update({upstream.label: upstream.label})

                fetches[upstream_label].add(
                    (upstream_artifact, stage_platform, locale))

                artifact_part = "{platform}/{locale}/target.installer.exe".format(
                    platform=stage_platform, locale=locale)
                artifact = "releng/partner/{partner}/{sub_partner}/{artifact_part}".format(
                    partner=partner_config["campaign"],
                    sub_partner=partner_config["content"],
                    artifact_part=artifact_part,
                )
                # config for script
                # TODO - generalise input & output ??
                #  add releng/partner prefix via get_artifact_prefix..()
                attributions.append({
                    "input":
                    "/builds/worker/fetches/{}".format(artifact_part),
                    "output":
                    "/builds/worker/artifacts/{}".format(artifact),
                    "attribution":
                    attribution_code,
                })
                release_artifacts.append(artifact)

    # bail-out early if we don't have any attributions to do
    if not attributions:
        return

    for task in tasks:
        worker = task.get("worker", {})
        worker["chain-of-trust"] = True

        task.setdefault("dependencies", {}).update(dependencies)
        task.setdefault("fetches", {})
        for upstream_label, upstream_artifacts in fetches.items():
            task["fetches"][upstream_label] = [{
                "artifact":
                upstream_artifact,
                "dest":
                "{platform}/{locale}".format(platform=platform, locale=locale),
                "extract":
                False,
                "verify-hash":
                True,
            } for upstream_artifact, platform, locale in upstream_artifacts]
        worker.setdefault("env", {})["ATTRIBUTION_CONFIG"] = six.ensure_text(
            json.dumps(attributions, sort_keys=True))
        worker["artifacts"] = [{
            "name": "releng/partner",
            "path": "/builds/worker/artifacts/releng/partner",
            "type": "directory",
        }]
        task["release-artifacts"] = release_artifacts
        task["label"] = config.kind

        yield task
示例#18
0
def split_public_and_private(config, jobs):
    # we need to separate private vs public destinations because beetmover supports one
    # in a single task. Only use a single task for each type though.
    partner_config = get_partner_config_by_kind(config, config.kind)
    for job in jobs:
        upstream_artifacts = job["primary-dependency"].release_artifacts
        attribution_task_ref = "<{}>".format(job["primary-dependency"].label)
        prefix = get_artifact_prefix(job["primary-dependency"])
        artifacts = defaultdict(list)
        for artifact in upstream_artifacts:
            partner, sub_partner, platform, locale, _ = artifact.replace(
                prefix + "/", "").split("/", 4)
            destination = "private"
            this_config = [
                p for p in partner_config["configs"]
                if (p["campaign"] == partner and p["content"] == sub_partner)
            ]
            if this_config[0].get("upload_to_candidates"):
                destination = "public"
            artifacts[destination].append(
                (artifact, partner, sub_partner, platform, locale))

        action_scope = add_scope_prefix(config,
                                        "beetmover:action:push-to-partner")
        public_bucket_scope = get_beetmover_bucket_scope(config)
        partner_bucket_scope = add_scope_prefix(config,
                                                job["partner-bucket-scope"])
        repl_dict = {
            "build_number":
            config.params["build_number"],
            "release_partner_build_number":
            config.params["release_partner_build_number"],
            "version":
            config.params["version"],
            "partner":
            "{partner}",  # we'll replace these later, per artifact
            "subpartner":
            "{subpartner}",
            "platform":
            "{platform}",
            "locale":
            "{locale}",
        }
        for destination, destination_artifacts in artifacts.items():
            this_job = deepcopy(job)

            if destination == "public":
                this_job["scopes"] = [public_bucket_scope, action_scope]
                this_job["partner_public"] = True
            else:
                this_job["scopes"] = [partner_bucket_scope, action_scope]
                this_job["partner_public"] = False

            partner_path_key = "partner-{destination}-path".format(
                destination=destination)
            partner_path = this_job[partner_path_key].format(**repl_dict)
            this_job.setdefault(
                "worker",
                {})["upstream-artifacts"] = generate_upstream_artifacts(
                    attribution_task_ref, destination_artifacts, partner_path)

            yield this_job