def add_l10n_dependencies(config, jobs): """ For multilingual Lightning repackaging, fetches a repackaged build artifact for each locale. This is a Linux 64-bit build for all locales except ja-JP-mac, in which case it is a OS X build. """ for job in jobs: locales_with_changesets = parse_locales_file(job["locales-file"], platform="linux64") locales_with_changesets = sorted(locales_with_changesets.keys()) chunks, remainder = divmod(len(locales_with_changesets), job["locales-per-chunk"]) if remainder: chunks = int(chunks + 1) for this_chunk in range(1, chunks + 1): label = "unsigned-repack-%d" % this_chunk job["dependencies"][ label] = "nightly-l10n-linux64-shippable-%d/opt" % this_chunk chunked_locales = chunkify(locales_with_changesets, this_chunk, chunks) job["fetches"][label] = [{ "artifact": "%s/target.tar.bz2" % locale, "dest": locale } for locale in chunked_locales] mac_locales_with_changesets = parse_locales_file(job["locales-file"], platform="macosx64") mac_locales_with_changesets = sorted( mac_locales_with_changesets.keys()) chunks, remainder = divmod(len(mac_locales_with_changesets), job["locales-per-chunk"]) if remainder: chunks = int(chunks + 1) for this_chunk in range(1, chunks + 1): chunked_locales = chunkify(mac_locales_with_changesets, this_chunk, chunks) if "ja-JP-mac" in chunked_locales: label = "unsigned-repack-mac" job["dependencies"][ label] = "nightly-l10n-macosx64-shippable-%d/opt" % this_chunk job["fetches"][label] = [{ "artifact": "ja-JP-mac/target.dmg", "dest": "ja-JP-mac" }] del job["locales-file"] del job["locales-per-chunk"] yield job
def chunk_locales(config, jobs): """ Utilizes chunking for l10n stuff """ for job in jobs: chunks = job.get('chunks') all_locales = job['attributes']['all_locales'] if chunks: for this_chunk in range(1, chunks + 1): chunked = copy.deepcopy(job) chunked['name'] = chunked['name'].replace( '/', '-{}/'.format(this_chunk), 1 ) chunked['mozharness']['options'] = chunked['mozharness'].get('options', []) my_locales = [] my_locales = chunkify(all_locales, this_chunk, chunks) chunked['mozharness']['options'].extend([ "locale={}".format(locale) for locale in my_locales ]) chunked['attributes']['l10n_chunk'] = str(this_chunk) chunked['attributes']['chunk_locales'] = my_locales # add the chunk number to the TH symbol group, symbol = split_symbol( chunked.get('treeherder', {}).get('symbol', '')) symbol += str(this_chunk) chunked['treeherder']['symbol'] = join_symbol(group, symbol) yield chunked else: job['mozharness']['options'] = job['mozharness'].get('options', []) job['mozharness']['options'].extend([ "locale={}".format(locale) for locale in all_locales ]) yield job
def chunk_locales(config, jobs): """ Utilizes chunking for l10n stuff """ for job in jobs: chunks = job.get('chunks') all_locales = job['attributes']['all_locales'] if chunks: if chunks > len(all_locales): # Reduce chunks down to the number of locales chunks = len(all_locales) for this_chunk in range(1, chunks + 1): chunked = copy.deepcopy(job) chunked['name'] = chunked['name'].replace( '/', '-{}/'.format(this_chunk), 1) chunked['mozharness']['options'] = chunked['mozharness'].get( 'options', []) my_locales = [] my_locales = chunkify(all_locales, this_chunk, chunks) chunked['mozharness']['options'].extend( ["locale={}".format(locale) for locale in my_locales]) chunked['attributes']['l10n_chunk'] = str(this_chunk) chunked['attributes']['chunk_locales'] = my_locales # add the chunk number to the TH symbol group, symbol = split_symbol( chunked.get('treeherder', {}).get('symbol', '')) symbol += str(this_chunk) chunked['treeherder']['symbol'] = join_symbol(group, symbol) yield chunked else: job['mozharness']['options'] = job['mozharness'].get('options', []) job['mozharness']['options'].extend( ["locale={}".format(locale) for locale in all_locales]) yield job
def chunk_partners(config, jobs): for job in jobs: dep_job = job["primary-dependency"] build_platform = dep_job.attributes["build_platform"] repack_id = dep_job.task.get("extra", {}).get("repack_id") repack_ids = dep_job.task.get("extra", {}).get("repack_ids") copy_repack_ids = job.pop("copy-repack-ids", False) if copy_repack_ids: assert repack_ids, "dep_job {} doesn't have repack_ids!".format( dep_job.label) job.setdefault("extra", {})["repack_ids"] = repack_ids yield job # first downstream of the repack task, no chunking or fanout has been done yet elif not any([repack_id, repack_ids]): platform_repack_ids = get_repack_ids_by_platform( config, build_platform) # we chunk mac signing if config.kind in ( "release-partner-repack-signing", "release-eme-free-repack-signing", "release-partner-repack-notarization-part-1", "release-eme-free-repack-notarization-part-1", ): repacks_per_chunk = job.get("repacks-per-chunk") chunks, remainder = divmod(len(platform_repack_ids), repacks_per_chunk) if remainder: chunks = int(chunks + 1) for this_chunk in range(1, chunks + 1): chunk = chunkify(platform_repack_ids, this_chunk, chunks) partner_job = copy.deepcopy(job) partner_job.setdefault("extra", {}).setdefault("repack_ids", chunk) partner_job["extra"]["repack_suffix"] = str(this_chunk) yield partner_job # linux and windows we fan out immediately to one task per partner-sub_partner-locale else: for repack_id in platform_repack_ids: partner_job = copy.deepcopy( job) # don't overwrite dict values here partner_job.setdefault("extra", {}) partner_job["extra"]["repack_id"] = repack_id yield partner_job # fan out chunked mac signing for repackage elif repack_ids: for repack_id in repack_ids: partner_job = copy.deepcopy(job) partner_job.setdefault("extra", {}).setdefault("repack_id", repack_id) yield partner_job # otherwise we've fully fanned out already, continue by passing repack_id on else: partner_job = copy.deepcopy(job) partner_job.setdefault("extra", {}).setdefault("repack_id", repack_id) yield partner_job
def chunk_partners(config, jobs): partner_configs = get_partner_config_by_kind(config, config.kind) for job in jobs: dep_job = job['primary-dependency'] build_platform = dep_job.attributes["build_platform"] repack_id = dep_job.task.get('extra', {}).get('repack_id') repack_ids = dep_job.task.get('extra', {}).get('repack_ids') # first downstream of the repack task, no chunking or fanout has been done yet if not any([repack_id, repack_ids]): platform_repack_ids = _get_repack_ids_by_platform( partner_configs, build_platform) # we chunk mac signing if config.kind in ("release-partner-repack-signing", "release-eme-free-repack-signing"): repacks_per_chunk = job.get('repacks-per-chunk') chunks, remainder = divmod(len(platform_repack_ids), repacks_per_chunk) if remainder: chunks = int(chunks + 1) for this_chunk in range(1, chunks + 1): chunk = chunkify(platform_repack_ids, this_chunk, chunks) partner_job = copy.deepcopy(job) partner_job.setdefault('extra', {}).setdefault('repack_ids', chunk) partner_job['extra']['repack_suffix'] = str(this_chunk) yield partner_job # linux and windows we fan out immediately to one task per partner-sub_partner-locale else: for repack_id in platform_repack_ids: if _check_repack_ids_by_platform(build_platform, repack_id): continue partner_job = copy.deepcopy( job) # don't overwrite dict values here partner_job.setdefault('extra', {}) partner_job['extra']['repack_id'] = repack_id yield partner_job # fan out chunked mac signing for repackage elif repack_ids: for repack_id in repack_ids: if _check_repack_ids_by_platform(build_platform, repack_id): continue partner_job = copy.deepcopy(job) partner_job.setdefault('extra', {}).setdefault('repack_id', repack_id) yield partner_job # otherwise we've fully fanned out already, continue by passing repack_id on else: if _check_repack_ids_by_platform(build_platform, repack_id): continue partner_job = copy.deepcopy(job) partner_job.setdefault('extra', {}).setdefault('repack_id', repack_id) yield partner_job
def chunk_locales(config, jobs): """ Utilizes chunking for l10n stuff """ for job in jobs: locales_per_chunk = job.get('locales-per-chunk') locales_with_changesets = job['attributes'][ 'all_locales_with_changesets'] if locales_per_chunk: chunks, remainder = divmod(len(locales_with_changesets), locales_per_chunk) if remainder: chunks = int(chunks + 1) for this_chunk in range(1, chunks + 1): chunked = copy.deepcopy(job) chunked['name'] = chunked['name'].replace( '/', '-{}/'.format(this_chunk), 1) chunked['mozharness']['options'] = chunked['mozharness'].get( 'options', []) # chunkify doesn't work with dicts locales_with_changesets_as_list = sorted( locales_with_changesets.items()) chunked_locales = chunkify(locales_with_changesets_as_list, this_chunk, chunks) chunked['mozharness']['options'].extend([ 'locale={}:{}'.format(locale, changeset) for locale, changeset in chunked_locales ]) chunked['attributes']['l10n_chunk'] = str(this_chunk) # strip revision chunked['attributes']['chunk_locales'] = [ locale for locale, _ in chunked_locales ] # add the chunk number to the TH symbol group, symbol = split_symbol( chunked.get('treeherder', {}).get('symbol', '')) symbol += str(this_chunk) chunked['treeherder']['symbol'] = join_symbol(group, symbol) yield chunked else: job['mozharness']['options'] = job['mozharness'].get('options', []) job['mozharness']['options'].extend([ 'locale={}:{}'.format(locale, changeset) for locale, changeset in sorted(locales_with_changesets.items()) ]) yield job
def chunk_locales(config, jobs): """ Utilizes chunking for l10n stuff """ for job in jobs: locales_per_chunk = job.get("locales-per-chunk") locales_with_changesets = job["attributes"][ "all_locales_with_changesets"] if locales_per_chunk: chunks, remainder = divmod(len(locales_with_changesets), locales_per_chunk) if remainder: chunks = int(chunks + 1) for this_chunk in range(1, chunks + 1): chunked = copy.deepcopy(job) chunked["name"] = chunked["name"].replace( "/", "-{}/".format(this_chunk), 1) chunked["mozharness"]["options"] = chunked["mozharness"].get( "options", []) # chunkify doesn't work with dicts locales_with_changesets_as_list = sorted( locales_with_changesets.items()) chunked_locales = chunkify(locales_with_changesets_as_list, this_chunk, chunks) chunked["mozharness"]["options"].extend([ "locale={}:{}".format(locale, changeset) for locale, changeset in chunked_locales ]) chunked["attributes"]["l10n_chunk"] = str(this_chunk) # strip revision chunked["attributes"]["chunk_locales"] = [ locale for locale, _ in chunked_locales ] # add the chunk number to the TH symbol chunked["treeherder"]["symbol"] = add_suffix( chunked["treeherder"]["symbol"], this_chunk) yield chunked else: job["mozharness"]["options"] = job["mozharness"].get("options", []) job["mozharness"]["options"].extend([ "locale={}:{}".format(locale, changeset) for locale, changeset in sorted(locales_with_changesets.items()) ]) yield job
def chunk_locales(config, jobs): """ Utilizes chunking for l10n stuff """ for job in jobs: chunks = job.get('chunks') locales_with_changesets = job['attributes']['all_locales_with_changesets'] if chunks: if chunks > len(locales_with_changesets): # Reduce chunks down to the number of locales chunks = len(locales_with_changesets) for this_chunk in range(1, chunks + 1): chunked = copy.deepcopy(job) chunked['name'] = chunked['name'].replace( '/', '-{}/'.format(this_chunk), 1 ) chunked['mozharness']['options'] = chunked['mozharness'].get('options', []) # chunkify doesn't work with dicts locales_with_changesets_as_list = locales_with_changesets.items() chunked_locales = chunkify(locales_with_changesets_as_list, this_chunk, chunks) chunked['mozharness']['options'].extend([ 'locale={}:{}'.format(locale, changeset) for locale, changeset in chunked_locales ]) chunked['attributes']['l10n_chunk'] = str(this_chunk) # strip revision chunked['attributes']['chunk_locales'] = [locale for locale, _ in chunked_locales] # add the chunk number to the TH symbol group, symbol = split_symbol( chunked.get('treeherder', {}).get('symbol', '')) symbol += str(this_chunk) chunked['treeherder']['symbol'] = join_symbol(group, symbol) yield chunked else: job['mozharness']['options'] = job['mozharness'].get('options', []) job['mozharness']['options'].extend([ 'locale={}:{}'.format(locale, changeset) for locale, changeset in locales_with_changesets.items() ]) yield job