async def move_partner_beets(context, manifest): artifacts_to_beetmove = context.artifacts_to_beetmove beets = [] for locale in artifacts_to_beetmove: for full_path_artifact in artifacts_to_beetmove[locale]: source = artifacts_to_beetmove[locale][full_path_artifact] destination = get_destination_for_partner_repack_path( context, manifest, full_path_artifact, locale) beets.append( asyncio.ensure_future( upload_to_s3(context=context, s3_key=destination, path=source))) if is_partner_public_task(context): # we trim the full destination to the part after # candidates/{version}-candidates/build{build_number}/ artifact_pretty_name = destination[destination.find(locale):] if context.checksums.get(artifact_pretty_name) is None: context.checksums[artifact_pretty_name] = { algo: get_hash(source, algo) for algo in context.config["checksums_digests"] } context.checksums[artifact_pretty_name]["size"] = get_size( source) await raise_future_exceptions(beets)
async def move_beet(context, source, destinations, locale, update_balrog_manifest, balrog_format, from_buildid, artifact_pretty_name): await retry_upload(context=context, destinations=destinations, path=source) if context.checksums.get(artifact_pretty_name) is None: context.checksums[artifact_pretty_name] = { algo: get_hash(source, algo) for algo in context.config['checksums_digests'] } context.checksums[artifact_pretty_name]['size'] = get_size(source) if update_balrog_manifest: context.raw_balrog_manifest.setdefault(locale, {}) balrog_info = generate_balrog_info( context, artifact_pretty_name, locale, destinations, from_buildid, ) if from_buildid: context.raw_balrog_manifest[locale].setdefault( 'partialInfo', []).append(balrog_info) else: context.raw_balrog_manifest[locale].setdefault( 'completeInfo', {})[balrog_format] = balrog_info
def enrich_balrog_manifest(context, path, locale, destinations): props = context.properties if props["branch"] == 'date': # nightlies from dev branches don't usually upload to archive.m.o but # in this particular case we're gradually rolling out in the # archive.m.o under the latest-date corresponding bucket subfolder url = "{prefix}/{s3_key}".format(prefix="https://archive.mozilla.org", s3_key=destinations[0]) url_replacements = [] else: # we extract the dated destination as the 'latest' is useless url = "{prefix}/{s3_key}".format(prefix="https://archive.mozilla.org", s3_key=destinations[0]) url_replacements = [['http://archive.mozilla.org/pub, http://download.cdn.mozilla.net/pub']] return { "tc_nightly": True, "completeInfo": [{ "hash": get_hash(path, hash_type=props["hashType"]), "size": os.path.getsize(path), "url": url }], "appName": props["appName"], "appVersion": props["appVersion"], "branch": props["branch"], "buildid": props["buildid"], "extVersion": props["appVersion"], "hashType": props["hashType"], "locale": locale if not locale == 'multi' else 'en-US', "platform": props["stage_platform"], "url_replacements": url_replacements }
def test_get_hash(): correct_sha1s = ("cb8aa4802996ac8de0436160e7bc0c79b600c222", "da39a3ee5e6b4b0d3255bfef95601890afd80709") text = b"Hello world from beetmoverscript!" with tempfile.NamedTemporaryFile(delete=True) as fp: # we generate a file by repeatedly appending the `text` to make sure we # overcome the HASH_BLOCK_SIZE chunk digest update line count = int(HASH_BLOCK_SIZE / len(text)) * 2 for i in range(count): fp.write(text) sha1digest = get_hash(fp.name, hash_type="sha1") assert sha1digest in correct_sha1s
def test_get_hash(): correct_sha1 = 'cb8aa4802996ac8de0436160e7bc0c79b600c222' text = b'Hello world from beetmoverscript!' with tempfile.NamedTemporaryFile(delete=True) as fp: # we generate a file by repeatedly appending the `text` to make sure we # overcome the HASH_BLOCK_SIZE chunk digest update line count = int(HASH_BLOCK_SIZE / len(text)) * 2 for i in range(count): fp.write(text) sha1digest = get_hash(fp.name, hash_type="sha1") assert correct_sha1 == sha1digest
async def move_beet(context, source, destinations, locale, update_balrog_manifest, artifact_pretty_name): await retry_upload(context=context, destinations=destinations, path=source) if context.checksums.get(artifact_pretty_name) is None: context.checksums[artifact_pretty_name] = { algo: get_hash(source, algo) for algo in context.config['checksums_digests'] } context.checksums[artifact_pretty_name]['size'] = get_size(source) if update_balrog_manifest: context.balrog_manifest.append( enrich_balrog_manifest(context, artifact_pretty_name, locale, destinations) )