def test_alter_unpretty_contents(context, mocker): context.artifacts_to_beetmove = { 'loc1': { 'target.test_packages.json': 'mobile' }, 'loc2': { 'target.test_packages.json': 'mobile' }, } mappings = { 'mapping': { 'loc1': { 'bar': { 's3_key': 'x' }, }, 'loc2': {}, }, } def fake_json(*args, **kwargs): return {'mobile': ['bar']} mocker.patch.object(butils, 'load_json', new=fake_json) mocker.patch.object(butils, 'write_json', new=fake_json) butils.alter_unpretty_contents(context, ['target.test_packages.json'], mappings)
async def push_to_nightly(context): """Push artifacts to a certain location (e.g. nightly/ or candidates/). Determine the list of artifacts to be transferred, generate the mapping manifest, run some data validations, and upload the bits. Upon successful transfer, generate checksums files and manifests to be consumed downstream by balrogworkers.""" context.release_props = get_release_props(context) # balrog_manifest is written and uploaded as an artifact which is used by # a subsequent balrogworker task in the release graph. Balrogworker uses # this manifest to submit release blob info (e.g. mar filename, size, etc) context.balrog_manifest = list() # Used as a staging area to generate balrog_manifest, so that all the # completes and partials for a release end up in the same data structure context.raw_balrog_manifest = dict() # the checksums manifest is written and uploaded as an artifact which is # used by a subsequent signing task and again by another beetmover task to # upload it to S3 context.checksums = dict() # TODO: if artifactMap passes schema validation if context.task['payload'].get('artifactMap'): # determine artifacts to beetmove context.artifacts_to_beetmove = get_upstream_artifacts( context, preserve_full_paths=True) await move_beets(context, context.artifacts_to_beetmove, artifact_map=context.task['payload']['artifactMap']) else: # determine artifacts to beetmove context.artifacts_to_beetmove = get_upstream_artifacts(context) # generate beetmover mapping manifest mapping_manifest = generate_beetmover_manifest(context) # perform another validation check against the bucket path validate_bucket_paths(context.bucket, mapping_manifest['s3_bucket_path']) # some files to-be-determined via script configs need to have their # contents pretty named, so doing it here before even beetmoving begins blobs = context.config.get('blobs_needing_prettynaming_contents', []) alter_unpretty_contents(context, blobs, mapping_manifest) # for each artifact in manifest # a. map each upstream artifact to pretty name release bucket format # b. upload to corresponding S3 location await move_beets(context, context.artifacts_to_beetmove, manifest=mapping_manifest) # write balrog_manifest to a file and add it to list of artifacts add_balrog_manifest_to_artifacts(context) # determine the correct checksum filename and generate it, adding it to # the list of artifacts afterwards add_checksums_to_artifacts(context)
def test_alter_unpretty_contents(context, mocker): context.artifacts_to_beetmove = {"loc1": {"target.test_packages.json": "mobile"}, "loc2": {"target.test_packages.json": "mobile"}} mappings = {"mapping": {"loc1": {"bar": {"s3_key": "x"}}, "loc2": {}}} def fake_json(*args, **kwargs): return {"mobile": ["bar"]} mocker.patch.object(butils, "load_json", new=fake_json) mocker.patch.object(butils, "write_json", new=fake_json) butils.alter_unpretty_contents(context, ["target.test_packages.json"], mappings)
async def async_main(context): # balrog_manifest is written and uploaded as an artifact which is used by a subsequent # balrogworker task in the release graph. Balrogworker uses this manifest to submit # release blob info with things like mar filename, size, etc context.balrog_manifest = list() # the checksums manifest is written and uploaded as an artifact which is used # by a subsequent signing task and again by another beetmover task to # upload it along with the other artifacts context.checksums = dict() # determine and validate the task schema along with its scopes context.task = get_task(context.config) # e.g. $cfg['work_dir']/task.json validate_task_schema(context) # determine artifacts to beetmove context.artifacts_to_beetmove = get_upstream_artifacts(context) # determine the release properties and make a copy in the artifacts # directory release_props_file = get_initial_release_props_file(context) context.release_props = get_release_props(release_props_file) # generate beetmover mapping manifest mapping_manifest = generate_beetmover_manifest(context.config, context.task, context.release_props) # validate scopes to prevent beetmoving in the wrong place validate_task_scopes(context, mapping_manifest) # some files to-be-determined via script configs need to have their # contents pretty named, so doing it here before even beetmoving begins blobs = context.config.get('blobs_needing_prettynaming_contents', []) alter_unpretty_contents(context, blobs, mapping_manifest) # for each artifact in manifest # a. map each upstream artifact to pretty name release bucket format # b. upload to candidates/dated location await move_beets(context, context.artifacts_to_beetmove, mapping_manifest) # write balrog_manifest to a file and add it to list of artifacts add_balrog_manifest_to_artifacts(context) # determine the correct checksum filename and generate it, adding it to # the list of artifacts afterwards add_checksums_to_artifacts(context) # add release props file to later be used by beetmover jobs than upload # the checksums file add_release_props_to_artifacts(context, release_props_file) log.info('Success!')
async def push_to_nightly(context): # determine artifacts to beetmove context.artifacts_to_beetmove = get_upstream_artifacts(context) # find release properties and make a copy in the artifacts directory release_props_file = get_initial_release_props_file(context) context.release_props = get_release_props(release_props_file) # generate beetmover mapping manifest mapping_manifest = generate_beetmover_manifest(context) # perform another validation check against the bucket path validate_bucket_paths(context.bucket, mapping_manifest['s3_bucket_path']) # some files to-be-determined via script configs need to have their # contents pretty named, so doing it here before even beetmoving begins blobs = context.config.get('blobs_needing_prettynaming_contents', []) alter_unpretty_contents(context, blobs, mapping_manifest) # balrog_manifest is written and uploaded as an artifact which is used by # a subsequent balrogworker task in the release graph. Balrogworker uses # this manifest to submit release blob info (e.g. mar filename, size, etc) context.balrog_manifest = list() # the checksums manifest is written and uploaded as an artifact which is # used by a subsequent signing task and again by another beetmover task to # upload it to S3 context.checksums = dict() # for each artifact in manifest # a. map each upstream artifact to pretty name release bucket format # b. upload to corresponding S3 location await move_beets(context, context.artifacts_to_beetmove, mapping_manifest) # write balrog_manifest to a file and add it to list of artifacts add_balrog_manifest_to_artifacts(context) # determine the correct checksum filename and generate it, adding it to # the list of artifacts afterwards add_checksums_to_artifacts(context) # add release props file to later be used by beetmover jobs than upload # the checksums file add_release_props_to_artifacts(context, release_props_file)