async def push_to_maven(context): """Push artifacts to locations expected by maven clients (like mvn or gradle)""" artifacts_to_beetmove = task.get_upstream_artifacts_with_zip_extract_param(context) context.release_props = get_release_props(context) context.checksums = dict() # Needed by downstream calls context.raw_balrog_manifest = dict() # Needed by downstream calls if context.task['payload'].get('artifactMap'): context.artifacts_to_beetmove = _extract_and_check_maven_artifacts_to_beetmove( artifacts_to_beetmove, context.config.get('zip_max_file_size_in_mb', DEFAULT_ZIP_MAX_FILE_SIZE_IN_MB), artifact_map=context.task['payload'].get('artifactMap') ) await move_beets(context, context.artifacts_to_beetmove, artifact_map=context.task['payload']['artifactMap']) else: mapping_manifest = generate_beetmover_manifest(context) validate_bucket_paths(context.bucket, mapping_manifest['s3_bucket_path']) context.artifacts_to_beetmove = _extract_and_check_maven_artifacts_to_beetmove( artifacts_to_beetmove, context.config.get('zip_max_file_size_in_mb', DEFAULT_ZIP_MAX_FILE_SIZE_IN_MB), mapping_manifest=mapping_manifest ) await move_beets(context, context.artifacts_to_beetmove, manifest=mapping_manifest)
async def push_to_nightly(context): """Push artifacts to a certain location (e.g. nightly/ or candidates/). Determine the list of artifacts to be transferred, generate the mapping manifest, run some data validations, and upload the bits. Upon successful transfer, generate checksums files and manifests to be consumed downstream by balrogworkers.""" context.release_props = get_release_props(context) # balrog_manifest is written and uploaded as an artifact which is used by # a subsequent balrogworker task in the release graph. Balrogworker uses # this manifest to submit release blob info (e.g. mar filename, size, etc) context.balrog_manifest = list() # Used as a staging area to generate balrog_manifest, so that all the # completes and partials for a release end up in the same data structure context.raw_balrog_manifest = dict() # the checksums manifest is written and uploaded as an artifact which is # used by a subsequent signing task and again by another beetmover task to # upload it to S3 context.checksums = dict() # TODO: if artifactMap passes schema validation if context.task['payload'].get('artifactMap'): # determine artifacts to beetmove context.artifacts_to_beetmove = get_upstream_artifacts( context, preserve_full_paths=True) await move_beets(context, context.artifacts_to_beetmove, artifact_map=context.task['payload']['artifactMap']) else: # determine artifacts to beetmove context.artifacts_to_beetmove = get_upstream_artifacts(context) # generate beetmover mapping manifest mapping_manifest = generate_beetmover_manifest(context) # perform another validation check against the bucket path validate_bucket_paths(context.bucket, mapping_manifest['s3_bucket_path']) # some files to-be-determined via script configs need to have their # contents pretty named, so doing it here before even beetmoving begins blobs = context.config.get('blobs_needing_prettynaming_contents', []) alter_unpretty_contents(context, blobs, mapping_manifest) # for each artifact in manifest # a. map each upstream artifact to pretty name release bucket format # b. upload to corresponding S3 location await move_beets(context, context.artifacts_to_beetmove, manifest=mapping_manifest) # write balrog_manifest to a file and add it to list of artifacts add_balrog_manifest_to_artifacts(context) # determine the correct checksum filename and generate it, adding it to # the list of artifacts afterwards add_checksums_to_artifacts(context)
async def push_to_maven(context): """Push artifacts to locations expected by maven clients (like mvn or gradle)""" context.release_props = get_release_props(context) context.checksums = dict() # Needed by downstream calls context.raw_balrog_manifest = dict() # Needed by downstream calls # XXX this is needed in order to avoid the need to land the in-tree # corresponding patches across all trees altogether. Similarly for github # projects. This allows us a gradual rollout of this across all projects is_zip_archive = any([ d.get('zipExtract') for d in context.task['payload']['upstreamArtifacts'] ]) if context.task['payload'].get('artifactMap'): if is_zip_archive: artifacts_to_beetmove = task.get_upstream_artifacts_with_zip_extract_param( context) context.artifacts_to_beetmove = _extract_and_check_maven_artifacts_to_beetmove( artifacts_to_beetmove, context.config.get('zip_max_file_size_in_mb', DEFAULT_ZIP_MAX_FILE_SIZE_IN_MB), artifact_map=context.task['payload'].get('artifactMap')) await move_beets( context, context.artifacts_to_beetmove, artifact_map=context.task['payload']['artifactMap']) else: # overwrite artifacts_to_beetmove with the declarative artifacts ones context.artifacts_to_beetmove = task.get_upstream_artifacts( context, preserve_full_paths=True) await move_beets( context, context.artifacts_to_beetmove, artifact_map=context.task['payload']['artifactMap']) else: # TODO: remove this once we're done with migrating from maven.zip artifacts_to_beetmove = task.get_upstream_artifacts_with_zip_extract_param( context) mapping_manifest = generate_beetmover_manifest(context) validate_bucket_paths(context.bucket, mapping_manifest['s3_bucket_path']) context.artifacts_to_beetmove = _extract_and_check_maven_artifacts_to_beetmove( artifacts_to_beetmove, context.config.get('zip_max_file_size_in_mb', DEFAULT_ZIP_MAX_FILE_SIZE_IN_MB), mapping_manifest=mapping_manifest) await move_beets(context, context.artifacts_to_beetmove, manifest=mapping_manifest)
async def push_to_nightly(context): # determine artifacts to beetmove context.artifacts_to_beetmove = get_upstream_artifacts(context) # find release properties and make a copy in the artifacts directory release_props_file = get_initial_release_props_file(context) context.release_props = get_release_props(release_props_file) # generate beetmover mapping manifest mapping_manifest = generate_beetmover_manifest(context) # perform another validation check against the bucket path validate_bucket_paths(context.bucket, mapping_manifest['s3_bucket_path']) # some files to-be-determined via script configs need to have their # contents pretty named, so doing it here before even beetmoving begins blobs = context.config.get('blobs_needing_prettynaming_contents', []) alter_unpretty_contents(context, blobs, mapping_manifest) # balrog_manifest is written and uploaded as an artifact which is used by # a subsequent balrogworker task in the release graph. Balrogworker uses # this manifest to submit release blob info (e.g. mar filename, size, etc) context.balrog_manifest = list() # the checksums manifest is written and uploaded as an artifact which is # used by a subsequent signing task and again by another beetmover task to # upload it to S3 context.checksums = dict() # for each artifact in manifest # a. map each upstream artifact to pretty name release bucket format # b. upload to corresponding S3 location await move_beets(context, context.artifacts_to_beetmove, mapping_manifest) # write balrog_manifest to a file and add it to list of artifacts add_balrog_manifest_to_artifacts(context) # determine the correct checksum filename and generate it, adding it to # the list of artifacts afterwards add_checksums_to_artifacts(context) # add release props file to later be used by beetmover jobs than upload # the checksums file add_release_props_to_artifacts(context, release_props_file)
def test_validate_bucket_paths(bucket, path, raises): if raises: with pytest.raises(ScriptWorkerTaskException): validate_bucket_paths(bucket, path) else: validate_bucket_paths(bucket, path)