async def push_to_releases(context): """Copy artifacts from one S3 location to another. Determine the list of artifacts to be copied and transfer them. These copies happen in S3 without downloading/reuploading.""" context.artifacts_to_beetmove = {} product = context.task["payload"]["product"] build_number = context.task["payload"]["build_number"] version = context.task["payload"]["version"] context.bucket_name = get_bucket_name(context, product) candidates_prefix = get_candidates_prefix(product, version, build_number) releases_prefix = get_releases_prefix(product, version) creds = get_creds(context) s3_resource = boto3.resource("s3", aws_access_key_id=creds["id"], aws_secret_access_key=creds["key"]) candidates_keys_checksums = list_bucket_objects(context, s3_resource, candidates_prefix) releases_keys_checksums = list_bucket_objects(context, s3_resource, releases_prefix) if not candidates_keys_checksums: raise ScriptWorkerTaskException( "No artifacts to copy from {} so there is no reason to continue.". format(candidates_prefix)) if releases_keys_checksums: log.warning("Destination {} already exists with {} keys".format( releases_prefix, len(releases_keys_checksums))) # Weed out RELEASE_EXCLUDE matches, but allow partners specified in the payload push_partners = context.task["payload"].get("partners", []) for k in candidates_keys_checksums.keys(): if "/partner-repacks/" in k: partner_match = get_partner_match(k, candidates_prefix, push_partners) if partner_match: context.artifacts_to_beetmove[k] = k.replace( get_partner_candidates_prefix(candidates_prefix, partner_match), get_partner_releases_prefix(product, version, partner_match)) else: log.debug("Excluding partner repack {}".format(k)) elif not matches_exclude(k, RELEASE_EXCLUDE): context.artifacts_to_beetmove[k] = k.replace( candidates_prefix, releases_prefix) else: log.debug("Excluding {}".format(k)) copy_beets(context, candidates_keys_checksums, releases_keys_checksums)
async def push_to_releases(context): """Copy artifacts from one S3 location to another. Determine the list of artifacts to be copied and transfer them. These copies happen in S3 without downloading/reuploading.""" context.artifacts_to_beetmove = {} product = context.task['payload']['product'] build_number = context.task['payload']['build_number'] version = context.task['payload']['version'] context.bucket_name = get_bucket_name(context, product) candidates_prefix = get_candidates_prefix(product, version, build_number) releases_prefix = get_releases_prefix(product, version) creds = get_creds(context) s3_resource = boto3.resource('s3', aws_access_key_id=creds['id'], aws_secret_access_key=creds['key']) candidates_keys_checksums = list_bucket_objects(context, s3_resource, candidates_prefix) releases_keys_checksums = list_bucket_objects(context, s3_resource, releases_prefix) if not candidates_keys_checksums: raise ScriptWorkerTaskException( "No artifacts to copy from {} so there is no reason to continue.". format(candidates_prefix)) if releases_keys_checksums: log.warning("Destination {} already exists with {} keys".format( releases_prefix, len(releases_keys_checksums))) # Weed out RELEASE_EXCLUDE matches for k in candidates_keys_checksums.keys(): if not matches_exclude(k, RELEASE_EXCLUDE): context.artifacts_to_beetmove[k] = k.replace( candidates_prefix, releases_prefix) else: log.debug("Excluding {}".format(k)) copy_beets(context, candidates_keys_checksums, releases_keys_checksums)
def test_get_releases_prefix(product, version, expected): assert get_releases_prefix(product, version) == expected