コード例 #1
0
async def upload_to_s3(context, s3_key, path):
    product = get_product_name(context.release_props["appName"].lower(),
                               context.release_props["stage_platform"])
    mime_type = mimetypes.guess_type(path)[0]
    if not mime_type:
        raise ScriptWorkerTaskException(
            "Unable to discover valid mime-type for path ({}), "
            "mimetypes.guess_type() returned {}".format(path, mime_type))
    api_kwargs = {
        "Bucket": get_bucket_name(context, product),
        "Key": s3_key,
        "ContentType": mime_type
    }
    headers = {
        "Content-Type": mime_type,
        "Cache-Control": "public, max-age=%d" % CACHE_CONTROL_MAXAGE
    }
    creds = context.config["bucket_config"][context.bucket]["credentials"]
    s3 = boto3.client("s3",
                      aws_access_key_id=creds["id"],
                      aws_secret_access_key=creds["key"])
    url = s3.generate_presigned_url("put_object",
                                    api_kwargs,
                                    ExpiresIn=1800,
                                    HttpMethod="PUT")

    log.info("upload_to_s3: %s -> s3://%s/%s", path, api_kwargs.get("Bucket"),
             s3_key)
    await retry_async(put,
                      args=(context, url, headers, path),
                      retry_exceptions=(Exception, ),
                      kwargs={"session": context.session})
コード例 #2
0
async def upload_to_s3(context, s3_key, path):
    product = get_product_name(context.release_props['appName'].lower(),
                               context.release_props['stage_platform'])
    mime_type = mimetypes.guess_type(path)[0]
    if not mime_type:
        raise ScriptWorkerTaskException("Unable to discover valid mime-type for path ({}), "
                                        "mimetypes.guess_type() returned {}".format(
                                            path, mime_type
                                        ))
    api_kwargs = {
        'Bucket': get_bucket_name(context, product),
        'Key': s3_key,
        'ContentType': mime_type,
    }
    headers = {
        'Content-Type': mime_type,
        'Cache-Control': 'public, max-age=%d' % CACHE_CONTROL_MAXAGE,
    }
    creds = context.config['bucket_config'][context.bucket]['credentials']
    s3 = boto3.client('s3', aws_access_key_id=creds['id'], aws_secret_access_key=creds['key'],)
    url = s3.generate_presigned_url('put_object', api_kwargs, ExpiresIn=1800, HttpMethod='PUT')

    log.info("upload_to_s3: %s -> s3://%s/%s", path, api_kwargs.get('Bucket'), s3_key)
    await retry_async(put, args=(context, url, headers, path),
                      retry_exceptions=(Exception, ),
                      kwargs={'session': context.session})
コード例 #3
0
async def push_to_releases(context):
    """Copy artifacts from one S3 location to another.

    Determine the list of artifacts to be copied and transfer them. These
    copies happen in S3 without downloading/reuploading."""
    context.artifacts_to_beetmove = {}
    product = context.task["payload"]["product"]
    build_number = context.task["payload"]["build_number"]
    version = context.task["payload"]["version"]
    context.bucket_name = get_bucket_name(context, product)

    candidates_prefix = get_candidates_prefix(product, version, build_number)
    releases_prefix = get_releases_prefix(product, version)

    creds = get_creds(context)
    s3_resource = boto3.resource("s3",
                                 aws_access_key_id=creds["id"],
                                 aws_secret_access_key=creds["key"])

    candidates_keys_checksums = list_bucket_objects(context, s3_resource,
                                                    candidates_prefix)
    releases_keys_checksums = list_bucket_objects(context, s3_resource,
                                                  releases_prefix)

    if not candidates_keys_checksums:
        raise ScriptWorkerTaskException(
            "No artifacts to copy from {} so there is no reason to continue.".
            format(candidates_prefix))

    if releases_keys_checksums:
        log.warning("Destination {} already exists with {} keys".format(
            releases_prefix, len(releases_keys_checksums)))

    # Weed out RELEASE_EXCLUDE matches, but allow partners specified in the payload
    push_partners = context.task["payload"].get("partners", [])
    for k in candidates_keys_checksums.keys():
        if "/partner-repacks/" in k:
            partner_match = get_partner_match(k, candidates_prefix,
                                              push_partners)
            if partner_match:
                context.artifacts_to_beetmove[k] = k.replace(
                    get_partner_candidates_prefix(candidates_prefix,
                                                  partner_match),
                    get_partner_releases_prefix(product, version,
                                                partner_match))
            else:
                log.debug("Excluding partner repack {}".format(k))
        elif not matches_exclude(k, RELEASE_EXCLUDE):
            context.artifacts_to_beetmove[k] = k.replace(
                candidates_prefix, releases_prefix)
        else:
            log.debug("Excluding {}".format(k))

    copy_beets(context, candidates_keys_checksums, releases_keys_checksums)
コード例 #4
0
async def push_to_releases(context):
    """Copy artifacts from one S3 location to another.

    Determine the list of artifacts to be copied and transfer them. These
    copies happen in S3 without downloading/reuploading."""
    context.artifacts_to_beetmove = {}
    product = context.task['payload']['product']
    build_number = context.task['payload']['build_number']
    version = context.task['payload']['version']
    context.bucket_name = get_bucket_name(context, product)

    candidates_prefix = get_candidates_prefix(product, version, build_number)
    releases_prefix = get_releases_prefix(product, version)

    creds = get_creds(context)
    s3_resource = boto3.resource('s3',
                                 aws_access_key_id=creds['id'],
                                 aws_secret_access_key=creds['key'])

    candidates_keys_checksums = list_bucket_objects(context, s3_resource,
                                                    candidates_prefix)
    releases_keys_checksums = list_bucket_objects(context, s3_resource,
                                                  releases_prefix)

    if not candidates_keys_checksums:
        raise ScriptWorkerTaskException(
            "No artifacts to copy from {} so there is no reason to continue.".
            format(candidates_prefix))

    if releases_keys_checksums:
        log.warning("Destination {} already exists with {} keys".format(
            releases_prefix, len(releases_keys_checksums)))

    # Weed out RELEASE_EXCLUDE matches
    for k in candidates_keys_checksums.keys():
        if not matches_exclude(k, RELEASE_EXCLUDE):
            context.artifacts_to_beetmove[k] = k.replace(
                candidates_prefix, releases_prefix)
        else:
            log.debug("Excluding {}".format(k))

    copy_beets(context, candidates_keys_checksums, releases_keys_checksums)