async def handle_event(event, lambda_context):
    logger.info("Event: {}".format(json.dumps(event, indent=2)))

    s3_objects = __get_s3_objects_from(event)

    async with aioaws.resource("s3") as s3_resource, \
        aioaws.resource("dynamodb") as dynamodb_resource, \
        await items_table.new_batch_writer(dynamodb_resource) as batch_writer:
        await asyncio.gather(*[
            __process(s3_object, s3_resource, batch_writer)
            for s3_object in s3_objects
        ])
示例#2
0
async def handle_event(event, lambda_context):
    logger.info("Event: {}".format(json.dumps(event, indent=2)))

    async with aioaws.client("sqs") as sqs_client, \
        aioaws.resource("s3") as s3_resource, \
        aioaws.resource("dynamodb") as dynamodb_resource, \
        await items_table.new_batch_writer(dynamodb_resource) as batch_writer:
        chunks = [json.loads(record["body"]) for record in event["Records"]]
        await asyncio.gather(*[__process(chunk, s3_resource, batch_writer) for chunk in chunks])

        batch_ids = {chunk["batchId"] for chunk in chunks}
        await asyncio.gather(*[__check_if_complete(batch_id, s3_resource, sqs_client) for batch_id in batch_ids])
async def handle_event(event, lambda_context):
    logger.info("Event: {}".format(json.dumps(event, indent=2)))

    async with aioaws.resource("s3") as s3_resource:
        records = [json.loads(record["body"]) for record in event["Records"]]
        await asyncio.gather(
            *[__gather(record, s3_resource) for record in records])
async def handle_event(event, lambda_context):
    logger.info("Event: {}".format(json.dumps(event, indent=2)))

    s3_objects = __get_s3_objects_from(event)
    batch_ids = set([__extract_batch_id(key[1]) for key in s3_objects])

    async with aioaws.client("sqs") as sqs_client, \
        aioaws.resource("s3") as s3_resource:
        await asyncio.gather(*[
            __check_if_complete(batch_id, s3_resource, sqs_client)
            for batch_id in batch_ids
        ])
示例#5
0
async def handle_event(event, lambda_context):
    logger.info("Event: {}".format(json.dumps(event, indent=2)))

    s3_object = __get_s3_object_from(event)
    if s3_object is None:
        return
    batch_id = __extract_batch_id(s3_object[1])
    async with trace("Scattering {}", batch_id):
        async with aioaws.resource("s3") as s3_resource, \
            aioaws.client("sqs") as sqs_client, \
            aioaws.resource("dynamodb") as dynamodb_resource:
            batch_doc = await input_bucket.read_batch_input(
                s3_object[0], s3_object[1], s3_resource)
            validate_input(batch_doc)
            records = batch_doc.get("records", [])
            record_batch_started(batch_id)
            await batch_status_table.put_batch_status(batch_id, len(records),
                                                      dynamodb_resource)
            await __write_chunks_and_send_messages(batch_id, records,
                                                   dynamodb_resource,
                                                   sqs_client)
            await input_bucket.delete_batch_input(s3_object[0], s3_object[1],
                                                  s3_resource)
    record_scatter_finished(batch_id, len(records))
示例#6
0
async def handle_event(event, lambda_context):
    logger.info("Event: {}".format(json.dumps(event, indent=2)))

    s3_object = __get_s3_object_from(event)
    if s3_object is None:
        logger.info("Is s3 test event. Skipping.")
        return

    batch_id = __extract_batch_id(s3_object[1])
    async with trace("Scattering {}", batch_id):
        async with aioaws.resource("s3") as s3_resource, aioaws.client("sqs") as sqs_client:
            batch_doc = await input_bucket.read_batch_input(s3_object[0], s3_object[1], s3_resource)
            validate_input(batch_doc)
            records = batch_doc.get("records", [])
            record_batch_started(batch_id)
            await work_bucket.write_batch_status(batch_id, len(records), CHUNK_SIZE, s3_resource)
            await __write_chunks(batch_id, records, s3_resource, sqs_client)
            await input_bucket.delete_batch_input(s3_object[0], s3_object[1], s3_resource)
    record_scatter_finished(batch_id, len(records))
示例#7
0
async def run(batch_id, batch):
    async with aioaws.resource("s3") as s3_resource:
        await input_bucket.write_batch_input(batch_id, batch, s3_resource)
        await __wait_for_output_json_available_in_s3(batch_id, s3_resource)