Exemplo n.º 1
0
def handle_event(event, lambda_context):
    logger.info("Event: {}".format(json.dumps(event, indent=2)))
    s3_object = __get_s3_object_from(event)
    if s3_object is None:
        return
    batch_id = __extract_batch_id(s3_object[1])
    record_batch_started(batch_id)
    with trace("Scattering {}", batch_id):
        batch_doc = input_bucket.read_batch_input(s3_object[0], s3_object[1])
        validate_input(batch_doc)
        records = batch_doc.get("records", [])
        work_bucket.write_batch_status(batch_id, len(records))
        __write_tasks_and_send_messages(batch_id, records)

    input_bucket.delete_batch_input(s3_object[0], s3_object[1])
    record_scatter_finished(batch_id, len(records))
Exemplo n.º 2
0
async def handle_event(event, lambda_context):
    logger.info("Event: {}".format(json.dumps(event, indent=2)))

    s3_object = __get_s3_object_from(event)
    if s3_object is None:
        logger.info("Is s3 test event. Skipping.")
        return

    batch_id = __extract_batch_id(s3_object[1])
    async with trace("Scattering {}", batch_id):
        async with aioaws.resource("s3") as s3_resource, aioaws.client("sqs") as sqs_client:
            batch_doc = await input_bucket.read_batch_input(s3_object[0], s3_object[1], s3_resource)
            validate_input(batch_doc)
            records = batch_doc.get("records", [])
            record_batch_started(batch_id)
            await work_bucket.write_batch_status(batch_id, len(records), CHUNK_SIZE, s3_resource)
            await __write_chunks(batch_id, records, s3_resource, sqs_client)
            await input_bucket.delete_batch_input(s3_object[0], s3_object[1], s3_resource)
    record_scatter_finished(batch_id, len(records))
Exemplo n.º 3
0
async def handle_event(event, lambda_context):
    logger.info("Event: {}".format(json.dumps(event, indent=2)))

    s3_object = __get_s3_object_from(event)
    if s3_object is None:
        return
    batch_id = __extract_batch_id(s3_object[1])
    async with trace("Scattering {}", batch_id):
        async with aioaws.resource("s3") as s3_resource, \
            aioaws.client("sqs") as sqs_client, \
            aioaws.resource("dynamodb") as dynamodb_resource:
            batch_doc = await input_bucket.read_batch_input(
                s3_object[0], s3_object[1], s3_resource)
            validate_input(batch_doc)
            records = batch_doc.get("records", [])
            record_batch_started(batch_id)
            await batch_status_table.put_batch_status(batch_id, len(records),
                                                      dynamodb_resource)
            await __write_chunks_and_send_messages(batch_id, records,
                                                   dynamodb_resource,
                                                   sqs_client)
            await input_bucket.delete_batch_input(s3_object[0], s3_object[1],
                                                  s3_resource)
    record_scatter_finished(batch_id, len(records))
Exemplo n.º 4
0
 def test_missing_records_invalid(self):
     doc = {}
     with self.assertRaises(ValidationError):
         validate_input(doc)
Exemplo n.º 5
0
 def test_missing_item_no_invalid(self):
     doc = {"records": [{"price": 100}]}
     with self.assertRaises(ValidationError):
         validate_input(doc)
Exemplo n.º 6
0
 def test_missing_price_invalid(self):
     doc = {"records": [{"itemNo": "1"}]}
     with self.assertRaises(ValidationError):
         validate_input(doc)
Exemplo n.º 7
0
 def test_happy_doc(self):
     doc = {"records": [{"itemNo": "1", "price": 100}]}
     validate_input(doc)
Exemplo n.º 8
0
 def test_empty_is_valid(self):
     doc = {"records": []}
     validate_input(doc)