Example #1
0
async def __process(record, s3_resource, batch_writer):
    async with trace("Processing {}", json.dumps(record)):
        validate_pending_task(record)
        index = record["index"]
        batch_id = record["batchId"]
        request = record["request"]
        item_no = request["itemNo"]
        await items_table.put_item(
            {
                "itemNo": str(item_no),
                "updateTimestamp": now_epoch_millis()
            }, batch_writer)
        processed_task = {
            "batchId": batch_id,
            "index": index,
            "request": request,
            "response": {
                "success": True,
                "message": "Ok"
            }
        }
        validate_processed_task(processed_task)
        await work_bucket.write_task_result(batch_id, index, processed_task,
                                            s3_resource)
        await work_bucket.delete_pending_task(batch_id, index, s3_resource)
Example #2
0
async def __process(message, s3_resource, batch_writer):
    async with trace("Processing {}", json.dumps(message)):
        batch_id = message["batchId"]
        index = message["index"]
        chunk = await work_bucket.read_pending_chunk(batch_id, index, s3_resource)
        for record in chunk["records"]:
            request = record["request"]
            item_no = request["itemNo"]
            record["response"] = {"success": True,
                                  "message": "Ok"}
        await items_table.put_item({"itemNo": str(item_no),
                                    "updateTimestamp": now_epoch_millis()},
                                   batch_writer)
        await work_bucket.write_chunk_result(batch_id, index, chunk, s3_resource)
        await work_bucket.delete_pending_chunk(batch_id, index, s3_resource)
Example #3
0
def handle_event(event, lambda_context):
    logger.info("Event: {}".format(json.dumps(event, indent=2)))
    records = event["Records"]
    with items_table.new_batch_writer() as batch_writer:
        for record in records:
            record = json.loads(record["body"])
            with trace("Processing {}", json.dumps(record)):
                index = record["index"]
                batch_id = record["batchId"]
                request = record["request"]
                item_no = request["itemNo"]
                items_table.put_item(
                    {
                        "itemNo": str(item_no),
                        "updateTimestamp": now_epoch_millis()
                    }, batch_writer)
                work_bucket.write_task_result(batch_id, index, request, {
                    "success": True,
                    "message": "Ok"
                })
                work_bucket.delete_pending_task(batch_id, index)
                if not work_bucket.exists_pending_task(batch_id):
                    gather_queue.send_batch_complete_message(batch_id)
async def __process(message, dynamodb_resource, batch_writer):
    async with trace("Processing {}", json.dumps(message)):
        batch_id = message["batchId"]
        records = message["records"]
        tasks = await asyncio.gather(*[
            batch_tasks_table.get_batch_task(batch_id, record["index"],
                                             dynamodb_resource)
            for record in records
        ])
        for task in tasks:
            index = task["index"]
            request = task["request"]
            item_no = request["itemNo"]
            price = request["price"]
            response = {"success": True, "message": "Ok"}
            await items_table.put_item(
                {
                    "itemNo": str(item_no),
                    "price": price,
                    "updateTimestamp": now_epoch_millis()
                }, batch_writer)
            await batch_tasks_table.put_processed_batch_task(
                batch_id, index, request, response, dynamodb_resource)