from aws_scatter_gather.util import json
import os

from aws_scatter_gather.util import aws
from aws_scatter_gather.util.jsontime import now
from aws_scatter_gather.util.trace import trace

SCOPE = os.environ.get("SCOPE", "")

WORK_BUCKET = "{SCOPE}s3-sqs-lambda-sync-work".format(SCOPE=SCOPE)
s3_resource = aws.resource("s3")


def exists_pending_task(batch_id):
    with trace("Checking if batch batch_id={} is complete", batch_id):
        for _ in s3_resource.Bucket(name=WORK_BUCKET).objects.filter(Prefix="{}/pending/".format(batch_id), MaxKeys=1):
            return True
    return False


def write_batch_status(batch_id, record_count):
    with trace("Writing status for {}", batch_id):
        object_key = "{}/status.json".format(batch_id)
        s3_resource.Object(WORK_BUCKET, object_key).put(ACL='private', Body=json.dumps({
            "variant": "s3-sqs-lambda-sync",
            "batchId": batch_id,
            "taskCount": record_count,
            "startTime": now()
        }))

import os

from aws_scatter_gather.util import aws
from aws_scatter_gather.util.trace import trace

SCOPE = os.environ.get("SCOPE", "")

ITEMS_TABLE = "{SCOPE}s3-sqs-lambda-sync-items".format(SCOPE=SCOPE)
dynamodb_resource = aws.resource("dynamodb")


def get_item(item_no):
    with trace("Get item {}", item_no):
        table = dynamodb_resource.Table(ITEMS_TABLE)
        response = table.get_item(Key={"itemNo": item_no})
        return response.get('Item', None)


def new_batch_writer():
    table = dynamodb_resource.Table(ITEMS_TABLE)
    return table.batch_writer(overwrite_by_pkeys=["itemNo"])


def put_item(item, batch_writer):
    with trace("Put item {}", item.get("itemNo", None)):
        batch_writer.put_item(Item=item)