Beispiel #1
0
def test_it_fetches_userinfo_from_lambda_event():
    result = get_user_info({
        "requestContext": {
            "authorizer": {
                "claims": {
                    "sub": "12345678-1234-1234-1234-123456123456",
                    "aud": "12345678901234567890123456",
                    "email_verified": "true",
                    "event_id": "12345678-1234-1234-1234-654321654321",
                    "token_use": "id",
                    "auth_time": "1581433394",
                    "iss":
                    "https://cognito-idp.eu-west-1.amazonaws.com/eu-west-1_123456789",
                    "cognito:username": "******",
                    "exp": "Tue Feb 18 18:41:50 UTC 2020",
                    "iat": "Tue Feb 18 17:41:50 UTC 2020",
                    "email": "*****@*****.**",
                }
            }
        }
    })

    assert result == {
        "Username": "******",
        "Sub": "12345678-1234-1234-1234-123456123456",
    }
Beispiel #2
0
def enqueue_handler(event, context):
    body = event["body"]
    validate_queue_items([body])
    user_info = get_user_info(event)
    item = enqueue_items([body], user_info)[0]
    deletion_queue_table.put_item(Item=item)
    return {"statusCode": 201, "body": json.dumps(item, cls=DecimalEncoder)}
def create_data_mapper_handler(event, context):
    path_params = event["pathParameters"]
    body = event["body"]
    validate_mapper(body)
    deletion_db = body["QueryExecutorParameters"]["Database"]
    deletion_table = "deletion_queue_{}".format(
        camel_to_snake_case(path_params["data_mapper_id"]))
    deletion_queue_prefix = "data_mappers/{}/deletion_queue/".format(
        path_params["data_mapper_id"])
    generate_athena_table_for_mapper(body, deletion_db, deletion_table,
                                     s3f2_flow_bucket, deletion_queue_prefix)
    item = {
        "DataMapperId": path_params["data_mapper_id"],
        "Columns": body["Columns"],
        "QueryExecutor": body["QueryExecutor"],
        "QueryExecutorParameters": body["QueryExecutorParameters"],
        "CreatedBy": get_user_info(event),
        "RoleArn": body["RoleArn"],
        "Format": body.get("Format", "parquet"),
        "DeletionQueueDb": deletion_db,
        "DeletionQueueTableName": deletion_table,
        "DeletionQueueBucket": s3f2_flow_bucket,
        "DeletionQueuePrefix": deletion_queue_prefix,
        "DeleteOldVersions": body.get("DeleteOldVersions", True),
    }
    table.put_item(Item=item)

    return {"statusCode": 201, "body": json.dumps(item)}
Beispiel #4
0
def enqueue_batch_handler(event, context):
    body = event["body"]
    matches = body["Matches"]
    validate_queue_items(matches)
    user_info = get_user_info(event)
    items = enqueue_items(matches, user_info)
    return {
        "statusCode": 201,
        "body": json.dumps({"Matches": items}, cls=DecimalEncoder),
    }
def enqueue_handler(event, context):
    body = event["body"]
    match_id = body["MatchId"]
    data_mappers = body.get("DataMappers", [])
    item = {
        "DeletionQueueItemId": str(uuid.uuid4()),
        "MatchId": match_id,
        "CreatedAt": utc_timestamp(),
        "DataMappers": data_mappers,
        "CreatedBy": get_user_info(event)
    }
    deletion_queue_table.put_item(Item=item)

    return {"statusCode": 201, "body": json.dumps(item, cls=DecimalEncoder)}
def process_handler(event, context):
    if running_job_exists():
        raise ValueError("There is already a job in progress")

    job_id = str(uuid.uuid4())
    config = get_config()
    deletion_queue_key = 'jobs/{}/deletion_queue/data.json'.format(job_id)
    item = {
        "Id": job_id,
        "Sk": job_id,
        "Type": "Job",
        "JobStatus": "QUEUED",
        "GSIBucket": str(random.randint(0, bucket_count - 1)),
        "CreatedAt": utc_timestamp(),
        "DeletionQueueBucket": deletion_queue_bucket,
        "DeletionQueueKey": deletion_queue_key,
        "DeletionQueueItemsSkipped": False,
        "CreatedBy": get_user_info(event),
        **{
            k: v
            for k, v in config.items() if k not in ["JobDetailsRetentionDays"]
        }
    }

    if int(config.get("JobDetailsRetentionDays", 0)) > 0:
        item["Expires"] = utc_timestamp(days=config["JobDetailsRetentionDays"])

    deletion_queue_items = {"DeletionQueueItems": []}
    for extended_deletion_queue_item in get_deletion_queue():
        deletion_item = {
            "DeletionQueueItemId":
            extended_deletion_queue_item["DeletionQueueItemId"],
            "MatchId":
            extended_deletion_queue_item["MatchId"],
            "DataMappers":
            extended_deletion_queue_item["DataMappers"]
        }
        deletion_queue_items["DeletionQueueItems"].append(deletion_item)

    obj = s3.Object(deletion_queue_bucket, deletion_queue_key)
    obj.put(Body=json.dumps(deletion_queue_items))
    jobs_table.put_item(Item=item)

    # after sending the data to dynamo add the deletion_queue to the response
    item["DeletionQueueItems"] = list(
        map(lambda x: x["MatchId"],
            deletion_queue_items["DeletionQueueItems"]))

    return {"statusCode": 202, "body": json.dumps(item, cls=DecimalEncoder)}
Beispiel #7
0
def put_data_mapper_handler(event, context):
    path_params = event["pathParameters"]
    body = event["body"]
    validate_mapper(body)
    item = {
        "DataMapperId": path_params["data_mapper_id"],
        "Columns": body["Columns"],
        "QueryExecutor": body["QueryExecutor"],
        "QueryExecutorParameters": body["QueryExecutorParameters"],
        "CreatedBy": get_user_info(event),
        "RoleArn": body["RoleArn"],
        "Format": body.get("Format", "parquet"),
        "DeleteOldVersions": body.get("DeleteOldVersions", True),
    }
    table.put_item(Item=item)

    return {"statusCode": 201, "body": json.dumps(item)}
def process_handler(event, context):
    if running_job_exists():
        raise ValueError("There is already a job in progress")

    job_id = str(uuid.uuid4())
    config = get_config()
    item = {
        "Id": job_id,
        "Sk": job_id,
        "Type": "Job",
        "JobStatus": "QUEUED",
        "GSIBucket": str(random.randint(0, bucket_count - 1)),
        "CreatedAt": utc_timestamp(),
        "DeletionQueueItems": [],
        "DeletionQueueItemsSkipped": False,
        "CreatedBy": get_user_info(event),
        **{
            k: v
            for k, v in config.items() if k not in ["JobDetailsRetentionDays"]
        }
    }

    if int(config.get("JobDetailsRetentionDays", 0)) > 0:
        item["Expires"] = utc_timestamp(days=config["JobDetailsRetentionDays"])

    item_size_bytes = calculate_ddb_item_bytes(item)

    for deletion_queue_item in get_deletion_queue():
        current_size_bytes = calculate_ddb_item_bytes(deletion_queue_item)
        if item_size_bytes + current_size_bytes < max_size_bytes:
            item['DeletionQueueItems'].append(deletion_queue_item)
            item_size_bytes += current_size_bytes
        else:
            item['DeletionQueueItemsSkipped'] = True
            break

    jobs_table.put_item(Item=item)

    return {"statusCode": 202, "body": json.dumps(item, cls=DecimalEncoder)}
Beispiel #9
0
def process_handler(event, context):
    if running_job_exists():
        raise ValueError("There is already a job in progress")

    job_id = str(uuid.uuid4())
    config = get_config()
    item = {
        "Id": job_id,
        "Sk": job_id,
        "Type": "Job",
        "JobStatus": "QUEUED",
        "GSIBucket": str(random.randint(0, bucket_count - 1)),
        "CreatedAt": utc_timestamp(),
        "CreatedBy": get_user_info(event),
        **{
            k: v
            for k, v in config.items() if k not in ["JobDetailsRetentionDays"]
        },
    }
    if int(config.get("JobDetailsRetentionDays", 0)) > 0:
        item["Expires"] = utc_timestamp(days=config["JobDetailsRetentionDays"])
    jobs_table.put_item(Item=item)
    return {"statusCode": 202, "body": json.dumps(item, cls=DecimalEncoder)}
def test_it_fetches_userinfo_from_lambda_event_with_failover_in_place():
    result = get_user_info({ "requestContext": {}})
    assert result == {"Username": "******", "Sub": "N/A"}