def test_it_handles_invalid_config(mock_client):
    mock_client.get_parameter.return_value = {
        "Parameter": {
            "Value": ""
        }
    }
    with pytest.raises(ValueError):
        get_config()
def process_handler(event, context):
    if running_job_exists():
        raise ValueError("There is already a job in progress")

    job_id = str(uuid.uuid4())
    config = get_config()
    deletion_queue_key = 'jobs/{}/deletion_queue/data.json'.format(job_id)
    item = {
        "Id": job_id,
        "Sk": job_id,
        "Type": "Job",
        "JobStatus": "QUEUED",
        "GSIBucket": str(random.randint(0, bucket_count - 1)),
        "CreatedAt": utc_timestamp(),
        "DeletionQueueBucket": deletion_queue_bucket,
        "DeletionQueueKey": deletion_queue_key,
        "DeletionQueueItemsSkipped": False,
        "CreatedBy": get_user_info(event),
        **{
            k: v
            for k, v in config.items() if k not in ["JobDetailsRetentionDays"]
        }
    }

    if int(config.get("JobDetailsRetentionDays", 0)) > 0:
        item["Expires"] = utc_timestamp(days=config["JobDetailsRetentionDays"])

    deletion_queue_items = {"DeletionQueueItems": []}
    for extended_deletion_queue_item in get_deletion_queue():
        deletion_item = {
            "DeletionQueueItemId":
            extended_deletion_queue_item["DeletionQueueItemId"],
            "MatchId":
            extended_deletion_queue_item["MatchId"],
            "DataMappers":
            extended_deletion_queue_item["DataMappers"]
        }
        deletion_queue_items["DeletionQueueItems"].append(deletion_item)

    obj = s3.Object(deletion_queue_bucket, deletion_queue_key)
    obj.put(Body=json.dumps(deletion_queue_items))
    jobs_table.put_item(Item=item)

    # after sending the data to dynamo add the deletion_queue to the response
    item["DeletionQueueItems"] = list(
        map(lambda x: x["MatchId"],
            deletion_queue_items["DeletionQueueItems"]))

    return {"statusCode": 202, "body": json.dumps(item, cls=DecimalEncoder)}
def test_it_retrieves_config(mock_client):
    mock_client.get_parameter.return_value = {
        "Parameter": {
            "Value": json.dumps({
                "AthenaConcurrencyLimit": 1,
                "DeletionTasksMaxNumber": 1,
                "QueryExecutionWaitSeconds": 1,
                "QueryQueueWaitSeconds": 1,
                "ForgetQueueWaitSeconds": 1,
            })
        }
    }
    resp = get_config()

    assert {
               "AthenaConcurrencyLimit": 1,
               "DeletionTasksMaxNumber": 1,
               "QueryExecutionWaitSeconds": 1,
               "QueryQueueWaitSeconds": 1,
               "ForgetQueueWaitSeconds": 1,
           } == resp
def process_handler(event, context):
    if running_job_exists():
        raise ValueError("There is already a job in progress")

    job_id = str(uuid.uuid4())
    config = get_config()
    item = {
        "Id": job_id,
        "Sk": job_id,
        "Type": "Job",
        "JobStatus": "QUEUED",
        "GSIBucket": str(random.randint(0, bucket_count - 1)),
        "CreatedAt": utc_timestamp(),
        "DeletionQueueItems": [],
        "DeletionQueueItemsSkipped": False,
        "CreatedBy": get_user_info(event),
        **{
            k: v
            for k, v in config.items() if k not in ["JobDetailsRetentionDays"]
        }
    }

    if int(config.get("JobDetailsRetentionDays", 0)) > 0:
        item["Expires"] = utc_timestamp(days=config["JobDetailsRetentionDays"])

    item_size_bytes = calculate_ddb_item_bytes(item)

    for deletion_queue_item in get_deletion_queue():
        current_size_bytes = calculate_ddb_item_bytes(deletion_queue_item)
        if item_size_bytes + current_size_bytes < max_size_bytes:
            item['DeletionQueueItems'].append(deletion_queue_item)
            item_size_bytes += current_size_bytes
        else:
            item['DeletionQueueItemsSkipped'] = True
            break

    jobs_table.put_item(Item=item)

    return {"statusCode": 202, "body": json.dumps(item, cls=DecimalEncoder)}
Beispiel #5
0
def process_handler(event, context):
    if running_job_exists():
        raise ValueError("There is already a job in progress")

    job_id = str(uuid.uuid4())
    config = get_config()
    item = {
        "Id": job_id,
        "Sk": job_id,
        "Type": "Job",
        "JobStatus": "QUEUED",
        "GSIBucket": str(random.randint(0, bucket_count - 1)),
        "CreatedAt": utc_timestamp(),
        "CreatedBy": get_user_info(event),
        **{
            k: v
            for k, v in config.items() if k not in ["JobDetailsRetentionDays"]
        },
    }
    if int(config.get("JobDetailsRetentionDays", 0)) > 0:
        item["Expires"] = utc_timestamp(days=config["JobDetailsRetentionDays"])
    jobs_table.put_item(Item=item)
    return {"statusCode": 202, "body": json.dumps(item, cls=DecimalEncoder)}
def list_settings_handler(event, context):
    config = get_config()
    return {
        "statusCode": 200,
        "body": json.dumps({"Settings": config}, cls=DecimalEncoder)
    }
def test_it_handles_other_config_errors(mock_client):
    mock_client.get_parameter.side_effect = RuntimeError("oops!")
    with pytest.raises(RuntimeError):
        get_config()
def test_it_handles_config_not_found(mock_client):
    mock_client.get_parameter.side_effect = ClientError({}, "get_parameter")
    with pytest.raises(ClientError):
        get_config()