Esempio n. 1
0
def test_write_json_s3():
    bucket = 'test-bucket'
    prefix = 'test-prefix/'
    base_filename = 'test'

    content = {
        "it-IT": ["*****@*****.**"]
    }

    conn = boto3.resource('s3', region_name='us-west-2')
    bucket_obj = conn.create_bucket(Bucket=bucket)

    # Store the data in the mocked bucket.
    taar_utils.store_json_to_s3(json.dumps(content), base_filename,
                                '20171106', prefix, bucket)

    # Get the content of the bucket.
    available_objects = list(bucket_obj.objects.filter(Prefix=prefix))
    assert len(available_objects) == 2

    # Get the list of keys.
    keys = [o.key for o in available_objects]
    assert "{}{}.json".format(prefix, base_filename) in keys
    date_filename = "{}{}20171106.json".format(prefix, base_filename)
    assert date_filename in keys
Esempio n. 2
0
def test_write_json_s3():
    bucket = "test-bucket"
    prefix = "test-prefix/"
    base_filename = "test"

    content = {"it-IT": ["*****@*****.**"]}

    conn = boto3.resource("s3", region_name="us-west-2")
    bucket_obj = conn.create_bucket(
        Bucket=bucket,
        CreateBucketConfiguration={
            "LocationConstraint": "us-west-2",
        },
    )

    # Store the data in the mocked bucket.
    taar_utils.store_json_to_s3(
        json.dumps(content), base_filename, "20171106", prefix, bucket
    )

    # Get the content of the bucket.
    available_objects = list(bucket_obj.objects.filter(Prefix=prefix))
    assert len(available_objects) == 2

    # Get the list of keys.
    keys = [o.key for o in available_objects]
    assert "{}{}.json".format(prefix, base_filename) in keys
    date_filename = "{}{}20171106.json".format(prefix, base_filename)
    assert date_filename in keys
def s3_fixture():
    mock_s3().start()

    conn = boto3.resource('s3', region_name='us-west-2')
    conn.create_bucket(Bucket=taar_amowhitelist.AMO_DUMP_BUCKET)
    taar_utils.store_json_to_s3(json.dumps(SAMPLE_DATA),
                                taar_amowhitelist.AMO_DUMP_BASE_FILENAME,
                                '20171106', taar_amowhitelist.AMO_DUMP_PREFIX,
                                taar_amowhitelist.AMO_DUMP_BUCKET)
    yield conn, SAMPLE_DATA
    mock_s3().stop()
Esempio n. 4
0
def load_s3(result_df, date, prefix, bucket):
    result_list = result_df.collect()
    result_json = {}

    for row in result_list:
        key_addon = row.key_addon
        coinstalls = row.coinstallation_counts
        value_json = {}
        for _id, n in coinstalls:
            value_json[_id] = n
        result_json[key_addon] = value_json

    taar_utils.store_json_to_s3(json.dumps(result_json, indent=2),
                                OUTPUT_BASE_FILENAME, date, prefix, bucket)
def s3_fixture():
    s3 = mock_s3()
    s3.start()

    conn = boto3.resource("s3", region_name="us-west-2")
    conn.create_bucket(
        Bucket=taar_amowhitelist.AMO_DUMP_BUCKET,
        CreateBucketConfiguration={
            "LocationConstraint": "us-west-2",
        },
    )
    taar_utils.store_json_to_s3(
        json.dumps(SAMPLE_DATA),
        taar_amowhitelist.AMO_DUMP_BASE_FILENAME,
        "20171106",
        taar_amowhitelist.AMO_DUMP_PREFIX,
        taar_amowhitelist.AMO_DUMP_BUCKET,
    )
    yield conn, SAMPLE_DATA
    s3.stop()
def load_s3(result_data, date, prefix, bucket):
    taar_utils.store_json_to_s3(json.dumps(result_data, indent=2),
                                OUTPUT_BASE_FILENAME, date, prefix, bucket)