Ejemplo n.º 1
0
def test_copy_directory(request):
    session = boto3.session.Session(
        aws_access_key_id=os.getenv('LTD_KEEPER_TEST_AWS_ID'),
        aws_secret_access_key=os.getenv('LTD_KEEPER_TEST_AWS_SECRET'))
    s3 = session.resource('s3')
    bucket = s3.Bucket(os.getenv('LTD_KEEPER_TEST_BUCKET'))

    bucket_root = str(uuid.uuid4()) + '/'

    def cleanup():
        print("Cleaning up the bucket")
        delete_directory(os.getenv('LTD_KEEPER_TEST_BUCKET'), bucket_root,
                         os.getenv('LTD_KEEPER_TEST_AWS_ID'),
                         os.getenv('LTD_KEEPER_TEST_AWS_SECRET'))

    request.addfinalizer(cleanup)

    initial_paths = ['test1.txt', 'test2.txt', 'aa/test3.txt']
    new_paths = ['test4.txt', 'bb/test5.txt']

    # add old and new file sets
    _upload_files(initial_paths, bucket, bucket_root + 'a/', 'sample-key',
                  'max-age=3600', 'text/plain')
    _upload_files(new_paths, bucket, bucket_root + 'b/', 'sample-key',
                  'max-age=3600', 'text/plain')

    # copy files
    copy_directory(
        bucket_name=os.getenv('LTD_KEEPER_TEST_BUCKET'),
        src_path=bucket_root + 'b/',
        dest_path=bucket_root + 'a/',
        aws_access_key_id=os.getenv('LTD_KEEPER_TEST_AWS_ID'),
        aws_secret_access_key=os.getenv('LTD_KEEPER_TEST_AWS_SECRET'),
        surrogate_key='new-key',
        surrogate_control='max-age=31536000',
        cache_control='no-cache')

    # Test files in the a/ directory are from b/
    for obj in bucket.objects.filter(Prefix=bucket_root + 'a/'):
        bucket_path = os.path.relpath(obj.key, start=bucket_root + 'a/')
        assert bucket_path in new_paths
        # ensure correct metadata
        head = s3.meta.client.head_object(
            Bucket=os.getenv('LTD_KEEPER_TEST_BUCKET'), Key=obj.key)
        assert head['CacheControl'] == 'no-cache'
        assert head['ContentType'] == 'text/plain'
        assert head['Metadata']['surrogate-key'] == 'new-key'
        assert head['Metadata']['surrogate-control'] == 'max-age=31536000'

    # Test that a directory object exists
    bucket_paths = [
        obj.key for obj in bucket.objects.filter(Prefix=bucket_root + 'a')
    ]
    assert os.path.join(bucket_root, 'a') in bucket_paths
Ejemplo n.º 2
0
def test_copy_directory(request):
    session = boto3.session.Session(
        aws_access_key_id=os.getenv('LTD_KEEPER_TEST_AWS_ID'),
        aws_secret_access_key=os.getenv('LTD_KEEPER_TEST_AWS_SECRET'))
    s3 = session.resource('s3')
    bucket = s3.Bucket(os.getenv('LTD_KEEPER_TEST_BUCKET'))

    bucket_root = str(uuid.uuid4()) + '/'

    def cleanup():
        print("Cleaning up the bucket")
        delete_directory(os.getenv('LTD_KEEPER_TEST_BUCKET'),
                         bucket_root,
                         os.getenv('LTD_KEEPER_TEST_AWS_ID'),
                         os.getenv('LTD_KEEPER_TEST_AWS_SECRET'))
    request.addfinalizer(cleanup)

    initial_paths = ['test1.txt', 'test2.txt', 'aa/test3.txt']
    new_paths = ['test4.txt', 'bb/test5.txt']

    # add old and new file sets
    _upload_files(initial_paths, bucket, bucket_root + 'a/',
                  'sample-key', 'max-age=3600', 'text/plain')
    _upload_files(new_paths, bucket, bucket_root + 'b/',
                  'sample-key', 'max-age=3600', 'text/plain')

    # copy files
    copy_directory(
        bucket_name=os.getenv('LTD_KEEPER_TEST_BUCKET'),
        src_path=bucket_root + 'b/',
        dest_path=bucket_root + 'a/',
        aws_access_key_id=os.getenv('LTD_KEEPER_TEST_AWS_ID'),
        aws_secret_access_key=os.getenv('LTD_KEEPER_TEST_AWS_SECRET'),
        surrogate_key='new-key',
        surrogate_control='max-age=31536000',
        cache_control='no-cache')

    # Test files in the a/ directory are from b/
    for obj in bucket.objects.filter(Prefix=bucket_root + 'a/'):
        bucket_path = os.path.relpath(obj.key, start=bucket_root + 'a/')
        assert bucket_path in new_paths
        # ensure correct metadata
        head = s3.meta.client.head_object(
            Bucket=os.getenv('LTD_KEEPER_TEST_BUCKET'),
            Key=obj.key)
        assert head['CacheControl'] == 'no-cache'
        assert head['ContentType'] == 'text/plain'
        assert head['Metadata']['surrogate-key'] == 'new-key'
        assert head['Metadata']['surrogate-control'] == 'max-age=31536000'

    # Test that a directory object exists
    bucket_paths = [obj.key
                    for obj in bucket.objects.filter(Prefix=bucket_root + 'a')]
    assert os.path.join(bucket_root, 'a') in bucket_paths
Ejemplo n.º 3
0
def test_copy_dir_dest_in_src():
    """Test that copy_directory fails raises an assertion error if destination
    is part of the source.
    """
    with pytest.raises(AssertionError):
        copy_directory('example', 'src', 'src/dest', 'id', 'key')
Ejemplo n.º 4
0
def test_copy_dir_src_in_dest():
    """Test that copy_directory fails raises an assertion error if source in
    destination.
    """
    with pytest.raises(AssertionError):
        copy_directory('example', 'dest/src', 'dest', 'id', 'key')
Ejemplo n.º 5
0
def test_copy_dir_dest_in_src():
    """Test that copy_directory fails raises an assertion error if destination
    is part of the source.
    """
    with pytest.raises(AssertionError):
        copy_directory('example', 'src', 'src/dest', 'id', 'key')
Ejemplo n.º 6
0
def test_copy_dir_src_in_dest():
    """Test that copy_directory fails raises an assertion error if source in
    destination.
    """
    with pytest.raises(AssertionError):
        copy_directory('example', 'dest/src', 'dest', 'id', 'key')