Ejemplo n.º 1
0
def test_empty_chunks_queued_for_deletion():
    chunk_cleanup_queue = FakeQueue()
    args = dict(base_args)
    args['context'] = StorageContext('nyc', None, chunk_cleanup_queue, None,
                                     None)

    swift = FakeSwiftStorage(**args)
    uuid, metadata = swift.initiate_chunked_upload()

    chunks = ['this', '', 'is', 'some', '', 'chunked', 'data', '']
    offset = 0
    for chunk in chunks:
        length = len(chunk)
        if length == 0:
            length = 1

        bytes_written, metadata, error = swift.stream_upload_chunk(
            uuid, offset, length, io.BytesIO(chunk), metadata)
        assert error is None
        assert len(chunk) == bytes_written
        offset += len(chunk)

    swift.complete_chunked_upload(uuid, 'somepath', metadata)
    assert ''.join(chunks) == swift.get_content('somepath')

    # Check the chunk deletion queue and ensure we have the last chunk queued.
    found = chunk_cleanup_queue.get()
    assert found is not None

    found2 = chunk_cleanup_queue.get()
    assert found2 is None
Ejemplo n.º 2
0
def test_empty_chunks_queued_for_deletion():
    chunk_cleanup_queue = FakeQueue()
    args = dict(base_args)
    args["context"] = StorageContext("nyc", chunk_cleanup_queue, None, None)

    swift = FakeSwiftStorage(**args)
    uuid, metadata = swift.initiate_chunked_upload()

    chunks = [b"this", b"", b"is", b"some", b"", b"chunked", b"data", b""]
    offset = 0
    for chunk in chunks:
        length = len(chunk)
        if length == 0:
            length = 1

        bytes_written, metadata, error = swift.stream_upload_chunk(
            uuid, offset, length, io.BytesIO(chunk), metadata)
        assert error is None
        assert len(chunk) == bytes_written
        offset += len(chunk)

    swift.complete_chunked_upload(uuid, "somepath", metadata)
    assert b"".join(chunks) == swift.get_content("somepath")

    # Check the chunk deletion queue and ensure we have the last chunk queued.
    found = chunk_cleanup_queue.get()
    assert found is not None

    found2 = chunk_cleanup_queue.get()
    assert found2 is None
Ejemplo n.º 3
0
def test_direct_download_no_ip(test_aws_ip, aws_ip_range_data,
                               ipranges_populated, app):
    ipresolver = IPResolver(app)
    context = StorageContext('nyc', None, None, config_provider, ipresolver)

    # Create a test bucket and put some test content.
    boto.connect_s3().create_bucket(_TEST_BUCKET)

    engine = CloudFrontedS3Storage(context, 'cloudfrontdomain', 'keyid',
                                   'test/data/test.pem', 'some/path',
                                   _TEST_BUCKET, _TEST_USER, _TEST_PASSWORD)
    engine.put_content(_TEST_PATH, _TEST_CONTENT)
    assert engine.exists(_TEST_PATH)
    assert 's3.amazonaws.com' in engine.get_direct_download_url(_TEST_PATH)
Ejemplo n.º 4
0
def test_direct_download(
    test_aws_ip,
    test_empty_ip_range_cache,
    test_ip_range_cache,
    aws_ip_range_data,
    ipranges_populated,
    app,
):
    ipresolver = IPResolver(app)
    if ipranges_populated:
        ipresolver.sync_token = (
            test_ip_range_cache["sync_token"]
            if ipranges_populated
            else test_empty_ip_range_cache["sync_token"]
        )
        ipresolver.amazon_ranges = (
            test_ip_range_cache["all_amazon"]
            if ipranges_populated
            else test_empty_ip_range_cache["all_amazon"]
        )
        context = StorageContext("nyc", None, config_provider, ipresolver)

        # Create a test bucket and put some test content.
        boto3.client("s3").create_bucket(Bucket=_TEST_BUCKET)

        engine = CloudFrontedS3Storage(
            context,
            "cloudfrontdomain",
            "keyid",
            "test/data/test.pem",
            "some/path",
            _TEST_BUCKET,
            _TEST_USER,
            _TEST_PASSWORD,
        )
        engine.put_content(_TEST_PATH, _TEST_CONTENT)
        assert engine.exists(_TEST_PATH)

        # Request a direct download URL for a request from a known AWS IP, and ensure we are returned an S3 URL.
        assert "s3.amazonaws.com" in engine.get_direct_download_url(_TEST_PATH, test_aws_ip)

        if ipranges_populated:
            # Request a direct download URL for a request from a non-AWS IP, and ensure we are returned a CloudFront URL.
            assert "cloudfrontdomain" in engine.get_direct_download_url(_TEST_PATH, "1.2.3.4")
        else:
            # Request a direct download URL for a request from a non-AWS IP, but since IP Ranges isn't populated, we still
            # get back an S3 URL.
            assert "s3.amazonaws.com" in engine.get_direct_download_url(_TEST_PATH, "1.2.3.4")
Ejemplo n.º 5
0
def test_direct_download_no_ip(test_aws_ip, aws_ip_range_data, ipranges_populated, app):
    ipresolver = IPResolver(app)
    context = StorageContext("nyc", None, config_provider, ipresolver)

    # Create a test bucket and put some test content.
    boto3.client("s3").create_bucket(Bucket=_TEST_BUCKET)

    engine = CloudFrontedS3Storage(
        context,
        "cloudfrontdomain",
        "keyid",
        "test/data/test.pem",
        "some/path",
        _TEST_BUCKET,
        _TEST_USER,
        _TEST_PASSWORD,
    )
    engine.put_content(_TEST_PATH, _TEST_CONTENT)
    assert engine.exists(_TEST_PATH)
    assert "s3.amazonaws.com" in engine.get_direct_download_url(_TEST_PATH)
Ejemplo n.º 6
0
def test_cancel_chunked_upload():
    chunk_cleanup_queue = FakeQueue()

    args = dict(base_args)
    args["context"] = StorageContext("nyc", chunk_cleanup_queue, None, None)

    swift = FakeSwiftStorage(**args)
    uuid, metadata = swift.initiate_chunked_upload()

    chunks = [b"this", b"is", b"some", b"chunked", b"data", b""]
    offset = 0
    for chunk in chunks:
        bytes_written, metadata, error = swift.stream_upload_chunk(
            uuid, offset, len(chunk), io.BytesIO(chunk), metadata)
        assert error is None
        assert len(chunk) == bytes_written
        offset += len(chunk)

    swift.cancel_chunked_upload(uuid, metadata)

    found = chunk_cleanup_queue.get()
    assert found is not None
Ejemplo n.º 7
0
def test_cancel_chunked_upload():
    chunk_cleanup_queue = FakeQueue()

    args = dict(base_args)
    args['context'] = StorageContext('nyc', None, chunk_cleanup_queue, None,
                                     None)

    swift = FakeSwiftStorage(**args)
    uuid, metadata = swift.initiate_chunked_upload()

    chunks = ['this', 'is', 'some', 'chunked', 'data', '']
    offset = 0
    for chunk in chunks:
        bytes_written, metadata, error = swift.stream_upload_chunk(
            uuid, offset, len(chunk), io.BytesIO(chunk), metadata)
        assert error is None
        assert len(chunk) == bytes_written
        offset += len(chunk)

    swift.cancel_chunked_upload(uuid, metadata)

    found = chunk_cleanup_queue.get()
    assert found is not None
Ejemplo n.º 8
0
def test_direct_download(test_aws_ip, test_empty_ip_range_cache,
                         test_ip_range_cache, aws_ip_range_data,
                         ipranges_populated, app):
    ipresolver = IPResolver(app)
    if ipranges_populated:
        ipresolver.sync_token = test_ip_range_cache[
            'sync_token'] if ipranges_populated else test_empty_ip_range_cache[
                'sync_token']
        ipresolver.amazon_ranges = test_ip_range_cache[
            'all_amazon'] if ipranges_populated else test_empty_ip_range_cache[
                'all_amazon']
        context = StorageContext('nyc', None, None, config_provider,
                                 ipresolver)

        # Create a test bucket and put some test content.
        boto.connect_s3().create_bucket(_TEST_BUCKET)

        engine = CloudFrontedS3Storage(context, 'cloudfrontdomain', 'keyid',
                                       'test/data/test.pem', 'some/path',
                                       _TEST_BUCKET, _TEST_USER,
                                       _TEST_PASSWORD)
        engine.put_content(_TEST_PATH, _TEST_CONTENT)
        assert engine.exists(_TEST_PATH)

        # Request a direct download URL for a request from a known AWS IP, and ensure we are returned an S3 URL.
        assert 's3.amazonaws.com' in engine.get_direct_download_url(
            _TEST_PATH, test_aws_ip)

        if ipranges_populated:
            # Request a direct download URL for a request from a non-AWS IP, and ensure we are returned a CloudFront URL.
            assert 'cloudfrontdomain' in engine.get_direct_download_url(
                _TEST_PATH, '1.2.3.4')
        else:
            # Request a direct download URL for a request from a non-AWS IP, but since IP Ranges isn't populated, we still
            # get back an S3 URL.
            assert 's3.amazonaws.com' in engine.get_direct_download_url(
                _TEST_PATH, '1.2.3.4')
Ejemplo n.º 9
0
from moto import mock_s3_deprecated as mock_s3

from app import storage as test_storage
from data import model, database
from data.logs_model import logs_model
from storage import S3Storage, StorageContext, DistributedStorage
from workers.exportactionlogsworker import ExportActionLogsWorker, POLL_PERIOD_SECONDS

from test.fixtures import *

_TEST_CONTENT = os.urandom(1024)
_TEST_BUCKET = "some_bucket"
_TEST_USER = "******"
_TEST_PASSWORD = "******"
_TEST_PATH = "some/cool/path"
_TEST_CONTEXT = StorageContext("nyc", None, None, None)


@pytest.fixture(params=["test", "mock_s3"])
def storage_engine(request):
    if request.param == "test":
        yield test_storage
    else:
        with mock_s3():
            # Create a test bucket and put some test content.
            boto.connect_s3().create_bucket(_TEST_BUCKET)
            engine = DistributedStorage(
                {
                    "foo":
                    S3Storage(_TEST_CONTEXT, "some/path", _TEST_BUCKET,
                              _TEST_USER, _TEST_PASSWORD)
Ejemplo n.º 10
0
import _pyio as io
import pytest
import hashlib
import copy

from collections import defaultdict
from mock import MagicMock, patch

from swiftclient.client import ClientException, ReadableToIterable

from storage import StorageContext
from storage.swift import SwiftStorage, _EMPTY_SEGMENTS_KEY

base_args = {
    "context": StorageContext("nyc", None, None, None),
    "swift_container": "container-name",
    "storage_path": "/basepath",
    "auth_url": "https://auth.com",
    "swift_user": "******",
    "swift_password": "******",
}


class MockSwiftStorage(SwiftStorage):
    def __init__(self, *args, **kwargs):
        super(MockSwiftStorage, self).__init__(*args, **kwargs)
        self._connection = MagicMock()

    def _get_connection(self):
        return self._connection
Ejemplo n.º 11
0
import io
import pytest
import hashlib
import copy

from collections import defaultdict
from mock import MagicMock, patch

from storage import StorageContext
from storage.swift import SwiftStorage, _EMPTY_SEGMENTS_KEY
from swiftclient.client import ClientException

base_args = {
    'context': StorageContext('nyc', None, None, None, None),
    'swift_container': 'container-name',
    'storage_path': '/basepath',
    'auth_url': 'https://auth.com',
    'swift_user': '******',
    'swift_password': '******',
}


class MockSwiftStorage(SwiftStorage):
    def __init__(self, *args, **kwargs):
        super(MockSwiftStorage, self).__init__(*args, **kwargs)
        self._connection = MagicMock()

    def _get_connection(self):
        return self._connection

Ejemplo n.º 12
0
from moto import mock_s3_deprecated as mock_s3

from app import storage as test_storage
from data import model, database
from data.logs_model import logs_model
from storage import S3Storage, StorageContext, DistributedStorage
from workers.exportactionlogsworker import ExportActionLogsWorker, POLL_PERIOD_SECONDS

from test.fixtures import *

_TEST_CONTENT = os.urandom(1024)
_TEST_BUCKET = 'some_bucket'
_TEST_USER = '******'
_TEST_PASSWORD = '******'
_TEST_PATH = 'some/cool/path'
_TEST_CONTEXT = StorageContext('nyc', None, None, None, None)


@pytest.fixture(params=['test', 'mock_s3'])
def storage_engine(request):
    if request.param == 'test':
        yield test_storage
    else:
        with mock_s3():
            # Create a test bucket and put some test content.
            boto.connect_s3().create_bucket(_TEST_BUCKET)
            engine = DistributedStorage(
                {
                    'foo':
                    S3Storage(_TEST_CONTEXT, 'some/path', _TEST_BUCKET,
                              _TEST_USER, _TEST_PASSWORD)