def test_s3_store(prefix, public_base_url, s3_store_getter):
    s3 = s3_store_getter(prefix=prefix, public_base_url=public_base_url)
    thing_id = uuid.uuid1().int
    image = TestingImage(thing_id=thing_id,
                         width=405,
                         height=640,
                         mimetype='image/jpeg',
                         original=True,
                         created_at=utcnow())
    image_path = os.path.join(sample_images_dir, 'iu.jpg')
    with open(image_path, 'rb') as image_file:
        expected_data = image_file.read()
        image_file.seek(0)
        s3.store(image, image_file)
    with s3.open(image) as actual:
        actual_data = actual.read()
    assert expected_data == actual_data
    key_args = 'testing', thing_id, 405, 640, 'image/jpeg'
    if public_base_url:
        expected_url = (public_base_url.rstrip('/') + '/' +
                        s3.get_key(*key_args))
    else:
        expected_url = s3.get_url(*key_args)
    actual_url = s3.locate(image)
    assert remove_query(expected_url) == remove_query(actual_url)
    if prefix:
        no_prefix = s3_store_getter()
        with raises(IOError):
            no_prefix.open(image)
    s3.delete(image)
    with raises(IOError):
        s3.open(image)
def test_s3_store(prefix, public_base_url, s3_store_getter):
    s3 = s3_store_getter(prefix=prefix, public_base_url=public_base_url)
    thing_id = uuid.uuid1().int
    image = TestingImage(thing_id=thing_id, width=405, height=640,
                         mimetype='image/jpeg', original=True,
                         created_at=utcnow())
    image_path = os.path.join(sample_images_dir, 'iu.jpg')
    with open(image_path, 'rb') as image_file:
        expected_data = image_file.read()
        image_file.seek(0)
        s3.store(image, image_file)
    with s3.open(image) as actual:
        actual_data = actual.read()
    assert expected_data == actual_data
    key_args = 'testing', thing_id, 405, 640, 'image/jpeg'
    if public_base_url:
        expected_url = (public_base_url.rstrip('/') + '/' +
                        s3.get_key(*key_args))
    else:
        expected_url = s3.get_url(*key_args)
    actual_url = s3.locate(image)
    assert remove_query(expected_url) == remove_query(actual_url)
    if prefix:
        no_prefix = s3_store_getter()
        with raises(IOError):
            no_prefix.open(image)
    s3.delete(image)
    with raises(IOError):
        s3.open(image)
Example #3
0
def test_s3_store(prefix, public_base_url, s3_store_getter):
    s3 = s3_store_getter(prefix=prefix, public_base_url=public_base_url)
    thing_id = uuid.uuid1().int
    image = ExampleImage(thing_id=thing_id,
                         width=405,
                         height=640,
                         mimetype='image/jpeg',
                         original=True,
                         created_at=utcnow())
    image_path = os.path.join(sample_images_dir, 'iu.jpg')
    with open(image_path, 'rb') as image_file:
        expected_data = image_file.read()
        image_file.seek(0)
        s3.store(image, image_file)
    with s3.open(image) as actual:
        actual_data = actual.read()
    assert expected_data == actual_data
    key_args = 'testing', thing_id, 405, 640, 'image/jpeg'
    if public_base_url:
        expected_url = (public_base_url.rstrip('/') + '/' +
                        s3.get_key(*key_args))
    else:
        expected_url = s3.get_url(*key_args)
    actual_url = s3.locate(image)
    assert remove_query(expected_url) == remove_query(actual_url)
    if prefix:
        no_prefix = s3_store_getter()
        with raises(IOError):
            no_prefix.open(image)
    if s3.region not in (None, 'us-east-1', 'us-west-1', 'us-west-2',
                         'ap-southeast-1', 'ap-southeast-2', 'ap-northeast-1',
                         'eu-west-1', 'sa-east-1'):
        # Case when the region only supports AWS4Auth
        # https://docs.aws.amazon.com/general/latest/gr/rande.html#s3_region
        s3_aws2auth = s3_store_getter(prefix=prefix,
                                      public_base_url=public_base_url)
        s3_aws2auth.region = None  # Use AWS2Auth
        with raises(AuthMechanismError):
            with s3_aws2auth.open(image):
                pass
    s3.delete(image)
    with raises(IOError):
        s3.open(image)
def test_s3_sandbox_store(underlying_prefix, overriding_prefix,
                          s3_sandbox_store_getter):
    s3 = s3_sandbox_store_getter(underlying_prefix=underlying_prefix,
                                 overriding_prefix=overriding_prefix)
    under = s3.underlying
    over = s3.overriding
    id_offset = uuid.uuid1().int
    if id_offset % 2:  # id_offset is always even
        id_offset -= 1
    if not underlying_prefix:
        id_offset *= -1
    # Store a fixture image for underlying store
    under_id = id_offset + 1
    under_image = TestingImage(thing_id=under_id,
                               width=405,
                               height=640,
                               mimetype='image/jpeg',
                               original=True,
                               created_at=utcnow())
    image_path = os.path.join(sample_images_dir, 'iu.jpg')
    with open(image_path, 'rb') as image_file:
        expected_data = image_file.read()
        image_file.seek(0)
        under.store(under_image, image_file)
    # Underlying images have to be logically shown through sandbox
    with s3.open(under_image) as actual:
        actual_data = actual.read()
    assert expected_data == actual_data
    expected_url = under.get_url('testing', under_id, 405, 640, 'image/jpeg')
    actual_url = s3.locate(under_image)
    assert remove_query(expected_url) == remove_query(actual_url)
    # Store an image to sandbox store
    over_id = id_offset + 2
    image = TestingImage(thing_id=over_id,
                         width=405,
                         height=640,
                         mimetype='image/jpeg',
                         original=True,
                         created_at=utcnow())
    image_path = os.path.join(sample_images_dir, 'iu.jpg')
    with open(image_path, 'rb') as image_file:
        s3.store(image, image_file)
    # Image has to be logically stored
    with s3.open(image) as actual:
        actual_data = actual.read()
    assert expected_data == actual_data
    expected_url = over.get_url('testing', over_id, 405, 640, 'image/jpeg')
    actual_url = s3.locate(image)
    assert remove_query(expected_url) == remove_query(actual_url)
    # Image has to be physically stored into the overriding store
    with over.open(image) as actual:
        actual_data = actual.read()
    assert expected_data == actual_data
    expected_url = over.get_url('testing', over_id, 405, 640, 'image/jpeg')
    actual_url = s3.locate(image)
    assert remove_query(expected_url) == remove_query(actual_url)
    # Images must not be physically stored into the underlying store
    with raises(IOError):
        under.open(image)
    # Deletion must not touch underlying
    s3.delete(under_image)
    with raises(IOError):
        s3.open(under_image)
    with under.open(under_image) as actual:
        actual_data = actual.read()
    assert expected_data == actual_data
    expected_url = over.get_url('testing', under_id, 405, 640, 'image/jpeg')
    actual_url = s3.locate(under_image)
    assert remove_query(expected_url) == remove_query(actual_url)
    # Clean up fixtures
    if underlying_prefix and overriding_prefix:
        no_prefix = s3_sandbox_store_getter()
        with raises(IOError):
            no_prefix.open(image)
    under.delete(under_image)
def test_s3_sandbox_store(underlying_prefix, overriding_prefix,
                          s3_sandbox_store_getter):
    s3 = s3_sandbox_store_getter(underlying_prefix=underlying_prefix,
                                 overriding_prefix=overriding_prefix)
    under = s3.underlying
    over = s3.overriding
    id_offset = uuid.uuid1().int
    if id_offset % 2:  # id_offset is always even
        id_offset -= 1
    if not underlying_prefix:
        id_offset *= -1
    # Store a fixture image for underlying store
    under_id = id_offset + 1
    under_image = TestingImage(thing_id=under_id, width=405, height=640,
                               mimetype='image/jpeg', original=True,
                               created_at=utcnow())
    image_path = os.path.join(sample_images_dir, 'iu.jpg')
    with open(image_path, 'rb') as image_file:
        expected_data = image_file.read()
        image_file.seek(0)
        under.store(under_image, image_file)
    # Underlying images have to be logically shown through sandbox
    with s3.open(under_image) as actual:
        actual_data = actual.read()
    assert expected_data == actual_data
    expected_url = under.get_url('testing', under_id, 405, 640, 'image/jpeg')
    actual_url = s3.locate(under_image)
    assert remove_query(expected_url) == remove_query(actual_url)
    # Store an image to sandbox store
    over_id = id_offset + 2
    image = TestingImage(thing_id=over_id, width=405, height=640,
                         mimetype='image/jpeg', original=True,
                         created_at=utcnow())
    image_path = os.path.join(sample_images_dir, 'iu.jpg')
    with open(image_path, 'rb') as image_file:
        s3.store(image, image_file)
    # Image has to be logically stored
    with s3.open(image) as actual:
        actual_data = actual.read()
    assert expected_data == actual_data
    expected_url = over.get_url('testing', over_id, 405, 640, 'image/jpeg')
    actual_url = s3.locate(image)
    assert remove_query(expected_url) == remove_query(actual_url)
    # Image has to be physically stored into the overriding store
    with over.open(image) as actual:
        actual_data = actual.read()
    assert expected_data == actual_data
    expected_url = over.get_url('testing', over_id, 405, 640, 'image/jpeg')
    actual_url = s3.locate(image)
    assert remove_query(expected_url) == remove_query(actual_url)
    # Images must not be physically stored into the underlying store
    with raises(IOError):
        under.open(image)
    # Deletion must not touch underlying
    s3.delete(under_image)
    with raises(IOError):
        s3.open(under_image)
    with under.open(under_image) as actual:
        actual_data = actual.read()
    assert expected_data == actual_data
    expected_url = over.get_url('testing', under_id, 405, 640, 'image/jpeg')
    actual_url = s3.locate(under_image)
    assert remove_query(expected_url) == remove_query(actual_url)
    # Clean up fixtures
    if underlying_prefix and overriding_prefix:
        no_prefix = s3_sandbox_store_getter()
        with raises(IOError):
            no_prefix.open(image)
    under.delete(under_image)