コード例 #1
0
def get_test_session(config=None):
    if config is None:
        assert release._config is not None
        # TODO(cmaloney): HACK. Stashing and pulling the config from release/__init__.py
        # is definitely not the right way to do this.

        if 'testing' not in release._config:
            raise RuntimeError("No testing section in configuration")

        if 'aws' not in release._config['testing']:
            raise RuntimeError("No testing.aws section in configuration")

        config = release._config

    # TODO(cmaloney): get_session shouldn't live in release.storage
    return release.call_matching_arguments(release.storage.aws.get_session, config, True)
コード例 #2
0
ファイル: aws.py プロジェクト: malnick/dcos
def get_test_session(config=None):
    if config is None:
        assert release._config is not None
        # TODO(cmaloney): HACK. Stashing and pulling the config from release/__init__.py
        # is definitely not the right way to do this.

        if 'testing' not in release._config:
            raise RuntimeError("No testing section in configuration")

        if 'aws' not in release._config['testing']:
            raise RuntimeError("No testing.aws section in configuration")

        config = release._config['testing']['aws']

    # TODO(cmaloney): get_session shouldn't live in release.storage
    return release.call_matching_arguments(release.storage.aws.get_session, config, True)
コード例 #3
0
def exercise_storage_provider(tmpdir, name, config):
    store = release.call_matching_arguments(release.get_storage_provider_factory(name), config)

    # Make a uniquely named test storage location, and try to upload / copy files
    # inside that location.
    test_id = uuid.uuid4().hex
    test_base_path = 'dcos-image-test-tmp/{}'.format(test_id)

    # We want to always disable caching and set content-type so that things work
    # right when debugging the tests.
    upload_extra_args = {
        'no_cache': True,
        'content_type': 'text/plain; charset=utf-8'
    }

    # Test we're starting with an empty test_base_path
    assert store.list_recursive(test_base_path) == set()

    # TODO(cmaloney): Add a test that uses different caching, content-type,
    # and checks that the caching of the url download location works properly
    # as well as the properties get carried through copies.

    assert store.url.endswith('/')

    def curl_fetch(path):
        return subprocess.check_output([
            'curl',
            '--fail',
            '--location',
            '--silent',
            '--show-error',
            '--verbose',
            store.url + path])

    def get_path(path):
        assert not path.startswith('/')
        return test_base_path + '/' + path

    def check_file(path, contents):
        # The store should be internally consistent / API return it exists now.
        assert store.exists(path)

        # We should be able to use the native fetching method.
        assert store.fetch(path) == contents

        # Other programs should be able to fetch with curl.
        assert curl_fetch(path) == contents

    def make_content(name):
        return (name + " " + uuid.uuid4().hex).encode()

    try:
        # Test uploading bytes.
        upload_bytes = make_content("upload_bytes")
        upload_bytes_path = get_path('upload_bytes.txt')

        # Check if exists on non-existent object works
        assert not store.exists(upload_bytes_path)

        store.upload(
            upload_bytes_path,
            blob=upload_bytes,
            **upload_extra_args)
        check_file(upload_bytes_path, upload_bytes)

        # Test uploading the same bytes to a non-existent subdirectory of a subdirectory
        upload_bytes_dir_path = get_path("dir1/bar/upload_bytes2.txt")
        store.upload(
            upload_bytes_dir_path,
            blob=upload_bytes,
            **upload_extra_args)

        # Test uploading a local file.
        upload_file = make_content("upload_file")
        upload_file_path = get_path('upload_file.txt')
        upload_file_local = tmpdir.join('upload_file.txt')
        upload_file_local.write(upload_file)
        store.upload(
            upload_file_path,
            local_path=str(upload_file_local),
            **upload_extra_args)
        check_file(upload_file_path, upload_file)

        # Test copying uploaded bytes.
        copy_dest_path = get_path('copy_file.txt')
        store.copy(upload_bytes_path, copy_dest_path)
        check_file(copy_dest_path, upload_bytes)

        # Test copying an uploaded file to a subdirectory.
        copy_dest_path = get_path('new_dir/copy_path.txt')
        store.copy(upload_file_path, copy_dest_path)
        check_file(copy_dest_path, upload_file)

        # Check that listing all the files in the storage provider gives the list of
        # files we've uploaded / checked and only that list of files.
        assert store.list_recursive(test_base_path) == {
            get_path('upload_file.txt'),
            get_path('upload_bytes.txt'),
            get_path('dir1/bar/upload_bytes2.txt'),
            get_path('new_dir/copy_path.txt'),
            get_path('copy_file.txt')
        }

        # Check that cleanup removes everything
        store.remove_recursive(test_base_path)
        assert store.list_recursive(test_base_path) == set()
    finally:
        # Cleanup temp directory in storage provider as best as possible.
        store.remove_recursive(test_base_path)