Exemple #1
0
def test_log_with_various_messages(api):
    '''
    Test :py:meth:`~datafs.core.data_archive.DataArchive.log` stability

    Addresses :issue:`232` - log fails on versions with no message
    '''

    arch = api.create('test/archive1.txt')

    arch.log()

    with arch.open('w+') as f:
        f.write(u('hello 1'))

    arch.log()

    with arch.open('w+', message='hello') as f:
        f.write(u('hello 2'))

    arch.log()

    with arch.open('w+', message=4) as f:
        f.write(u('hello 3'))

    arch.log()

    with arch.open('w+', message=lambda x: x**2) as f:
        f.write(u('hello 4'))

    arch.log()

    with arch.open('w+', message=None) as f:
        f.write(u('hello 5'))

    arch.log()
Exemple #2
0
def test_archive_creation_with_multiple_authorities(api_dual_auth):
    '''
    Addresses :issue:`220`
    '''

    arch = api_dual_auth.create('auth1://my/new/archive.txt')

    try:
        # Make sure we can get the archive:
        arch1 = api_dual_auth.get_archive('my/new/archive.txt')
        assert arch1 == arch

        # Make sure str/unicode isn't a problem:
        arch2 = api_dual_auth.get_archive(u('my/new/archive.txt'))
        assert arch2 == arch

        # Make sure leading slash isn't a problem:
        arch3 = api_dual_auth.get_archive('/my/new/archive.txt')
        assert arch3 == arch

        # Make sure authority name matters
        with pytest.raises(ValueError):
            api_dual_auth.get_archive('auth2://my/new/archive.txt')

        # Make sure authority name prefix isn't a problem:
        arch4 = api_dual_auth.get_archive('auth1://my/new/archive.txt')
        assert arch4 == arch

        # Make sure bad authority names are a problem:
        with pytest.raises(ValueError):
            api_dual_auth.get_archive('non_authority://my/new/archive.txt')

    finally:
        arch.delete()
Exemple #3
0
def test_archive_creation_with_authority(api):
    '''
    Addresses :issue:`220`
    '''

    arch = api.create('filesys://my/new/archive.txt')

    try:
        # Make sure we can get the archive:
        arch1 = api.get_archive('filesys://my/new/archive.txt')
        assert arch1 == arch

        # Make sure str/unicode isn't a problem:
        arch2 = api.get_archive(u('my/new/archive.txt'))
        assert arch2 == arch

        # Make sure leading slash isn't a problem:
        arch3 = api.get_archive('/my/new/archive.txt')
        assert arch3 == arch

        # Make sure authority name isn't a problem:
        arch4 = api.get_archive('filesys://my/new/archive.txt')
        assert arch4 == arch

        # Make sure the wrong authority name is a problem:
        with pytest.raises(ValueError):
            api.get_archive('localhost://my/new/archive.txt')

    finally:
        arch.delete()
Exemple #4
0
def test_archive_creation_naming_conventions(api):
    '''
    Addresses :issue:`220`
    '''

    arch = api.create('my/new/archive.txt')

    try:
        # Make sure we can get the archive:
        arch1 = api.get_archive('my/new/archive.txt')
        assert arch1 == arch

        # Make sure str/unicode isn't a problem:
        arch2 = api.get_archive(u('my/new/archive.txt'))
        assert arch2 == arch

        # Make sure leading slash isn't a problem:
        arch3 = api.get_archive('/my/new/archive.txt')
        assert arch3 == arch

        # Make sure authority name isn't a problem:
        arch4 = api.get_archive('filesys://my/new/archive.txt')
        assert arch4 == arch

    finally:
        arch.delete()
Exemple #5
0
def test_versioned_fs_functions(api1, auth2, opener):

    api1.attach_authority('auth', auth2)

    archive = api1.create('fs_funcs_test_archive')

    assert not archive.isfile()
    assert not archive.hasmeta()
    assert not archive.exists()
    assert archive.desc() is None

    with pytest.raises(ResourceNotFoundError):
        assert archive.getinfo()

    with pytest.raises(NoMetaError):
        assert archive.getmeta()

    with opener(archive, 'w+') as f:
        f.write(u('test content v0.0.1'))
Exemple #6
0
def update(
        ctx,
        archive_name,
        bumpversion='patch',
        prerelease=None,
        dependency=None,
        string=False,
        file=None):
    '''
    Update an archive with new contents
    '''

    _generate_api(ctx)

    args, kwargs = _parse_args_and_kwargs(ctx.args)
    assert len(args) == 0, 'Unrecognized arguments: "{}"'.format(args)

    dependencies_dict = _parse_dependencies(dependency)

    var = ctx.obj.api.get_archive(archive_name)
    latest_version = var.get_latest_version()

    if string:

        with var.open(
            'w+',
            bumpversion=bumpversion,
            prerelease=prerelease,
            dependencies=dependencies_dict,
                metadata=kwargs) as f:

            if file is None:
                for line in sys.stdin:
                    f.write(u(line))
            else:
                f.write(u(file))

    else:
        if file is None:
            file = click.prompt('enter filepath')

        var.update(
            file,
            bumpversion=bumpversion,
            prerelease=prerelease,
            dependencies=dependencies_dict,
            metadata=kwargs)

    new_version = var.get_latest_version()

    if latest_version is None and new_version is not None:
        bumpmsg = ' new version {} created.'.format(
            new_version)

    elif new_version != latest_version:
        bumpmsg = ' version bumped {} --> {}.'.format(
            latest_version, new_version)

    elif var.versioned:
        bumpmsg = ' version remains {}.'.format(latest_version)
    else:
        bumpmsg = ''

    click.echo('uploaded data to {}.{}'.format(var, bumpmsg))
Exemple #7
0
def test_multi_api(api1, api2, auth1, cache1, cache2, opener):
    '''
    Test upload/download/cache operations with two users
    '''

    # Create two separate users. Each user connects to the
    # same authority and the same manager table (apis are
    # initialized with the same manager table but different
    # manager instance objects). Each user has its own
    # separate cache.

    api1.attach_authority('auth1', auth1)
    api1.attach_cache(cache1)

    api2.attach_authority('auth1', auth1)
    api2.attach_cache(cache2)

    archive1 = api1.create('myArchive', versioned=False)

    # Turn on caching in archive 1 and assert creation

    archive1.cache()
    assert archive1.is_cached() is True
    assert archive1.api.cache.fs is cache1
    assert cache1.isfile('myArchive')

    with cache1.open('myArchive', 'r') as f1:
        assert u(f1.read()) == u('')

    with opener(archive1, 'w+') as f1:
        f1.write(u('test1'))

    assert auth1.isfile('myArchive')
    assert cache1.isfile('myArchive')

    archive2 = api2.get_archive('myArchive')

    # Turn on caching in archive 2 and assert creation

    archive2.cache()
    assert archive2.is_cached() is True
    assert archive2.api.cache.fs is cache2
    assert cache2.isfile('myArchive')

    # Since we have not yet read from the authority, the
    # cache version has been 'touched' but is not up to date
    # with the archive's contents.

    with cache2.open('myArchive', 'r') as f2:
        assert u(f2.read()) == u('')

    # When we open the archive, the data is correct:

    with opener(archive2, 'r') as f2:
        assert u(f2.read()) == u('test1')

    # Furthermore, the cache has been updated:

    with cache2.open('myArchive', 'r') as f2:
        assert u(f2.read()) == u('test1')

    # If cached, the file is downloaded on open, then read
    # from the cache. Therefore, if the file is modified
    # by another user after the first user has downloaded
    # to the cache, a stale copy will be served.
    # No good way to guard against this since the file has
    # already been opened for reading. A lock on new writes
    # would not solve the problem, since that would have an
    # identical result.

    with opener(archive1, 'r') as f1:
        with opener(archive2, 'w+') as f2:
            f2.write(u('test2'))

        assert u(f1.read()) == u('test1')

    with opener(archive1, 'r') as f1:
        assert u(f1.read()) == u('test2')

    # Turn off caching in archive 2, and do the same test.
    # We expect the same result because the cached file is
    # already open in archive 1

    archive2.remove_from_cache()

    assert archive1.api.cache.fs.isfile('myArchive')
    assert not archive2.api.cache.fs.isfile('myArchive')

    with opener(archive1, 'r') as f1:
        with opener(archive2, 'w+') as f2:
            f2.write(u('test3'))

        assert u(f1.read()) == u('test2')

    with opener(archive1, 'r') as f1:
        assert u(f1.read()) == u('test3')

    # Turn off caching in archive 1, and do the same test.
    # This time, we expect the change made in archvie 2 to
    # be reflected in archive 1 because both are reading
    # and writing from the same authority.

    archive1.remove_from_cache()

    assert not archive1.api.cache.fs.isfile('myArchive')
    assert not archive2.api.cache.fs.isfile('myArchive')

    # NOTE: Here, archive 1 uses the method
    #       `archive.open('r')` explicitly. This test would
    #       not pass on
    #       `open(archive.get_local_path(), 'r')`, which is
    #       tested below.

    with archive1.open('r') as f1:
        with opener(archive2, 'w+') as f2:
            f2.write(u('test4'))

        assert u(f1.read()) == u('test4')

    with opener(archive1, 'r') as f1:
        assert u(f1.read()) == u('test4')

    # NOTE: Here, archive 1 uses the method
    #       `archive.get_local_path('r')` explicitly. This
    #       test would not pass on
    #       `archive.open('r')`, which is tested above.

    with archive1.get_local_path() as fp1:
        with open(fp1, 'r') as f1:
            with opener(archive2, 'w+') as f2:
                f2.write(u('test5'))

            assert u(f1.read()) == u('test4')

    with opener(archive1, 'r') as f1:
        assert u(f1.read()) == u('test5')

    # If we begin reading from the file, the file is locked,
    # and changes are not made until after the file has been
    # closed.

    first_char = u('t').encode('utf-8')

    with opener(archive1, 'r') as f1:
        f1.read(len(first_char))

        with opener(archive2, 'w+') as f2:
            f2.write(u('test6'))

        assert u(f1.read()) == u('est5')

    with opener(archive1, 'r') as f1:
        assert u(f1.read()) == u('test6')

    # This prevents problems in simultaneous read/write by
    # different parties. If someone is in the middle of
    # reading a file that is currently being written to,
    # they will not get garbage.

    test_str_1 = u('1234567890')
    test_str_2 = u('abcdefghij')

    str_length = len(test_str_1.encode('utf-8'))
    assert str_length == len(test_str_2.encode('utf-8'))

    with opener(archive1, 'w+') as f1:
        f1.write(test_str_1)

    with opener(archive1, 'r') as f1:

        assert len(archive1.get_history()) == 7
        assert u('12345') == u(f1.read(str_length / 2))

        with opener(archive2, 'w+') as f2:
            f2.write(test_str_2)

        assert len(archive1.get_history()) == 8
        assert u('67890') == u(f1.read())
Exemple #8
0
def test_version_tracking(api1, auth1, opener):

    api1.attach_authority('auth', auth1)

    archive = api1.create('test_versioned_archive')

    assert archive.versioned, "Archive not versioned, but should be by default"

    assert archive.get_latest_version() is None
    assert len(archive.get_versions()) == 0
    assert archive.get_latest_hash() is None
    assert archive.get_version_hash() is None

    with opener(archive, 'w+', prerelease='alpha') as f:
        f.write(u('test content v0.0.1a1'))

    assert archive.get_latest_version() == '0.0.1a1'
    assert len(archive.get_versions()) == 1
    assert archive.get_latest_hash() is not None
    assert archive.get_version_hash('0.0.1a1') == archive.get_latest_hash()

    with opener(archive, 'r') as f:
        assert u(f.read()) == u('test content v0.0.1a1')

    with opener(archive, 'w+', bumpversion='minor', prerelease='beta') as f:
        f.write(u('test content v0.1b1'))

    assert archive.get_latest_version() == '0.1b1'
    assert len(archive.get_versions()) == 2
    assert archive.get_latest_hash() is not None
    assert archive.get_version_hash('0.0.1a1') != archive.get_latest_hash()
    assert archive.get_version_hash('0.1b1') == archive.get_latest_hash()

    with opener(archive, 'r') as f:
        assert u(f.read()) == u('test content v0.1b1')

    with opener(archive, 'r', version='0.0.1a1') as f:
        assert u(f.read()) == u('test content v0.0.1a1')

    with opener(archive, 'a', version='0.0.1a1') as f:
        f.write(u(' --> v0.1.1'))

    assert archive.get_latest_version() == '0.1.1'
    assert len(archive.get_versions()) == 3
    assert archive.get_latest_hash() is not None
    assert archive.get_version_hash('0.0.1a1') != archive.get_latest_hash()
    assert archive.get_version_hash('0.1.1') == archive.get_latest_hash()

    with opener(archive, 'r', version='0.0.1a1') as f:
        assert u(f.read()) == u('test content v0.0.1a1')

    with opener(archive, 'r') as f:
        assert u(f.read()) == u('test content v0.0.1a1 --> v0.1.1')

    with pytest.raises(ValueError) as excinfo:
        archive.get_version_hash('2.0')

    assert 'not found in archive history' in str(excinfo.value)
Exemple #9
0
def test_file_io(local_auth, cache, datafile_opener):

    a1 = DataService(local_auth)
    csh = DataService(cache)

    # SUBTEST 1

    # Write data to a new system path. No files currently in csh.
    with datafile_opener(a1,
                         csh,
                         updater,
                         get_checker(a1, p),
                         hasher,
                         p,
                         mode='w+') as f:

        f.write(u('test data 1'))

    assert a1.fs.isfile(p)

    # We expect the contents to be written to the authority
    with open(a1.fs.getsyspath(p), 'r') as f:
        assert u('test data 1') == f.read()

    # We expect that the csh was left empty, because no file was present on
    # update.
    assert not os.path.isfile(csh.fs.getsyspath(p))

    # SUBTEST 2

    # Read from file and check contents multiple times

    for _ in range(5):

        with datafile_opener(a1,
                             csh,
                             updater,
                             get_checker(a1, p),
                             hasher,
                             p,
                             mode='r') as f:

            assert u('test data 1') == f.read()

        # We expect the contents to be left unchanged
        with open(a1.fs.getsyspath(p), 'r') as f:
            assert u('test data 1') == f.read()

        # We expect that the csh was left empty, because no file was present on
        # update.
        assert not os.path.isfile(csh.fs.getsyspath(p))

    # SUBTEST 3

    # Overwrite file and check contents again, this time with no csh

    with datafile_opener(a1,
                         None,
                         updater,
                         get_checker(a1, p),
                         hasher,
                         p,
                         mode='w+') as f:

        f.write(u('test data 2'))

    # We expect the contents to be written to the authority
    with open(a1.fs.getsyspath(p), 'r') as f:
        assert u('test data 2') == f.read()

    # We expect that the csh was left empty, because no file was present on
    # update.
    assert not os.path.isfile(csh.fs.getsyspath(p))

    # SUBTEST 4

    # Append to the file and test file contents

    for i in range(5):
        with datafile_opener(a1,
                             csh,
                             updater,
                             get_checker(a1, p),
                             hasher,
                             p,
                             mode='a') as f:

            f.write(u('appended data'))

        # We expect the contents to be left unchanged
        with open(a1.fs.getsyspath(p), 'r') as f:
            assert u('test data 2' + 'appended data' * (i + 1)) == f.read()

        # We expect that the csh was left empty, because no file was present on
        # update.
        assert not os.path.isfile(csh.fs.getsyspath(p))
Exemple #10
0
def test_delete_handling(local_auth, cache):

    a1 = DataService(local_auth)
    csh = DataService(cache)

    # "touch" the csh file
    csh.fs.makedir(fs.path.dirname(p), recursive=True, allow_recreate=True)
    with open(csh.fs.getsyspath(p), 'w+') as f:
        f.write('')

    with data_file.get_local_path(a1, csh, updater, get_checker(a1, p), hasher,
                                  p) as fp:

        with open(fp, 'w+') as f:
            f.write(u('test data 1'))

    assert a1.fs.isfile(p)

    # We expect authority to contain the new data
    with open(a1.fs.getsyspath(p), 'r') as f:
        assert u('test data 1') == f.read()

    # We expect csh to contain the new data
    with open(csh.fs.getsyspath(p), 'r') as f:
        assert u('test data 1') == f.read()

    # Test error handling of file deletion within a
    # get_local_path context manager

    with pytest.raises(OSError) as excinfo:

        with data_file.get_local_path(a1, csh, updater, get_checker(a1, p),
                                      hasher, p) as fp:

            with open(fp, 'r') as f:
                assert u('test data 1') == f.read()

            with open(fp, 'w+') as f:
                f.write(u('test data 2'))

            with open(fp, 'r') as f:
                assert u('test data 2') == f.read()

            os.remove(fp)

    assert "removed during execution" in str(excinfo.value)

    # We expect authority to be unchanged
    with open(a1.fs.getsyspath(p), 'r') as f:
        assert u('test data 1') == f.read()

    # Unexpected things may have happened to the csh. But we expect it to be
    # back to normal after another read:
    with data_file.open_file(a1,
                             csh,
                             updater,
                             get_checker(a1, p),
                             hasher,
                             p,
                             mode='r') as f:

        assert u('test data 1') == f.read()

    with open(a1.fs.getsyspath(p), 'r') as f:
        assert u('test data 1') == f.read()

    with open(csh.fs.getsyspath(p), 'r') as f:
        assert u('test data 1') == f.read()
Exemple #11
0
def test_file_caching(local_auth, cache, datafile_opener):

    a1 = DataService(local_auth)
    csh = DataService(cache)

    # SUBTEST 1

    # Write data to a new system path. No files currently in csh.
    with datafile_opener(a1,
                         csh,
                         updater,
                         get_checker(a1, p),
                         hasher,
                         p,
                         mode='w+') as f:

        f.write(u('test data 1'))

    assert a1.fs.isfile(p)

    # We expect the contents to be written to the authority
    with open(a1.fs.getsyspath(p), 'r') as f:
        assert u('test data 1') == f.read()

    # We expect that the csh was left empty, because no file was present on
    # update.
    assert not os.path.isfile(csh.fs.getsyspath(p))

    # SUBTEST 2

    # Create a blank file in the csh location. Then test reading from
    # authority.

    # "touch" the csh file
    csh.fs.makedir(fs.path.dirname(p), recursive=True, allow_recreate=True)
    with open(csh.fs.getsyspath(p), 'w+') as f:
        f.write('')

    # Read file, and ensure we read from the authority
    with datafile_opener(a1,
                         csh,
                         updater,
                         get_checker(a1, p),
                         hasher,
                         p,
                         mode='r') as f:

        assert u('test data 1') == f.read()

    # We expect the contents to be left unchanged
    with open(a1.fs.getsyspath(p), 'r') as f:
        assert u('test data 1') == f.read()

    # We expect the csh to have been updated by read
    with open(csh.fs.getsyspath(p), 'r') as f:
        assert u('test data 1') == f.read()

    # SUBTEST 3

    # Write, and check consistency across authority and csh

    with datafile_opener(a1,
                         csh,
                         updater,
                         get_checker(a1, p),
                         hasher,
                         p,
                         mode='w+') as f:

        f.write(u('test data 2'))

    # We expect the contents to be written to the authority
    with open(a1.fs.getsyspath(p), 'r') as f:
        assert u('test data 2') == f.read()

    # We expect the contents to be written to the csh
    with open(csh.fs.getsyspath(p), 'r') as f:
        assert u('test data 2') == f.read()

    # SUBTEST 4

    # Manually modify the csh, then ensure data was replaced

    with open(csh.fs.getsyspath(p), 'w+') as f:
        f.write(u('erroneous csh data'))

    with datafile_opener(a1,
                         csh,
                         updater,
                         get_checker(a1, p),
                         hasher,
                         p,
                         mode='r') as f:

        assert u('test data 2') == f.read()

    # We expect authority to be unaffected
    with open(a1.fs.getsyspath(p), 'r') as f:
        assert u('test data 2') == f.read()

    # We expect the csh to be overwritten on read
    with open(csh.fs.getsyspath(p), 'r') as f:
        assert u('test data 2') == f.read()

    # SUBTEST 5

    # During read, manually modify the authority. Ensure updated data not
    # overwritten
    with datafile_opener(a1,
                         csh,
                         updater,
                         get_checker(a1, p),
                         hasher,
                         p,
                         mode='r') as f:

        # Overwrite a1
        with open(a1.fs.getsyspath(p), 'w+') as f2:
            f2.write(u('test data 3'))

        assert u('test data 2') == f.read()

    # We expect authority to reflect changed made
    with open(a1.fs.getsyspath(p), 'r') as f:
        assert u('test data 3') == f.read()

    # We expect the csh to be unaffected by write to authority
    with open(csh.fs.getsyspath(p), 'r') as f:
        assert u('test data 2') == f.read()

    # On second read, we expect the data to be updated
    with datafile_opener(a1,
                         csh,
                         updater,
                         get_checker(a1, p),
                         hasher,
                         p,
                         mode='r') as f:

        assert u('test data 3') == f.read()

    with open(a1.fs.getsyspath(p), 'r') as f:
        assert u('test data 3') == f.read()

    with open(csh.fs.getsyspath(p), 'r') as f:
        assert u('test data 3') == f.read()

    # SUBTEST 6

    # During write, manually modify the authority. Overwrite the authority
    # with new version.
    with datafile_opener(a1,
                         csh,
                         updater,
                         get_checker(a1, p),
                         hasher,
                         p,
                         mode='w+') as f:

        # Overwrite a1
        with open(a1.fs.getsyspath(p), 'w+') as f2:
            f2.write(u('test data 4'))

        f.write(u('test data 5'))

    # We expect authority to reflect the local change
    with open(a1.fs.getsyspath(p), 'r') as f:
        assert u('test data 5') == f.read()

    # We expect the csh to reflect the local change
    with open(csh.fs.getsyspath(p), 'r') as f:
        assert u('test data 5') == f.read()
Exemple #12
0
def test_hashfuncs(archive, contents):
    '''
    Run through a number of iterations of the hash functions
    '''

    contents = u(contents)

    direct = hashlib.md5(contents.encode('utf-8')).hexdigest()
    apihash = update_and_hash(archive, contents)

    assert direct == apihash, 'Manual hash "{}" != api hash "{}"'.format(
        direct, apihash)

    msg = (
        'Manual hash "{}"'.format(direct) +
        ' != archive hash "{}"'.format(archive.get_latest_hash()))
    assert direct == archive.get_latest_hash(), msg

    # Try uploading the same file
    apihash = update_and_hash(archive, contents)

    assert direct == apihash, 'Manual hash "{}" != api hash "{}"'.format(
        direct, apihash)

    msg = (
        'Manual hash "{}"'.format(direct) +
        ' != archive hash "{}"'.format(archive.get_latest_hash()))
    assert direct == archive.get_latest_hash(), msg

    # Update and test again!

    contents = u((os.linesep).join(
        [contents, contents, 'line 3!' + contents]))

    direct = hashlib.md5(contents.encode('utf-8')).hexdigest()
    apihash = update_and_hash(archive, contents)

    with archive.open('rb') as f:
        current = f.read()

    msg = 'Latest updates "{}" !=  archive contents "{}"'.format(
        contents, current)
    assert contents == current, msg

    assert direct == apihash, 'Manual hash "{}" != api hash "{}"'.format(
        direct, apihash)

    msg = (
        'Manual hash "{}"'.format(direct) +
        ' != archive hash "{}"'.format(archive.get_latest_hash()))
    assert direct == archive.get_latest_hash(), msg

    # Update and test a different way!

    contents = u((os.linesep).join([contents, 'more!!!', contents]))

    direct = hashlib.md5(contents.encode('utf-8')).hexdigest()

    with archive.open('wb+') as f:
        f.write(b(contents))

    with archive.open('rb') as f2:
        current = f2.read()

    msg = 'Latest updates "{}" !=  archive contents "{}"'.format(
        contents, current)
    assert contents == current, msg

    msg = (
        'Manual hash "{}"'.format(direct) +
        ' != archive hash "{}"'.format(archive.get_latest_hash()))
    assert direct == archive.get_latest_hash(), msg
Exemple #13
0
def test_multi_api(api1, api2, auth1, cache1, cache2, opener):
    '''
    Test upload/download/cache operations with two users
    '''
    # Create two separate users. Each user connects to the
    # same authority and the same manager table (apis are
    # initialized with the same manager table but different
    # manager instance objects). Each user has its own
    # separate cache.

    api1.attach_authority('auth1', auth1)
    api1.attach_cache(cache1)

    api2.attach_authority('auth1', auth1)
    api2.attach_cache(cache2)

    with open('text_file.txt', 'w') as f:
        f.write(u'Stay Stoked')

    archive1 = api1.create('myArchive', versioned=False)
    archive1.update('text_file.txt')

    # Turn on caching in archive 1 and assert creation

    archive1.cache()
    assert archive1.is_cached() is True
    assert archive1.api.cache.fs is cache1

    with opener(archive1, 'w+') as f1:
        f1.write(u('Be happy and Stay Stoked'))

    assert auth1.isfile('myArchive')
    assert cache1.isfile('myArchive')

    archive2 = api2.get_archive('myArchive')

    # Turn on caching in archive 2 and assert creation
    archive2.cache()
    assert archive2.is_cached() is True
    assert archive2.api.cache.fs is cache2

    with archive2.open('r') as f:
        assert u(f.read()) == u'Be happy and Stay Stoked'

    archive2.delete()

    with pytest.raises(ResourceNotFoundError):
        api2._authorities['auth1'].fs.open('myArchive', 'r')

    with pytest.raises(ResourceNotFoundError):
        api2.cache.fs.open('myArchive', 'r')

    assert cache1.isfile('myArchive')

    with pytest.raises(ResourceNotFoundError):
        api1._authorities['auth1'].fs.open('myArchive', 'r')

    # using remove_from_cache because this archive is no longer in the manager
    archive1.remove_from_cache()

    with pytest.raises(ResourceNotFoundError):
        api1.cache.fs.open('myArchive', 'r')
Exemple #14
0
def test_cli_log_with_various_messages(sample_config):
    '''
    Test CLI command ``log`` stability

    Addresses :issue:`232` - log fails on versions with no message
    '''

    profile, temp_file = sample_config

    api = get_api(profile=profile, config_file=temp_file)

    runner = CliRunner()

    prefix = ['--config-file', '{}'.format(temp_file), '--profile', 'myapi']

    arch = api.create('test/archive1.txt')

    def run_test():
        # Test log on an empty archive
        result = runner.invoke(cli, prefix + ['log', 'test/archive1.txt'])

        if result.exit_code != 0:
            traceback.print_exception(*result.exc_info)
            raise OSError('Errors encountered during execution')

        return result.output.strip()

    try:
        # Test log on an empty archive
        log = run_test()
        assert log == ''

        # Test log with no message -- should still have info

        with arch.open('w+') as f:
            f.write(u('hello 1'))

        log = run_test()
        assert len(log.split('\n')) > 0

        # Test log with message -- should appear in log

        with arch.open('w+', message='a message') as f:
            f.write(u('hello 2'))

        log = run_test()
        assert 'a message' in log

        # Test log with numeric message -- should appear in log

        with arch.open('w+', message=4.3) as f:
            f.write(u('hello 3'))

        log = run_test()
        assert '4.3' in log

        with arch.open('w+', message=lambda x: x**2) as f:
            f.write(u('hello 4'))

        log = run_test()
        assert 'lambda' in log

        # Test log with explicit None as message -- should not appear in log,
        # but the version should be present

        with arch.open('w+', message=None) as f:
            f.write(u('hello 5'))

        log = run_test()
        assert str(arch.get_latest_version()) in log
        assert 'None' not in log

    finally:
        arch.delete()