def test_cache_compression_setting():
    image = np.zeros(shape=(128,128,128,1), dtype=np.uint8)
    dirpath = '/tmp/cloudvolume/caching-validity-' + str(TEST_NUMBER)
    layer_path = 'file://' + dirpath

    vol = create_volume_from_image(
        image=image, 
        offset=(1,1,1), 
        layer_path=layer_path, 
        layer_type='image', 
        resolution=(1,1,1), 
        encoding='raw'
    )
    vol.cache.enabled = True
    vol.cache.flush()
    vol.commit_info()

    vol.cache.compress = None
    vol[:] = image
    assert all([ os.path.splitext(x)[1] == '.gz' for x in vol.cache.list() ])
    vol.cache.flush()

    vol.cache.compress = True
    vol[:] = image
    assert all([ os.path.splitext(x)[1] == '.gz' for x in vol.cache.list() ])
    vol.cache.flush()

    vol.cache.compress = False
    vol[:] = image
    assert all([ os.path.splitext(x)[1] == '' for x in vol.cache.list() ])
    vol.cache.flush()

    delete_layer(dirpath)
Esempio n. 2
0
def test_info_provenance_cache():
    image = np.zeros(shape=(128, 128, 128, 1), dtype=np.uint8)
    vol = create_volume_from_image(
        image=image,
        offset=(0, 0, 0),
        layer_path='gs://seunglab-test/cloudvolume/caching',
        layer_type='image',
        resolution=(1, 1, 1),
        encoding='raw')

    # Test Info
    vol.cache.enabled = True
    vol.cache.flush()
    info = vol.refresh_info()
    assert info is not None

    with open(os.path.join(vol.cache.path, 'info'), 'r') as infof:
        info = infof.read()
        info = json.loads(info)

    with open(os.path.join(vol.cache.path, 'info'), 'w') as infof:
        infof.write(json.dumps({'wow': 'amaze'}))

    info = vol.refresh_info()
    assert info == {'wow': 'amaze'}
    vol.cache.enabled = False
    info = vol.refresh_info()
    assert info != {'wow': 'amaze'}

    infopath = os.path.join(vol.cache.path, 'info')
    assert os.path.exists(infopath)

    vol.cache.flush_info()
    assert not os.path.exists(infopath)
    vol.cache.flush_info()  # assert no error by double delete

    # Test Provenance
    vol.cache.enabled = True
    vol.cache.flush()
    prov = vol.refresh_provenance()
    assert prov is not None

    with open(os.path.join(vol.cache.path, 'provenance'), 'r') as provf:
        prov = provf.read()
        prov = json.loads(prov)

    with open(os.path.join(vol.cache.path, 'provenance'), 'w') as provf:
        prov['description'] = 'wow'
        provf.write(json.dumps(prov))

    prov = vol.refresh_provenance()
    assert prov['description'] == 'wow'
    vol.cache.enabled = False
    prov = vol.refresh_provenance()
    assert prov['description'] == ''

    provpath = os.path.join(vol.cache.path, 'provenance')
    vol.cache.flush_provenance()
    assert not os.path.exists(provpath)
    vol.cache.flush_provenance()  # assert no error by double delete
def test_cache_validity():
    image = np.zeros(shape=(128,128,128,1), dtype=np.uint8)
    dirpath = '/tmp/cloudvolume/caching-validity-' + str(TEST_NUMBER)
    layer_path = 'file://' + dirpath

    vol = create_volume_from_image(
        image=image, 
        offset=(1,1,1), 
        layer_path=layer_path, 
        layer_type='image', 
        resolution=(1,1,1), 
        encoding='raw'
    )
    vol.cache.enabled = True
    vol.cache.flush()
    vol.commit_info()

    def test_with_mock_cache_info(info, shoulderror):
        finfo = os.path.join(vol.cache.path, 'info')
        with open(finfo, 'w') as f:
            f.write(json.dumps(info))

        if shoulderror:
            try:
                CloudVolume(vol.layer_cloudpath, cache=True)
            except ValueError:
                pass
            else:
                assert False
        else:
            CloudVolume(vol.layer_cloudpath, cache=True)

    test_with_mock_cache_info(vol.info, shoulderror=False)

    info = vol.info.copy()
    info['scales'][0]['size'][0] = 666
    test_with_mock_cache_info(info, shoulderror=False)

    test_with_mock_cache_info({ 'zomg': 'wow' }, shoulderror=True)

    def tiny_change(key, val):
        info = vol.info.copy()
        info[key] = val
        test_with_mock_cache_info(info, shoulderror=True)

    tiny_change('type', 'zoolander')
    tiny_change('data_type', 'uint32')
    tiny_change('num_channels', 2)
    tiny_change('mesh', 'mesh')

    def scale_change(key, val, mip=0):
        info = vol.info.copy()
        info['scales'][mip][key] = val
        test_with_mock_cache_info(info, shoulderror=True)

    scale_change('voxel_offset', [ 1, 2, 3 ])
    scale_change('resolution', [ 1, 2, 3 ])
    scale_change('encoding', 'npz')

    vol.cache.flush()

    # Test no info file at all    
    CloudVolume(vol.layer_cloudpath, cache=True)

    vol.cache.flush()
def test_caching():
    image = np.zeros(shape=(128,128,128,1), dtype=np.uint8)
    image[0:64,0:64,0:64] = 1
    image[64:128,0:64,0:64] = 2
    image[0:64,64:128,0:64] = 3
    image[0:64,0:64,64:128] = 4
    image[64:128,64:128,0:64] = 5
    image[64:128,0:64,64:128] = 6
    image[0:64,64:128,64:128] = 7
    image[64:128,64:128,64:128] = 8

    dirpath = '/tmp/cloudvolume/caching-volume-' + str(TEST_NUMBER)
    layer_path = 'file://' + dirpath

    vol = create_volume_from_image(
        image=image, 
        offset=(0,0,0), 
        layer_path=layer_path, 
        layer_type='image', 
        resolution=(1,1,1), 
        encoding='raw'
    )

    vol.cache.enabled = True
    vol.cache.flush()

    # Test that reading populates the cache
    read1 = vol[:,:,:]
    assert np.all(read1 == image)

    read2 = vol[:,:,:]
    assert np.all(read2 == image)

    assert len(vol.cache.list()) > 0

    files = vol.cache.list()
    validation_set = [
        '0-64_0-64_0-64',
        '64-128_0-64_0-64',
        '0-64_64-128_0-64',
        '0-64_0-64_64-128',
        '64-128_64-128_0-64',
        '64-128_0-64_64-128',
        '0-64_64-128_64-128',
        '64-128_64-128_64-128'
    ]
    assert set([ os.path.splitext(fname)[0] for fname in files ]) == set(validation_set)

    for i in range(8):
        fname = os.path.join(vol.cache.path, vol.key, validation_set[i]) + '.gz'
        with gzip.GzipFile(fname, mode='rb') as gfile:
            chunk = gfile.read()
        img3d = chunks.decode(
          chunk, 'raw', (64,64,64,1), np.uint8
        )
        assert np.all(img3d == (i+1))

    vol.cache.flush()
    assert not os.path.exists(vol.cache.path)

    # Test that writing populates the cache
    vol[:,:,:] = image

    assert os.path.exists(vol.cache.path)
    assert np.all(vol[:,:,:] == image)

    vol.cache.flush()

    # Test that partial reads work too
    result = vol[0:64,0:64,:]
    assert np.all(result == image[0:64,0:64,:])
    files = vol.cache.list()
    assert len(files) == 2
    result = vol[:,:,:]
    assert np.all(result == image)
    files = vol.cache.list()
    assert len(files) == 8

    vol.cache.flush()

    # Test Non-standard Cache Destination
    dirpath = '/tmp/cloudvolume/caching-cache-' + str(TEST_NUMBER)
    vol.cache.enabled = dirpath
    vol[:,:,:] = image

    assert len(os.listdir(os.path.join(dirpath, vol.key))) == 8

    vol.cache.flush()

    # Test that caching doesn't occur when cache is not set
    vol.cache.enabled = False
    result = vol[:,:,:]
    if os.path.exists(vol.cache.path):
        files = vol.cache.list()
        assert len(files) == 0

    vol[:,:,:] = image
    if os.path.exists(vol.cache.path):
        files = vol.cache.list()
        assert len(files) == 0

    vol.cache.flush()

    # Test that deletion works too
    vol.cache.enabled = True
    vol[:,:,:] = image
    files = vol.cache.list()
    assert len(files) == 8
    vol.delete( np.s_[:,:,:] )
    files = vol.cache.list()
    assert len(files) == 0

    vol.cache.flush()    

    vol[:,:,:] = image
    files = vol.cache.list()
    assert len(files) == 8
    vol.cache.flush(preserve=np.s_[:,:,:])
    files = vol.cache.list()
    assert len(files) == 8
    vol.cache.flush(preserve=np.s_[:64,:64,:])
    files = vol.cache.list()
    assert len(files) == 2

    vol.cache.flush()

    vol[:,:,:] = image
    files = vol.cache.list()
    assert len(files) == 8
    vol.cache.flush_region(Bbox( (50, 50, 0), (100, 100, 10) ))
    files = vol.cache.list()
    assert len(files) == 4

    vol.cache.flush()

    vol[:,:,:] = image
    files = vol.cache.list()
    assert len(files) == 8
    vol.cache.flush_region(np.s_[50:100, 50:100, 0:10])
    files = vol.cache.list()
    assert len(files) == 4

    vol.cache.flush()