def test_ellipsis_read():
    delete_layer()
    cv, data = create_layer(size=(50, 50, 50, 1), offset=(0, 0, 0))

    img = cv[...]
    assert np.all(img == data)

    img = cv[5:10, ...]
    assert np.all(img == data[5:10, :, :, :])

    img = cv[5:10, 7, 8, ...]
    assert np.all(np.squeeze(img) == data[5:10, 7, 8, 0])

    img = cv[5:10, ..., 8, 0]
    assert np.all(np.squeeze(img) == data[5:10, :, 8, 0])

    try:
        img = cv[..., 5, ..., 0]
    except ValueError:
        pass
Beispiel #2
0
def test_writer_last_chunk_smaller():
    delete_layer()
    cv, data = create_layer(size=(100, 64, 64, 1), offset=(0, 0, 0))
    cv.info['scales'][0]['chunk_sizes'] = [[64, 64, 64]]

    chunks = [
        chunk
        for chunk in txrx.generate_chunks(cv, data[:, :, :, :], (0, 0, 0))
    ]

    assert len(chunks) == 2

    startpt, endpt, spt, ept = chunks[0]
    assert np.array_equal(spt, (0, 0, 0))
    assert np.array_equal(ept, (64, 64, 64))
    assert np.all((endpt - startpt) == Vec(64, 64, 64))

    startpt, endpt, spt, ept = chunks[1]
    assert np.array_equal(spt, (64, 0, 0))
    assert np.array_equal(ept, (100, 64, 64))
    assert np.all((endpt - startpt) == Vec(36, 64, 64))
def test_aligned_read(green, encoding):
    delete_layer()
    cv, data = create_layer(size=(50, 50, 50, 1),
                            offset=(0, 0, 0),
                            encoding=encoding)
    cv.green_threads = green
    # the last dimension is the number of channels
    assert cv[0:50, 0:50, 0:50].shape == (50, 50, 50, 1)
    assert image_equal(cv[0:50, 0:50, 0:50], data, encoding)

    delete_layer()
    cv, data = create_layer(size=(128, 64, 64, 1),
                            offset=(0, 0, 0),
                            encoding=encoding)
    cv.green_threads = green
    # the last dimension is the number of channels
    assert cv[0:64, 0:64, 0:64].shape == (64, 64, 64, 1)

    assert image_equal(cv[0:64, 0:64, 0:64], data[:64, :64, :64, :], encoding)

    delete_layer()
    cv, data = create_layer(size=(128, 64, 64, 1),
                            offset=(10, 20, 0),
                            encoding=encoding)
    cv.green_threads = green
    cutout = cv[10:74, 20:84, 0:64]
    # the last dimension is the number of channels
    assert cutout.shape == (64, 64, 64, 1)
    assert image_equal(cutout, data[:64, :64, :64, :], encoding)
    # get the second chunk
    cutout2 = cv[74:138, 20:84, 0:64]
    assert cutout2.shape == (64, 64, 64, 1)
    assert image_equal(cutout2, data[64:128, :64, :64, :], encoding)

    assert cv[25, 25, 25].shape == (1, 1, 1, 1)
Beispiel #4
0
def test_aligned_read():
    for green in (False, True):
        print("green", green)
        delete_layer()
        cv, data = create_layer(size=(50, 50, 50, 1), offset=(0, 0, 0))
        cv.green_threads = green
        # the last dimension is the number of channels
        assert cv[0:50, 0:50, 0:50].shape == (50, 50, 50, 1)
        assert np.all(cv[0:50, 0:50, 0:50] == data)

        delete_layer()
        cv, data = create_layer(size=(128, 64, 64, 1), offset=(0, 0, 0))
        cv.green_threads = green
        # the last dimension is the number of channels
        assert cv[0:64, 0:64, 0:64].shape == (64, 64, 64, 1)
        assert np.all(cv[0:64, 0:64, 0:64] == data[:64, :64, :64, :])

        delete_layer()
        cv, data = create_layer(size=(128, 64, 64, 1), offset=(10, 20, 0))
        cv.green_threads = green
        cutout = cv[10:74, 20:84, 0:64]
        # the last dimension is the number of channels
        assert cutout.shape == (64, 64, 64, 1)
        assert np.all(cutout == data[:64, :64, :64, :])
        # get the second chunk
        cutout2 = cv[74:138, 20:84, 0:64]
        assert cutout2.shape == (64, 64, 64, 1)
        assert np.all(cutout2 == data[64:128, :64, :64, :])

        assert cv[25, 25, 25].shape == (1, 1, 1, 1)
Beispiel #5
0
def test_non_aligned_read():
    delete_layer()
    cv, data = create_layer(size=(128, 64, 64, 1), offset=(0, 0, 0))

    # the last dimension is the number of channels
    assert cv[31:65, 0:64, 0:64].shape == (34, 64, 64, 1)
    assert np.all(cv[31:65, 0:64, 0:64] == data[31:65, :64, :64, :])

    # read a single pixel
    delete_layer()
    cv, data = create_layer(size=(64, 64, 64, 1), offset=(0, 0, 0))
    # the last dimension is the number of channels
    assert cv[22:23, 22:23, 22:23].shape == (1, 1, 1, 1)
    assert np.all(cv[22:23, 22:23, 22:23] == data[22:23, 22:23, 22:23, :])

    # Test steps (negative steps are not supported)
    img1 = cv[::2, ::2, ::2, :]
    img2 = cv[:, :, :, :][::2, ::2, ::2, :]
    assert np.array_equal(img1, img2)

    # read a single pixel
    delete_layer()
    cv, data = create_layer(size=(256, 256, 64, 1), offset=(3, 7, 11))
    # the last dimension is the number of channels
    assert cv[22:77:2, 22:197:3, 22:32].shape == (28, 59, 10, 1)
    assert data[19:74:2, 15:190:3, 11:21, :].shape == (28, 59, 10, 1)
    assert np.all(cv[22:77:2, 22:197:3, 22:32] == data[19:74:2, 15:190:3,
                                                       11:21, :])
def test_exists():

    # Bbox version
    delete_layer()
    cv, data = create_layer(size=(128,64,64,1), offset=(0,0,0))

    defexists = Bbox( (0,0,0), (128,64,64) )
    results = cv.exists(defexists)
    assert len(results) == 2
    assert results['1_1_1/0-64_0-64_0-64'] == True
    assert results['1_1_1/64-128_0-64_0-64'] == True

    fpath = os.path.join(cv.layer_cloudpath, cv.key, '64-128_0-64_0-64')
    fpath = fpath.replace('file://', '') + '.gz'
    os.remove(fpath)

    results = cv.exists(defexists)
    assert len(results) == 2
    assert results['1_1_1/0-64_0-64_0-64'] == True
    assert results['1_1_1/64-128_0-64_0-64'] == False

    # Slice version
    delete_layer()
    cv, data = create_layer(size=(128,64,64,1), offset=(0,0,0))

    defexists = np.s_[ 0:128, :, : ]

    results = cv.exists(defexists)
    assert len(results) == 2
    assert results['1_1_1/0-64_0-64_0-64'] == True
    assert results['1_1_1/64-128_0-64_0-64'] == True

    fpath = os.path.join(cv.layer_cloudpath, cv.key, '64-128_0-64_0-64')
    fpath = fpath.replace('file://', '') + '.gz'
    os.remove(fpath)

    results = cv.exists(defexists)
    assert len(results) == 2
    assert results['1_1_1/0-64_0-64_0-64'] == True
    assert results['1_1_1/64-128_0-64_0-64'] == False
def test_background_color():
    info = CloudVolume.create_new_info(
        num_channels=1,
        layer_type='image',
        data_type='uint8',
        encoding='raw',
        resolution=[1, 1, 1],
        voxel_offset=[0, 0, 0],
        volume_size=[128, 128, 1],
        mesh='mesh',
        chunk_size=[64, 64, 1],
    )

    vol = CloudVolume('file:///tmp/cloudvolume/empty_volume', mip=0, info=info)
    vol.commit_info()

    vol.cache.flush()

    vol = CloudVolume('file:///tmp/cloudvolume/empty_volume',
                      mip=0,
                      background_color=1,
                      fill_missing=True)
    assert np.count_nonzero(vol[:] - 1) == 0

    vol = CloudVolume('file:///tmp/cloudvolume/empty_volume',
                      mip=0,
                      background_color=1,
                      fill_missing=True,
                      bounded=False)
    assert np.count_nonzero(vol[0:129, 0:129, 0:1] - 1) == 0

    vol = CloudVolume('file:///tmp/cloudvolume/empty_volume',
                      mip=0,
                      background_color=1,
                      fill_missing=True,
                      bounded=False,
                      parallel=2)
    assert np.count_nonzero(vol[0:129, 0:129, 0:1] - 1) == 0
    vol.cache.flush()
    delete_layer('/tmp/cloudvolume/empty_volume')
def test_read_write():
    urls = [
        "file:///tmp/removeme/read_write",
        "gs://seunglab-test/cloudvolume/read_write",
        "s3://seunglab-test/cloudvolume/read_write"
    ]

    for num_threads in range(0, 11, 5):
        for url in urls:
            url = url + '-' + str(TEST_NUMBER)
            with Storage(url, n_threads=num_threads) as s:
                content = b'some_string'
                s.put_file('info',
                           content,
                           compress=None,
                           cache_control='no-cache')
                s.wait()
                assert s.get_file('info') == content
                assert s.get_file('nonexistentfile') is None

                num_infos = max(num_threads, 1)

                results = s.get_files(['info' for i in range(num_infos)])

                assert len(results) == num_infos
                assert results[0]['filename'] == 'info'
                assert results[0]['content'] == content
                assert all(map(lambda x: x['error'] is None, results))
                assert s.get_files(['nonexistentfile'])[0]['content'] is None

                s.delete_file('info')
                s.wait()

                s.put_json('info', {'omg': 'wow'}, cache_control='no-cache')
                s.wait()
                results = s.get_json('info')
                assert results == {'omg': 'wow'}

    delete_layer("/tmp/removeme/read_write")
Beispiel #9
0
def test_provenance():
    delete_layer()
    cv, _ = create_layer(size=(64, 64, 64, 1), offset=(0, 0, 0))

    provobj = json.loads(cv.provenance.serialize())
    provobj['processing'] = []  # from_numpy
    assert provobj == {
        "sources": [],
        "owners": [],
        "processing": [],
        "description": ""
    }

    cv.provenance.sources.append('*****@*****.**')
    cv.commit_provenance()
    cv.refresh_provenance()

    assert cv.provenance.sources == ['*****@*****.**']

    # should not die
    cv = CloudVolume(cv.cloudpath, provenance={})
    cv = CloudVolume(cv.cloudpath, provenance={'sources': []})
    cv = CloudVolume(cv.cloudpath, provenance={'owners': []})
    cv = CloudVolume(cv.cloudpath, provenance={'processing': []})
    cv = CloudVolume(cv.cloudpath, provenance={'description': ''})

    # should die
    try:
        cv = CloudVolume(cv.cloudpath, provenance={'sources': 3})
        assert False
    except:
        pass

    cv = CloudVolume(cv.cloudpath,
                     provenance="""{
    "sources": [ "wow" ]
  }""")

    assert cv.provenance.sources[0] == 'wow'
Beispiel #10
0
def test_write_compressed_segmentation():
    delete_layer()
    cv, data = create_layer(size=(128, 64, 64, 1), offset=(0, 0, 0))

    cv.info['num_channels'] = 1
    cv.info['data_type'] = 'uint32'
    cv.scale['encoding'] = 'compressed_segmentation'
    cv.scale['compressed_segmentation_block_size'] = (8, 8, 8)
    cv.commit_info()

    cv[:] = data.astype(np.uint32)
    data2 = cv[:]

    assert np.all(data == data2)

    cv.info['data_type'] = 'uint64'
    cv.commit_info()

    cv[:] = data.astype(np.uint64)
    data2 = cv[:]

    assert np.all(data == data2)
def test_parallel_shared_memory_write():
    delete_layer()
    cv, data = create_layer(size=(256,256,128,1), offset=(0,0,0))

    shm_location = 'cloudvolume-test-shm-parallel-write'
    mmapfh, shareddata = shm.ndarray(shape=(256,256,128), dtype=np.uint8, location=shm_location)
    shareddata[:] = 1

    cv.parallel = 1
    cv.upload_from_shared_memory(shm_location, Bbox((0,0,0), (256,256,128)))
    assert np.all(cv[:] == 1)

    shareddata[:] = 2
    cv.parallel = 2
    cv.upload_from_shared_memory(shm_location, Bbox((0,0,0), (256,256,128)))
    assert np.all(cv[:] == 2)

    shareddata[:,:,:64] = 3
    cv.upload_from_shared_memory(shm_location, bbox=Bbox((0,0,0), (256,256,128)), 
        cutout_bbox=Bbox((0,0,0), (256,256,64)))
    assert np.all(cv[:,:,:64] == 3)    
    assert np.all(cv[:,:,64:128] == 2)    

    shareddata[:,:,:69] = 4
    cv.autocrop = True
    cv.upload_from_shared_memory(shm_location, bbox=Bbox((-5,-5,-5), (251,251,123)), 
        cutout_bbox=Bbox((-5,-5,-5), (128,128,64)))
    assert np.all(cv[:128,:128,:63] == 4)    
    assert np.all(cv[128:,128:,:64] == 3)    
    assert np.all(cv[:,:,64:128] == 2)    

    shareddata[:] = 0
    shareddata[:,0,0] = 1
    cv.upload_from_shared_memory(shm_location, bbox=Bbox((0,0,0), (256,256,128)), order='C')
    assert np.all(cv[0,0,:] == 1)
    assert np.all(cv[1,0,:] == 0)

    mmapfh.close()
    shm.unlink(shm_location)
Beispiel #12
0
def test_transfer():
    # Bbox version
    delete_layer()
    cv, _ = create_layer(size=(128, 64, 64, 1), offset=(0, 0, 0))

    cv.transfer_to('file:///tmp/removeme/transfer/', cv.bounds)

    ls = os.listdir('/tmp/removeme/transfer/1_1_1/')

    assert '0-64_0-64_0-64.gz' in ls
    assert len(ls) == 2

    assert os.path.exists('/tmp/removeme/transfer/info')
    assert os.path.exists('/tmp/removeme/transfer/provenance')

    dcv = CloudVolume("file:///tmp/removeme/transfer")
    dcv.info["dont_touch_me_bro"] = True
    dcv.commit_info()

    cv.transfer_to('file:///tmp/removeme/transfer/', cv.bounds)
    dcv.refresh_info()

    assert 'dont_touch_me_bro' in dcv.info
def test_parallel_write():
  delete_layer()
  cv, data = create_layer(size=(512,512,128,1), offset=(0,0,0))
  
  # aligned write
  cv.parallel = 2
  cv[:] = np.zeros(shape=(512,512,128,1), dtype=cv.dtype) + 5
  data = cv[:]
  assert np.all(data == 5)

  # non-aligned-write
  cv.parallel = 2
  cv.non_aligned_writes = True
  cv[1:,1:,1:] = np.zeros(shape=(511,511,127,1), dtype=cv.dtype) + 7
  data = cv[1:,1:,1:]
  assert np.all(data == 7)

  # thin non-aligned-write so that there's no aligned core
  cv.parallel = 2
  cv.non_aligned_writes = True
  cv[25:75,25:75,25:75] = np.zeros(shape=(50,50,50,1), dtype=cv.dtype) + 8
  data = cv[25:75,25:75,25:75]
  assert np.all(data == 8)
Beispiel #14
0
def test_write():
    for green in (False, True):
        print("green:", green)
        delete_layer()
        cv, _ = create_layer(size=(50, 50, 50, 1), offset=(0, 0, 0))
        cv.green_threads = green

        replacement_data = np.zeros(shape=(50, 50, 50, 1), dtype=np.uint8)
        cv[0:50, 0:50, 0:50] = replacement_data
        assert np.all(cv[0:50, 0:50, 0:50] == replacement_data)

        replacement_data = np.random.randint(255,
                                             size=(50, 50, 50, 1),
                                             dtype=np.uint8)
        cv[0:50, 0:50, 0:50] = replacement_data
        assert np.all(cv[0:50, 0:50, 0:50] == replacement_data)

        replacement_data = np.random.randint(255,
                                             size=(50, 50, 50, 1),
                                             dtype=np.uint8)
        bbx = Bbox((0, 0, 0), (50, 50, 50))
        cv[bbx] = replacement_data
        assert np.all(cv[bbx] == replacement_data)

        # out of bounds
        delete_layer()
        cv, _ = create_layer(size=(128, 64, 64, 1), offset=(10, 20, 0))
        cv.green_threads = green
        with pytest.raises(ValueError):
            cv[74:150, 20:84, 0:64] = np.ones(shape=(64, 64, 64, 1),
                                              dtype=np.uint8)

        # non-aligned writes
        delete_layer()
        cv, _ = create_layer(size=(128, 64, 64, 1), offset=(10, 20, 0))
        cv.green_threads = green
        with pytest.raises(ValueError):
            cv[21:85, 0:64, 0:64] = np.ones(shape=(64, 64, 64, 1),
                                            dtype=np.uint8)

        # test bounds check for short boundary chunk
        delete_layer()
        cv, _ = create_layer(size=(25, 25, 25, 1), offset=(1, 3, 5))
        cv.green_threads = green
        cv.info['scales'][0]['chunk_sizes'] = [[11, 11, 11]]
        cv[:] = np.ones(shape=(25, 25, 25, 1), dtype=np.uint8)
def test_compress_level(compression_method):
    filepath = "/tmp/removeme/compress_level-" + str(TEST_NUMBER)
    url = "file://" + filepath

    content = b'some_string' * 1000

    compress_levels = range(1, 9, 2)
    for compress_level in compress_levels:
        with Storage(url, n_threads=5) as s:
            s.put_file('info',
                       content,
                       compress=compression_method,
                       compress_level=compress_level)
            s.wait()

            retrieved = s.get_file('info')
            assert content == retrieved

            _, e = s._interface.get_file("info")
            assert e == compression_method

            assert s.get_file('nonexistentfile') is None

        delete_layer(filepath)
def test_delete():

  # Bbox version
  delete_layer()
  cv, _ = create_layer(size=(128,64,64,1), offset=(0,0,0))

  defexists = Bbox( (0,0,0), (128,64,64) )
  results = cv.exists(defexists)
  assert len(results) == 2
  assert results['1_1_1/0-64_0-64_0-64'] == True
  assert results['1_1_1/64-128_0-64_0-64'] == True


  cv.delete(defexists)
  results = cv.exists(defexists)
  assert len(results) == 2
  assert results['1_1_1/0-64_0-64_0-64'] == False
  assert results['1_1_1/64-128_0-64_0-64'] == False

  # Slice version
  delete_layer()
  cv, _ = create_layer(size=(128,64,64,1), offset=(0,0,0))

  defexists = np.s_[ 0:128, :, : ]

  results = cv.exists(defexists)
  assert len(results) == 2
  assert results['1_1_1/0-64_0-64_0-64'] == True
  assert results['1_1_1/64-128_0-64_0-64'] == True

  cv.delete(defexists)
  results = cv.exists(defexists)
  assert len(results) == 2
  assert results['1_1_1/0-64_0-64_0-64'] == False
  assert results['1_1_1/64-128_0-64_0-64'] == False

  # Check errors
  delete_layer()
  cv, _ = create_layer(size=(128,64,64,1), offset=(0,0,0))

  try:
    results = cv.exists( np.s_[1:129, :, :] )
    print(results)
  except exceptions.OutOfBoundsError:
    pass
  else:
    assert False
def test_aligned_read():
    delete_layer()
    cv, data = create_layer(size=(50,50,50,1), offset=(0,0,0))
    # the last dimension is the number of channels
    assert cv[0:50,0:50,0:50].shape == (50,50,50,1)
    assert np.all(cv[0:50,0:50,0:50] == data)
    
    delete_layer()
    cv, data = create_layer(size=(128,64,64,1), offset=(0,0,0))
    # the last dimension is the number of channels
    assert cv[0:64,0:64,0:64].shape == (64,64,64,1) 
    assert np.all(cv[0:64,0:64,0:64] ==  data[:64,:64,:64,:])

    delete_layer()
    cv, data = create_layer(size=(128,64,64,1), offset=(10,20,0))
    cutout = cv[10:74,20:84,0:64]
    # the last dimension is the number of channels
    assert cutout.shape == (64,64,64,1) 
    assert np.all(cutout == data[:64,:64,:64,:])
    # get the second chunk
    cutout2 = cv[74:138,20:84,0:64]
    assert cutout2.shape == (64,64,64,1) 
    assert np.all(cutout2 == data[64:128,:64,:64,:])
Beispiel #18
0
def test_setitem_mismatch():
    delete_layer()
    cv, _ = create_layer(size=(64, 64, 64, 1), offset=(0, 0, 0))

    with pytest.raises(ValueError):
        cv[0:64, 0:64, 0:64] = np.zeros(shape=(5, 5, 5, 1), dtype=np.uint8)
Beispiel #19
0
def test_redirects():
    info = CloudVolume.create_new_info(
        num_channels=1,  # Increase this number when we add more tests for RGB
        layer_type='image',
        data_type='uint8',
        encoding='raw',
        resolution=[1, 1, 1],
        voxel_offset=[0, 0, 0],
        volume_size=[128, 128, 64],
        mesh='mesh',
        chunk_size=[64, 64, 64],
    )

    vol = CloudVolume('file:///tmp/cloudvolume/redirects_0', mip=0, info=info)
    vol.commit_info()
    vol.refresh_info()

    vol.info['redirect'] = 'file:///tmp/cloudvolume/redirects_0'
    vol.commit_info()
    vol.refresh_info()

    del vol.info['redirect']

    for i in range(0, 10):
        info['redirect'] = 'file:///tmp/cloudvolume/redirects_' + str(i + 1)
        vol = CloudVolume('file:///tmp/cloudvolume/redirects_' + str(i),
                          mip=0,
                          info=info)
        vol.commit_info()
    else:
        del vol.info['redirect']
        vol.commit_info()

    vol = CloudVolume('file:///tmp/cloudvolume/redirects_0', mip=0)

    assert vol.cloudpath == 'file:///tmp/cloudvolume/redirects_9'

    info['redirect'] = 'file:///tmp/cloudvolume/redirects_10'
    vol = CloudVolume('file:///tmp/cloudvolume/redirects_9', mip=0, info=info)
    vol.commit_info()

    try:
        CloudVolume('file:///tmp/cloudvolume/redirects_0', mip=0)
        assert False
    except exceptions.TooManyRedirects:
        pass

    vol = CloudVolume('file:///tmp/cloudvolume/redirects_9', max_redirects=0)
    del vol.info['redirect']
    vol.commit_info()

    vol = CloudVolume('file:///tmp/cloudvolume/redirects_5', max_redirects=0)
    vol.info['redirect'] = 'file:///tmp/cloudvolume/redirects_1'
    vol.commit_info()

    try:
        vol = CloudVolume('file:///tmp/cloudvolume/redirects_5')
        assert False
    except exceptions.CyclicRedirect:
        pass

    vol.info['redirect'] = 'file:///tmp/cloudvolume/redirects_6'
    vol.commit_info()

    vol = CloudVolume('file:///tmp/cloudvolume/redirects_1')

    try:
        vol[:, :, :] = 1
        assert False
    except exceptions.ReadOnlyException:
        pass

    for i in range(0, 10):
        delete_layer('/tmp/cloudvolume/redirects_' + str(i))