def _test(): import numpy as np from ClearMap.Utils.Timer import Timer import ClearMap.ParallelProcessing.ParallelIO as pio reload(pio) #dat = np.random.rand(2000,2000,1000) > 0.5; #dat = np.random.rand(1000,1000,500) > 0.5; #dat = np.random.rand(1000,1000,500); dat = np.random.rand(200, 300, 400) filename = 'test.npy' timer = Timer() np.save(filename, dat) timer.print_elapsed_time('Numpy saving data of size %d' % dat.size) filename2 = 'test2.npy' timer = Timer() pio.write(filename2, dat, processes=4, blocks=4, verbose=False) timer.print_elapsed_time('ParallelIO writing data of size %d' % dat.size) timer = Timer() dat2 = np.load(filename) timer.print_elapsed_time('Numpy loading data of size %d' % dat.size) print(np.all(dat == dat2)) timer = Timer() dat3 = pio.read(filename, verbose=True) timer.print_elapsed_time('ParallelIO reading data of size %d' % dat2.size) print(np.all(dat3 == dat)) pio.io.fu.delete_file(filename) pio.io.fu.delete_file(filename2)
def _test(): import numpy as np from ClearMap.Utils.Timer import Timer import ClearMap.ParallelProcessing.DataProcessing.LargeData as ld from importlib import reload reload(ld) #dat = np.random.rand(2000,2000,1000) > 0.5; #dat = np.random.rand(1000,1000,500) > 0.5; dat = np.random.rand(200, 300, 400) > 0.5 #datan = io.MMP.writeData('test.npy', dat); dat = np.load('data.npy') xyz1 = np.load('points.npy') s = ld.sum(dat) print(s == np.sum(s)) timer = Timer() xyz = ld.where(dat) timer.print_elapsed_time('parallel') #parallel: elapsed time: 0:00:25.807 timer = Timer() xyz1 = np.vstack(np.where(dat)).T timer.print_elapsed_time('numpy') #numpy: elapsed time: 0:05:45.590 d0 = np.zeros(dat.shape, dtype=bool) d1 = np.zeros(dat.shape, dtype=bool) d0[xyz[:, 0], xyz[:, 1], xyz[:, 2]] = True d1[xyz1[:, 0], xyz1[:, 1], xyz1[:, 2]] = True np.all(d0 == d1) dat2 = np.array(np.random.rand(1000, 1000, 1000) > 0, dtype='bool') filename = 'test.npy' np.save(filename, dat2) filename = '/disque/raid/vasculature/4X-test2/170824_IgG_2/170824_IgG_16-23-46/rank_threshold.npy' timer = Timer() ldat = ld.load(filename, verbose=True) timer.print_elapsed_time('load') #load: elapsed time: 0:00:04.867 timer = Timer() ldat2 = np.load(filename) timer.print_elapsed_time('numpy') #numpy: elapsed time: 0:00:27.982 np.all(ldat == ldat2) timer = Timer() xyz = ld.where(ldat) timer.printElapsedTime('parallel') #parallel: elapsed time: 0:07:25.698 lldat = ldat.reshape(-1, order='A') timer = Timer() xyz = ld.where(lldat) timer.printElapsedTime('parallel 1d') #parallel 1d: elapsed time: 0:00:49.034 timer = Timer() xyz = np.where(ldat) timer.printElapsedTime('numpy') import os #os.remove(filename) filename = './ClearMap/Test/Skeletonization/test_bin.npy' timer = Timer() ldat = ld.load(filename, shared=True, verbose=True) timer.printElapsedTime('load') ld.shm.isShared(ldat) import numpy as np from ClearMap.Utils.Timer import Timer import ClearMap.DataProcessing.LargeData as ld reload(ld) filename = 'test_save.npy' dat = np.random.rand(100, 200, 100) ld.save(filename, dat) dat2 = ld.load(filename) np.all(dat == dat2) os.remove(filename) import numpy as np from ClearMap.Utils.Timer import Timer import ClearMap.DataProcessing.LargeData as ld reload(ld) dat = np.zeros(100, dtype=bool) dat2 = dat.copy() indices = np.array([5, 6, 7, 8, 13, 42]) ld.setValue(dat, indices, True, cutoff=0) dat2[indices] = True np.all(dat2 == dat) d = ld.take(dat, indices, cutoff=0) np.all(d) import numpy as np from ClearMap.Utils.Timer import Timer import ClearMap.DataProcessing.LargeData as ld reload(ld) pts = np.array([0, 1, 5, 6, 10, 11], dtype=int) ld.neighbours(pts, -10) import numpy as np from ClearMap.Utils.Timer import Timer import ClearMap.DataProcessing.LargeData as ld import ClearMap.ImageProcessing.Filter.StructureElement as sel reload(ld) dat = np.random.rand(30, 40, 50) > 0.5 mask = sel.structureElement('Disk', (5, 5, 5)) indices = np.where(dat.reshape(-1))[0] c_id = len(indices) / 2 c = indices[c_id] xyz = np.unravel_index(c, dat.shape) l = np.array(mask.shape) / 2 r = np.array(mask.shape) - l dlo = [max(0, xx - ll) for xx, ll in zip(xyz, l)] dhi = [min(xx + rr, ss) for xx, rr, ss in zip(xyz, r, dat.shape)] mlo = [-min(0, xx - ll) for xx, ll in zip(xyz, l)] mhi = [ mm + min(0, ss - xx - rr) for xx, rr, ss, mm in zip(xyz, r, dat.shape, mask.shape) ] nbh = dat[dlo[0]:dhi[0], dlo[1]:dhi[1], dlo[2]:dhi[2]] nbhm = np.logical_and( nbh, mask[mlo[0]:mhi[0], mlo[1]:mhi[1], mlo[2]:mhi[2]] > 0) nxyz = np.where(nbhm) nxyz = [nn + dl for nn, dl in zip(nxyz, dlo)] nbi = np.ravel_multi_index(nxyz, dat.shape) nbs = ld.findNeighbours(indices, c_id, dat.shape, dat.strides, mask) nbs.sort() print(np.all(nbs == nbi)) dat = np.random.rand(30, 40, 50) > 0.5 indices = np.where(dat.reshape(-1))[0] c_id = len(indices) / 2 c = indices[c_id] xyz = np.unravel_index(c, dat.shape) l = np.array([2, 2, 2]) r = l + 1 dlo = [max(0, xx - ll) for xx, ll in zip(xyz, l)] dhi = [min(xx + rr, ss) for xx, rr, ss in zip(xyz, r, dat.shape)] nbh = dat[dlo[0]:dhi[0], dlo[1]:dhi[1], dlo[2]:dhi[2]] nxyz = np.where(nbh) nxyz = [nn + dl for nn, dl in zip(nxyz, dlo)] nbi = np.ravel_multi_index(nxyz, dat.shape) nbs = ld.findNeighbours(indices, c_id, dat.shape, dat.strides, tuple(l)) nbs.sort() print(np.all(nbs == nbi)) print(nbs) print(nbi) import numpy as np from ClearMap.Utils.Timer import Timer import ClearMap.DataProcessing.LargeData as ld reload(ld) data = np.random.rand(100) values = np.random.rand(50) indices = np.arange(50) ld.setArray(data, indices, values, cutoff=1) print(np.all(data[:50] == values)) import numpy as np from ClearMap.Utils.Timer import Timer import ClearMap.DataProcessing.LargeData as ld reload(ld) m = np.array([1, 3, 6, 7, 10]) i = np.array([1, 2, 3, 4, 6, 7, 8, 9]) o = ld.match(m, i) o2 = [np.where(i == l)[0][0] for l in m]