def create_sparsetensor2d_file(file_name, rand_num_events, n_projections): voxel_set_list = data_generator.build_sparse_tensor( rand_num_events, n_projections=n_projections) data_generator.write_sparse_tensors(file_name, voxel_set_list, dimension=2, n_projections=n_projections)
def test_sparse_tensor_downsample(dimension, pooling): # Create image Meta: meta = image_meta_factory(dimension) meta.set_projection_id(0) for dim in range(dimension): L = 10. N = 128 meta.set_dimension(dim, L, N) if dimension == 2: st = larcv.SparseTensor2D() if dimension == 3: st = larcv.SparseTensor3D() st.meta(meta) # Get a set of voxels: voxel_set_list = data_generator.build_sparse_tensor(1, n_projections=1) indexes = voxel_set_list[0][0]['indexes'] values = voxel_set_list[0][0]['values'] n_voxels = voxel_set_list[0][0]['n_voxels'] for j in range(n_voxels): if pooling == larcv.kPoolMax: # Only use positive values for max pooling. # Negative values have different behavior in sparse vs dense # max pooling. st.emplace(larcv.Voxel(indexes[j], numpy.abs(values[j])), False) else: st.emplace(larcv.Voxel(indexes[j], values[j]), False) # Dense downsampling is tested against skimage # Here, test against dense downsampling compression = 2 st_dense = st.to_tensor() st_dense_compressed = st_dense.compress(compression, pooling).as_array() st_compressed = st.compress(compression, pooling) st_compressed_dense = st_compressed.dense() print(st_dense.as_array()) # Do some checks: assert numpy.abs((st_compressed_dense.sum() - st_dense_compressed.sum()) / st_dense_compressed.sum()) < 1e-6 max_index = numpy.prod(st_compressed_dense.shape) for i in range(50): index = numpy.random.randint(0, max_index) assert numpy.abs( st_compressed_dense.take(index) - st_dense_compressed.take(index)) < 1e-4