def construct_from_parameters_add_tree(): parameters = Skeleton.define_parameters('2017-01-12_FD0156-2', (11.24, 11.24, 32)) skel = Skeleton(parameters=parameters) nodes = skel.define_nodes([40000, 40100, 40200], [45000, 45100, 45200], [1000, 1100, 1200], [1, 2, 3]) edges = [(1, 3), (2, 3)] skel.add_tree(nodes, edges) skel.write_nml('testdata/PA_gen.nml')
def add_nodes_as_trees(): # Test merging skeleton with multiple trees from nodes skel = Skeleton('testdata/02_ref.nml') nodes = skel.define_nodes([40000, 40100, 40200], [45000, 45100, 45200], [1000, 1100, 1200]) skel.add_nodes_as_trees(nodes) skel.write_nml('testdata/01_NT_merged_gen.nml')
def add_tree_from_skel(): # Test merging skeleton with multiple trees with a single tree from another skeleton skel = Skeleton('testdata/01_ref.nml') skel.add_tree_from_skel(Skeleton('testdata/02_ref.nml'), tree_idx=1, group_id=10) # Test for unique node and tree ids in merged nml _, c = np.unique(np.concatenate([nodes.id.values for nodes in skel.nodes]), return_counts=True) assert all(c < 2) _, c = np.unique(skel.tree_ids, return_counts=True) assert all(c < 2) # Test writing skel.write_nml('testdata/01_02_single_merged_gen.nml')
cache_HDD_root = os.path.join(path_in, '.cache/') path_datasources = os.path.join(path_in, 'datasources.json') path_nml_in = os.path.join(path_in, 'bbox_annotated.nml') input_shape = (140, 140, 1) target_shape = (1, 1, 1) stride = (35, 35, 1) datasources = WkwData.datasources_from_json(path_datasources) dataset = WkwData(input_shape=input_shape, target_shape=target_shape, data_sources=datasources, stride=stride, cache_HDD=False, cache_RAM=True) skel = Skeleton(path_nml_in) pred_df = pd.DataFrame(columns=[ 'tree_idx', 'tree_id', 'x', 'y', 'z', 'xi', 'yi', 'class', 'explicit', 'cluster_id', 'prob' ]) group_ids = np.array(skel.group_ids) input_path = datasources[0].input_path input_bbox = datasources[0].input_bbox structure = np.ones((3, 3), dtype=np.int) cluster_id = 0 for plane_group in skel.groups: plane_group_id = plane_group.id plane_group_class = bool(int(plane_group.name[-1])) plane_tree_inds = np.where(group_ids == plane_group_id)[0] plane_matrix = np.zeros((5, 5), dtype=np.bool)
def get_distances_to_node(): skel = Skeleton('testdata/01_ref.nml') positions = skel.nodes[5].position skel.get_distances_to_node(positions=positions, node_id=35370) skel.get_distances_to_node(positions=positions, tree_idx=0, node_idx=5)
def get_distance_to_nodes(): skel = Skeleton('testdata/01_ref.nml') skel.get_distance_to_nodes(position=(40000, 38000, 1000), tree_idx=6)
from genEM3.util.image import bboxesFromArray from genEM3.data.skeleton import get_volume_df # Get the names of three test skeletons path_in_stub = os.path.join(get_runs_dir(), 'inference/ae_classify_11_parallel') test_dirs = ['test_center_filt', 'test_bottom_filt', 'test_top_filt'] skel_dirs = [ os.path.join(path_in_stub, d, 'bbox_annotated.nml') for d in test_dirs ] # check that all of the files exist assert all([os.path.exists(skel_dir) for skel_dir in skel_dirs]) # Create skeleton objects start = time.time() skeletons = [Skeleton(skel_dir) for skel_dir in skel_dirs] print(f'Time to read skeleton: {time.time() - start}') # Read the coordinates and target class of all three skeletons into the volume data frame volume_df = get_volume_df(skeletons=skeletons) # Get the ingredients for making the datasources bboxes = bboxesFromArray(volume_df[['x', 'y', 'z']].values) input_dir = '/tmpscratch/webknossos/Connectomics_Department/2018-11-13_scMS109_1to7199_v01_l4_06_24_fixed_mag8_artifact_pred/color/1' target_class = volume_df['class'].values.astype(np.float) target_binary = 1 target_dir = input_dir input_mean = 148.0 input_std = 36.0 # Create a list of data sources source_list = [] for i, cur_bbox in enumerate(bboxes): cur_target = target_class[i]
def add_trees_from_skel(): # Test merging skeletons both having both root and (nested) group trees skel = Skeleton('testdata/01_ref.nml') assert all([ Skeleton._num_conn_comp(Skeleton._get_graph(nodes, edges)) == 1 for nodes, edges in zip(skel.nodes, skel.edges) ]) skel.add_trees_from_skel(Skeleton('testdata/02_ref.nml')) # Test for unique node and tree ids in merged nml _, c = np.unique(np.concatenate([nodes.id.values for nodes in skel.nodes]), return_counts=True) assert all(c < 2) _, c = np.unique(skel.tree_ids, return_counts=True) assert all(c < 2) # Test writing assert all([ Skeleton._num_conn_comp(Skeleton._get_graph(nodes, edges)) == 1 for nodes, edges in zip(skel.nodes, skel.edges) ]) skel.write_nml('testdata/01_02_merged_gen.nml') # Test reading skel_gen = Skeleton('testdata/01_02_merged_gen.nml') skel_ref = Skeleton('testdata/01_02_merged_ref.nml') # Test for equal numbers of nodes and edges in both generated and reference merge nml assert set([len(nodes) for nodes in skel_gen.nodes ]) == set([len(nodes) for nodes in skel_ref.nodes]) assert set([len(edges) for edges in skel_gen.edges ]) == set([len(edges) for edges in skel_ref.edges]) # Test merging skeleton having both root and group trees with one having only root trees skel = Skeleton('testdata/02_ref.nml') skel.add_trees_from_skel(Skeleton('testdata/03_ref.nml')) skel.write_nml('testdata/02_03_merged_gen.nml') # Test merging skeleton having only root trees with one having both root and group trees skel = Skeleton('testdata/03_ref.nml') skel.add_trees_from_skel(Skeleton('testdata/02_ref.nml')) skel.write_nml('testdata/03_02_merged_gen.nml')
data_sources=datasources, stride=stride, cache_HDD=False, cache_RAM=False) bbox_val_dims_vx = np.array(bbox_val_dims_um) * 1000 / np.array(scale) n_fits = np.ceil((bbox_val_dims_vx - np.array(input_shape)) / np.array(stride)).astype(int) meshes = dataset.data_meshes[0]['input'] meshes_shape = np.array(meshes['x'].shape) meshes_center = np.floor(meshes_shape / 2) meshes_min = np.floor(meshes_center - n_fits/2).astype(int) meshes_max = np.floor(meshes_center + n_fits/2).astype(int) meshes_val = {key: meshes[key][meshes_min[0]:meshes_max[0], meshes_min[1]:meshes_max[1], meshes_min[2]:meshes_max[2]] for key in meshes} skel = Skeleton(os.path.join(get_runs_dir(), 'inference/ae_classify_11_parallel/empty.nml')) min_id = 1 for idx in range(np.prod(n_fits)): print('adding trees {}/{}'.format(idx, np.prod(n_fits))) xi, yi, zi = np.unravel_index(idx, shape=n_fits) cx = meshes_val['x'][xi, yi, zi] cy = meshes_val['y'][xi, yi, zi] cz = meshes_val['z'][xi, yi, zi] positions = np.array([ [cx, cy, cz], [cx - input_shape[0]/2, cy - input_shape[1]/2, cz], [cx - input_shape[0]/2, cy + input_shape[1]/2, cz], [cx + input_shape[0]/2, cy - input_shape[1]/2, cz], [cx + input_shape[0]/2, cy + input_shape[1]/2, cz], [cx + input_shape[0]/2, cy + input_shape[1]/2 - 1, cz]
def construct_from_parameters_empty(): parameters = Skeleton.define_parameters('2017-01-12_FD0156-2', (11.24, 11.24, 32)) skel = Skeleton(parameters=parameters) skel.write_nml('testdata/PE_gen.nml')
def construct_from_nml(): skel = Skeleton('testdata/02_ref.nml') skel.write_nml('testdata/02_gen.nml')
def plot(): skel = Skeleton('testdata/01_02_merged_ref.nml') skel.plot(unit='um', view=None, colors='Dark2')