コード例 #1
0
def ensemble2pointcloud(
        ensemble: CloudEnsemble) -> Optional[Union[PointCloud, HybridCloud]]:
    """ Merges vertices and labels from all clouds in the ensemble into a single PointCloud with the respective
        object boundary information saved in obj_bounds. There can only be one HybridCloud per CloudEnsemble, if
        there is one, the nodes and edges get transferred as well.

    Args:
        ensemble: The CloudEnsemble whose clouds should be merged.
    """
    parts = [ensemble.clouds[key] for key in ensemble.clouds.keys()]
    names = [key for key in ensemble.clouds.keys()]
    merged_clouds = clouds.merge_clouds(parts, names, ignore_hybrids=True)
    if merged_clouds is not None:
        merged_clouds.add_no_pred(ensemble.no_pred)
    if ensemble.hc is None:
        return merged_clouds
    else:
        if merged_clouds is None:
            # no additional clouds are present
            return ensemble.hc
        return clouds.merge_clouds([ensemble.hc, merged_clouds],
                                   ['hybrid', 'clouds'])
コード例 #2
0
def build_pcd(cloud_list: list, random_seed: int) -> o3d.geometry.PointCloud:
    """ Builds an Open3d point cloud object out of the given list of morphx PointClouds.

    Args:
        cloud_list: List of MorphX PointCloud objects which should be visualized.
        random_seed: flag for using the same colors.
    """
    if random_seed is not None:
        np.random.seed(random_seed)

    # merge all clouds in cloud_list
    merged = None
    for cloud in cloud_list:
        if isinstance(cloud, CloudEnsemble):
            cloud = cloud.flattened
        if merged is None:
            merged = cloud
        else:
            merged = clouds.merge_clouds([merged, cloud])

    labels = merged.labels
    vertices = merged.vertices

    # add 3D points
    pcd = o3d.geometry.PointCloud()
    pcd.points = o3d.utility.Vector3dVector(vertices)

    # assign colors if labels exist
    if labels is not None and len(labels) != 0:
        labels = labels.reshape(len(labels))
        label_num = int(max(np.unique(labels)) + 1)

        # generate colors (either fixed or randomly)
        if label_num <= 10:
            colors = np.array([[122, 174, 183], [197, 129, 104], [87, 200, 50],
                               [137, 58, 252], [133, 1, 1], [107, 114, 219],
                               [4, 52, 124], [46, 41, 78], [46, 41, 78],
                               [46, 41, 78]]) / 255
        else:
            colors = np.random.choice(range(256), size=(label_num, 3)) / 255
        colors = colors[labels.astype(int)]

        pcd.colors = o3d.utility.Vector3dVector(colors)

    return pcd
コード例 #3
0
def sample_ensemble(ensemble: CloudEnsemble, vertex_number: int, random_seed: Optional[int] = None) \
        -> Tuple[Optional[PointCloud], np.ndarray]:
    """ Samples ensemble parts with respect to their vertex number. Each cloud in the ensemble gets
        len(cloud.vertices)/len(ensemble.vertices)*vertex_number points (ceiled if possible), where
        len(ensemble.vertices) is just the total number of vertices from all ensemble clouds. The
        samples from the different clouds are merged into one PointCloud, the cloud information
        gets saved in the obj_bounds dict of that PointCloud.

    Args:
        ensemble: The ensemble from whose objects the samples should be drawn.
        vertex_number: The number of requested sample points.
        random_seed: A random seed to make sampling deterministic.

    Returns:
        PointCloud object with ensemble cloud information in obj_bounds. Dict with ensemble object names
        as keys and indices of the samples drawn from this object as np.arrays.
    """
    total = 0
    for key in ensemble.clouds:
        total += len(ensemble.clouds[key].vertices)
    if total == 0:
        return None, np.zeros(0)
    current = 0
    result_ixs = np.zeros((vertex_number, 1))
    samples = []
    names = []
    for key in ensemble.clouds:
        verts = len(ensemble.clouds[key].vertices) / total * vertex_number
        if current + ceil(verts) <= vertex_number:
            verts = ceil(verts)
        sample, ixs = clouds.sample_objectwise(ensemble.clouds[key],
                                               verts,
                                               random_seed=random_seed)
        result_ixs[current:current + verts] = ixs
        current += verts
        samples.append(sample)
        names.append(key)
    result = clouds.merge_clouds(samples, names)
    return result, result_ixs
コード例 #4
0
    def load_cloudset(self, idx: int):
        """ Gets executed when cloudset flag is set and visualizes the results from cloudset chunking performed by
            the morphx.data.analyser save_cloudset method.

        Args:
            idx: Index at which file the viewing should start.
        """

        while idx < len(self.files1):
            file = self.files1[idx]
            slashs = [pos for pos, char in enumerate(file) if char == '/']
            filename = file[slashs[-1]:-4]
            print("Viewing: " + filename)

            with open(file, 'rb') as f:
                content = pickle.load(f)

            hybrid_idx = content[0]
            hybrid_file = [
                file for file in self.files2
                if 'cloud_{}'.format(hybrid_idx) in file
            ]
            hybrid = basics.load_pkl(hybrid_file[0])

            local_bfs = content[1]
            sample = content[2]
            bfs_cloud = visualize.prepare_bfs(hybrid, local_bfs)

            hybrid_bfs = clouds.merge_clouds([hybrid, bfs_cloud])
            res = self.core_next(hybrid_bfs, sample,
                                 'sample_h{}_i{}'.format(hybrid_idx, idx))

            if res is None:
                return
            else:
                idx += res
コード例 #5
0
ファイル: analyse_data.py プロジェクト: PhylomatX/NeuronX
def compare_transforms(chunk_size: int, sample_num: int):
    """ Create and save all resulting chunks of an dataset with different transforms """
    # features = {'hc': np.array([1, 0, 0, 0]),
    #             'mi': np.array([0, 1, 0, 0]),
    #             'vc': np.array([0, 0, 1, 0]),
    #             'sy': np.array([0, 0, 0, 1])}
    features = {'hc': np.array([1])}
    identity = clouds.Compose([clouds.Identity()])
    center = clouds.Compose([clouds.Center()])
    path = os.path.expanduser('~/thesis/gt/cmn/dnh/voxeled/')
    save_path = f'{path}examples/'
    ch = ChunkHandler(path,
                      sample_num=sample_num,
                      density_mode=False,
                      tech_density=100,
                      bio_density=100,
                      specific=True,
                      ctx_size=chunk_size,
                      obj_feats=features,
                      transform=identity,
                      splitting_redundancy=2,
                      label_mappings=[(5, 3), (6, 4)],
                      label_remove=None,
                      sampling=True,
                      verbose=True)
    ch_transform = ChunkHandler(path,
                                sample_num=5000,
                                density_mode=False,
                                tech_density=100,
                                bio_density=100,
                                specific=True,
                                ctx_size=chunk_size,
                                obj_feats=features,
                                transform=center,
                                splitting_redundancy=2,
                                label_mappings=[(5, 3), (6, 4)],
                                label_remove=None,
                                sampling=True,
                                verbose=True)
    vert_nums = []
    counter = 0
    chunk_num = 0
    total = None
    for item in ch.obj_names:
        total_cell = None
        chunk_num += ch.get_obj_length(item)
        for i in range(ch.get_obj_length(item)):
            sample, idcs, vert_num = ch[(item, i)]
            sample_t, _, _ = ch_transform[(item, i)]
            vert_nums.append(vert_num)
            if not os.path.exists(save_path + f'{item}/'):
                os.makedirs(save_path + f'{item}/')
            if vert_num < ch.sample_num:
                counter += 1
            with open(f'{save_path}{item}/{i}.pkl', 'wb') as f:
                pickle.dump([sample, sample_t], f)
            if total_cell is None:
                total_cell = sample
            else:
                total_cell = clouds.merge_clouds([total_cell, sample])
        if total is None:
            total = total_cell
        else:
            total = clouds.merge_clouds([total, total_cell])
        with open(f'{save_path}{item}/total.pkl', 'wb') as f:
            pickle.dump(total_cell, f)
    with open(f'{save_path}total.pkl', 'wb') as f:
        pickle.dump(total, f)
    vert_nums = np.array(vert_nums)
    print(f"Min: {vert_nums.min()}")
    print(f"Max: {vert_nums.max()}")
    print(f"Mean: {vert_nums.mean()}")
    print(f"Chunks with less points than requested: {counter}/{chunk_num}")
    with open(f'{save_path}{chunk_size}_vertnums.pkl', 'wb') as f:
        pickle.dump(vert_nums, f)
    f.close()
コード例 #6
0
ファイル: analyse_model.py プロジェクト: PhylomatX/NeuronX
def analyse_features(m_path: str, args_path: str, out_path: str, val_path: str, context_list: List[Tuple[str, int]],
                     label_mappings: List[Tuple[int, int]] = None, label_remove: List[int] = None,
                     splitting_redundancy: int = 1, test: bool = False):
    device = torch.device('cuda')
    m_path = os.path.expanduser(m_path)
    out_path = os.path.expanduser(out_path)
    args_path = os.path.expanduser(args_path)
    val_path = os.path.expanduser(val_path)

    # load model specifications
    argscont = ArgsContainer().load_from_pkl(args_path)

    lcp_flag = False
    # load model
    if argscont.architecture == 'lcp' or argscont.model == 'ConvAdaptSeg':
        kwargs = {}
        if argscont.model == 'ConvAdaptSeg':
            kwargs = dict(f_map_num=argscont.pl, architecture=argscont.architecture, act=argscont.act, norm=argscont.norm_type)
        conv = dict(layer=argscont.conv[0], kernel_separation=argscont.conv[1])
        model = get_network(argscont.model, argscont.input_channels, argscont.class_num, conv, argscont.search, **kwargs)
        lcp_flag = True
    elif argscont.use_big:
        model = SegBig(argscont.input_channels, argscont.class_num, trs=argscont.track_running_stats, dropout=0,
                       use_bias=argscont.use_bias, norm_type=argscont.norm_type, use_norm=argscont.use_norm,
                       kernel_size=argscont.kernel_size, neighbor_nums=argscont.neighbor_nums,
                       reductions=argscont.reductions, first_layer=argscont.first_layer,
                       padding=argscont.padding, nn_center=argscont.nn_center, centroids=argscont.centroids,
                       pl=argscont.pl, normalize=argscont.cp_norm)
    else:
        print("Adaptable model was found!")
        model = SegAdapt(argscont.input_channels, argscont.class_num, architecture=argscont.architecture,
                         trs=argscont.track_running_stats, dropout=argscont.dropout, use_bias=argscont.use_bias,
                         norm_type=argscont.norm_type, kernel_size=argscont.kernel_size, padding=argscont.padding,
                         nn_center=argscont.nn_center, centroids=argscont.centroids, kernel_num=argscont.pl,
                         normalize=argscont.cp_norm, act=argscont.act)
    try:
        full = torch.load(m_path)
        model.load_state_dict(full)
    except RuntimeError:
        model.load_state_dict(full['model_state_dict'])
    model.to(device)
    model.eval()

    pts = torch.rand(1, argscont.sample_num, 3, device=device)
    feats = torch.rand(1, argscont.sample_num, argscont.input_channels, device=device)
    contexts = []
    th = None

    if not test:
        # prepare data loader
        if label_mappings is None:
            label_mappings = argscont.label_mappings
        if label_remove is None:
            label_remove = argscont.label_remove
        transforms = clouds.Compose(argscont.val_transforms)
        th = TorchHandler(val_path, argscont.sample_num, argscont.class_num, density_mode=argscont.density_mode,
                          bio_density=argscont.bio_density, tech_density=argscont.tech_density, transform=transforms,
                          specific=True, obj_feats=argscont.features, ctx_size=argscont.chunk_size,
                          label_mappings=label_mappings, hybrid_mode=argscont.hybrid_mode,
                          feat_dim=argscont.input_channels, splitting_redundancy=splitting_redundancy,
                          label_remove=label_remove, sampling=argscont.sampling,
                          force_split=False, padding=argscont.padding, exclude_borders=0)
        for context in context_list:
            pts = torch.zeros((1, argscont.sample_num, 3))
            feats = torch.ones((1, argscont.sample_num, argscont.input_channels))
            sample = th[context]
            pts[0] = sample['pts']
            feats[0] = sample['features']
            o_mask = sample['o_mask'].numpy().astype(bool)
            l_mask = sample['l_mask'].numpy().astype(bool)
            target = sample['target'].numpy()
            target = target[l_mask].astype(int)
            contexts.append((feats, pts, o_mask, l_mask, target))
    else:
        contexts.append((feats, pts))

    for c_ix, context in enumerate(contexts):
        # set hooks

        if lcp_flag:
            layer_outs = SaveFeatures(list(model.children())[0][1:])
            act_outs = SaveFeatures([layer.activation for layer in list(model.children())[0][1:]])
        else:
            layer_outs = SaveFeatures(list(model.children())[1])
            act_outs = SaveFeatures([list(model.children())[0]])
        feats = context[0].to(device, non_blocking=True)
        pts = context[1].to(device, non_blocking=True)

        if lcp_flag:
            pts = pts.transpose(1, 2)
            feats = feats.transpose(1, 2)

        output = model(feats, pts).cpu().detach()

        if lcp_flag:
            output = output.transpose(1, 2).numpy()

        if not test:
            output = np.argmax(output[0][context[2]].reshape(-1, th.num_classes), axis=1)
            pts = context[1][0].numpy()
            identifier = f'{context_list[c_ix][0]}_{context_list[c_ix][1]}'
            target = PointCloud(pts, context[4])
            x_offset = (pts[:, 0].max() - pts[:, 0].min()) * 1.5 * 3
            pred = PointCloud(pts[context[3]], output)
            pred.move(np.array([x_offset / 2, 0, 0]))
            clouds.merge([target, pred]).save2pkl(out_path + identifier + '_0io_r_a.pkl')
        for ix, layer in enumerate(layer_outs.features):
            if len(layer) < 2:
                continue
            feats = layer[0].detach().cpu()[0]
            feats_act = act_outs.features[ix].detach().cpu()[0]
            pts = layer[1].detach().cpu()[0]
            if lcp_flag:
                feats = feats.transpose(0, 1).numpy()
                feats_act = feats_act.transpose(0, 1).numpy()
                pts = pts.transpose(0, 1).numpy()
            else:
                feats = feats.numpy()
                feats_act = feats_act.numpy()
                pts = pts.numpy()
            x_offset = (pts[:, 0].max() - pts[:, 0].min()) * 1.5 * 3
            x_offset_act = x_offset / 3
            y_size = (pts[:, 1].max() - pts[:, 1].min()) * 1.5
            y_offset = 0
            row_num = feats.shape[1] / 8
            total_pc = None
            total_pc_act = None
            for i in range(feats.shape[1]):
                if i % 8 == 0 and i != 0:
                    y_offset += y_size
                pc = PointCloud(vertices=pts, features=feats[:, i].reshape(-1, 1))
                pc_act = PointCloud(vertices=pts, features=feats_act[:, i].reshape(-1, 1))
                pc.move(np.array([(i % 8) * x_offset, y_offset, 0]))
                pc_act.move(np.array([(i % 8) * x_offset + x_offset / 2.8, y_offset, 0]))
                pc = clouds.merge_clouds([pc, pc_act])
                pc_act = PointCloud(vertices=pts, features=feats_act[:, i].reshape(-1, 1))
                pc_act.move(np.array([(i % 8) * x_offset_act, y_offset, 0]))
                if total_pc is None:
                    total_pc = pc
                    total_pc_act = pc_act
                else:
                    total_pc = clouds.merge_clouds([total_pc, pc])
                    total_pc_act = clouds.merge_clouds([total_pc_act, pc_act])
            total_pc.move(np.array([-4 * x_offset - x_offset / 2, -row_num / 2 * y_size - y_size / 2, 0]))
            total_pc_act.move(np.array([-4 * x_offset_act - x_offset_act / 2, -row_num / 2 * y_size - y_size / 2, 0]))
            total_pc.save2pkl(out_path + f'{context_list[c_ix][0]}_{context_list[c_ix][1]}_l{ix}_r.pkl')
            total_pc_act.save2pkl(out_path + f'{context_list[c_ix][0]}_{context_list[c_ix][1]}_l{ix}_a.pkl')
コード例 #7
0
def hybridmesh2poisson(hm: HybridMesh, tech_density: int, obj_factor: float) -> PointCloud:
    """ If there is a skeleton, it gets split into chunks of approximately equal size. For each chunk
        the corresponding mesh piece gets extracted and gets sampled according to its area. If there
        is no skeleton, the mesh is split into multiple parts, depending on its area. Each part is
        then again sampled based on its area.

    Args:
        hm: HybridMesh which should be transformed into a HybridCloud with poisson disk sampled points.
        tech_density: poisson sampling density in point/um². With tech_density = -1, the number of sampled points
            equals the number of vertices in the given HybridMesh.
    """
    if len(hm.nodes) == 0:
        offset = 0
        mesh = trimesh.Trimesh(vertices=hm.vertices, faces=hm.faces)
        area = mesh.area * 1e-06
        # number of chunks should be relative to area
        chunk_number = round(area / 6)
        if area == 0 or chunk_number == 0:
            return PointCloud()
        total = None
        for i in tqdm(range(int(chunk_number))):
            # process all faces left with last chunk
            if i == chunk_number-1:
                chunk_faces = hm.faces[offset:]
            else:
                chunk_faces = hm.faces[offset:offset + floor(len(hm.faces) // chunk_number)]
                offset += floor(len(hm.faces) // chunk_number)
            chunk_hm = HybridMesh(vertices=hm.vertices, faces=chunk_faces, labels=hm.labels, types=hm.types)
            mesh = trimesh.Trimesh(vertices=chunk_hm.vertices, faces=chunk_hm.faces)
            area = mesh.area * 1e-06
            if tech_density == -1:
                pc = meshes.sample_mesh_poisson_disk(chunk_hm, int(len(chunk_hm.vertices) * obj_factor))
            else:
                pc = meshes.sample_mesh_poisson_disk(chunk_hm, tech_density * area * obj_factor)
            if total is None:
                total = pc
            else:
                total = clouds.merge_clouds([total, pc])
        result = PointCloud(vertices=total.vertices, labels=total.labels, encoding=hm.encoding, no_pred=hm.no_pred,
                            types=total.types)
    else:
        total = None
        intermediate = None
        context_size = 5
        skel2node_mapping = True
        counter = 0
        chunks = graphs.bfs_iterative(hm.graph(), 0, context_size)
        for chunk in tqdm(chunks):
            chunk = np.array(chunk)
            # At the first iteration the face2node mapping must be done
            if skel2node_mapping:
                print("Mapping faces to node for further processing. This might take a while...")
                skel2node_mapping = False
            extract = hybrids.extract_mesh_subset(hm, chunk)
            if len(hm.faces) == 0:
                continue
            # get the mesh area in trimesh units and use it to determine how many points should be sampled
            mesh = trimesh.Trimesh(vertices=extract.vertices, faces=extract.faces)
            area = mesh.area * 1e-06
            if area == 0:
                continue
            else:
                if tech_density == -1:
                    pc = meshes.sample_mesh_poisson_disk(extract, len(extract.vertices))
                else:
                    pc = meshes.sample_mesh_poisson_disk(extract, tech_density * area)
            if intermediate is None:
                intermediate = pc
            else:
                intermediate = clouds.merge_clouds([intermediate, pc])
            # merging slows down process => hold speed constant by reducing merging operations
            counter += 1
            if counter % 50 == 0:
                if total is None:
                    total = intermediate
                else:
                    total = clouds.merge_clouds(([total, intermediate]))
                intermediate = None
        total = clouds.merge_clouds([total, intermediate])
        result = HybridCloud(nodes=hm.nodes, edges=hm.edges, vertices=total.vertices, labels=total.labels,
                             encoding=hm.encoding, no_pred=hm.no_pred, types=total.types)
    return result