def test_stc_to_label(): """Test stc_to_label """ with warnings.catch_warnings(record=True) as w: warnings.simplefilter('always') src = read_source_spaces(fwd_fname) src_bad = read_source_spaces(src_bad_fname) stc = read_source_estimate(stc_fname, 'sample') os.environ['SUBJECTS_DIR'] = op.join(data_path, 'subjects') labels1 = _stc_to_label(stc, src='sample', smooth=3) labels2 = _stc_to_label(stc, src=src, smooth=3) assert_equal(len(labels1), len(labels2)) for l1, l2 in zip(labels1, labels2): assert_labels_equal(l1, l2, decimal=4) with warnings.catch_warnings(record=True) as w: # connectedness warning warnings.simplefilter('always') labels_lh, labels_rh = stc_to_label(stc, src=src, smooth=True, connected=True) assert_true(len(w) > 0) assert_raises(ValueError, stc_to_label, stc, 'sample', smooth=True, connected=True) assert_raises(RuntimeError, stc_to_label, stc, smooth=True, src=src_bad, connected=True) assert_equal(len(labels_lh), 1) assert_equal(len(labels_rh), 1) # test getting tris tris = labels_lh[0].get_tris(src[0]['use_tris'], vertices=stc.vertices[0]) assert_raises(ValueError, spatial_tris_connectivity, tris, remap_vertices=False) connectivity = spatial_tris_connectivity(tris, remap_vertices=True) assert_true(connectivity.shape[0] == len(stc.vertices[0])) # "src" as a subject name assert_raises(TypeError, stc_to_label, stc, src=1, smooth=False, connected=False, subjects_dir=subjects_dir) assert_raises(ValueError, stc_to_label, stc, src=SourceSpaces([src[0]]), smooth=False, connected=False, subjects_dir=subjects_dir) assert_raises(ValueError, stc_to_label, stc, src='sample', smooth=False, connected=True, subjects_dir=subjects_dir) assert_raises(ValueError, stc_to_label, stc, src='sample', smooth=True, connected=False, subjects_dir=subjects_dir) labels_lh, labels_rh = stc_to_label(stc, src='sample', smooth=False, connected=False, subjects_dir=subjects_dir) assert_true(len(labels_lh) > 1) assert_true(len(labels_rh) > 1) # with smooth='patch' with warnings.catch_warnings(record=True) as w: # connectedness warning warnings.simplefilter('always') labels_patch = stc_to_label(stc, src=src, smooth=True) assert_equal(len(w), 1) assert_equal(len(labels_patch), len(labels1)) for l1, l2 in zip(labels1, labels2): assert_labels_equal(l1, l2, decimal=4)
def test_stc_to_label(): """Test stc_to_label.""" src = read_source_spaces(fwd_fname) src_bad = read_source_spaces(src_bad_fname) stc = read_source_estimate(stc_fname, 'sample') os.environ['SUBJECTS_DIR'] = op.join(data_path, 'subjects') labels1 = _stc_to_label(stc, src='sample', smooth=3) labels2 = _stc_to_label(stc, src=src, smooth=3) assert_equal(len(labels1), len(labels2)) for l1, l2 in zip(labels1, labels2): assert_labels_equal(l1, l2, decimal=4) with pytest.warns(RuntimeWarning, match='have holes'): labels_lh, labels_rh = stc_to_label(stc, src=src, smooth=True, connected=True) pytest.raises(ValueError, stc_to_label, stc, 'sample', smooth=True, connected=True) pytest.raises(RuntimeError, stc_to_label, stc, smooth=True, src=src_bad, connected=True) assert_equal(len(labels_lh), 1) assert_equal(len(labels_rh), 1) # test getting tris tris = labels_lh[0].get_tris(src[0]['use_tris'], vertices=stc.vertices[0]) pytest.raises(ValueError, spatial_tris_adjacency, tris, remap_vertices=False) adjacency = spatial_tris_adjacency(tris, remap_vertices=True) assert (adjacency.shape[0] == len(stc.vertices[0])) # "src" as a subject name pytest.raises(TypeError, stc_to_label, stc, src=1, smooth=False, connected=False, subjects_dir=subjects_dir) pytest.raises(ValueError, stc_to_label, stc, src=SourceSpaces([src[0]]), smooth=False, connected=False, subjects_dir=subjects_dir) pytest.raises(ValueError, stc_to_label, stc, src='sample', smooth=False, connected=True, subjects_dir=subjects_dir) pytest.raises(ValueError, stc_to_label, stc, src='sample', smooth=True, connected=False, subjects_dir=subjects_dir) labels_lh, labels_rh = stc_to_label(stc, src='sample', smooth=False, connected=False, subjects_dir=subjects_dir) assert (len(labels_lh) > 1) assert (len(labels_rh) > 1) # with smooth='patch' with pytest.warns(RuntimeWarning, match='have holes'): labels_patch = stc_to_label(stc, src=src, smooth=True) assert len(labels_patch) == len(labels1) for l1, l2 in zip(labels1, labels2): assert_labels_equal(l1, l2, decimal=4)
def get_volume_sources(volume, space=5, remains=None): """get sources in volume Parameters ---------- volume : Volume object space : float The distance between sources remains : None | int The number of sources that we want to keep Returns ------- src : SourceSpaces object ------- Author : Alexandre Fabre """ if remains is None: remains, removes = get_number_sources(volume, space=space, surface=False) else: # avoid to have an incorrect number of sources remains = max(0, min(volume.pos_length, remains)) removes = volume.pos_length - remains if remains == 0: raise ValueError('Error, 0 source created') with warnings.catch_warnings(): warnings.simplefilter("ignore") # create clusters km = MiniBatchKMeans(n_clusters=remains, n_init=10) # get cluster labels cluster_id = km.fit(volume.pos).labels_ # get centroids of clusters centroids, _ = Pycluster.clustercentroids(volume.pos, clusterid=cluster_id) dist = euclidean_distances(centroids, volume.pos) # get indices of closest points of centroids arg_min = np.argmin(dist, axis=1) inuse = np.zeros(volume.pos_length) inuse[arg_min] = 1 inuse = inuse.astype(int) # Need to be int # must be converted to meters # Pos is in voxels coords not mm rr = volume.pos * 1e-3 if volume.hemi=='lh': Id = 101 elif volume.hemi=='rh': Id = 102 src = [{'rr': rr, 'coord_frame': np.array((FIFF.FIFFV_COORD_MRI,), np.int32), 'type': 'surf', 'id': Id, 'np': volume.pos_length, 'nn': volume.normals, 'inuse': inuse, 'nuse': remains, 'dist': None, 'nearest': None, 'use_tris': None, 'nuse_tris': 0, 'vertno': arg_min, 'patch_inds': None, 'tris': None, 'dist_limit': None, 'pinfo': None, 'ntri': 0, 'nearest_dist': None, 'removes': removes}] src = SourceSpaces(src) return src
def get_brain_sources(obj, space=5, distance=None, remains=None, pack=True, master=None): """get brain sources for each structures Parameters ---------- obj : list of Surface object | list of Volume object space : float Distance between sources distance : 'euclidean' | 'dijkstra' | 'continuous' | None The distance used on the surface If distance is different to None, obj is treated like a Surface object remains : int | None The number of sources that we want to keep pack : Boolean If True pack sources master: Surface object | Volume object | array | None The reference structure used to pack sources or positions Returns ------- src : SourceSpaces object index_pack_src : list(2) Source indices in the source list for each parcels. If pack is None, it's not returned ------- Author : Alexandre Fabre """ remain_nb = 0 remove_nb = 0 src = [] for i, cour_obj in enumerate(obj): if distance is not None: cour_src = get_surface_sources(cour_obj, space, distance, remains) else: cour_src = get_volume_sources(cour_obj, space, remains) remove_nb += cour_src[0]['removes'] remain_nb += cour_src[0]['nuse'] src.append(cour_src) if pack: pos = None normals = None triangles = None # get attributes from master if master is not None: if isinstance(master, list) or isinstance(master, np.ndarray): pos = master else: if hasattr(master, 'pos'): pos = master.pos elif hasattr(master, 'rr'): pos = master.rr if hasattr(master, 'normals'): normals = master.normals elif hasattr(master, 'nn'): normals = master.nn if hasattr(master, 'triangles'): triangles = master.triangles elif hasattr(master, 'tris'): triangles = master.tris # pack sources src, index_pack_src = sources_pack(src, pos, normals, triangles) for i in range(len(obj)): if hasattr(obj[i], 'index_pack_src'): obj[i].index_pack_src = index_pack_src[i] else: src = SourceSpaces(src) print("\n%d points have been removed, %d points remained for downsample" % (remove_nb, remain_nb)) if pack: return src, index_pack_src else: return src
def sources_pack(src, pos=None, normals=None, triangles=None, unit='mm'): """pack sources Parameters ---------- src : list of SourceSpaces object pos : array(2) | None positions master normals : array(2) | None normals master triangles : array(2) | int triangles master unit : 'mm' | 'm' The unit of positions Returns ------- src : SourceSpaces object ------- Author : Alexandre Fabre """ ind = [] ntri = 0 index_pack_src = [] try : if isinstance(src, SourceSpaces) or isinstance(src[0], dict): src = [s for s in src] elif isinstance(src[0], SourceSpaces): src= [s[0] for s in src] else: raise TypeError('src is incorrect') except Exception as msg_error: raise Exception(msg_error) if not len(src): raise ValueError('src is empty') get_normals = True get_triangles = True if normals is None: normals = [] get_normals = False if triangles is None: triangles = None if src[0]['tris'] is None else [] get_triangles = False # pack sources if pos is None: pos_nb = 0 src_nb = 0 rr = [] for s in src: p = np.add((s['vertno']), pos_nb) ind += list(p) rr += list(s['rr']) index_pack_src.append(np.arange(src_nb, src_nb + s['nuse']).tolist()) if not get_normals: normals += list(s['nn']) if triangles is not None: triangles += np.add(s['tris'], pos_nb).tolist() pos_nb += s['np'] src_nb += s['nuse'] ntri += s['ntri'] remains = src_nb surf_length = pos_nb rr = np.array(rr) # pack sources with surface master else: if unit == 'mm': pos *= 1e-3 surf_length = len(pos) if not get_normals: normals = np.zeros(len(pos)) rr = pos.tolist() for s in src: normals_cond = (not get_normals and not get_triangles and triangles is None) triangles_cond = (not get_triangles and triangles is not None) if triangles_cond or normals_cond: pos_cour = s['rr'].tolist() pos_ind = list(map(rr.index, pos_cour)) if normals_cond: normals[pos_ind] = s['nn'] if triangles_cond: triangles += np.take(pos_ind, s['tris']).tolist() pos_src_cour = s['rr'][s['vertno']].tolist() ind += list(map(rr.index, pos_src_cour)) remains = len(ind) index = np.sort(ind, axis=0).tolist() ntri = len(triangles) if not get_normals and triangles is not None: _dict = dict(rr=pos, tris=triangles, ntri=ntri, np=surf_length) normals = _complete_surface_info(_dict)['nn'] cour = 0 for s in src: sup = cour + s['nuse'] select = ind[cour: sup] index_pack_src.append(list(map(index.index, select))) cour = sup ind = index src_dict = src[0].copy() inuse = np.zeros(surf_length) ind = np.array(ind).tolist() inuse[ind] = 1 # Need to become an array not a list rr = np.array(rr) src_dict.update(dict(rr=rr, inuse=inuse, np=surf_length, nuse=remains, nn=normals, tris=triangles, ntri=ntri, vertno=ind)) src = SourceSpaces([src_dict]) return src, index_pack_src
def get_surface_sources(surface, space=5, distance='euclidean', remains=None): """get sources in volume Parameters ---------- surface : Surface object space : float The distance between sources distance : 'euclidean' | 'dijkstra' | 'continuous' The distance used to compute distance on surface remains : None | int The number of sources that we want to keep Returns ------- src : SourceSpaces object ------- Author : Alexandre Fabre """ if remains is None: remains, removes = get_number_sources(surface, space=space, surface=True) else: # avoid to have a incorrect number of sources remains = max(0, min(surface.pos_length, remains)) removes = surface.pos_length - remains if remains == 0: raise ValueError('Error, 0 source created') if removes == 0: # all points are sources # logger.info('all points are remained') centroids_id = np.arange(remains) inuse = np.ones(surface.pos_length, dtype=int) else: # connectivity of neighbors points n_neighbors = min(50, surface.pos_length) # get the matrix that identify neighbors points knn_graph = kneighbors_graph(surface.pos, n_neighbors, include_self=False) # ward criterion is adapted for a surface clustering model = AgglomerativeClustering(linkage='ward', connectivity=knn_graph, n_clusters=remains) # compute clusters model.fit(surface.pos) # get cluster labels cluster_id = model.labels_ # get the distance between points on the surface with Dijkstra or continuous # if distance is euclidean, it just computes euclidean distances between points distance = surf_m.get_surf_distance(surface.pos, surface.triangles, distance=distance) # clusters give by AgglomerativeClustering are initial clusters for k-medoids # for k-medoids, the centroid is a point in a cluster # k-medoids method return clusters that are identified by the index of their centroid point cluster_id, _, _ = kmedoids(distance, nclusters=remains, npass=1, initialid=cluster_id) # get the index of centroids centroids_id = np.unique(cluster_id) inuse = np.zeros(surface.pos_length) inuse[centroids_id] = 1 inuse = inuse.astype(int) # Need to be int # must be converted to meters and transorm to numpy array rr = surface.pos * 1e-3 # Change index for hemi if surface.hemi=='lh': Id = 101 elif surface.hemi=='rh': Id = 102 src = [{'rr': rr, 'coord_frame': np.array((FIFF.FIFFV_COORD_MRI,), np.int32), 'type': 'surf', 'id': Id, 'np': surface.pos_length, 'nn': surface.normals, 'inuse': inuse, 'nuse': remains, 'dist': None, 'ntri': surface.triangles_length, 'nearest': None, 'use_tris': None, 'nuse_tris': 0, 'vertno': centroids_id, 'patch_inds': None, 'tris': surface.triangles, 'dist_limit': None, 'pinfo': None, 'nearest_dist': None, 'removes': removes}] src = SourceSpaces(src) return src
def make_pert_forward_dipole(dipole, bem, info, trans=None, n_jobs=1, verbose=None): """Convert dipole object to source estimate and calculate forward operator. The instance of Dipole is converted to a discrete source space, which is then combined with a BEM or a sphere model and the sensor information in info to form a forward operator. The source estimate object (with the forward operator) can be projected to sensor-space using :func:`mne.simulation.simulate_evoked`. .. note:: If the (unique) time points of the dipole object are unevenly spaced, the first output will be a list of single-timepoint source estimates. Parameters ---------- dipole : instance of Dipole Dipole object containing position, orientation and amplitude of one or more dipoles. Multiple simultaneous dipoles may be defined by assigning them identical times. bem : str | dict The BEM filename (str) or a loaded sphere model (dict). info : instance of Info The measurement information dictionary. It is sensor-information etc., e.g., from a real data file. trans : str | None The head<->MRI transform filename. Must be provided unless BEM is a sphere model. n_jobs : int Number of jobs to run in parallel (used in making forward solution). verbose : bool, str, int, or None If not None, override default verbose level (see :func:`mne.verbose` and :ref:`Logging documentation <tut_logging>` for more). Returns ------- fwd : instance of Forward The forward solution corresponding to the source estimate(s). stc : instance of VolSourceEstimate | list of VolSourceEstimate The dipoles converted to a discrete set of points and associated time courses. If the time points of the dipole are unevenly spaced, a list of single-timepoint source estimates are returned. See Also -------- mne.simulation.simulate_evoked Notes ----- .. versionadded:: 0.12.0 """ # Make copies to avoid mangling original dipole times = dipole.times.copy() pos = dipole.pos.copy() amplitude = dipole.amplitude.copy() ori = dipole.ori.copy() # Convert positions to discrete source space (allows duplicate rr & nn) # NB information about dipole orientation enters here, then no more sources = dict(rr=pos, nn=ori) # Dipole objects must be in the head frame sp = _make_discrete_source_space(sources, coord_frame='head') src = SourceSpaces([sp]) # dict with working_dir, command_line not nec # Forward operator created for channels in info (use pick_info to restrict) # Use defaults for most params, including min_dist fwd = make_pert_forward_solution(info, trans, src, bem, n_jobs=n_jobs, verbose=verbose, eeg=False) # Convert from free orientations to fixed (in-place) convert_forward_solution(fwd, surf_ori=False, force_fixed=True, copy=False, use_cps=False, verbose=None) # Check for omissions due to proximity to inner skull in # make_forward_solution, which will result in an exception if fwd['src'][0]['nuse'] != len(pos): inuse = fwd['src'][0]['inuse'].astype(np.bool) head = ('The following dipoles are outside the inner skull boundary') msg = len(head) * '#' + '\n' + head + '\n' for (t, pos) in zip(times[np.logical_not(inuse)], pos[np.logical_not(inuse)]): msg += ' t={:.0f} ms, pos=({:.0f}, {:.0f}, {:.0f}) mm\n'.\ format(t * 1000., pos[0] * 1000., pos[1] * 1000., pos[2] * 1000.) msg += len(head) * '#' logger.error(msg) raise ValueError('One or more dipoles outside the inner skull.') # multiple dipoles (rr and nn) per time instant allowed # uneven sampling in time returns list timepoints = np.unique(times) if len(timepoints) > 1: tdiff = np.diff(timepoints) if not np.allclose(tdiff, tdiff[0]): warn('Unique time points of dipoles unevenly spaced: returned ' 'stc will be a list, one for each time point.') tstep = -1.0 else: tstep = tdiff[0] elif len(timepoints) == 1: tstep = 0.001 # Build the data matrix, essentially a block-diagonal with # n_rows: number of dipoles in total (dipole.amplitudes) # n_cols: number of unique time points in dipole.times # amplitude with identical value of times go together in one col (others=0) data = np.zeros((len(amplitude), len(timepoints))) # (n_d, n_t) row = 0 for tpind, tp in enumerate(timepoints): amp = amplitude[np.in1d(times, tp)] data[row:row + len(amp), tpind] = amp row += len(amp) if tstep > 0: stc = VolSourceEstimate(data, vertices=fwd['src'][0]['vertno'], tmin=timepoints[0], tstep=tstep, subject=None) else: # Must return a list of stc, one for each time point stc = [] for col, tp in enumerate(timepoints): stc += [ VolSourceEstimate(data[:, col][:, np.newaxis], vertices=fwd['src'][0]['vertno'], tmin=tp, tstep=0.001, subject=None) ] return fwd, stc