os.mkdir(_RESULTS) ############################################################################## # Get root path ############################################################################## coh_sig_file = f'thr_{metric}_at_{at}_surr.nc' wt = None ############################################################################## # Load the supertensor and convert to adjacency matrix ############################################################################## net = temporal_network(coh_file=f'{metric}_at_{at}.nc', coh_sig_file=coh_sig_file, wt=wt, date=sessions[s_id], trial_type=[1], behavioral_response=[1]) net.convert_to_adjacency() # If the metric is pec take the absolute value of weigths only if metric == "pec": net.super_tensor.values = np.abs(net.super_tensor.values) ############################################################################## # 1. Strength ############################################################################## degree = [] for f in tqdm(range(net.A.sizes["freqs"])):
import os from tqdm import tqdm import numpy as np import xarray as xr from config import sessions from GDa.util import average_stages from GDa.temporal_network import temporal_network coh_file = 'coh_at_cue.nc' coh_sig_file = 'thr_coh_at_cue_surr.nc' data = [] for s_id in tqdm(sessions): net = temporal_network(coh_file=coh_file, coh_sig_file=coh_sig_file, wt=None, date=s_id, trial_type=[1], behavioral_response=[1]) # Average if needed out = average_stages(net.super_tensor, 1) # To save memory del net # Convert to format required by the MI workflow data += [out.isel(roi=[r]).mean("trials") for r in range(len(out['roi']))] # Concatenate channels data = xr.concat(data, dim="roi") # Get unique rois urois, counts = np.unique(data.roi.data, return_counts=True) # Get unique rois that has at leats 10 channels urois = urois[counts >= 10]
################################################################## # Loading temporal network ################################################################## _ROOT = os.path.expanduser("~/storage1/projects/GrayData-Analysis") # Path in which to save coherence data _RESULTS = os.path.join("Results", "lucy", "141017", "session01") coh_sig_file = "coh_k_0.3_multitaper_at_cue_surr.nc" wt = None net = temporal_network( coh_file="coh_k_0.3_multitaper_at_cue.nc", coh_sig_file=coh_sig_file, wt=wt, date="141017", trial_type=[1], behavioral_response=[1], ) ################################################################## # Compute static coherence networks ################################################################## # Stage masks net.create_stage_masks(flatten=True) coh = [] # Save attributes attrs = net.super_tensor.attrs # Stack trials and times
# Path in which to save coherence data _RESULTS = os.path.join("Results", "lucy", session, "session01") coh_sig_file = None if bool(surr) is False: coh_file = f'{metric}_at_cue.nc' if bool(thr): coh_sig_file = f'thr_{metric}_at_cue_surr.nc' else: coh_file = f'{metric}_at_cue_surr.nc' wt = None net = temporal_network( coh_file=coh_file, coh_sig_file=coh_sig_file, wt=wt, date=session, trial_type=[1], behavioral_response=[1], ) # Masks for each stage net.create_stage_masks(flatten=True) # Stack trials FC = net.super_tensor.stack(obs=("trials", "times")).data ############################################################################## # Compute meta-connectivity ############################################################################## n_edges = net.super_tensor.sizes["roi"] n_freqs = net.super_tensor.sizes["freqs"]
############################################################################### # Path in which to save burst stats data ############################################################################### path_st = os.path.join(_ROOT, f"Results/lucy/{session}/session01") path_st = os.path.join(path_st, f"bs_stats_k_{_KS}_numba_{mode}.nc") ############################################################################### # Instantiate temp net ############################################################################### # Instantiating a temporal network object without thresholding the data net = temporal_network(coh_file=_COH_FILE, coh_sig_file=_COH_FILE_SIG, date=session, trial_type=[1], behavioral_response=[1], q=None, relative=False) ############################################################################### # Compute burstness statistics for different thresholds ############################################################################### bs_stats = np.zeros((2, net.super_tensor.sizes["freqs"], net.super_tensor.shape[0], len(stages), 4)) # Set to one all values about siginificance level net.super_tensor.values = (net.super_tensor.values > 0) # coh = net.super_tensor # Wheter to compute the burst stats for sequences of siliences