sns.color_palette('colorblind', as_cmap=True)[3]) # Query session list eids, probes = query_sessions(selection=INCL_SESSIONS) results_df = pd.DataFrame() for i in range(len(eids)): print('\nProcessing session %d of %d' % (i+1, len(eids))) # Load in data eid = eids[i] try: spikes, clusters, channels = bbone.load_spike_sorting_with_channel( eid, aligned=True, one=one) ses_path = one.path_from_eid(eid) trials = load_trials(eid) except Exception as error_message: print(error_message) continue # Check data integrity if check_trials(trials) is False: continue # Extract session data ses_info = one.get_details(eid) subject = ses_info['subject'] date = ses_info['start_time'][:10] probes_to_use = probes[i] # Process per probe
from my_functions import paths, query_sessions, check_trials, load_trials from oneibl.one import ONE one = ONE() # Settings INCL_SESSIONS = 'aligned-behavior' # Query sessions eids, _ = query_sessions(selection=INCL_SESSIONS) # Loop over sessions all_trials = pd.DataFrame() for i, eid in enumerate(eids): print(f'Loading trails of session {i+1} of {len(eids)}') try: trials = load_trials(eid, invert_stimside=True) except: continue if check_trials(trials): ses_info = one.get_details(eid) trials['subject'] = ses_info['subject'] trials['date'] = ses_info['start_time'][:10] trials = trials.drop(columns=[ 'stimOn_times', 'feedback_times', 'contrastLeft', 'contrastRight', 'goCue_times', 'right_choice', 'correct', 'firstMovement_times' ]) all_trials = all_trials.append( trials[trials['probabilityLeft'] != 0.5]) print(f'Added {len(trials)} trial (total {len(all_trials)})') print('Saving results..') all_trials.to_pickle(join(paths()[2], 'Ephys', 'Decoding', 'all_trials.p'))
ATLAS, int(PRE_TIME * 1000), int(POST_TIME * 1000))))) else: decoding_result = pd.DataFrame( columns=['subject', 'date', 'eid', 'probe', 'region']) # Loop over subjects for i, subject in enumerate(np.unique(subjects)): print('\nStarting subject %s [%d of %d]\n' % (subject, i + 1, len(np.unique(subjects)))) # Generate stimulus vectors for all sessions of this subject stimuli_arr, actions_arr, stim_sides_arr, session_uuids = [], [], [], [] for j, eid in enumerate(eids[subjects == subject]): try: # Load in trials vectors trials = load_trials(eid, invert_stimside=True, one=one) incl_trials = get_incl_trials(trials, TARGET, EXCL_5050, MIN_RT) stimuli_arr.append(trials['signed_contrast'][incl_trials].values) actions_arr.append(trials['choice'][incl_trials].values) stim_sides_arr.append(trials['stim_side'][incl_trials].values) session_uuids.append(eid) except: print(f'Could not load trials for {eid}') print(f'\nLoaded data from {len(session_uuids)} sessions') if len(session_uuids) == 0: continue # Get maximum number of trials across sessions max_len = np.array([len(stimuli_arr[k]) for k in range(len(stimuli_arr))]).max()
model = exp_prev_action( join(SAVE_PATH, 'Behavior', 'exp_smoothing_model_fits/'), all_eids, SUBJECT, np.array([np.array(None)] * all_eids.shape[0]), np.array([np.array(None)] * all_eids.shape[0]), np.array([np.array(None)] * all_eids.shape[0])) elif TARGET == 'prior-stimside': model = exp_stimside( join(SAVE_PATH, 'Behavior', 'exp_smoothing_model_fits/'), all_eids, SUBJECT, np.array([np.array(None)] * all_eids.shape[0]), np.array([np.array(None)] * all_eids.shape[0]), np.array([np.array(None)] * all_eids.shape[0])) model.load_or_train(nb_steps=2000, remove_old=False) params = model.get_parameters(parameter_type='posterior_mean') # Get priors per trial trials = load_trials(EID, invert_stimside=True) priors = model.compute_signal(signal='prior', act=np.array(trials['choice']), stim=np.array(trials['signed_contrast']), side=np.array(trials['stim_side']), parameter_type='posterior_mean')['prior'] # Get clusters in this brain region spikes, clusters, channels = bbone.load_spike_sorting_with_channel( EID, aligned=True, one=one) # Get list of neurons that pass QC if INCL_NEURONS == 'pass-QC': clusters_pass = np.where(clusters[PROBE]['metrics']['label'] == 1)[0] elif INCL_NEURONS == 'all': clusters_pass = np.arange(clusters[PROBE]['metrics'].shape[0])