def vl_look(): mpl.rcParams['font.size'] = 26 animal=66 session=60 fn, trigger_tm = load_mux(animal, session) vl = load_vl(animal,fn) print 'npw' cls = {tetrode:load_cl(animal,fn,tetrode) for tetrode in range(1,17)} _, good_clusters = get_good_clusters(0) print 'kw' t_cells = count_cells(vl,cls,trigger_tm,good_clusters) print 'w' L1 = 59 L2 = 70 clusters = [[] for _ in range(L2-L1)] for cell, spk_i in t_cells.items(): spk_i = np.unique(spk_i) for spk in np.nonzero((spk_i<L2) & (spk_i>=L1))[0]: clusters[spk_i[spk]-L1].append(cell) out = zip(clusters, vl['xs'][L1:L2], vl['ys'][L1:L2], vl['vxs'][L1:L2], vl['vys'][L1:L2]) for tt in out: print '%s, (%.1f,%.1f), (%.1f,%.1f)'%( (str(tt[0]),) + tt[1:]) import pdb; pdb.set_trace()
def rate_graph(): animal = 66 session = 60 # This is August 7, 2013 run room_shape = [[-55, 55], [-55, 55]] tetrodes = range(1, 17) cluster_profile = 0 bin_size = 5 _, good_clusters = get_good_clusters(cluster_profile) fn, trigger_tm = load_mux(animal, session) vl = load_vl(animal, fn) cls = {tetrode: load_cl(animal, fn, tetrode) for tetrode in tetrodes} tpp = 1.0 * np.mean(vl['Time'][1:] - vl['Time'][:-1]) t_cells = count_cells(vl, cls, trigger_tm, good_clusters) label_l = vl['Task'] # rates[cell, lbl, xbin, ybin] = firing rate rates = get_fracs(vl['xs'], vl['ys'], label_l, room_shape, bin_size, t_cells) rates /= tpp plot_rates(rates, label_l, t_cells) plt.show()
def rate_graph(): animal = 66 session = 60 # This is August 7, 2013 run room_shape = [[-55, 55], [-55, 55]] tetrodes = range(1, 17) cluster_profile = 0 bin_size = 5 _, good_clusters = get_good_clusters(cluster_profile) fn, trigger_tm = load_mux(animal, session) vl = load_vl(animal, fn) cls = {tetrode: load_cl(animal, fn, tetrode) for tetrode in tetrodes} tpp = 1.0 * np.mean(vl["Time"][1:] - vl["Time"][:-1]) t_cells = count_cells(vl, cls, trigger_tm, good_clusters) label_l = vl["Task"] # rates[cell, lbl, xbin, ybin] = firing rate rates = get_fracs(vl["xs"], vl["ys"], label_l, room_shape, bin_size, t_cells) rates /= tpp plot_rates(rates, label_l, t_cells) plt.show()
def vl_look(): mpl.rcParams['font.size'] = 26 animal = 66 session = 60 fn, trigger_tm = load_mux(animal, session) vl = load_vl(animal, fn) print 'npw' cls = {tetrode: load_cl(animal, fn, tetrode) for tetrode in range(1, 17)} _, good_clusters = get_good_clusters(0) print 'kw' t_cells = count_cells(vl, cls, trigger_tm, good_clusters) print 'w' L1 = 59 L2 = 70 clusters = [[] for _ in range(L2 - L1)] for cell, spk_i in t_cells.items(): spk_i = np.unique(spk_i) for spk in np.nonzero((spk_i < L2) & (spk_i >= L1))[0]: clusters[spk_i[spk] - L1].append(cell) out = zip(clusters, vl['xs'][L1:L2], vl['ys'][L1:L2], vl['vxs'][L1:L2], vl['vys'][L1:L2]) for tt in out: print '%s, (%.1f,%.1f), (%.1f,%.1f)' % ((str(tt[0]), ) + tt[1:]) import pdb pdb.set_trace()
def rate_graph(): #mpl.rcParams['axes.titlesize'] = 18 #mpl.rcParams['axes.labelsize'] = 18 mpl.rcParams['font.size'] = 26 animal = 66 session = 60 # This is August 7, 2013 run room_shape = [[-55,55],[-55,55]] tetrodes = [1] cluster_profile = 0 bin_size = 5 _, good_clusters = get_good_clusters(cluster_profile) fn, trigger_tm = load_mux(animal, session) vl = load_vl(animal,fn) cls = {tetrode:load_cl(animal,fn,tetrode) for tetrode in tetrodes} t_cells = count_cells(vl,cls,trigger_tm,good_clusters) label_l = vl['Task'] # rates[cell, lbl, xbin, ybin] = firing rate rates = get_fracs(vl['xs'], vl['ys'], label_l, room_shape, bin_size, t_cells) for lbl in range(len(np.unique(label_l))): plt.figure(figsize=(10,10)) x = np.concatenate([np.arange(room_shape[0][0],room_shape[0][1],bin_size),[room_shape[0][1]]]) y = np.concatenate([np.arange(room_shape[1][0],room_shape[1][1],bin_size),[room_shape[1][1]]]) Xs, Ys = np.meshgrid(x, y) cntr = plt.pcolor(Ys,Xs,rates[3,lbl]) t=plt.colorbar(cntr, extend='both') t.set_label('Frequency (Hz)') plt.xlabel('Position (in)') plt.ylabel('Position (in)') if lbl == 0: plt.title('Clockwise') else: plt.title('Counterclockwise') #plt.axis('equal') plt.xlim(room_shape[0]) plt.ylim(room_shape[1]) ''' plt.figure() x = np.arange(room_shape[0][0],room_shape[0][1],bin_size) y = np.arange(room_shape[1][0],room_shape[1][1],bin_size) Xs, Ys = np.meshgrid(x, y) cntr = plt.contourf(Ys,Xs,rate_dict[contxt][2]) t = plt.colorbar(cntr, extend='both') t.set_label('Frequency (Hz)') plt.xlabel('Position (in)') plt.ylabel('Position (in)')''' plt.show()
def dp_accuracy(): logging.basicConfig(level=10) # 5 for more stuff CL = CL3 animal = 66 session = 60 room =[[-55,55],[-55,55]] bin_size = 5 K = 50 # Segment length used to calculate firing rates CL.delt_t = K*.02 cluster_profile = 0 label = 'Task' cl_prof_name, good_clusters = get_good_clusters(cluster_profile) try: adat = try_cache('One big data structure') correct_dp = adat[CL.name][animal][session][cl_prof_name][bin_size][label][K] logging.info('Got data from Cache.cache.') except: logging.info('Calculating classifications...') CL.delt_t=K fn, trigger_tm = load_mux(animal, session) vl = load_vl(animal,fn) cls = {tetrode:load_cl(animal,fn,tetrode) for tetrode in range(1,17)} label_l = vl['Task'] t_cells = count_cells(vl,cls,trigger_tm,good_clusters) logging.info('About to generate population vector.') #X, Y = gpv(vl, t_cells, label_l, K) s=time() X, Y = gpv(vl, t_cells, label_l, K, bin_size, room) logging.info('%i population vectors generated in %.3f.',X.shape[0],time()-s) Y = Y.reshape([-1]) correct_dp = check_classifier(range(X.shape[0]),range(X.shape[0]), X, Y, CL, room, bin_size) # Accuracy meter plt.figure() plt.hist(correct_dp,normed=True) plt.xlabel('Accuracy') tt = '%s, K: %i, ClPr: %s, Label:%s'%(CL.name,K,cl_prof_name, label) plt.title(tt) msg = [] for i in [1,50,75,90,95,99]: perc = 1.0*np.sum(correct_dp > i/100.0)/len(correct_dp)*100.0 msg.append('>%i%%: %.1f%%'%(i,perc)) msg = '\n'.join(msg) plt.xlim([0,1]) xcoord = plt.xlim()[0] + (plt.xlim()[1]-plt.xlim()[0])*.1 ycoord = plt.ylim()[0] + (plt.ylim()[1]-plt.ylim()[0])*.5 plt.text(xcoord,ycoord,msg) plt.show()
def smoothing(): logging.basicConfig(level=logging.INFO) room = [[-55, 55], [-55, 55]] bin_size = 5 xs = range(room[0][0], room[0][1], bin_size) ys = range(room[1][0], room[1][1], bin_size) X, Y = np.meshgrid(xs, ys) session = 60 animal = 66 _, good_clusters = get_good_clusters(0) fn, trigger_tm = load_mux(animal, session) vl = load_vl(animal, fn) cls = {tetrode: load_cl(animal, fn, tetrode) for tetrode in range(1, 17)} t_cells = count_cells(vl, cls, trigger_tm, good_clusters) x = vl['xs'] y = vl['ys'] tpp = np.mean(vl['Time'][1:] - vl['Time'][:-1]) * 24 * 60 * 60 label_l = vl['Task'] # rates[cell id, lbl, xbin, ybin] = rate rates1 = get_fracs(x, y, label_l, room, bin_size, t_cells, smooth_flag=True) rates1 /= tpp logging.info('Got smoothed rates') rates2 = get_fracs(x, y, label_l, room, bin_size, t_cells, smooth_flag=False) rates2 /= tpp logging.info('Got unsmoothed rates') for i in range(5): # or rates1.shape[0] logging.info('Cell %i', i) plt.figure() plt.pcolor(X, Y, rates1[i, 0]) plt.colorbar() plt.autoscale(tight=True) plt.xlabel('Position (in)') plt.ylabel('Position (in)') plt.figure() plt.pcolor(rates2[i, 0]) plt.autoscale(tight=True) plt.xlabel('Position (in)') plt.ylabel('Position (in)') plt.show()
def run(): logging.basicConfig(level=logging.INFO) cache_key = 'Good trials' animals = [66, 73] sessions = range(100) _, good_clusters = goodClusters.get_good_clusters(0) good_trials = try_cache(cache_key) if good_trials is None: good_trials = {} for animal in animals: if animal not in good_trials: good_trials[animal] = [] for session in sessions: if session in good_trials[animal]: continue try: fn, trigger_tm = load_mux(animal, session) except: logging.info('Animal %i has no sessions greater than %i', animal, session + 1) break try: vl = load_vl(animal, fn) except: logging.info('Animal %i session %i is not a task trial', animal, session + 1) continue cls = { tetrode: load_cl(animal, fn, tetrode) for tetrode in range(1, 17) } try: count_cells(vl, cls, trigger_tm, good_clusters) except: # No cells found continue if session not in good_trials[animal]: good_trials[animal].append(session) store_in_cache(cache_key, good_trials)
def run(): logging.basicConfig(level=logging.INFO) cache_key = 'Good trials' animals = [66,73] sessions = range(100) _, good_clusters = goodClusters.get_good_clusters(0) good_trials = try_cache(cache_key) if good_trials is None: good_trials = {} for animal in animals: if animal not in good_trials: good_trials[animal] = [] for session in sessions: if session in good_trials[animal]: continue try: fn, trigger_tm = load_mux(animal, session) except: logging.info('Animal %i has no sessions greater than %i',animal,session+1) break try: vl = load_vl(animal,fn) except: logging.info('Animal %i session %i is not a task trial',animal,session+1) continue cls = {tetrode:load_cl(animal,fn,tetrode) for tetrode in range(1,17)} try: count_cells(vl,cls,trigger_tm, good_clusters) except: # No cells found continue if session not in good_trials[animal]: good_trials[animal].append(session) store_in_cache(cache_key,good_trials)
def gpv_rates(): logging.basicConfig(level=logging.INFO) animal = 66 session = 60 room_shape = [[-55, 55], [-55, 55]] tetrodes = [1] cells = range(2, 10) bin_size = 5 K = 50 # Segment length used to calculate firing rates #xbins = ybins = (room_shape[0][1]-room_shape[0][0])/bin_size good_clusters = {tetrode: cells for tetrode in tetrodes} #good_clusters = get_good_clusters(0) fn, trigger_tm = load_mux(animal, session) vl = load_vl(animal, fn) cls = {tetrode: load_cl(animal, fn, tetrode) for tetrode in tetrodes} label_l = vl['Task'] t_cells = count_cells(vl, cls, trigger_tm, good_clusters) logging.info('About to generate population vector.') X, Y = gpv(vl, t_cells, label_l, K, bin_size, room_shape) logging.info('%i population vectors generated.', X.shape[0]) Y = Y.reshape([-1]) # GPV rates rates = fracs_from_pv(X, Y, bin_size, room_shape, smooth_flag=True) # Now get normally calculate rates real_rates = get_fracs(vl['xs'], vl['ys'], label_l, room_shape, bin_size, t_cells, smooth_flag=True) try: assert np.all(rates == real_rates) except: print 'DOESNT WORK!!' plot_rates(rates, Y, t_cells) plot_rates(real_rates, Y, t_cells) plt.show()
def smoothing(): logging.basicConfig(level=logging.INFO) room = [[-55,55],[-55,55]] bin_size = 5 xs = range(room[0][0],room[0][1],bin_size) ys = range(room[1][0],room[1][1],bin_size) X,Y = np.meshgrid(xs,ys) session = 60 animal=66 _, good_clusters = get_good_clusters(0) fn, trigger_tm = load_mux(animal, session) vl = load_vl(animal,fn) cls = {tetrode:load_cl(animal,fn,tetrode) for tetrode in range(1,17)} t_cells = count_cells(vl,cls,trigger_tm,good_clusters) x = vl['xs'] y = vl['ys'] tpp = np.mean(vl['Time'][1:]-vl['Time'][:-1])*24*60*60 label_l = vl['Task'] # rates[cell id, lbl, xbin, ybin] = rate rates1 = get_fracs(x,y,label_l, room, bin_size, t_cells, smooth_flag=True) rates1 /= tpp logging.info('Got smoothed rates') rates2 = get_fracs(x,y,label_l, room, bin_size, t_cells, smooth_flag=False) rates2 /= tpp logging.info('Got unsmoothed rates') for i in range(5): # or rates1.shape[0] logging.info('Cell %i',i) plt.figure() plt.pcolor(X,Y,rates1[i,0]) plt.colorbar() plt.autoscale(tight=True) plt.xlabel('Position (in)') plt.ylabel('Position (in)') plt.figure() plt.pcolor(rates2[i,0]) plt.autoscale(tight=True) plt.xlabel('Position (in)') plt.ylabel('Position (in)') plt.show()
def gpv_rates(): logging.basicConfig(level=logging.INFO) animal = 66 session = 60 room_shape = [[-55,55],[-55,55]] tetrodes = [1] cells = range(2,10) bin_size = 5 K = 50# Segment length used to calculate firing rates #xbins = ybins = (room_shape[0][1]-room_shape[0][0])/bin_size good_clusters = {tetrode:cells for tetrode in tetrodes} #good_clusters = get_good_clusters(0) fn, trigger_tm = load_mux(animal, session) vl = load_vl(animal,fn) cls = {tetrode:load_cl(animal,fn,tetrode) for tetrode in tetrodes} label_l = vl['Task'] t_cells = count_cells(vl,cls,trigger_tm, good_clusters) logging.info('About to generate population vector.') X, Y = gpv(vl, t_cells, label_l, K, bin_size, room_shape) logging.info('%i population vectors generated.',X.shape[0]) Y = Y.reshape([-1]) # GPV rates rates = fracs_from_pv(X,Y,bin_size,room_shape,smooth_flag=True) # Now get normally calculate rates real_rates = get_fracs(vl['xs'],vl['ys'],label_l,room_shape,bin_size, t_cells,smooth_flag=True) try: assert np.all(rates == real_rates) except: print 'DOESNT WORK!!' plot_rates(rates,Y,t_cells) plot_rates(real_rates,Y,t_cells) plt.show()
def view_PCA(): animal = 66 session = 60 bin_size = 5 K = 50 # Segment length used to calculate firing rates label = 'Task' room = [[-55, 55], [-55, 55]] _, good_clusters = get_good_clusters(0) xbins = (room[0][1] - [0][0]) / bin_size ybins = (room[1][1] - [1][0]) / bin_size fn, trigger_tm = load_mux(animal, session) vl = load_vl(animal, fn) cls = {tetrode: load_cl(animal, fn, tetrode) for tetrode in range(1, 17)} if label == 'Task': label_l = vl['Task'] else: raise Exception('Not implemented yet.') t_cells = count_cells(vl, cls, trigger_tm, good_clusters) logging.info('About to generate population vector.') #X, Y = gpv(vl, t_cells, label_l, K) X, Y = gpv(vl, t_cells, label_l, K, bin_size, room) pcas = np.zeros([xbins, ybins]) for xbin, ybin in product(xbins, ybins): pca = PCA() Xtmp = np.zeros([ X.shape[0], ]) X = pca.fit_transform(X[:, :len(t_cells)]) pcas[xbin, ybin] = pca plt.plot(pca.explained_variance_ratio_) print pca.components_ plt.show()
def view_PCA(): animal = 66 session = 60 bin_size = 5 K = 50 # Segment length used to calculate firing rates label = "Task" room = [[-55, 55], [-55, 55]] _, good_clusters = get_good_clusters(0) xbins = (room[0][1] - [0][0]) / bin_size ybins = (room[1][1] - [1][0]) / bin_size fn, trigger_tm = load_mux(animal, session) vl = load_vl(animal, fn) cls = {tetrode: load_cl(animal, fn, tetrode) for tetrode in range(1, 17)} if label == "Task": label_l = vl["Task"] else: raise Exception("Not implemented yet.") t_cells = count_cells(vl, cls, trigger_tm, good_clusters) logging.info("About to generate population vector.") # X, Y = gpv(vl, t_cells, label_l, K) X, Y = gpv(vl, t_cells, label_l, K, bin_size, room) pcas = np.zeros([xbins, ybins]) for xbin, ybin in product(xbins, ybins): pca = PCA() Xtmp = np.zeros([X.shape[0]]) X = pca.fit_transform(X[:, : len(t_cells)]) pcas[xbin, ybin] = pca plt.plot(pca.explained_variance_ratio_) print pca.components_ plt.show()
def checkGPV(): logging.basicConfig(level=5) animal = 66 session = 60 room_shape = [[-55,55],[-55,55]] tetrodes = [1] cells = range(2,10) bin_size = 5 K = 1# Segment length used to calculate firing rates maxs = 10000 assert maxs%K==0 #xbins = ybins = (room_shape[0][1]-room_shape[0][0])/bin_size good_clusters = {tetrode:cells for tetrode in tetrodes} #good_clusters = get_good_clusters(0) fn, trigger_tm = load_mux(animal, session) vl = load_vl(animal,fn) cls = {tetrode:load_cl(animal,fn,tetrode) for tetrode in tetrodes} label_l = vl['Task'] t_cells = count_cells(vl,cls,trigger_tm, good_clusters) '''''' # For debugging purposes, make sure it's not too big if len(label_l) > maxs: vl['xs'] = vl['xs'][:maxs*10:10] vl['ys'] = vl['ys'][:maxs*10:10] label_l = label_l[:maxs*10:10] for key in t_cells: tmp = np.array(t_cells[key]) tmp = tmp[tmp<maxs] t_cells[key] = tmp for gpv in [gpv1]: logging.info('About to generate population vector.') X, Y = gpv(vl, t_cells, label_l, K, bin_size, room_shape) logging.info('%i population vectors generated.',X.shape[0]) Y = Y.reshape([-1]) # Debug code # This is only true if no points are thrown away if X.shape[0]*K == maxs: tot_spks = np.sum([len(np.unique(spki)) for spki in t_cells.values()]) assert tot_spks == np.sum(X[:,:len(t_cells)])*K # GPV rates rates = fracs_from_pv(X,Y,bin_size,room_shape,smooth_flag=False) # Now get normally calculate rates real_rates = get_fracs(vl['xs'],vl['ys'],label_l,room_shape,bin_size, t_cells,smooth_flag=False) try: assert np.all(rates == real_rates) except: print 'DOESNT WORK!!' plot_rates(rates,Y,t_cells) plot_rates(real_rates,Y,t_cells) plt.show()
def run(Folds): # Toggle-able parameters #CLs = [CL2,CL6,CL5] #CLs = [CL6, CL7] CLs = [CL10] Ks = np.arange(10,200,20) # Segment length used to calculate firing rates # Sort of toggle-able parameters #animal_sess_combs = [(66,60),(70,8),(70,10),(66,61)] animal_sess_combs = [(66,60)] #good_trials = try_cache('Good trials') #animal_sess_combs = [(animal,session) for animal in range(65,74) # for session in good_trials[animal]] bin_sizes = [5] label = 'Task' exceptions = [] cl_profs = [0] # Not really toggle-able parameters room = [[-55,55],[-55,55]] cache = try_cache('One big data structure for %i folds'%(Folds,)) adat = ({} if cache is None else cache) for animal, session in animal_sess_combs: fn, trigger_tm = load_mux(animal, session) vl = load_vl(animal,fn) cls = {tetrode:load_cl(animal,fn,tetrode) for tetrode in range(1,17)} if label == 'Task': label_l = vl['Task'] else: raise Exception('Not implemented yet.') for clust_prof in cl_profs: cl_prof_name, good_clusters = get_good_clusters(clust_prof) t_cells = count_cells(vl,cls,trigger_tm,good_clusters) for bin_size, K in product(bin_sizes,Ks): cached = np.zeros(len(CLs)) for CL in CLs: i = CLs.index(CL) try: raise Exception adat[CL.name][animal][session][cl_prof_name][bin_size][label][K] cached[i] = True except: cached[i] = False if np.sum(cached) == len(CLs): print 'Everything already cached' continue # Everything is already cached! logging.info('About to generate population vector.') X, Y = gpv(vl, t_cells, label_l, K, bin_size, room) # The main data stricture dps = {CL:[] for CL in CLs if CL not in cached} if Folds >0: kf = cross_validation.KFold(len(Y),n_folds=Folds,shuffle=True) else: kf = [(range(len(Y)),range(len(Y)))] for train_index, test_index in kf: logging.warning('Training/testing: %i/%i',len(train_index),len(test_index)) for CL in CLs: if cached[CLs.index(CL)]: continue logging.warning('%s, %i seg, (%i, %i)',CL.name, K, animal, session) if (CL,clust_prof) in exceptions: continue CL.delt_t = K correct_dp = check_classifier(train_index,test_index,X,Y,CL, room, bin_size) dps[CL].extend(correct_dp.tolist()) for CL in CLs: if cached[CLs.index(CL)]: continue to_add = np.array(dps[CL]).reshape([-1]) add(adat, CL.name, animal, session, cl_prof_name, bin_size, label, K, to_add) store_in_cache('One big data structure for %i folds'%(Folds,),adat)
def checkGPV(): logging.basicConfig(level=5) animal = 66 session = 60 room_shape = [[-55, 55], [-55, 55]] tetrodes = [1] cells = range(2, 10) bin_size = 5 K = 1 # Segment length used to calculate firing rates maxs = 10000 assert maxs % K == 0 #xbins = ybins = (room_shape[0][1]-room_shape[0][0])/bin_size good_clusters = {tetrode: cells for tetrode in tetrodes} #good_clusters = get_good_clusters(0) fn, trigger_tm = load_mux(animal, session) vl = load_vl(animal, fn) cls = {tetrode: load_cl(animal, fn, tetrode) for tetrode in tetrodes} label_l = vl['Task'] t_cells = count_cells(vl, cls, trigger_tm, good_clusters) '''''' # For debugging purposes, make sure it's not too big if len(label_l) > maxs: vl['xs'] = vl['xs'][:maxs * 10:10] vl['ys'] = vl['ys'][:maxs * 10:10] label_l = label_l[:maxs * 10:10] for key in t_cells: tmp = np.array(t_cells[key]) tmp = tmp[tmp < maxs] t_cells[key] = tmp for gpv in [gpv1]: logging.info('About to generate population vector.') X, Y = gpv(vl, t_cells, label_l, K, bin_size, room_shape) logging.info('%i population vectors generated.', X.shape[0]) Y = Y.reshape([-1]) # Debug code # This is only true if no points are thrown away if X.shape[0] * K == maxs: tot_spks = np.sum( [len(np.unique(spki)) for spki in t_cells.values()]) assert tot_spks == np.sum(X[:, :len(t_cells)]) * K # GPV rates rates = fracs_from_pv(X, Y, bin_size, room_shape, smooth_flag=False) # Now get normally calculate rates real_rates = get_fracs(vl['xs'], vl['ys'], label_l, room_shape, bin_size, t_cells, smooth_flag=False) try: assert np.all(rates == real_rates) except: print 'DOESNT WORK!!' plot_rates(rates, Y, t_cells) plot_rates(real_rates, Y, t_cells) plt.show()
def run(Folds): # Toggle-able parameters #CLs = [CL2,CL6,CL5] #CLs = [CL6, CL7] CLs = [CL10] Ks = np.arange(10, 200, 20) # Segment length used to calculate firing rates # Sort of toggle-able parameters #animal_sess_combs = [(66,60),(70,8),(70,10),(66,61)] animal_sess_combs = [(66, 60)] #good_trials = try_cache('Good trials') #animal_sess_combs = [(animal,session) for animal in range(65,74) # for session in good_trials[animal]] bin_sizes = [5] label = 'Task' exceptions = [] cl_profs = [0] # Not really toggle-able parameters room = [[-55, 55], [-55, 55]] cache = try_cache('One big data structure for %i folds' % (Folds, )) adat = ({} if cache is None else cache) for animal, session in animal_sess_combs: fn, trigger_tm = load_mux(animal, session) vl = load_vl(animal, fn) cls = { tetrode: load_cl(animal, fn, tetrode) for tetrode in range(1, 17) } if label == 'Task': label_l = vl['Task'] else: raise Exception('Not implemented yet.') for clust_prof in cl_profs: cl_prof_name, good_clusters = get_good_clusters(clust_prof) t_cells = count_cells(vl, cls, trigger_tm, good_clusters) for bin_size, K in product(bin_sizes, Ks): cached = np.zeros(len(CLs)) for CL in CLs: i = CLs.index(CL) try: raise Exception adat[CL.name][animal][session][cl_prof_name][bin_size][ label][K] cached[i] = True except: cached[i] = False if np.sum(cached) == len(CLs): print 'Everything already cached' continue # Everything is already cached! logging.info('About to generate population vector.') X, Y = gpv(vl, t_cells, label_l, K, bin_size, room) # The main data stricture dps = {CL: [] for CL in CLs if CL not in cached} if Folds > 0: kf = cross_validation.KFold(len(Y), n_folds=Folds, shuffle=True) else: kf = [(range(len(Y)), range(len(Y)))] for train_index, test_index in kf: logging.warning('Training/testing: %i/%i', len(train_index), len(test_index)) for CL in CLs: if cached[CLs.index(CL)]: continue logging.warning('%s, %i seg, (%i, %i)', CL.name, K, animal, session) if (CL, clust_prof) in exceptions: continue CL.delt_t = K correct_dp = check_classifier(train_index, test_index, X, Y, CL, room, bin_size) dps[CL].extend(correct_dp.tolist()) for CL in CLs: if cached[CLs.index(CL)]: continue to_add = np.array(dps[CL]).reshape([-1]) add(adat, CL.name, animal, session, cl_prof_name, bin_size, label, K, to_add) store_in_cache('One big data structure for %i folds' % (Folds, ), adat)
def rate_graph(): #mpl.rcParams['axes.titlesize'] = 18 #mpl.rcParams['axes.labelsize'] = 18 mpl.rcParams['font.size'] = 26 animal = 66 session = 60 # This is August 7, 2013 run room_shape = [[-55, 55], [-55, 55]] tetrodes = [1] cluster_profile = 0 bin_size = 5 _, good_clusters = get_good_clusters(cluster_profile) fn, trigger_tm = load_mux(animal, session) vl = load_vl(animal, fn) cls = {tetrode: load_cl(animal, fn, tetrode) for tetrode in tetrodes} t_cells = count_cells(vl, cls, trigger_tm, good_clusters) label_l = vl['Task'] # rates[cell, lbl, xbin, ybin] = firing rate rates = get_fracs(vl['xs'], vl['ys'], label_l, room_shape, bin_size, t_cells) for lbl in range(len(np.unique(label_l))): plt.figure(figsize=(10, 10)) x = np.concatenate([ np.arange(room_shape[0][0], room_shape[0][1], bin_size), [room_shape[0][1]] ]) y = np.concatenate([ np.arange(room_shape[1][0], room_shape[1][1], bin_size), [room_shape[1][1]] ]) Xs, Ys = np.meshgrid(x, y) cntr = plt.pcolor(Ys, Xs, rates[3, lbl]) t = plt.colorbar(cntr, extend='both') t.set_label('Frequency (Hz)') plt.xlabel('Position (in)') plt.ylabel('Position (in)') if lbl == 0: plt.title('Clockwise') else: plt.title('Counterclockwise') #plt.axis('equal') plt.xlim(room_shape[0]) plt.ylim(room_shape[1]) ''' plt.figure() x = np.arange(room_shape[0][0],room_shape[0][1],bin_size) y = np.arange(room_shape[1][0],room_shape[1][1],bin_size) Xs, Ys = np.meshgrid(x, y) cntr = plt.contourf(Ys,Xs,rate_dict[contxt][2]) t = plt.colorbar(cntr, extend='both') t.set_label('Frequency (Hz)') plt.xlabel('Position (in)') plt.ylabel('Position (in)')''' plt.show()