def __init__(self, *args, **kwargs): Tk.__init__(self, *args, **kwargs) self.title("Article Searcher") #Title of window #set the frame dimentions and pack the parent window container = Frame(self) menu = makemenu.MenuMaker2000(self).createMenu() self.analyzer = analysis.Analyzer() container.pack(side="top", fill="both", expand=True) container.grid_rowconfigure(0, weight=1) container.grid_columnconfigure(0, weight=1) self.resizable(width=False, height=False) #get screen dimensions and center window xoffset = int(self.winfo_screenwidth() / 2 - 700 / 2) yoffset = int(self.winfo_screenheight() / 2 - 550 / 2) self.geometry("%dx%d+%d+%d" % (700, 450, xoffset, yoffset)) #set geometry of window self.frames = {} for F in (SearchFrame, StartFrame): #The two windows used in program sets the page page_name = F.__name__ frame = F(parent=container, controller=self) self.frames[page_name] = frame frame.grid(row=0, column=0, sticky="nsew") #self.show_frame("Welcome") #call show_frame to display the welcome window self.show_frame('StartFrame')
def __init__(self, parent, controller): Frame.__init__(self, parent) self.analyzer = analysis.Analyzer() self.controller = controller # set the controller self.title = "Article Search" # ttile of the window #title path = 'cyspider.jpg' self.img = ImageTk.PhotoImage(Image.open(path)) self.panel = Label(self, image=self.img) self.panel.pack() self.searchwindow()
right = all_triggered_units[3]['t'] units_turn = [l + r for (l, r) in zip(left, right)] return units_straight, units_turn if __name__ == "__main__": save_folder = "./DataFigures/FigureS2/" if not os.path.exists(save_folder): os.makedirs(save_folder) sns.reset_orig() mpl.rcParams['pdf.fonttype'] = 42 std_05Hz = c.GradientData.load_standards("gd_05Hz_training_data.hdf5") std_2Hz = c.GradientData.load_standards("gd_2Hz_training_Data.hdf5") std_zf = c.GradientData.load_standards("gd_training_data.hdf5") ana_zf = a.Analyzer(MoTypes(False), std_zf, "sim_store.hdf5", "activity_store.hdf5") # load cluster data from file clfile = h5py.File("cluster_info.hdf5", "r") clust_ids_zf = np.array(clfile["clust_ids"]) clfile.close() # load and interpolate temperature stimulus dfile = h5py.File("stimFile.hdf5", 'r') tsin = np.array(dfile['sine_L_H_temp']) x = np.arange(tsin.size) # stored at 20 Hz ! xinterp = np.linspace(0, tsin.size, tsin.size * GlobalDefs.frame_rate // 20) temperature = np.interp(xinterp, x, tsin) dfile.close() # get activity data
error_file.create_dataset("test_rank_errors", data=np.array(test_errors)) error_file.create_dataset("test_eval", data=np.array(test_steps)) error_file.close() if __name__ == '__main__': # load training and test data tD_1 = GradientData.load("ce_gd_training_data.hdf5") tD_2 = GradientData.load("ce_gd_training_data_rev.hdf5") tD_2.copy_normalization(tD_1) train_list = [tD_1, tD_2] testData = GradientData.load("ce_gd_test_data_radial.hdf5") # enforce same scaling on testData as on trainingData testData.copy_normalization(tD_1) ana = a.Analyzer(MoTypes(True), tD_1.standards, None, "ce_activity_store.hdf5") # load cell unit ids and cluster ids dfile = h5py.File("stimFile.hdf5", 'r') tsin = np.array(dfile['sine_L_H_temp']) x = np.arange(tsin.size) # stored at 20 Hz ! xinterp = np.linspace(0, tsin.size, tsin.size * GlobalDefs.frame_rate // 20) temperature = np.interp(xinterp, x, tsin) dfile.close() all_ids = [] for i, p in enumerate(paths_512): cell_res, ids = ana.temperature_activity(mpath(p), temperature, i) all_ids.append(ids) all_ids = np.hstack(all_ids) clfile = h5py.File("ce_cluster_info.hdf5", "r")
color="C1", n_boot=1000, condition="512 HU") epoch_times = np.linspace(0, test_time.max(), 10, endpoint=False) for e in epoch_times: ax.plot([e, e], [-1.2, .4], 'k--', lw=0.25) ax.set_ylabel("log(Squared test error)") ax.set_xlabel("Training step") ax.set_xlim(-10000) ax.set_xticks([0, 250000, 500000]) ax.legend() sns.despine(fig, ax) fig.savefig(save_folder + "ce_test_errors.pdf", type="pdf") std_zf = c.GradientData.load_standards("gd_training_data.hdf5") ana_zf = a.Analyzer(MoTypes(False), std_zf, "sim_store.hdf5", "activity_store.hdf5") std_ce = c.GradientData.load_standards("ce_gd_training_data.hdf5") ana_ce = a.Analyzer(MoTypes(True), std_ce, "ce_sim_store.hdf5", "ce_activity_store.hdf5") # load and interpolate temperature stimulus dfile = h5py.File("stimFile.hdf5", 'r') tsin = np.array(dfile['sine_L_H_temp']) x = np.arange(tsin.size) # stored at 20 Hz ! xinterp = np.linspace(0, tsin.size, tsin.size * GlobalDefs.frame_rate // 20) temperature = np.interp(xinterp, x, tsin) dfile.close() # get activity data - corresponding to sine-wave all_ids_zf = []
if cm < 0: continue row_matches[cm] = ix return {ix: col_names[row_matches[ix]] if row_matches[ix] != -1 else ix for ix in range(corr_mat.shape[0])} if __name__ == "__main__": save_folder = "./DataFigures/ZF_ANN_Correspondence/" if not os.path.exists(save_folder): os.makedirs(save_folder) sns.reset_orig() mpl.rcParams['pdf.fonttype'] = 42 mo = MoTypes(False) std = c.GradientData.load_standards("gd_training_data.hdf5") ana = a.Analyzer(mo, std, "sim_store.hdf5", "activity_store.hdf5") # load zebrafish region results and create Rh56 regressor matrix for FastON, SlowON, FastOFF, SlowOFF result_labels = ["Rh6"] region_results = {} # type: Dict[str, RegionResults] analysis_file = h5py.File('regiondata.hdf5', 'r') for rl in result_labels: region_results[rl] = pickle.loads(np.array(analysis_file[rl])) analysis_file.close() rh_56_calcium = region_results["Rh6"].regressors[:, :-1] # the names of these regressors according to Haesemeyer et al., 2018 reg_names = ["Fast ON", "Slow ON", "Fast OFF", "Slow OFF"] # load and interpolate temperature stimulus dfile = h5py.File("stimFile.hdf5", 'r') tsin = np.array(dfile['sine_L_H_temp'])
def __init__(self, pairs, database_path, disable_saver): btcebot.TraderBase.__init__(self, pairs) self.trade_history_seen = {} self.saver = saver.DataSaver(database_path, disable_saver) self.analyzer = analysis.Analyzer()
error_file.create_dataset("test_losses", data=np.array(test_losses)) error_file.create_dataset("test_eval", data=np.array(test_steps)) error_file.close() if __name__ == '__main__': # load training and test data tD_1 = GradientData.load("gd_training_data.hdf5") tD_2 = GradientData.load("gd_training_data_rev.hdf5") tD_2.copy_normalization(tD_1) train_list = [tD_1, tD_2] testData = GradientData.load("gd_test_data_radial.hdf5") # enforce same scaling on testData as on trainingData testData.copy_normalization(tD_1) ana = a.Analyzer(MoTypes(False), tD_1.standards, None, "activity_store.hdf5") # load cell unit ids and cluster ids dfile = h5py.File("stimFile.hdf5", 'r') tsin = np.array(dfile['sine_L_H_temp']) x = np.arange(tsin.size) # stored at 20 Hz ! xinterp = np.linspace(0, tsin.size, tsin.size * GlobalDefs.frame_rate // 20) temperature = np.interp(xinterp, x, tsin) dfile.close() all_ids = [] for i, p in enumerate(paths_512): cell_res, ids = ana.temperature_activity(mpath(p), temperature, i) all_ids.append(ids) all_ids = np.hstack(all_ids) clfile = h5py.File("cluster_info.hdf5", "r")
import sys sys.path.append("..") import analysis TEXT = "adamsmith.txt" common_words_file = open('../mostcommon.txt', 'r') common_words = common_words_file.read().split("\n") TOP_500 = set(common_words[:500]) CHAPTER_FINDER =\ ["INTRODUCTION AND PLAN OF THE WORK.", "BOOK I.", "BOOK II.", "BOOK III." "BOOK IV.", "BOOK V.", "End of the Project Gutenberg EBook"] adamsmith = analysis.Analyzer(TEXT, CHAPTER_FINDER, TOP_500) print("Number of words: %d"%adamsmith.getTotalNumberOfWords()) print("Number of unique words: %d"%adamsmith.getTotalUniqueWords()) print("20 most frequent words:\n %s\n"%adamsmith.get20MostFrequentWords()) print("20 most frequent interesting words:\n%s\n"%adamsmith.get20MostInterestingFrequentWords()) print("20 least frequent words:\n%s\n"%adamsmith.get20LeastFrequentWords()) print("Generated sentences: ") print(adamsmith.generateSentence("The")) print(adamsmith.generateSentence("From")) print(adamsmith.generateSentence("The")) print(adamsmith.generateSentence("From"))
sns.tsplot(bf_trained, centers, n_boot=1000, color="C1", err_style="ci_band", condition="Generation 0") sns.tsplot(bf_part, centers, n_boot=1000, color=(.5, .5, .5), err_style="ci_band", condition="Generation 8") sns.tsplot(bf_evolved, centers, n_boot=1000, color="C0", err_style="ci_band", condition="Generation 50") ax.set_xlim(23, 36) ax.set_xticks([25, 30, 35]) ax.set_yticks([0.5, 0.75, 1, 1.25]) ax.set_xlabel("Temperature [C]") ax.set_ylabel("Swim frequency [Hz]") ax.legend() sns.despine(fig, ax) fig.savefig(save_folder + "gradient_swim_frequency.pdf", type="pdf") # fourth panel - gradient distribution naive, trained, evolved bns = np.linspace(0, GlobalDefs.circle_sim_params["radius"], 100) centers = a.temp_convert(bns[:-1]+np.diff(bns), "r") ana = a.Analyzer(MoTypes(False), std, "sim_store.hdf5", None) naive = np.empty((len(paths_512), centers.size)) trained = np.empty_like(naive) evolved = np.empty_like(naive) naive_errors = [] trained_errors = [] evolved_errors = [] for i, p in enumerate(paths_512): pos_n = ana.run_simulation(mpath(p), "r", "naive") naive_errors.append(a.temp_error(pos_n, 'r')) naive[i, :] = a.bin_simulation(pos_n, bns, "r") pos_t = ana.run_simulation(mpath(p), "r", "trained") trained_errors.append(a.temp_error(pos_t, 'r')) trained[i, :] = a.bin_simulation(pos_t, bns, "r") pos_e = ana.run_simulation(mpath(p), "r", "bfevolve") evolved_errors.append(a.temp_error(pos_e, 'r'))