def test_read_write_4(self): database_manager = ExperimentDatabaseManager( mysql_credentials=sql_credentials.credentials, cache_size=40) database_manager.delete_experiment( 'writing_numerical_data_test_case_4') database_manager.set_experiment('writing_numerical_data_test_case_4') inserted_data = dict() inserted_data['var_1'] = 19 inserted_data['var_2'] = 109 inserted_data['var_3'] = np.nan database_manager.insert_experiment_data( 'writing_numerical_data_test_case_4', inserted_data) database_manager.flush() database_manager_2 = ExperimentDatabaseReadingManager( mysql_credentials=sql_credentials.credentials) read_back_data = database_manager_2.get_data( 'writing_numerical_data_test_case_4', 'writing_numerical_data_test_case_4') assert read_back_data['var_1'][0] == inserted_data[ 'var_1'] # Always returns list assert read_back_data['var_2'][0] == inserted_data[ 'var_2'] # Always returns list assert read_back_data['var_3'][0] == 0 # Always returns list # Doing it at the end to check if it flushing works properly database_manager.delete_experiment( 'writing_numerical_data_test_case_4') database_manager.close()
def download_experiment_to_file(experiment_name, file_path): if os.path.exists(file_path): os.unlink(file_path) file_database = ExperimentDatabaseManager(file=file_path, cache_size=100000) file_database.set_experiment(experiment_name) reading_manager = ExperimentDatabaseReadingManager(mysql_credentials=credentials) query = '''SELECT (table_name) FROM information_schema.columns WHERE column_name = 'experiment_name' AND table_schema = '%s';''' % credentials['database'] data = reading_manager.get_data_from_query(query) tables = [x[0] for x in data] tables.remove('experiments') print("Gotten list of all tables:") print('\n'.join(tables)) for table in tables: print("Downloading data from ", table) table_data = reading_manager.get_data(table, experiment_name) if table_data is None: print("No data found in table for this experiment, skipping") continue print("Gotten keys", table_data.keys()) for key in table_data.keys(): print('\t%s is %s' % (key, str(type(table_data[key][0])))) table_data.pop('experiment_name') file_database.insert_experiment_data(table, table_data) print("Finishing up writing to file...") file_database.close()
def test_write_to_network_and_file_and_read_from_both(self): if os.path.exists('sample.db'): os.unlink('sample.db') print("Writing to server") database_manager = ExperimentDatabaseManager( mysql_credentials=sql_credentials.credentials, file='sample.db', cache_size=40) database_manager.delete_experiment('database_plots_test_case_1') database_manager.set_experiment('database_plots_test_case_1') self.write_to_database(database_manager) database_manager.close() download_experiment_to_file( experiment_name='database_plots_test_case_1', file_path='output/downloaded_database.db') print("Reading back from downloaded file") database_reading_manager = ExperimentDatabaseReadingManager( file='output/downloaded_database.db') self.read_from_database(database_reading_manager) print("Reading back from cache file") database_reading_manager = ExperimentDatabaseReadingManager( file='sample.db') self.read_from_database(database_reading_manager) database_manager = ExperimentDatabaseManager( mysql_credentials=sql_credentials.credentials, cache_size=40) database_manager.delete_experiment('database_plots_test_case_1') database_manager.close()
def test_read_write_4(self): if os.path.exists('sample.db'): os.unlink('sample.db') database_manager = ExperimentDatabaseManager(file='sample.db', cache_size=40) database_manager.delete_experiment( 'writing_numerical_data_test_case_4') database_manager.set_experiment('writing_numerical_data_test_case_4') inserted_data = dict() inserted_data['var_1'] = 19 inserted_data['var_2'] = 109 inserted_data['var_3'] = np.nan database_manager.insert_experiment_data( 'writing_numerical_data_test_case_4', inserted_data) database_manager.flush() database_manager_2 = ExperimentDatabaseReadingManager(file='sample.db') read_back_data = database_manager_2.get_data( 'writing_numerical_data_test_case_4', 'writing_numerical_data_test_case_4') assert read_back_data['var_1'][0] == inserted_data[ 'var_1'] # Always returns list assert read_back_data['var_2'][0] == inserted_data[ 'var_2'] # Always returns list assert read_back_data['var_3'][0] == 0 # Always returns list # Doing it at the end to check if it flushing works properly database_manager.delete_experiment( 'writing_numerical_data_test_case_4') database_manager.close()
def main(files, pdfpath, dumppath, soft, database_table_prefix, run_for=-1): global dataset_analysis_dict, fake_max_iou_values file_index = 0 t1 = time.time() for file in files: print("\nFILE\n", file_index) with gzip.open(file, 'rb') as f: data_loaded = pickle.load(f) # print("XYZ", len(data_dict['features']), len(data_dict['predicted']), len(data_dict['truth'])) file_results = analyse_multiple_endcaps_multi_cpu( data_loaded, soft=soft, beta_threshold=beta_threshold, distance_threshold=distance_threshold, iou_threshold=iou_threshold) for r in file_results: append_endcap_dict_to_dataset_dict(dataset_analysis_dict, r) # analyse_one_file(data_loaded, soft=soft) if file_index == run_for - 1: break file_index += 1 print("It took", time.time() - t1, "seconds") if len(dumppath) > 0: print("Dumping analysis to bin file", dumppath) with mgzip.open(dumppath, 'wb', thread=8, blocksize=2 * 10**7) as f: pickle.dump(dataset_analysis_dict, f) else: print( "WARNING: No analysis output path specified. Skipped dumping of analysis." ) # print("Number of total fakes is ", num_total_fakes) # np.savetxt('max_fake_iou.txt', fake_max_iou_values, delimiter=',') # 0/0 plotter = HGCalAnalysisPlotter() plotter.add_data_from_analysis_dict(dataset_analysis_dict) if len(pdfpath) != 0: plotter.write_to_pdf(pdfpath) if len(database_table_prefix) != 0: print("Will write plots to database") database_manager = ExperimentDatabaseManager( mysql_credentials=sql_credentials.credentials, cache_size=40) database_manager.set_experiment('analysis_plotting_experiments') plotter.write_data_to_database(database_manager, database_table_prefix) database_manager.close()
def write_to_database(self): database_manager = ExperimentDatabaseManager(mysql_credentials=sql_credentials.credentials, cache_size=40) database_manager.delete_experiment('database_plots_test_case_1') database_manager.set_experiment('database_plots_test_case_1') efficiency_plot = General2dBinningPlot(bins=np.array([0, 1, 2, 3, 4]), histogram_log=False, histogram_fraction=False) x_values = np.array([0, 0, 1, 1, 2, 2, 2, 2, 3, 3, 3]) y_values = np.array([0, 0, 1, 1, 1, 2, 2, 2, 3, 3, 3]) # # print(type(x_values)) efficiency_plot.add_raw_values(x_values=x_values, y_values=y_values, tags={'beta_threshold': 0.1, 'dist_threshold': 0.9}) efficiency_plot.draw() efficiency_plot.write_to_database(database_manager,table_name='database_plots_test_case_1') plt.savefig('output/test-original-plot.png') database_manager.close()
def test_read_write_file(self): if os.path.exists('sample.db'): os.unlink('sample.db') database_manager = ExperimentDatabaseManager(file='sample.db', cache_size=40) database_manager.delete_experiment('database_plots_test_case_1') database_manager.set_experiment('database_plots_test_case_1') print("Writing to file") self.write_to_database(database_manager) database_manager.close() print("Reading back from file") database_reading_manager = ExperimentDatabaseReadingManager( file='sample.db') self.read_from_database(database_reading_manager) if os.path.exists('sample.db'): os.unlink('sample.db')
def test_read_write_3(self): if os.path.exists('sample.db'): os.unlink('sample.db') database_manager = ExperimentDatabaseManager(file='sample.db', cache_size=40) database_manager.delete_experiment( 'writing_numerical_data_test_case_3') database_manager.set_experiment('writing_numerical_data_test_case_3') inserted_data = dict() inserted_data['var_1'] = np.array([19, 20]) inserted_data['var_2'] = np.array([109, 110]) inserted_data['var_3'] = np.array([54.1, 43]) inserted_data['var_4'] = np.array(['hello', 'world']) database_manager.insert_experiment_data( 'writing_numerical_data_test_case_3', inserted_data) database_manager.flush() database_manager_2 = ExperimentDatabaseReadingManager(file='sample.db') read_back_data = database_manager_2.get_data( 'writing_numerical_data_test_case_3', 'writing_numerical_data_test_case_3') assert read_back_data['var_1'][0] == inserted_data['var_1'][0] assert read_back_data['var_1'][1] == inserted_data['var_1'][1] assert read_back_data['var_2'][0] == inserted_data['var_2'][0] assert read_back_data['var_2'][1] == inserted_data['var_2'][1] assert read_back_data['var_3'][0] == inserted_data['var_3'][0] assert read_back_data['var_3'][1] == inserted_data['var_3'][1] assert read_back_data['var_4'][0] == inserted_data['var_4'][0] assert read_back_data['var_4'][1] == inserted_data['var_4'][1] # Doing it at the end to check if it flushing works properly # database_manager.delete_experiment('writing_numerical_data_test_case_3') database_manager.close()
def test_read_write(self): print("Writing to server") database_manager = ExperimentDatabaseManager( mysql_credentials=sql_credentials.credentials, cache_size=40) print("Deleting experiment") database_manager.delete_experiment('database_plots_test_case_1') print("Setting experiment") database_manager.set_experiment('database_plots_test_case_1') self.write_to_database(database_manager) print("Here, closing") database_manager.close() print("Reading back from server") database_reading_manager = ExperimentDatabaseReadingManager( sql_credentials.credentials) self.read_from_database(database_reading_manager) database_manager = ExperimentDatabaseManager( mysql_credentials=sql_credentials.credentials, cache_size=40) database_manager.delete_experiment('database_plots_test_case_1') database_manager.close()
def test_read_write_2(self): database_manager = ExperimentDatabaseManager( mysql_credentials=sql_credentials.credentials, cache_size=40) database_manager.delete_experiment( 'writing_numerical_data_test_case_2') database_manager.set_experiment('writing_numerical_data_test_case_2') inserted_data = dict() inserted_data['var_1'] = [19, 20] inserted_data['var_2'] = [109, 110] inserted_data['var_3'] = [54.1, 43] database_manager.insert_experiment_data( 'writing_numerical_data_test_case_2', inserted_data) database_manager.flush() database_manager_2 = ExperimentDatabaseReadingManager( mysql_credentials=sql_credentials.credentials) read_back_data = database_manager_2.get_data( 'writing_numerical_data_test_case_2', 'writing_numerical_data_test_case_2') assert read_back_data['var_1'][0] == inserted_data['var_1'][0] assert read_back_data['var_1'][1] == inserted_data['var_1'][1] assert read_back_data['var_2'][0] == inserted_data['var_2'][0] assert read_back_data['var_2'][1] == inserted_data['var_2'][1] assert read_back_data['var_3'][0] == inserted_data['var_3'][0] assert read_back_data['var_3'][1] == inserted_data['var_3'][1] # Doing it at the end to check if it flushing works properly database_manager.delete_experiment( 'writing_numerical_data_test_case_2') database_manager.close()
os.system('mkdir -p %s' % (train.outputDir + "/summary/")) tensorboard_manager = TensorBoardManager(train.outputDir + "/summary/") unique_id_path = os.path.join(train.outputDir,'unique_id.txt') if os.path.exists(unique_id_path): with open(unique_id_path, 'r') as f: unique_id = f.readlines()[0].strip() else: unique_id = str(uuid.uuid4())[:8] with open(unique_id_path, 'w') as f: f.write(unique_id+'\n') database_manager = ExperimentDatabaseManager(mysql_credentials=sql_credentials.credentials, cache_size=40) database_manager.set_experiment('alpha_experiment_june_pca_double_cords_' + unique_id) cb += [RunningMetricsCallback(td, tensorboard_manager, dist_threshold=0.5, beta_threshold=0.5, database_manager=database_manager)] cb += [plotClusteringDuringTraining( use_backgather_idx=8 + i, outputfile=train.outputDir + "/plts/sn" + str(i) + '_', samplefile=samplepath, after_n_batches=20, on_epoch_end=False, publish=None, use_event=0) for i in [0, 2]] cb += [ plotEventDuringTraining( outputfile=train.outputDir + "/plts2/sn0",
with open(unique_id_path, 'r') as f: unique_id = f.readlines()[0].strip() else: unique_id = str(uuid.uuid4())[:8] with open(unique_id_path, 'w') as f: f.write(unique_id + '\n') nbatch = 50000 #this is rather low, and can be set to a higher values e.g. when training on V100s database_manager = ExperimentDatabaseManager(file=os.path.join( train.outputDir, "training_metrics.db"), cache_size=100) database_reading_manager = ExperimentDatabaseReadingManager( file=os.path.join(train.outputDir, "training_metrics.db")) database_manager.set_experiment(unique_id) metadata = matching_and_analysis.build_metadeta_dict( beta_threshold=0.5, distance_threshold=0.5, iou_threshold=0.0001, matching_type=matching_and_analysis.MATCHING_TYPE_MAX_FOUND) analyzer = matching_and_analysis.OCAnlayzerWrapper(metadata) cb += [ RunningMetricsDatabaseAdditionCallback(td, tensorboard_manager, database_manager=database_manager, analyzer=analyzer) ] cb += [ RunningMetricsPlotterCallback( after_n_batches=200,
all_data.append(data_loaded) analysed_graphs, metadata = matching_and_analysis.OCAnlayzerWrapper( metadata).analyse_from_data(all_data) else: analysed_graphs, metadata = matching_and_analysis.OCAnlayzerWrapper( metadata).analyse_from_files(files_to_be_tested) plotter = hp.TrackMLPlotter() plotter.add_data_from_analysed_graph_list(analysed_graphs, metadata) if len(pdfpath) > 0: plotter.write_to_pdf(pdfpath=pdfpath) if len(args.analysisoutpath) != 0: with gzip.open(args.analysisoutpath, 'wb') as f: pickle.dump((analysed_graphs, metadata), f) if len(database_table_prefix) != 0: print("Will write plots to database") database_manager = ExperimentDatabaseManager( mysql_credentials=sql_credentials.credentials, cache_size=40) database_manager.set_experiment('analysis_plotting_experiments') plotter.write_data_to_database(database_manager, database_table_prefix) database_manager.close() if len(database_file) != 0: print("Will write plots to database") database_manager = ExperimentDatabaseManager(file=database_file, cache_size=40) database_manager.set_experiment('analysis_plotting_experiments') plotter.write_data_to_database(database_manager, 'plots') database_manager.close()