def test_write_to_network_and_file_and_read_from_both(self): if os.path.exists('sample.db'): os.unlink('sample.db') print("Writing to server") database_manager = ExperimentDatabaseManager( mysql_credentials=sql_credentials.credentials, file='sample.db', cache_size=40) database_manager.delete_experiment('database_plots_test_case_1') database_manager.set_experiment('database_plots_test_case_1') self.write_to_database(database_manager) database_manager.close() download_experiment_to_file( experiment_name='database_plots_test_case_1', file_path='output/downloaded_database.db') print("Reading back from downloaded file") database_reading_manager = ExperimentDatabaseReadingManager( file='output/downloaded_database.db') self.read_from_database(database_reading_manager) print("Reading back from cache file") database_reading_manager = ExperimentDatabaseReadingManager( file='sample.db') self.read_from_database(database_reading_manager) database_manager = ExperimentDatabaseManager( mysql_credentials=sql_credentials.credentials, cache_size=40) database_manager.delete_experiment('database_plots_test_case_1') database_manager.close()
def download_experiment_to_file(experiment_name, file_path): if os.path.exists(file_path): os.unlink(file_path) file_database = ExperimentDatabaseManager(file=file_path, cache_size=100000) file_database.set_experiment(experiment_name) reading_manager = ExperimentDatabaseReadingManager(mysql_credentials=credentials) query = '''SELECT (table_name) FROM information_schema.columns WHERE column_name = 'experiment_name' AND table_schema = '%s';''' % credentials['database'] data = reading_manager.get_data_from_query(query) tables = [x[0] for x in data] tables.remove('experiments') print("Gotten list of all tables:") print('\n'.join(tables)) for table in tables: print("Downloading data from ", table) table_data = reading_manager.get_data(table, experiment_name) if table_data is None: print("No data found in table for this experiment, skipping") continue print("Gotten keys", table_data.keys()) for key in table_data.keys(): print('\t%s is %s' % (key, str(type(table_data[key][0])))) table_data.pop('experiment_name') file_database.insert_experiment_data(table, table_data) print("Finishing up writing to file...") file_database.close()
def test_read_write_4(self): if os.path.exists('sample.db'): os.unlink('sample.db') database_manager = ExperimentDatabaseManager(file='sample.db', cache_size=40) database_manager.delete_experiment( 'writing_numerical_data_test_case_4') database_manager.set_experiment('writing_numerical_data_test_case_4') inserted_data = dict() inserted_data['var_1'] = 19 inserted_data['var_2'] = 109 inserted_data['var_3'] = np.nan database_manager.insert_experiment_data( 'writing_numerical_data_test_case_4', inserted_data) database_manager.flush() database_manager_2 = ExperimentDatabaseReadingManager(file='sample.db') read_back_data = database_manager_2.get_data( 'writing_numerical_data_test_case_4', 'writing_numerical_data_test_case_4') assert read_back_data['var_1'][0] == inserted_data[ 'var_1'] # Always returns list assert read_back_data['var_2'][0] == inserted_data[ 'var_2'] # Always returns list assert read_back_data['var_3'][0] == 0 # Always returns list # Doing it at the end to check if it flushing works properly database_manager.delete_experiment( 'writing_numerical_data_test_case_4') database_manager.close()
def test_read_write_4(self): database_manager = ExperimentDatabaseManager( mysql_credentials=sql_credentials.credentials, cache_size=40) database_manager.delete_experiment( 'writing_numerical_data_test_case_4') database_manager.set_experiment('writing_numerical_data_test_case_4') inserted_data = dict() inserted_data['var_1'] = 19 inserted_data['var_2'] = 109 inserted_data['var_3'] = np.nan database_manager.insert_experiment_data( 'writing_numerical_data_test_case_4', inserted_data) database_manager.flush() database_manager_2 = ExperimentDatabaseReadingManager( mysql_credentials=sql_credentials.credentials) read_back_data = database_manager_2.get_data( 'writing_numerical_data_test_case_4', 'writing_numerical_data_test_case_4') assert read_back_data['var_1'][0] == inserted_data[ 'var_1'] # Always returns list assert read_back_data['var_2'][0] == inserted_data[ 'var_2'] # Always returns list assert read_back_data['var_3'][0] == 0 # Always returns list # Doing it at the end to check if it flushing works properly database_manager.delete_experiment( 'writing_numerical_data_test_case_4') database_manager.close()
def read_from_database(self): database_reading_manager = ExperimentDatabaseReadingManager(sql_credentials.credentials) efficiency_plot = General2dBinningPlot(bins=np.array([0, 1, 2, 3, 4]), histogram_log=False, histogram_fraction=False) efficiency_plot.read_from_database(database_reading_manager, 'database_plots_test_case_1') efficiency_plot.draw() plt.savefig('output/test-reproduced-plot.png')
def test_read_write_file(self): if os.path.exists('sample.db'): os.unlink('sample.db') database_manager = ExperimentDatabaseManager(file='sample.db', cache_size=40) print("Writing to file") self.write_to_database(database_manager) database_manager.close() print("Reading back from file") database_reading_manager = ExperimentDatabaseReadingManager( file='sample.db') self.read_from_database(database_reading_manager) if os.path.exists('sample.db'): os.unlink('sample.db')
def test_read_write(self): print("Writing to server") database_manager = ExperimentDatabaseManager( mysql_credentials=sql_credentials.credentials, cache_size=40) self.write_to_database(database_manager) database_manager.close() print("Reading back from server") database_reading_manager = ExperimentDatabaseReadingManager( sql_credentials.credentials) self.read_from_database(database_reading_manager) database_manager = ExperimentDatabaseManager( mysql_credentials=sql_credentials.credentials, cache_size=40) database_manager.delete_experiment('database_plots_test_case_1') database_manager.close()
def test_read_write_3(self): if os.path.exists('sample.db'): os.unlink('sample.db') database_manager = ExperimentDatabaseManager(file='sample.db', cache_size=40) database_manager.delete_experiment( 'writing_numerical_data_test_case_3') database_manager.set_experiment('writing_numerical_data_test_case_3') inserted_data = dict() inserted_data['var_1'] = np.array([19, 20]) inserted_data['var_2'] = np.array([109, 110]) inserted_data['var_3'] = np.array([54.1, 43]) inserted_data['var_4'] = np.array(['hello', 'world']) database_manager.insert_experiment_data( 'writing_numerical_data_test_case_3', inserted_data) database_manager.flush() database_manager_2 = ExperimentDatabaseReadingManager(file='sample.db') read_back_data = database_manager_2.get_data( 'writing_numerical_data_test_case_3', 'writing_numerical_data_test_case_3') assert read_back_data['var_1'][0] == inserted_data['var_1'][0] assert read_back_data['var_1'][1] == inserted_data['var_1'][1] assert read_back_data['var_2'][0] == inserted_data['var_2'][0] assert read_back_data['var_2'][1] == inserted_data['var_2'][1] assert read_back_data['var_3'][0] == inserted_data['var_3'][0] assert read_back_data['var_3'][1] == inserted_data['var_3'][1] assert read_back_data['var_4'][0] == inserted_data['var_4'][0] assert read_back_data['var_4'][1] == inserted_data['var_4'][1] # Doing it at the end to check if it flushing works properly # database_manager.delete_experiment('writing_numerical_data_test_case_3') database_manager.close()
def test_read_write_2(self): database_manager = ExperimentDatabaseManager( mysql_credentials=sql_credentials.credentials, cache_size=40) database_manager.delete_experiment( 'writing_numerical_data_test_case_2') database_manager.set_experiment('writing_numerical_data_test_case_2') inserted_data = dict() inserted_data['var_1'] = [19, 20] inserted_data['var_2'] = [109, 110] inserted_data['var_3'] = [54.1, 43] database_manager.insert_experiment_data( 'writing_numerical_data_test_case_2', inserted_data) database_manager.flush() database_manager_2 = ExperimentDatabaseReadingManager( mysql_credentials=sql_credentials.credentials) read_back_data = database_manager_2.get_data( 'writing_numerical_data_test_case_2', 'writing_numerical_data_test_case_2') assert read_back_data['var_1'][0] == inserted_data['var_1'][0] assert read_back_data['var_1'][1] == inserted_data['var_1'][1] assert read_back_data['var_2'][0] == inserted_data['var_2'][0] assert read_back_data['var_2'][1] == inserted_data['var_2'][1] assert read_back_data['var_3'][0] == inserted_data['var_3'][0] assert read_back_data['var_3'][1] == inserted_data['var_3'][1] # Doing it at the end to check if it flushing works properly database_manager.delete_experiment( 'writing_numerical_data_test_case_2') database_manager.close()
'table_prefix', help= 'Output directory with .bin.gz files or a txt file with full paths of the bin gz files' ) parser.add_argument('output', help='PDF file') parser.add_argument('--condition_string', default='', help='Condition sql string') args = parser.parse_args() condition_string = None if len(args.condition_string) != 0: condition_string = args.condition_string plotter = HGCalAnalysisPlotter([ 'settings', 'efficiency_fo_truth', 'fake_rate_fo_pred', 'response_fo_truth', 'response_fo_pred', 'response_sum_fo_truth', 'energy_resolution' ]) reading_manager = ExperimentDatabaseReadingManager( mysql_credentials=sql_credentials.credentials) plotter.add_data_from_database(reading_manager, table_prefix=args.table_prefix, condition=condition_string) # plotter.add_data_from_database(reading_manager, table_prefix='alpha_plots_a2') # plotter.write_to_pdf(args.output, formatter=lambda x: 'Optimized f1 score\n$\\alpha$ param=$%.2f$ \n$\\beta$ param$=%.2f$ \n$\\beta=%.4f$\n$d=%.4f$\n'%(x['beta_param'],x['alpha_param'],x['beta_threshold'],x['distance_threshold'])) plotter.write_to_pdf( args.output, formatter=lambda x: 'IOU threshold %f' % x['iou_threshold'])
unique_id_path = os.path.join(train.outputDir, 'unique_id.txt') if os.path.exists(unique_id_path): with open(unique_id_path, 'r') as f: unique_id = f.readlines()[0].strip() else: unique_id = str(uuid.uuid4())[:8] with open(unique_id_path, 'w') as f: f.write(unique_id + '\n') nbatch = 50000 #this is rather low, and can be set to a higher values e.g. when training on V100s database_manager = ExperimentDatabaseManager(file=os.path.join( train.outputDir, "training_metrics.db"), cache_size=100) database_reading_manager = ExperimentDatabaseReadingManager( file=os.path.join(train.outputDir, "training_metrics.db")) database_manager.set_experiment(unique_id) metadata = matching_and_analysis.build_metadeta_dict( beta_threshold=0.5, distance_threshold=0.5, iou_threshold=0.0001, matching_type=matching_and_analysis.MATCHING_TYPE_MAX_FOUND) analyzer = matching_and_analysis.OCAnlayzerWrapper(metadata) cb += [ RunningMetricsDatabaseAdditionCallback(td, tensorboard_manager, database_manager=database_manager, analyzer=analyzer) ] cb += [ RunningMetricsPlotterCallback(