def try_mixed(arg_list, init_needed=True): print('\n===== try_mixed: BEGIN =====') # Make the off-cadence file. if init_needed: generate_fil_file(PATH_IRRELEVANT_FIL, -1, -1) # Make a DAT file. if init_needed: make_one_dat_file(PATH_IRRELEVANT_FIL, max_drift=10.0, min_snr=20.0, remove_h5=False) # Make the dat list. with open(PATH_DAT_LIST_FILE, 'w') as fh: for path_dat in arg_list: fh.write('{}\n'.format(path_dat)) # With the list of DAT files, do find_event_pipeline() df_event = find_event_pipeline(PATH_DAT_LIST_FILE, sortby_tstart=False, filter_threshold=3, number_in_cadence=len(arg_list), user_validation=False, saving=True, on_source_complex_cadence='VOYAGER-1', csv_name=PATH_CSVF) # df_event should not be nil. if df_event is None: raise ValueError('try_mixed: returned pandas df is None!') # CSV file created? if not Path(PATH_CSVF).exists(): raise ValueError('try_mixed: No CSV of events created') # An event CSV was created. # Validate the hit table file. utl.validate_hittbl(df_event, PATH_CSVF, 'test_pipeline_mixed') print('\n===== try_mixed: END =====')
def add_one(arg_case_num, arg_fh, arg_fdir, arg_drsign): r'''Add one case to the reference file''' print('fb_genref: Case {} ...'.format(arg_case_num)) generate_fil_file(PATH_FIL_FILE, arg_fdir, arg_drsign) make_one_dat_file(PATH_FIL_FILE, max_drift=MAX_DRIFT, min_snr=MIN_SNR) path_dat_file = PATH_FIL_FILE.replace('.fil', '.dat') obs_tophit_1, obs_tophit_2 = get_case_results(path_dat_file) arg_fh.write('#\n') arg_fh.write('# Case {}: frequency {}, drift rate {}\n'.format( arg_case_num, arg_fdir, arg_drsign)) FMT_RECORD = '{} {} {} {} {} {} {} {}\n' record_1 = FMT_RECORD \ .format(arg_case_num, arg_fdir, arg_drsign, obs_tophit_1.tophit_id, obs_tophit_1.drate, obs_tophit_1.snr, obs_tophit_1.freq, obs_tophit_1.index) arg_fh.write(record_1) record_2 = FMT_RECORD \ .format(arg_case_num, arg_fdir, arg_drsign, obs_tophit_2.tophit_id, obs_tophit_2.drate, obs_tophit_2.snr, obs_tophit_2.freq, obs_tophit_2.index) arg_fh.write(record_2)
def exec_one_case(case_num, path_fil_file, max_drift, ref_tophit_1, ref_tophit_2): r'''Execute one test case''' if DEBUGGING: print( 'exec_one_case: on entry, max_drift={}\nref_tophit_1:::{}\nref_tophit_2:::{}' .format(max_drift, ref_tophit_1.to_string(), ref_tophit_2.to_string())) generate_fil_file(path_fil_file, ref_tophit_1.fdir, ref_tophit_1.drsign) make_one_dat_file(path_fil_file, max_drift=max_drift, min_snr=MIN_SNR) path_dat_file = path_fil_file.replace('.fil', '.dat') obs_tophit_1, obs_tophit_2 = get_case_results(path_dat_file) obs_tophit_1.fdir = ref_tophit_1.fdir # replace 0 with correct value obs_tophit_1.drsign = ref_tophit_1.drsign # replace 0 with correct value obs_tophit_2.fdir = ref_tophit_2.fdir # replace 0 with correct value obs_tophit_2.drsign = ref_tophit_2.drsign # replace 0 with correct value if DEBUGGING: print( 'exec_one_case: case results\nobs_tophit_1:::{}\nobs_tophit_2:::{}' .format(obs_tophit_1.to_string(), obs_tophit_2.to_string())) case_comparison(obs_tophit_1, ref_tophit_1, max_drift) case_comparison(obs_tophit_2, ref_tophit_2, max_drift) gc.collect() print('=== CASE {} at max drift {} success'.format(case_num, max_drift))