def query_datebase_cr(db_session, query_time_min=None, query_time_max=None, interest_date=None, center=None, ref_inst=None, cr_rot=None): if query_time_min and query_time_max is not None: query_pd = db_funcs.query_euv_images(db_session=db_session, time_min=query_time_min, time_max=query_time_max) elif cr_rot is not None: query_pd = db_funcs.query_euv_images_rot(db_session, rot_min=cr_rot, rot_max=cr_rot + 1) else: ref_instrument = [ ref_inst, ] euv_images = db_funcs.query_euv_images( db_session, time_min=interest_date + datetime.timedelta(hours=1), time_max=interest_date + datetime.timedelta(hours=1), instrument=ref_instrument) # get min and max carrington rotation # TODO: really only want one CR_value cr_rot = euv_images.cr_rot if center: query_pd = db_funcs.query_euv_images_rot(db_session, rot_min=cr_rot - 0.5, rot_max=cr_rot + 0.5) else: query_pd = db_funcs.query_euv_images_rot(db_session, rot_min=cr_rot, rot_max=cr_rot + 1) return query_pd
db_session = init_db_conn_old(db_name=use_db, chd_base=db_class.Base, user=user, password=password) # method information meth_name = "LBCC" ##### QUERY IMAGES ###### for inst_index, instrument in enumerate(inst_list): query_instrument = [ instrument, ] image_pd = query_euv_images(db_session=db_session, time_min=lbc_query_time_min, time_max=lbc_query_time_max, instrument=query_instrument) # query correct image combos combo_query = query_inst_combo( db_session, lbc_query_time_min - datetime.timedelta(weeks=2), lbc_query_time_max + datetime.timedelta(weeks=2), meth_name, instrument) ###### GET LOS IMAGES COORDINATES (DATA) ##### # apply LBC for index in range(n_images_plot): row = image_pd.iloc[index] print("Processing image number", row.data_id, ".") if row.fname_hdf == "": print("Warning: Image # " + str(row.data_id) + " does not have an associated hdf file. Skipping")
# search for images in database that have no processed fname if do_all_unprocessed: query_result = pd.read_sql( db_session.query(DBClass.EUV_Images).filter( DBClass.EUV_Images.fname_hdf == "", DBClass.EUV_Images.flag == 0).statement, db_session.bind) # sort it by time so that it is easy to track progression in a physical way query_result.sort_values(by=['date_obs'], inplace=True) # or query the database for each spacecraft type for a specific time range else: period_start = datetime.datetime(2013, 3, 3, 0, 0, 0) period_end = datetime.datetime(2013, 3, 4, 0, 0, 0) fs = query_euv_images(db_session=db_session, time_min=period_start, time_max=period_end, instrument=('AIA',)) fa = query_euv_images(db_session=db_session, time_min=period_start, time_max=period_end, instrument=('EUVI-A',)) fb = query_euv_images(db_session=db_session, time_min=period_start, time_max=period_end, instrument=('EUVI-B',)) query_result = pd.concat([fs, fa, fb], axis=0) # use default print for the data frame to see start and end print(query_result) # print out the number of records to prep print(f'### Query Returned {len(query_result)} images to prep') # Prep options deconvolve = True write = True # start the IDL session for STEREO A or B if necessary
# setup database connection use_db = "sqlite" sqlite_filename = "dbtest.db" sqlite_path = os.path.join(database_dir, sqlite_filename) db_session = init_db_conn_old(db_name=use_db, chd_base=Base, sqlite_path=sqlite_path) # query_EUV_images function: # requires time_min and time_max (datetime). do we need to code 'jd' time option? query_time_min = datetime.datetime(2014, 4, 13, 19, 35, 0) query_time_max = datetime.datetime(2014, 4, 13, 19, 37, 0) print("\nQuery DB for downloaded images with timestamps between " + str(query_time_min) + " and " + str(query_time_max)) test_pd = query_euv_images(db_session=db_session, time_min=query_time_min, time_max=query_time_max) # returns info on 3 images print(test_pd) # query specific instrument print("\nQuery the reference database for all entries with instrument='AIA'.") query_time_min = datetime.datetime(2014, 4, 13, 10, 0, 0) instrument = ("AIA", ) test_pd = query_euv_images(db_session=db_session, time_min=query_time_min, time_max=query_time_max, instrument=instrument) print(test_pd) # query specific wavelength
sqlite_path = os.path.join(database_dir, sqlite_filename) db_session = init_db_conn_old(db_name=use_db, chd_base=db_class.Base, sqlite_path=sqlite_path) elif use_db == 'mysql-Q': # setup database connection to MySQL database on Q db_session = init_db_conn_old(db_name=use_db, chd_base=db_class.Base, user=user, password=password) # query a list of images in query range for each instrument. sort by time aia_images = query_euv_images(db_session, time_min=query_start, time_max=query_end, instrument=("AIA", ), wavelength=(wave_aia, )) aia_images = aia_images.sort_values(by="date_obs") euvia_images = query_euv_images(db_session, time_min=query_start, time_max=query_end, instrument=("EUVI-A", ), wavelength=(wave_euvi, )) euvia_images = euvia_images.sort_values(by="date_obs") euvib_images = query_euv_images(db_session, time_min=query_start, time_max=query_end, instrument=("EUVI-B", ),
db_session = init_db_conn_old(db_name=use_db, chd_base=Base, sqlite_path=sqlite_path) elif use_db == 'mysql-Q': # setup database connection to MySQL database on Q db_session = init_db_conn_old(db_name=use_db, chd_base=Base, user=user, password=password) # build database print("\nNow build database image records for each existing file:") db_session = build_euvimages_from_fits(db_session=db_session, raw_data_dir=raw_data_dir, hdf_data_dir=hdf_data_dir) print("\nProcess complete.") if use_db == 'sqlite': # recover all image records and print print("\nCheck that 'euv_images' table contains records:") test_pd = query_euv_images(db_session=db_session) print(test_pd) elif use_db == 'mysql-Q': # recover all image records and print the count test_pd = query_euv_images(db_session=db_session) num_rows = test_pd.shape[0] print("\nTotal number of images in database: " + str(num_rows) + "\n") db_session.close()
db_session = db_funcs.init_db_conn_old(db_name=use_db, chd_base=db_class.Base, sqlite_path=sqlite_path) elif use_db in ['mysql-Q', 'mysql-Q_test']: # setup database connection to MySQL database on Q db_session = db_funcs.init_db_conn_old(db_name=use_db, chd_base=db_class.Base, user=user, password=password) # create IIT method meth_name = "IIT" meth_desc = "IIT Fit Method" method_id = db_funcs.get_method_id(db_session, meth_name, meth_desc, var_names=None, var_descs=None, create=False) #### GET REFERENCE INFO FOR LATER USE #### # get index number of reference instrument ref_index = inst_list.index(ref_inst) # query euv images to get carrington rotation range ref_instrument = [ref_inst, ] euv_images = db_funcs.query_euv_images(db_session, time_min=calc_query_time_min, time_max=calc_query_time_max, instrument=ref_instrument, wavelength=wavelengths) # get min and max carrington rotation rot_max = euv_images.cr_rot.max() rot_min = euv_images.cr_rot.min() # calculate the parameter moving average centers moving_avg_centers, moving_width = lbcc.moving_averages(calc_query_time_min, calc_query_time_max, weekday, number_of_days) # calculate image cadence centers range_min_date = moving_avg_centers[0] - moving_width/2 range_max_date = moving_avg_centers[-1] + moving_width/2 image_centers = synch_utils.get_dates( time_min=range_min_date.astype(datetime.datetime), time_max=range_max_date.astype(datetime.datetime), map_freq=image_freq)
use_db = "mysql-Q" # 'sqlite' Use local sqlite file-based db # 'mysql-Q' Use the remote MySQL database on Q # 'mysql-Q_test' Use the development database on Q user = "******" # only needed for remote databases. password = "" # See example109 for setting-up an encrypted password. In this case leave password="", and # init_db_conn_old() will automatically find and use your saved password. Otherwise, enter your MySQL password here. # Establish connection to database db_session = db_funs.init_db_conn_old(db_name=use_db, chd_base=db_class.Base, user=user, password=password) # ------------ NO NEED TO UPDATE ANYTHING BELOW ------------- # # query images query_pd = db_funs.query_euv_images(db_session, time_min=query_time_min, time_max=query_time_max, instrument=inst_list, wavelength=wavelengths) # get method id meth_name = 'LBCC' meth_desc = 'LBCC Theoretic Fit Method' method_id = db_funs.get_method_id(db_session, meth_name, meth_desc, var_names=None, var_descs=None, create=False) # query LBC histograms hist_pd = db_funs.query_hist(db_session, meth_id=method_id[1], n_mu_bins=n_mu_bins, n_intensity_bins=n_intensity_bins, time_min=query_time_min, time_max=query_time_max, instrument=inst_list, wavelength=wavelengths) # convert the binary types back to arrays mu_bin_array, intensity_bin_array, full_hist = psi_d_types.binary_to_hist(
database_dir = os.path.join(App.APP_HOME, "reference_data") sqlite_filename = "dbtest.db" # setup database connection use_db = "sqlite" sqlite_path = os.path.join(database_dir, sqlite_filename) db_session = init_db_conn_old(db_name=use_db, chd_base=db_class.Base, sqlite_path=sqlite_path) # query some images query_time_min = datetime.datetime(2014, 4, 13, 0, 0, 0) query_time_max = datetime.datetime(2014, 4, 14, 0, 0, 1) query_pd = query_euv_images(db_session=db_session, time_min=query_time_min, time_max=query_time_max) print(query_pd.instrument) # use these three images (one from each instrument) selected_images = query_pd # declare map and binning parameters R0 = 1.01 mu_bin_edges = np.array(range(19), dtype="float") * 0.05 + 0.1 image_intensity_bin_edges = np.array(range(61), dtype="float") * .05 + 0.5 log10 = True # image_intensity_bin_edges = np.array(range(4000), dtype="float") # image_intensity_bin_edges = np.append(image_intensity_bin_edges, np.inf) # log10 = False lat_band = [-np.pi / 64., np.pi / 64.]
def apply_iit_correction(db_session, hdf_data_dir, iit_query_time_min, iit_query_time_max, inst_list, ref_inst, n_intensity_bins=200, R0=1.01, n_images_plot=1, plot=False): # start time start_time = time.time() #### GET REFERENCE INFO FOR LATER USE #### # query euv images to get carrington rotation range ref_instrument = [ ref_inst, ] euv_images = db_funcs.query_euv_images(db_session, time_min=iit_query_time_min, time_max=iit_query_time_max, instrument=ref_instrument) # get min and max carrington rotation rot_max = euv_images.cr_rot.max() rot_min = euv_images.cr_rot.min() for inst_index, instrument in enumerate(inst_list): #### QUERY IMAGES #### query_instrument = [ instrument, ] rot_images = db_funcs.query_euv_images_rot(db_session, rot_min=rot_min, rot_max=rot_max, instrument=query_instrument) image_pd = rot_images.sort_values(by=['cr_rot']) # get time minimum and maximum for instrument inst_time_min = rot_images.date_obs.min() inst_time_max = rot_images.date_obs.max() # query correct image combos lbc_meth_name = "LBCC" combo_query_lbc = db_funcs.query_inst_combo(db_session, inst_time_min, inst_time_max, lbc_meth_name, instrument) iit_meth_name = "IIT" combo_query_iit = db_funcs.query_inst_combo(db_session, inst_time_min, inst_time_max, iit_meth_name, instrument) # apply LBC for index in range(n_images_plot): row = image_pd.iloc[index] print("Processing image number", row.data_id, "for IIT Correction.") #### APPLY LBC CORRECTION ##### original_los, lbcc_image, mu_indices, use_indices, theoretic_query = lbcc_funcs.apply_lbc( db_session, hdf_data_dir, combo_query_lbc, image_row=row, n_intensity_bins=n_intensity_bins, R0=R0) #### APPLY IIT CORRECTION #### lbcc_image, iit_image, use_indices, alpha, x = apply_iit( db_session, combo_query_iit, lbcc_image, use_indices, original_los, R0=R0) if plot: lbcc_data = lbcc_image.lbcc_data corrected_iit_data = iit_image.iit_data # plot LBC image Plotting.PlotCorrectedImage(lbcc_data, los_image=original_los, nfig=100 + inst_index * 10 + index, title="Corrected LBCC Image for " + instrument) # plot IIT image Plotting.PlotCorrectedImage(corrected_iit_data, los_image=original_los, nfig=200 + inst_index * 10 + index, title="Corrected IIT Image for " + instrument) # plot difference Plotting.PlotCorrectedImage(corrected_iit_data - lbcc_data, los_image=original_los, nfig=300 + inst_index * 10 + index, title="Difference Plot for " + instrument) # end time end_time = time.time() print("ITT has been applied and specified images plotted.") print("Total elapsed time to apply correction and plot: " + str(round(end_time - start_time, 3)) + " seconds.") return None
def plot_iit_histograms(db_session, hdf_data_dir, hist_query_time_min, hist_query_time_max, inst_list, ref_inst, n_intensity_bins=200, lat_band=[-np.pi / 2.4, np.pi / 2.4], R0=1.01, log10=True): # start time start_time = time.time() #### GET REFERENCE INFO FOR LATER USE #### # get index number of reference instrument ref_index = inst_list.index(ref_inst) # query euv images to get carrington rotation range ref_instrument = [ ref_inst, ] euv_images = db_funcs.query_euv_images(db_session, time_min=hist_query_time_min, time_max=hist_query_time_max, instrument=ref_instrument) # get min and max carrington rotation rot_max = euv_images.cr_rot.max() rot_min = euv_images.cr_rot.min() # method information meth_name = "IIT" method_id = db_funcs.get_method_id(db_session, meth_name, meth_desc=None, var_names=None, var_descs=None, create=False) # query for IIT histograms pd_lbc_hist = db_funcs.query_hist(db_session=db_session, meth_id=method_id[1], n_intensity_bins=n_intensity_bins, lat_band=lat_band, time_min=hist_query_time_min, time_max=hist_query_time_max) pd_lbc_hist_srt = pd_lbc_hist.sort_values(by=['image_id']) # convert the binary types back to arrays mu_bin_edges, intensity_bin_edges, full_lbc_hist = psi_d_types.binary_to_hist( pd_lbc_hist_srt, n_mu_bins=None, n_intensity_bins=n_intensity_bins) # create corrected/original histograms original_hist_list = np.full(full_lbc_hist.shape, 0, dtype=np.int64) corrected_hist_list = np.full(full_lbc_hist.shape, 0, dtype=np.int64) for inst_index, instrument in enumerate(inst_list): print("Applying corrections for", instrument) #### QUERY IMAGES #### query_instrument = [ instrument, ] rot_images = db_funcs.query_euv_images_rot(db_session, rot_min=rot_min, rot_max=rot_max, instrument=query_instrument) image_pd = rot_images.sort_values(by=['cr_rot']) # get time minimum and maximum for instrument inst_time_min = rot_images.date_obs.min() inst_time_max = rot_images.date_obs.max() # query correct image combos lbc_meth_name = "LBCC" combo_query_lbc = db_funcs.query_inst_combo(db_session, inst_time_min, inst_time_max, lbc_meth_name, instrument) iit_meth_name = "IIT" combo_query_iit = db_funcs.query_inst_combo(db_session, inst_time_min, inst_time_max, iit_meth_name, instrument) # query correct image combos combo_query_lbc = db_funcs.query_inst_combo(db_session, hist_query_time_min, hist_query_time_max, meth_name="LBCC", instrument=instrument) # query correct image combos combo_query_iit = db_funcs.query_inst_combo(db_session, hist_query_time_min, hist_query_time_max, meth_name="IIT", instrument=instrument) for index, row in image_pd.iterrows(): # apply LBC original_los, lbcc_image, mu_indices, use_indices, theoretic_query = lbcc_funcs.apply_lbc( db_session, hdf_data_dir, combo_query_lbc, image_row=row, n_intensity_bins=n_intensity_bins, R0=R0) #### ORIGINAL LOS DATA #### # calculate IIT histogram from original data original_los_hist = psi_d_types.LosImage.iit_hist( original_los, intensity_bin_edges, lat_band, log10) # add 1D histogram to array original_hist_list[:, index] = original_los_hist #### CORRECTED DATA #### # apply IIT correction lbcc_image, iit_image, use_indices, alpha, x = iit_funcs.apply_iit( db_session, combo_query_iit, lbcc_image, use_indices, original_los, R0=R0) #### CREATE CORRECTED IIT HISTOGRAM ##### # calculate IIT histogram from LBC hist_iit = psi_d_types.IITImage.iit_hist(iit_image, lat_band, log10) # create IIT histogram datatype corrected_hist = psi_d_types.create_iit_hist( iit_image, method_id[1], lat_band, hist_iit) corrected_hist_list[:, index] = corrected_hist.hist # plotting definitions color_list = ['red', 'blue', 'black'] linestyle_list = ['solid', 'dashed', 'dashdot'] #### CREATE NEW HISTOGRAM #### for inst_index, instrument in enumerate(inst_list): print("Plotting Histograms for", instrument) #### GET INDICES TO USE #### # get index of instrument in histogram dataframe hist_inst = pd_lbc_hist_srt['instrument'] pd_inst_index = hist_inst[hist_inst == instrument].index #### ORIGINAL HISTOGRAM ##### # define histogram original_hist = original_hist_list[:, pd_inst_index].sum(axis=1) # normalize histogram row_sums = original_hist.sum(axis=0, keepdims=True) norm_original_hist = original_hist / row_sums # plot original Plotting.Plot1d_Hist(norm_original_hist, instrument, inst_index, intensity_bin_edges, color_list, linestyle_list, figure=100, xlabel="Intensity (log10)", ylabel="H(I)", title="Histogram: Original LOS Data") #### LBCC HISTOGRAM ##### # define histogram lbc_hist = full_lbc_hist[:, pd_inst_index].sum(axis=1) # normalize histogram lbc_sums = lbc_hist.sum(axis=0, keepdims=True) norm_lbc_hist = lbc_hist / lbc_sums # plot lbcc Plotting.Plot1d_Hist(norm_lbc_hist, instrument, inst_index, intensity_bin_edges, color_list, linestyle_list, figure=200, xlabel="Intensity (log10)", ylabel="H(I)", title="Histogram: Post LBCC") #### CORRECTED HISTOGRAM #### # define histogram corrected_hist = corrected_hist_list[:, pd_inst_index].sum(axis=1) # normalize histogram iit_sums = corrected_hist.sum(axis=0, keepdims=True) norm_corrected_hist = corrected_hist / iit_sums # plot corrected Plotting.Plot1d_Hist(norm_corrected_hist, instrument, inst_index, intensity_bin_edges, color_list, linestyle_list, figure=300, xlabel="Intensity (log10)", ylabel="H(I)", title="Histogram: Post IIT") # end time end_time = time.time() print("ITT has been applied and original/resulting histograms plotted.") print("Total elapsed time to apply correction and plot histograms: " + str(round(end_time - start_time, 3)) + " seconds.") return None
def create_histograms(db_session, inst_list, lbc_query_time_min, lbc_query_time_max, hdf_data_dir, n_intensity_bins=200, lat_band=[-np.pi / 2.4, np.pi / 2.4], log10=True, R0=1.01, wavelengths=None): """ create and save (to database) IIT-Histograms from LBC Data @param db_session: connected db session for querying EUV images and saving histograms @param inst_list: list of instruments @param lbc_query_time_min: minimum query time for applying lbc fit @param lbc_query_time_max: maximum query time for applying lbc fit @param hdf_data_dir: directory of processed images to plot original images @param n_intensity_bins: number of intensity bins @param lat_band: latitude band @param log10: boolean value @param R0: radius @return: None, saves histograms to database """ # start time start_time = time.time() # create IIT method meth_name = "IIT" meth_desc = "IIT Fit Method" method_id = db_funcs.get_method_id(db_session, meth_name, meth_desc, var_names=None, var_descs=None, create=True) for instrument in inst_list: print("Beginning loop for instrument:", instrument) # query EUV images query_instrument = [ instrument, ] image_pd_all = db_funcs.query_euv_images(db_session=db_session, time_min=lbc_query_time_min, time_max=lbc_query_time_max, instrument=query_instrument, wavelength=wavelengths) # query LBCC histograms hist_pd = db_funcs.query_hist(db_session, meth_id=method_id[1], n_intensity_bins=n_intensity_bins, lat_band=lat_band, time_min=lbc_query_time_min, time_max=lbc_query_time_max, instrument=query_instrument, wavelength=wavelengths) if hist_pd.shape[0] == 0: # use all images in range in_index = pd.Series([False] * image_pd_all.shape[0]) else: # compare image results to hist results based on image_id in_index = image_pd_all.image_id.isin(hist_pd.image_id) # return only images that do not have corresponding histograms image_pd = image_pd_all[~in_index] # check that images remain that need histograms if image_pd.shape[0] == 0: print("All " + instrument + " images in timeframe already have associated histograms.") continue # apply LBC for index, row in image_pd.iterrows(): print("Calculating IIT histogram at time:", row.date_obs) original_los, lbcc_image, mu_indices, use_indices, theoretic_query = lbcc_funcs.apply_lbc_2( db_session, hdf_data_dir, image_row=row, n_intensity_bins=n_intensity_bins, R0=R0) # check that image load and LBCC application finished successfully if original_los is None: continue # calculate IIT histogram from LBC hist = psi_d_types.LBCCImage.iit_hist(lbcc_image, lat_band, log10) # create IIT histogram datatype iit_hist = psi_d_types.create_iit_hist(lbcc_image, method_id[1], lat_band, hist) # add IIT histogram and meta data to database db_funcs.add_hist(db_session, iit_hist) end_time = time.time() print( "Inter-instrument transformation histograms have been created and saved to the database." ) print("Total elapsed time for histogram creation: " + str(round(end_time - start_time, 3)) + " seconds.") return None
def calc_iit_coefficients(db_session, inst_list, ref_inst, calc_query_time_min, calc_query_time_max, weekday=0, number_of_days=180, image_freq=2, image_del=np.timedelta64(30, 'm'), n_intensity_bins=200, lat_band=[-np.pi / 2.4, np.pi / 2.4], create=False, wavelengths=None): # start time start_time = time.time() # create IIT method meth_name = "IIT" meth_desc = "IIT Fit Method" method_id = db_funcs.get_method_id(db_session, meth_name, meth_desc, var_names=None, var_descs=None, create=False) #### GET REFERENCE INFO FOR LATER USE #### # get index number of reference instrument ref_index = inst_list.index(ref_inst) # query euv images to get carrington rotation range ref_instrument = [ ref_inst, ] euv_images = db_funcs.query_euv_images(db_session, time_min=calc_query_time_min, time_max=calc_query_time_max, instrument=ref_instrument, wavelength=wavelengths) # get min and max carrington rotation rot_max = euv_images.cr_rot.max() rot_min = euv_images.cr_rot.min() # calculate the moving average centers ref_moving_avg_centers, moving_width = lbcc.moving_averages( calc_query_time_min, calc_query_time_max, weekday, number_of_days) # calculate image cadence centers range_min_date = ref_moving_avg_centers[0] - moving_width / 2 range_max_date = ref_moving_avg_centers[-1] + moving_width / 2 image_centers = synch_utils.get_dates( time_min=range_min_date.astype(datetime.datetime), time_max=range_max_date.astype(datetime.datetime), map_freq=image_freq) # query histograms ref_hist_pd = db_funcs.query_hist( db_session=db_session, meth_id=method_id[1], n_intensity_bins=n_intensity_bins, lat_band=lat_band, time_min=calc_query_time_min - datetime.timedelta(days=number_of_days), time_max=calc_query_time_max + datetime.timedelta(days=number_of_days), instrument=ref_instrument, wavelength=wavelengths) # keep only one observation-histogram per image_center window keep_ind = lbcc.cadence_choose(ref_hist_pd.date_obs, image_centers, image_del) ref_hist_pd = ref_hist_pd.iloc[keep_ind] # convert binary to histogram data mu_bin_edges, intensity_bin_edges, ref_full_hist = psi_d_types.binary_to_hist( hist_binary=ref_hist_pd, n_mu_bins=None, n_intensity_bins=n_intensity_bins) # determine date of first AIA image min_ref_time = db_session.query(func.min( db_class.EUV_Images.date_obs)).filter( db_class.EUV_Images.instrument == ref_inst).all() base_ref_min = min_ref_time[0][0] base_ref_center = base_ref_min + datetime.timedelta( days=number_of_days) / 2 base_ref_max = base_ref_center + datetime.timedelta( days=number_of_days) / 2 if (calc_query_time_min - datetime.timedelta(days=7)) < base_ref_center: # generate histogram for first year of reference instrument ref_base_hist = ref_full_hist[:, ( ref_hist_pd['date_obs'] >= str(base_ref_min)) & ( ref_hist_pd['date_obs'] <= str(base_ref_max))] else: ref_base_hist = None for inst_index, instrument in enumerate(inst_list): # check if this is the reference instrument if inst_index == ref_index: # loop through moving average centers for date_index, center_date in enumerate(ref_moving_avg_centers): print("Starting calculations for", instrument, ":", center_date) if center_date > ref_hist_pd.date_obs.max( ) or center_date < ref_hist_pd.date_obs.min(): print("Date is out of instrument range, skipping.") continue # determine time range based off moving average centers min_date = center_date - moving_width / 2 max_date = center_date + moving_width / 2 # get the correct date range to use for image combos ref_pd_use = ref_hist_pd[ (ref_hist_pd['date_obs'] >= str(min_date)) & (ref_hist_pd['date_obs'] <= str(max_date))] # save alpha/x as [1, 0] for reference instrument alpha = 1 x = 0 db_funcs.store_iit_values(db_session, ref_pd_use, meth_name, meth_desc, [alpha, x], create) else: # query euv_images for correct carrington rotation query_instrument = [ instrument, ] rot_images = db_funcs.query_euv_images_rot( db_session, rot_min=rot_min, rot_max=rot_max, instrument=query_instrument, wavelength=wavelengths) if rot_images.shape[0] == 0: print("No images in timeframe for ", instrument, ". Skipping") continue # get time minimum and maximum for instrument inst_time_min = rot_images.date_obs.min() inst_time_max = rot_images.date_obs.max() # if Stereo A or B has images before AIA, calc IIT for those weeks if inst_time_min > calc_query_time_min: all_images = db_funcs.query_euv_images( db_session, time_min=calc_query_time_min, time_max=calc_query_time_max, instrument=query_instrument, wavelength=wavelengths) if all_images.date_obs.min() < inst_time_min: inst_time_min = all_images.date_obs.min() moving_avg_centers, moving_width = lbcc.moving_averages( inst_time_min, inst_time_max, weekday, number_of_days) # calculate image cadence centers range_min_date = moving_avg_centers[0] - moving_width / 2 range_max_date = moving_avg_centers[-1] + moving_width / 2 image_centers = synch_utils.get_dates( time_min=range_min_date.astype(datetime.datetime), time_max=range_max_date.astype(datetime.datetime), map_freq=image_freq) inst_hist_pd = db_funcs.query_hist( db_session=db_session, meth_id=method_id[1], n_intensity_bins=n_intensity_bins, lat_band=lat_band, time_min=inst_time_min - datetime.timedelta(days=number_of_days), time_max=inst_time_max + datetime.timedelta(days=number_of_days), instrument=query_instrument, wavelength=wavelengths) # keep only one observation-histogram per image_center window keep_ind = lbcc.cadence_choose(inst_hist_pd.date_obs, image_centers, image_del) inst_hist_pd = inst_hist_pd.iloc[keep_ind] # convert binary to histogram data mu_bin_edges, intensity_bin_edges, inst_full_hist = psi_d_types.binary_to_hist( hist_binary=inst_hist_pd, n_mu_bins=None, n_intensity_bins=n_intensity_bins) # loops through moving average centers for date_index, center_date in enumerate(moving_avg_centers): print("Starting calculations for", instrument, ":", center_date) if center_date > inst_hist_pd.date_obs.max( ) or center_date < inst_hist_pd.date_obs.min(): print("Date is out of instrument range, skipping.") continue # determine time range based off moving average centers min_date = center_date - moving_width / 2 max_date = center_date + moving_width / 2 # get proper time-range of reference histograms if center_date <= base_ref_center: # if date is earlier than reference (AIA) first year, use reference (AIA) first year ref_hist_use = ref_base_hist else: # get indices for calculation of reference histogram ref_hist_ind = (ref_hist_pd['date_obs'] >= str( min_date)) & (ref_hist_pd['date_obs'] <= str(max_date)) ref_hist_use = ref_full_hist[:, ref_hist_ind] # get the correct date range to use for the instrument histogram inst_hist_ind = (inst_hist_pd['date_obs'] >= str(min_date)) & ( inst_hist_pd['date_obs'] <= str(max_date)) inst_pd_use = inst_hist_pd[inst_hist_ind] # get indices and histogram for calculation inst_hist_use = inst_full_hist[:, inst_hist_ind] # sum histograms hist_fit = inst_hist_use.sum(axis=1) hist_ref = ref_hist_use.sum(axis=1) # normalize fit histogram fit_sum = hist_fit.sum() norm_hist_fit = hist_fit / fit_sum # normalize reference histogram ref_sum = hist_ref.sum() norm_hist_ref = hist_ref / ref_sum # get reference/fit peaks ref_peak_index = np.argmax( norm_hist_ref) # index of max value of hist_ref ref_peak_val = norm_hist_ref[ ref_peak_index] # max value of hist_ref fit_peak_index = np.argmax( norm_hist_fit) # index of max value of hist_fit fit_peak_val = norm_hist_fit[ fit_peak_index] # max value of hist_fit # estimate correction coefficients that match fit_peak to ref_peak alpha_est = fit_peak_val / ref_peak_val x_est = intensity_bin_edges[ ref_peak_index] - alpha_est * intensity_bin_edges[ fit_peak_index] init_pars = np.asarray([alpha_est, x_est], dtype=np.float64) # calculate alpha and x alpha_x_parameters = iit.optim_iit_linear(norm_hist_ref, norm_hist_fit, intensity_bin_edges, init_pars=init_pars) # save alpha and x to database db_funcs.store_iit_values(db_session, inst_pd_use, meth_name, meth_desc, alpha_x_parameters.x, create) end_time = time.time() tot_time = end_time - start_time time_tot = str(datetime.timedelta(minutes=tot_time)) print( "Inter-instrument transformation fit parameters have been calculated and saved to the database." ) print("Total elapsed time for IIT fit parameter calculation: " + time_tot) return None
elif use_db == 'mysql-Q': # setup database connection to MySQL database on Q db_session = db_funs.init_db_conn_old(db_name=use_db, chd_base=db_class.Base, user=user, password=password) # first image (AIA) query_min = datetime.datetime(2020, 7, 30, 15, 0, 0) query_max = datetime.datetime(2020, 7, 30, 17, 0, 0) instrument = [ "AIA", ] image_pd = db_funs.query_euv_images(db_session, time_min=query_min, time_max=query_max, instrument=instrument) delete_path = os.path.join(hdf_data_dir, image_pd.fname_hdf.iloc[0]) os.remove(delete_path) db_session = db_funs.update_image_val(db_session, image_pd.iloc[0], "fname_hdf", "") # other two images (EUVI-A) query_min = datetime.datetime(2020, 7, 30, 13, 0, 0) query_max = datetime.datetime(2020, 7, 30, 17, 0, 0) instrument = [ "EUVI-A", ]
db_session = db_funcs.init_db_conn_old(db_name=use_db, chd_base=db_class.Base, sqlite_path=sqlite_path) elif use_db in ('mysql-Q', 'mysql-Q_test'): # setup database connection to MySQL database on Q db_session = db_funcs.init_db_conn_old(db_name=use_db, chd_base=db_class.Base, user=user, password=password) #### STEP ONE: SELECT IMAGES #### start_time = time.time() # 1.) query some images query_pd = db_funcs.query_euv_images(db_session=db_session, time_min=query_time_min - del_interval_dt, time_max=query_time_max + del_interval_dt) #### STEP TWO: APPLY PRE-PROCESSING CORRECTIONS #### # 1.) get dates moving_avg_centers = synch_utils.get_dates(time_min=query_time_min, time_max=query_time_max, map_freq=map_freq) # 3.) loop through center dates for date_ind, center in enumerate(moving_avg_centers): # choose which images to use in the same way we choose images for synchronic download synch_images, cluster_method = synch_utils.select_synchronic_images( center, del_interval, query_pd, inst_list) if synch_images is None: # no images fall in the appropriate range, skip
var_names=None, var_descs=None, create=True) # determine date of first AIA image # min_aia_image = db_session.query(db_class.EUV_Images.date_obs) for instrument in inst_list: print("Beginning loop for instrument:", instrument) # query EUV images query_instrument = [ instrument, ] image_pd_all = db_funcs.query_euv_images(db_session=db_session, time_min=lbc_query_time_min, time_max=lbc_query_time_max, instrument=query_instrument, wavelength=wavelengths) # query LBCC histograms hist_pd = db_funcs.query_hist(db_session, meth_id=method_id[1], n_intensity_bins=n_intensity_bins, lat_band=lat_band, time_min=lbc_query_time_min, time_max=lbc_query_time_max, instrument=query_instrument, wavelength=wavelengths) if hist_pd.shape[0] == 0: # use all images in range in_index = pd.Series([False] * image_pd_all.shape[0])
# Establish connection to database db_session = db_funcs.init_db_conn(db_type, db_class.Base, db_loc, db_name=mysql_db_name, user=user, password=password, cred_dir=cred_dir) # SAMPLE QUERY # use database session to query available pre-processed images query_time_min = datetime.datetime(2011, 2, 1, 0, 0, 0) query_time_max = datetime.datetime(2011, 2, 1, 12, 0, 0) image_pd = db_funcs.query_euv_images(db_session, time_min=query_time_min, time_max=query_time_max) # view a snapshot of the results image_pd.loc[:, ['date_obs', 'instrument', 'fname_hdf']] # open the first image image_path = os.path.join(hdf_data_dir, image_pd.fname_hdf[0]) psi_image = psi_dtypes.read_los_image(image_path) # plot deconvolved image psi_plots.PlotImage(psi_image) # CLOSE CONNECTION db_session.close()
# definitions plot_hist = full_hist[:, :, plot_index] date_obs = pd_hist.date_obs[plot_index] figure = "Original Histogram Plot: " # plot histogram Plotting.Plot_LBCC_Hists(plot_hist, date_obs, instrument, intensity_bin_edges, mu_bin_edges, figure, plot_index) #### APPLY LBC CORRECTION #### # query EUV images query_instrument = [ instrument, ] image_pd = query_euv_images(db_session=db_session, time_min=hist_plot_query_time_min, time_max=hist_plot_query_time_max, instrument=query_instrument) for index, row in image_pd.iterrows(): # apply LBC original_los, lbcc_image, mu_indices, use_indices = iit_funcs.apply_lbc_correction( db_session, hdf_data_dir, combo_query, row, n_intensity_bins=n_intensity_bins, R0=R0) #### CREATE NEW HISTOGRAMS #### # perform 2D histogram on mu and image intensity hdf_path = os.path.join(hdf_data_dir, row.fname_hdf) temp_hist = psi_d_types.LosImage.mu_hist(lbcc_image, intensity_bin_edges,
# creates mu bin & intensity bin arrays mu_bin_edges = np.linspace(0.1, 1.0, n_mu_bins + 1, dtype='float') image_intensity_bin_edges = np.linspace(0, 5, num=n_intensity_bins + 1, dtype='float') # create LBC method meth_name = 'LBCC' meth_desc = 'LBCC Theoretic Fit Method' method_id = get_method_id(db_session, meth_name, meth_desc, var_names=None, var_descs=None, create=True) # loop over instrument for instrument in inst_list: # query EUV images query_instrument = [instrument, ] query_pd_all = query_euv_images(db_session=db_session, time_min=hist_query_time_min, time_max=hist_query_time_max, instrument=query_instrument, wavelength=wavelengths) # query LBCC histograms hist_pd = query_hist(db_session, meth_id=method_id[1], n_mu_bins=n_mu_bins, n_intensity_bins=n_intensity_bins, lat_band=lat_band, time_min=hist_query_time_min, time_max=hist_query_time_max, instrument=query_instrument, wavelength=wavelengths) # compare image results to hist results based on image_id in_index = query_pd_all.data_id.isin(hist_pd.image_id) # return only images that do not have corresponding histograms query_pd = query_pd_all[~in_index] # check that images remain that need histograms if query_pd.shape[0] == 0: print("All" + instrument + " images in timeframe already have associated histograms.")
elif i == 2: if (len(h5file[data_name].dims[2].keys()) != 0): z = h5file[data_name].dims[2][0] x = np.array(x) y = np.array(y) z = np.array(z) f = np.array(f) h5file.close() losAIA = psi_datatype.LosImage(f, x, y) # query DB EUV images to use their meta data euv_images = db_funs.query_euv_images(db_session, time_min=query_start, time_max=query_end) A_ind = euv_images.instrument == "EUVI-A" A_fname = euv_images.fname_hdf[A_ind].item() A_path = os.path.join(image_hdf_dir, A_fname) A_los = psi_datatype.read_los_image(A_path) losA.info = A_los.info B_ind = euv_images.instrument == "EUVI-B" B_fname = euv_images.fname_hdf[B_ind].item() B_path = os.path.join(image_hdf_dir, B_fname) B_los = psi_datatype.read_los_image(B_path) losB.info = B_los.info AIA_ind = euv_images.instrument == "AIA"
# db_session = db_funcs.init_db_conn_old(db_name=use_db, chd_base=db_class.Base, user=user, password=password) #### ----- GENERATE HISTOGRAMS ---- #### # start time start_time = time.time() #### GET REFERENCE INFO FOR LATER USE #### #### GET REFERENCE INFO FOR LATER USE #### # get index number of reference instrument ref_index = inst_list.index(ref_inst) # query euv images to get carrington rotation range ref_instrument = [ ref_inst, ] euv_images = db_funcs.query_euv_images(db_session, time_min=hist_query_time_min, time_max=hist_query_time_max, instrument=ref_instrument) # get min and max carrington rotation rot_max = euv_images.cr_rot.max() rot_min = euv_images.cr_rot.min() # method information meth_name = "IIT" method_id = db_funcs.get_method_id(db_session, meth_name, meth_desc=None, var_names=None, var_descs=None, create=False) # query for IIT histograms
elif use_db == 'mysql-Q': # setup database connection to MySQL database on Q db_session = init_db_conn_old(db_name=use_db, chd_base=Base, user=user, password=password) # setup the time range for the query query_time_min = datetime.datetime(2014, 8, 13, 18, 0, 0) query_time_max = datetime.datetime(2014, 8, 13, 20, 0, 0) # query_time_min = datetime.datetime(2013, 1, 1, 0, 0, 0) # query_time_max = datetime.datetime(2013, 12, 31, 23, 59, 59, 999999) # query the database for each spacecraft type fs = query_euv_images(db_session=db_session, time_min=query_time_min, time_max=query_time_max, instrument=('AIA', )) fa = query_euv_images(db_session=db_session, time_min=query_time_min, time_max=query_time_max, instrument=('EUVI-A', )) fb = query_euv_images(db_session=db_session, time_min=query_time_min, time_max=query_time_max, instrument=('EUVI-B', )) # merge the query results into one dataframe df_all = pd.concat([fs, fa, fb], axis=0) print(df_all)