def apply_ipp(db_session, hdf_data_dir, inst_list, row, methods_list, lbc_combo_query, iit_combo_query, n_intensity_bins=200, R0=1.01): start = time.time() index = row[0] image_row = row[1] inst_ind = inst_list.index(image_row.instrument) # apply LBC los_image, lbcc_image, mu_indices, use_ind, theoretic_query = lbcc_funcs.apply_lbc( db_session, hdf_data_dir, lbc_combo_query[inst_ind], image_row=image_row, n_intensity_bins=n_intensity_bins, R0=R0) # apply IIT lbcc_image, iit_image, use_indices, alpha, x = iit_funcs.apply_iit( db_session, iit_combo_query[inst_ind], lbcc_image, use_ind, los_image, R0=R0) # add methods to dataframe ipp_method = { 'meth_name': ("LBCC", "IIT"), 'meth_description': ["LBCC Theoretic Fit Method", "IIT Fit Method"], 'var_name': ("LBCC", "IIT"), 'var_description': (" ", " ") } methods_list[index] = methods_list[index].append( pd.DataFrame(data=ipp_method), sort=False) end = time.time() print( "Image Pre-Processing Corrections (Limb-Brightening and Inter-Instrument Transformation) have been " "applied to image", image_row.data_id, "in", end - start, "seconds.") return los_image, iit_image, methods_list, use_indices
n_intensity_bins=n_intensity_bins, R0=R0) #### ORIGINAL LOS DATA #### # calculate IIT histogram from original data original_los_hist = psi_d_types.LosImage.iit_hist( original_los, intensity_bin_edges, lat_band, log10) # add 1D histogram to array original_hist_list[:, index] = original_los_hist #### CORRECTED DATA #### # apply IIT correction lbcc_image, iit_image, use_indices, alpha, x = iit_funcs.apply_iit( db_session, combo_query_iit, lbcc_image, use_indices, original_los, R0=R0) #### CREATE CORRECTED IIT HISTOGRAM ##### # calculate IIT histogram from LBC hist_iit = psi_d_types.IITImage.iit_hist(iit_image, lat_band, log10) # create IIT histogram datatype corrected_hist = psi_d_types.create_iit_hist(iit_image, method_id[1], lat_band, hist_iit) corrected_hist_list[:, index] = corrected_hist.hist # plotting definitions color_list = ['red', 'blue', 'black'] linestyle_list = ['solid', 'dashed', 'dashdot']
def apply_ipp(db_session, center_date, query_pd, inst_list, hdf_data_dir, lbc_combo_query, iit_combo_query, methods_list, n_intensity_bins=200, R0=1.01): """ function to apply image pre-processing (limb-brightening, inter-instrument transformation) corrections to EUV images for creation of maps @param db_session: database session from which to query correction variable values @param center_date: date for querying @param query_pd: pandas dataframe of euv_images @param inst_list: instrument list @param hdf_data_dir: directory of hdf5 files @param lbc_combo_query: list (of length number of instruments) of lbc image combo queries @param iit_combo_query: list (of length number of instruments) of iit image combo queries @param methods_list: methods dataframe @param n_intensity_bins: number of intensity bins @param R0: radius @return: image dataframe, list of los images, list of iit images, indices used for correction, methods list, ref alpha, ref x """ start = time.time() # create image lists image_pd = [None] * len(inst_list) los_list = [None] * len(inst_list) iit_list = [None] * len(inst_list) use_indices = [(2048, 2048)] * len(inst_list) # convert date to correct format print("\nStarting corrections for", center_date, "images:") date_time = np.datetime64(center_date).astype(datetime.datetime) # alpha, x for threshold sta_ind = inst_list.index('EUVI-A') ref_alpha, ref_x = db_funcs.query_var_val(db_session, meth_name='IIT', date_obs=date_time, inst_combo_query=iit_combo_query[sta_ind]) # create dataframe for date hist_date = query_pd['date_obs'] date_pd = query_pd[ (hist_date >= np.datetime64(date_time - datetime.timedelta(minutes=10))) & (hist_date <= np.datetime64(date_time + datetime.timedelta(minutes=10)))] if len(date_pd) == 0: print("No Images to Process for this date.") else: for inst_ind, instrument in enumerate(inst_list): # get image row image_pd[inst_ind] = date_pd[date_pd['instrument'] == instrument] inst_image = date_pd[date_pd['instrument'] == instrument] if len(inst_image) == 0: print("No", instrument, "image to process for this date.") else: image_row = inst_image.iloc[0] index = np.where(date_pd['instrument'] == instrument)[0][0] print("Processing image number", image_row.data_id, "for LBC and IIT Corrections.") # apply LBC los_list[inst_ind], lbcc_image, mu_indices, use_ind, theoretic_query = lbcc_funcs.apply_lbc(db_session, hdf_data_dir, lbc_combo_query[ inst_ind], image_row=image_row, n_intensity_bins=n_intensity_bins, R0=R0) # generate a record of the method and variable values used for LBC # lbc_method = {'meth_name': ("LBCC", "LBCC", "LBCC", "LBCC", "LBCC", "LBCC"), 'meth_description': # ["LBCC Theoretic Fit Method"] * 6, 'var_name': ("TheoVar0", "TheoVar1", "TheoVar2", "TheoVar3", "TheoVar4", "TheoVar5"), # 'var_description': ("Theoretic fit parameter at index 0", "Theoretic fit parameter at index 1", "Theoretic fit parameter at index 2", # "Theoretic fit parameter at index 3", "Theoretic fit parameter at index 4", "Theoretic fit parameter at index 5"), # 'var_val': (theoretic_query[0], theoretic_query[1], theoretic_query[2], theoretic_query[3], # theoretic_query[4], theoretic_query[5])} # apply IIT lbcc_image, iit_list[inst_ind], use_indices[inst_ind], alpha, x = iit_funcs.apply_iit(db_session, iit_combo_query[ inst_ind], lbcc_image, use_ind, los_list[ inst_ind], R0=R0) # iit_method = {'meth_name': ("IIT", "IIT"), 'meth_description': ["IIT Fit Method"] * 2, 'var_name': ( # "alpha", "x"), 'var_description': ("IIT correction coefficient: alpha", "IIT correction coefficient: # x"), 'var_val': (alpha, x)} # add methods to dataframe ipp_method = {'meth_name': ("LBCC", "IIT"), 'meth_description': ["LBCC Theoretic Fit Method", "IIT Fit Method"], 'var_name': ("LBCC", "IIT"), 'var_description': (" ", " ")} methods_list[index] = methods_list[index].append(pd.DataFrame(data=ipp_method), sort=False) # methods_list[inst_ind] = pd.DataFrame(data=ipp_method) end = time.time() print("Image Pre-Processing Corrections (Limb-Brightening and Inter-Instrument Transformation) have been " "applied " " in", end - start, "seconds.") return date_pd, los_list, iit_list, use_indices, methods_list, ref_alpha, ref_x
def apply_ipp_2(db_session, center_date, query_pd, inst_list, hdf_data_dir, n_intensity_bins=200, R0=1.01): """ Function to apply image pre-processing (limb-brightening, inter-instrument transformation) corrections to EUV images for creation of maps. Three major differences from original function: 1. Expects query_pd to contain all images that should be corrected. No temporal selection of images in this function. 2. Queries the DB directly for previous/next IIT and LBC values. No need to input combo-query results. 3. methods_list is generated internally and outputted to be appended to an existing list as appropriate. @param db_session: database session from which to query correction variable values @param center_date: date for querying @param query_pd: pandas dataframe of euv_images @param inst_list: instrument list @param hdf_data_dir: directory of hdf5 files @param n_intensity_bins: number of intensity bins @param R0: radius @return: image dataframe (identical to input 'query_pd', but retained for backward compatibility), list of los images, list of iit images, indices used for correction, methods list, ref alpha, ref x """ start = time.time() # create image lists n_images = query_pd.shape[0] los_list = [None] * n_images iit_list = [None] * n_images methods_list = db_funcs.generate_methdf(query_pd) use_indices = [np.full((2048, 2048), True, dtype=bool)] * len(inst_list) # convert date to correct format print("\nStarting corrections for", center_date, "images:") date_time = np.datetime64(center_date).astype(datetime.datetime) # alpha, x for threshold euvia_iit = db_funcs.get_correction_pars(db_session, meth_name="IIT", date_obs=date_time, instrument='EUVI-A') ref_alpha = euvia_iit[0] ref_x = euvia_iit[1] # create dataframe for date date_pd = query_pd if len(date_pd) == 0: print("No Images to Process for this date.") else: for index in range(date_pd.shape[0]): # get image row image_row = date_pd.iloc[index] print("Processing image number", image_row.data_id, "for LBC and IIT Corrections.") # apply LBC los_list[index], lbcc_image, mu_indices, use_ind, theoretic_query = \ lbcc_funcs.apply_lbc_2(db_session, hdf_data_dir, image_row=image_row, n_intensity_bins=n_intensity_bins, R0=R0) # update method with LBCC parameter values? Would need to associate each LBCC # parameter set with an image # and store in DB. For now, simply record method # without values. Same for IIT below. # apply IIT lbcc_image, iit_list[index], use_indices[index], alpha, x = \ iit_funcs.apply_iit_2(db_session, lbcc_image, use_ind, los_list[index], R0=R0) # set unused points to no_data_val # if los_list[index].no_data_val is None: # no_data_val = -9999.0 # iit_list[index].no_data_val = no_data_val # else: # no_data_val = iit_list[index].no_data_val # iit_list[index].iit_data[~use_indices[index]] = no_data_val # JT - this should be handled in minimum intensity merge, not here # add methods to dataframe ipp_method = {'meth_name': ("LBCC", "IIT"), 'meth_description': ["LBCC Theoretic Fit Method", "IIT Fit Method"], 'var_name': ("LBCC", "IIT"), 'var_description': (" ", " ")} methods_list[index] = methods_list[index].append(pd.DataFrame(data=ipp_method), sort=False) # methods_list[inst_ind] = pd.DataFrame(data=ipp_method) end = time.time() print("Image Pre-Processing Corrections (Limb-Brightening and Inter-Instrument Transformation) have been " "applied " " in", end - start, "seconds.") return date_pd, los_list, iit_list, use_indices, methods_list, ref_alpha, ref_x
# setup database connection create = True # true if you want to add to database use_db = "sqlite" sqlite_path = os.path.join(database_dir, sqlite_filename) db_session = init_db_conn_old(db_name=use_db, chd_base=db_class.Base, sqlite_path=sqlite_path) ##### ------ INTER INSTRUMENT TRANSFORMATION FUNCTIONS BELOW ------- ######## ##### STEP ONE: CREATE 1D HISTOGRAMS AND SAVE TO DATABASE ###### iit_funcs.create_histograms(db_session, inst_list, lbc_query_time_min, lbc_query_time_max, hdf_data_dir, n_intensity_bins=n_intensity_bins, lat_band=lat_band, log10=log10, R0=R0, wavelengths=wavelengths) ##### STEP TWO: CALCULATE INTER-INSTRUMENT TRANSFORMATION COEFFICIENTS AND SAVE TO DATABASE ###### iit_funcs.calc_iit_coefficients(db_session, inst_list, ref_inst, calc_query_time_min, calc_query_time_max, weekday=weekday, number_of_days=number_of_days, image_freq=image_freq, image_del=image_del,
for index, row in instrument_pd.iterrows(): print("Processing image number", row.data_id, "for LBC and IIT Corrections.") # apply LBC original_los, lbcc_image, mu_indices, use_indices, theoretic_query = lbcc_funcs.apply_lbc( db_session, hdf_data_dir, lbc_combo_query, image_row=row, n_intensity_bins=n_intensity_bins, R0=R0) # apply IIT lbcc_image, iit_image, use_indices, alpha, x = iit_funcs.apply_iit( db_session, hdf_data_dir, iit_combo_query, lbcc_image, use_indices, image_row=row, R0=R0) # Store Coronal Hole Map with data map? or as separate map-object? #chd_list = [None] * len(los_list) #for ii in range(len(los_list)): # call function to ezseg los_list[ii] # chd_list[ii] = ezseg_wrapper(los_list[ii]) # pass # use fixed map resolution map_image = iit_image.interp_to_map(R0=R0, map_x=map_x, map_y=map_y,
#### APPLY LBC CORRECTION #### # query EUV images query_instrument = [ instrument, ] image_pd = query_euv_images(db_session=db_session, time_min=hist_plot_query_time_min, time_max=hist_plot_query_time_max, instrument=query_instrument) for index, row in image_pd.iterrows(): # apply LBC original_los, lbcc_image, mu_indices, use_indices = iit_funcs.apply_lbc_correction( db_session, hdf_data_dir, combo_query, row, n_intensity_bins=n_intensity_bins, R0=R0) #### CREATE NEW HISTOGRAMS #### # perform 2D histogram on mu and image intensity hdf_path = os.path.join(hdf_data_dir, row.fname_hdf) temp_hist = psi_d_types.LosImage.mu_hist(lbcc_image, intensity_bin_edges, mu_bin_edges, lat_band=lat_band, log10=log10) hist_lbcc = psi_d_types.create_lbcc_hist(hdf_path, row.data_id, method_id[1], mu_bin_edges, intensity_bin_edges,
def plot_iit_histograms(db_session, hdf_data_dir, hist_query_time_min, hist_query_time_max, inst_list, ref_inst, n_intensity_bins=200, lat_band=[-np.pi / 2.4, np.pi / 2.4], R0=1.01, log10=True): # start time start_time = time.time() #### GET REFERENCE INFO FOR LATER USE #### # get index number of reference instrument ref_index = inst_list.index(ref_inst) # query euv images to get carrington rotation range ref_instrument = [ ref_inst, ] euv_images = db_funcs.query_euv_images(db_session, time_min=hist_query_time_min, time_max=hist_query_time_max, instrument=ref_instrument) # get min and max carrington rotation rot_max = euv_images.cr_rot.max() rot_min = euv_images.cr_rot.min() # method information meth_name = "IIT" method_id = db_funcs.get_method_id(db_session, meth_name, meth_desc=None, var_names=None, var_descs=None, create=False) # query for IIT histograms pd_lbc_hist = db_funcs.query_hist(db_session=db_session, meth_id=method_id[1], n_intensity_bins=n_intensity_bins, lat_band=lat_band, time_min=hist_query_time_min, time_max=hist_query_time_max) pd_lbc_hist_srt = pd_lbc_hist.sort_values(by=['image_id']) # convert the binary types back to arrays mu_bin_edges, intensity_bin_edges, full_lbc_hist = psi_d_types.binary_to_hist( pd_lbc_hist_srt, n_mu_bins=None, n_intensity_bins=n_intensity_bins) # create corrected/original histograms original_hist_list = np.full(full_lbc_hist.shape, 0, dtype=np.int64) corrected_hist_list = np.full(full_lbc_hist.shape, 0, dtype=np.int64) for inst_index, instrument in enumerate(inst_list): print("Applying corrections for", instrument) #### QUERY IMAGES #### query_instrument = [ instrument, ] rot_images = db_funcs.query_euv_images_rot(db_session, rot_min=rot_min, rot_max=rot_max, instrument=query_instrument) image_pd = rot_images.sort_values(by=['cr_rot']) # get time minimum and maximum for instrument inst_time_min = rot_images.date_obs.min() inst_time_max = rot_images.date_obs.max() # query correct image combos lbc_meth_name = "LBCC" combo_query_lbc = db_funcs.query_inst_combo(db_session, inst_time_min, inst_time_max, lbc_meth_name, instrument) iit_meth_name = "IIT" combo_query_iit = db_funcs.query_inst_combo(db_session, inst_time_min, inst_time_max, iit_meth_name, instrument) # query correct image combos combo_query_lbc = db_funcs.query_inst_combo(db_session, hist_query_time_min, hist_query_time_max, meth_name="LBCC", instrument=instrument) # query correct image combos combo_query_iit = db_funcs.query_inst_combo(db_session, hist_query_time_min, hist_query_time_max, meth_name="IIT", instrument=instrument) for index, row in image_pd.iterrows(): # apply LBC original_los, lbcc_image, mu_indices, use_indices, theoretic_query = lbcc_funcs.apply_lbc( db_session, hdf_data_dir, combo_query_lbc, image_row=row, n_intensity_bins=n_intensity_bins, R0=R0) #### ORIGINAL LOS DATA #### # calculate IIT histogram from original data original_los_hist = psi_d_types.LosImage.iit_hist( original_los, intensity_bin_edges, lat_band, log10) # add 1D histogram to array original_hist_list[:, index] = original_los_hist #### CORRECTED DATA #### # apply IIT correction lbcc_image, iit_image, use_indices, alpha, x = iit_funcs.apply_iit( db_session, combo_query_iit, lbcc_image, use_indices, original_los, R0=R0) #### CREATE CORRECTED IIT HISTOGRAM ##### # calculate IIT histogram from LBC hist_iit = psi_d_types.IITImage.iit_hist(iit_image, lat_band, log10) # create IIT histogram datatype corrected_hist = psi_d_types.create_iit_hist( iit_image, method_id[1], lat_band, hist_iit) corrected_hist_list[:, index] = corrected_hist.hist # plotting definitions color_list = ['red', 'blue', 'black'] linestyle_list = ['solid', 'dashed', 'dashdot'] #### CREATE NEW HISTOGRAM #### for inst_index, instrument in enumerate(inst_list): print("Plotting Histograms for", instrument) #### GET INDICES TO USE #### # get index of instrument in histogram dataframe hist_inst = pd_lbc_hist_srt['instrument'] pd_inst_index = hist_inst[hist_inst == instrument].index #### ORIGINAL HISTOGRAM ##### # define histogram original_hist = original_hist_list[:, pd_inst_index].sum(axis=1) # normalize histogram row_sums = original_hist.sum(axis=0, keepdims=True) norm_original_hist = original_hist / row_sums # plot original Plotting.Plot1d_Hist(norm_original_hist, instrument, inst_index, intensity_bin_edges, color_list, linestyle_list, figure=100, xlabel="Intensity (log10)", ylabel="H(I)", title="Histogram: Original LOS Data") #### LBCC HISTOGRAM ##### # define histogram lbc_hist = full_lbc_hist[:, pd_inst_index].sum(axis=1) # normalize histogram lbc_sums = lbc_hist.sum(axis=0, keepdims=True) norm_lbc_hist = lbc_hist / lbc_sums # plot lbcc Plotting.Plot1d_Hist(norm_lbc_hist, instrument, inst_index, intensity_bin_edges, color_list, linestyle_list, figure=200, xlabel="Intensity (log10)", ylabel="H(I)", title="Histogram: Post LBCC") #### CORRECTED HISTOGRAM #### # define histogram corrected_hist = corrected_hist_list[:, pd_inst_index].sum(axis=1) # normalize histogram iit_sums = corrected_hist.sum(axis=0, keepdims=True) norm_corrected_hist = corrected_hist / iit_sums # plot corrected Plotting.Plot1d_Hist(norm_corrected_hist, instrument, inst_index, intensity_bin_edges, color_list, linestyle_list, figure=300, xlabel="Intensity (log10)", ylabel="H(I)", title="Histogram: Post IIT") # end time end_time = time.time() print("ITT has been applied and original/resulting histograms plotted.") print("Total elapsed time to apply correction and plot histograms: " + str(round(end_time - start_time, 3)) + " seconds.") return None