Ejemplo n.º 1
0
def apply_ipp_2(db_session, center_date, query_pd, inst_list, hdf_data_dir,
                n_intensity_bins=200, R0=1.01):
    """
    Function to apply image pre-processing (limb-brightening, inter-instrument transformation) corrections
    to EUV images for creation of maps. Three major differences from original function:
        1. Expects query_pd to contain all images that should be corrected. No temporal
           selection of images in this function.
        2. Queries the DB directly for previous/next IIT and LBC values. No need to input
           combo-query results.
        3. methods_list is generated internally and outputted to be appended to an existing
           list as appropriate.
    @param db_session: database session from which to query correction variable values
    @param center_date: date for querying
    @param query_pd: pandas dataframe of euv_images
    @param inst_list: instrument list
    @param hdf_data_dir: directory of hdf5 files
    @param n_intensity_bins: number of intensity bins
    @param R0: radius
    @return: image dataframe (identical to input 'query_pd', but retained for backward compatibility),
             list of los images,
             list of iit images,
             indices used for correction,
             methods list,
             ref alpha, ref x
    """
    start = time.time()
    # create image lists
    n_images = query_pd.shape[0]
    los_list = [None] * n_images
    iit_list = [None] * n_images
    methods_list = db_funcs.generate_methdf(query_pd)
    use_indices = [np.full((2048, 2048), True, dtype=bool)] * len(inst_list)
    # convert date to correct format
    print("\nStarting corrections for", center_date, "images:")
    date_time = np.datetime64(center_date).astype(datetime.datetime)
    # alpha, x for threshold
    euvia_iit = db_funcs.get_correction_pars(db_session, meth_name="IIT",
                                             date_obs=date_time, instrument='EUVI-A')
    ref_alpha = euvia_iit[0]
    ref_x = euvia_iit[1]

    # create dataframe for date
    date_pd = query_pd
    if len(date_pd) == 0:
        print("No Images to Process for this date.")
    else:
        for index in range(date_pd.shape[0]):
            # get image row
            image_row = date_pd.iloc[index]
            print("Processing image number", image_row.data_id, "for LBC and IIT Corrections.")
            # apply LBC
            los_list[index], lbcc_image, mu_indices, use_ind, theoretic_query = \
                lbcc_funcs.apply_lbc_2(db_session, hdf_data_dir, image_row=image_row,
                                       n_intensity_bins=n_intensity_bins, R0=R0)
            # update method with LBCC parameter values? Would need to associate each LBCC
            #   parameter set with an image # and store in DB. For now, simply record method
            #   without values. Same for IIT below.
            # apply IIT
            lbcc_image, iit_list[index], use_indices[index], alpha, x = \
                iit_funcs.apply_iit_2(db_session, lbcc_image, use_ind,
                                      los_list[index], R0=R0)
            # set unused points to no_data_val
            # if los_list[index].no_data_val is None:
            #     no_data_val = -9999.0
            #     iit_list[index].no_data_val = no_data_val
            # else:
            #     no_data_val = iit_list[index].no_data_val
            # iit_list[index].iit_data[~use_indices[index]] = no_data_val
            # JT - this should be handled in minimum intensity merge, not here

            # add methods to dataframe
            ipp_method = {'meth_name': ("LBCC", "IIT"), 'meth_description': ["LBCC Theoretic Fit Method",
                                                                             "IIT Fit Method"],
                          'var_name': ("LBCC", "IIT"), 'var_description': (" ", " ")}
            methods_list[index] = methods_list[index].append(pd.DataFrame(data=ipp_method), sort=False)
            # methods_list[inst_ind] = pd.DataFrame(data=ipp_method)
        end = time.time()
        print("Image Pre-Processing Corrections (Limb-Brightening and Inter-Instrument Transformation) have been "
              "applied "
              " in", end - start, "seconds.")

    return date_pd, los_list, iit_list, use_indices, methods_list, ref_alpha, ref_x
Ejemplo n.º 2
0
                                       sqlite_path=sqlite_path)
elif use_db == 'mysql-Q':
    # setup database connection to MySQL database on Q
    db_session = db_funcs.init_db_conn(db_name=use_db,
                                       chd_base=db_class.Base,
                                       user=user,
                                       password=password)

#### STEP ONE: SELECT IMAGES ####
# 1.) query some images
query_pd = db_funcs.query_euv_images(db_session=db_session,
                                     time_min=query_time_min,
                                     time_max=query_time_max)

# 2.) generate a dataframe to record methods
methods_list = db_funcs.generate_methdf(query_pd)

#### STEP TWO: APPLY PRE-PROCESSING CORRECTIONS ####
# 1.) get dates
moving_avg_centers = synch_utils.get_dates(time_min=query_time_min,
                                           time_max=query_time_max,
                                           map_freq=map_freq)

# 2.) loop through center dates
h5file = h5.File(h5_filename, 'w')
for date_ind, center in enumerate(moving_avg_centers):
    # choose which images to use in the same way we choose images for synchronic download
    synch_images, cluster_method = synch_utils.select_synchronic_images(
        center, del_interval, query_pd, inst_list)
    if synch_images is None:
        # no images fall in the appropriate range, skip