예제 #1
0
def apply_iit(db_session,
              inst_combo_query,
              lbcc_image,
              use_indices,
              los_image,
              R0=1.01):
    ###### GET VARIABLE VALUES #####
    meth_name = "IIT"
    method_id_info = db_funcs.get_method_id(db_session,
                                            meth_name,
                                            meth_desc=None,
                                            var_names=None,
                                            var_descs=None,
                                            create=False)

    alpha_x_parameters = db_funcs.query_var_val(
        db_session,
        meth_name,
        date_obs=lbcc_image.date_obs,
        inst_combo_query=inst_combo_query)
    alpha, x = alpha_x_parameters

    ##### APPLY IIT TRANSFORMATION ######
    lbcc_data = lbcc_image.lbcc_data
    corrected_iit_data = np.copy(lbcc_data)
    corrected_iit_data[use_indices] = 10**(
        alpha * np.log10(lbcc_data[use_indices]) + x)
    # create IIT datatype
    iit_image = psi_d_types.create_iit_image(los_image, lbcc_image,
                                             corrected_iit_data,
                                             method_id_info[0])
    psi_d_types.LosImage.get_coordinates(iit_image, R0=R0)

    return lbcc_image, iit_image, use_indices, alpha, x
예제 #2
0
def apply_iit_2(db_session, lbcc_image, use_indices, los_image, R0=1.01):
    """
    Different from apply_iit() because it does not require pre-queried iit_combos.
    This function finds the previous and next IIT coefs based on lbcc_image.date_obs
    Parameters
    ----------
    db_session
    lbcc_image
    use_indices
    los_image
    R0

    Returns
    -------

    """
    ###### GET VARIABLE VALUES #####
    meth_name = "IIT"
    method_id_info = db_funcs.get_method_id(db_session,
                                            meth_name,
                                            meth_desc=None,
                                            var_names=None,
                                            var_descs=None,
                                            create=False)

    theoretic_query = db_funcs.get_correction_pars(
        db_session,
        meth_name,
        date_obs=lbcc_image.date_obs,
        instrument=lbcc_image.instrument)
    # separate alpha and x
    alpha = theoretic_query[0]
    x = theoretic_query[1]

    ##### APPLY IIT TRANSFORMATION ######
    lbcc_data = lbcc_image.lbcc_data
    corrected_iit_data = np.copy(lbcc_data)
    corrected_iit_data[use_indices] = 10**(
        alpha * np.log10(lbcc_data[use_indices]) + x)
    # create IIT datatype
    iit_image = psi_d_types.create_iit_image(los_image, lbcc_image,
                                             corrected_iit_data,
                                             method_id_info[0])
    psi_d_types.LosImage.get_coordinates(iit_image, R0=R0)

    return lbcc_image, iit_image, use_indices, alpha, x
예제 #3
0
ref_instrument = [
    ref_inst,
]
euv_images = db_funcs.query_euv_images(db_session,
                                       time_min=hist_query_time_min,
                                       time_max=hist_query_time_max,
                                       instrument=ref_instrument)
# get min and max carrington rotation
rot_max = euv_images.cr_rot.max()
rot_min = euv_images.cr_rot.min()

# method information
meth_name = "IIT"
method_id = db_funcs.get_method_id(db_session,
                                   meth_name,
                                   meth_desc=None,
                                   var_names=None,
                                   var_descs=None,
                                   create=False)

# query for IIT histograms
pd_lbc_hist = db_funcs.query_hist(db_session=db_session,
                                  meth_id=method_id[1],
                                  n_intensity_bins=n_intensity_bins,
                                  lat_band=lat_band,
                                  time_min=hist_query_time_min,
                                  time_max=hist_query_time_max)
pd_lbc_hist_srt = pd_lbc_hist.sort_values(by=['image_id'])
# convert the binary types back to arrays
mu_bin_edges, intensity_bin_edges, full_lbc_hist = psi_d_types.binary_to_hist(
    pd_lbc_hist_srt, n_mu_bins=None, n_intensity_bins=n_intensity_bins)
# create corrected/original histograms
예제 #4
0
    db_session = init_db_conn_old(db_name=use_db, chd_base=db_class.Base, sqlite_path=sqlite_path)
elif use_db in ['mysql-Q', 'mysql-Q_test']:
    # setup database connection to MySQL database on Q
    db_session = init_db_conn_old(db_name=use_db, chd_base=db_class.Base, user=user, password=password)

# start time
start_time_tot = time.time()

# creates mu bin & intensity bin arrays
mu_bin_edges = np.linspace(0.1, 1.0, n_mu_bins + 1, dtype='float')
image_intensity_bin_edges = np.linspace(0, 5, num=n_intensity_bins + 1, dtype='float')

# create LBC method
meth_name = 'LBCC'
meth_desc = 'LBCC Theoretic Fit Method'
method_id = get_method_id(db_session, meth_name, meth_desc, var_names=None, var_descs=None, create=True)

# loop over instrument
for instrument in inst_list:
    # query EUV images
    query_instrument = [instrument, ]
    query_pd_all = query_euv_images(db_session=db_session, time_min=hist_query_time_min,
                                    time_max=hist_query_time_max, instrument=query_instrument,
                                    wavelength=wavelengths)
    # query LBCC histograms
    hist_pd = query_hist(db_session, meth_id=method_id[1], n_mu_bins=n_mu_bins, n_intensity_bins=n_intensity_bins,
                         lat_band=lat_band, time_min=hist_query_time_min, time_max=hist_query_time_max,
                         instrument=query_instrument, wavelength=wavelengths)

    # compare image results to hist results based on image_id
    in_index = query_pd_all.data_id.isin(hist_pd.image_id)
예제 #5
0
mu_bin_centers = (mu_bin_array[1:] + mu_bin_array[:-1]) / 2

# time arrays
# returns array of moving averages center dates, based off start and end date
moving_avg_centers, moving_width = lbcc.moving_averages(query_time_min,
                                                        query_time_max,
                                                        weekday,
                                                        days=number_of_days)

# retrieve beta and y for all instruments
# plot all three instruments beta plot, then y plot
linestyles = ['dashed']
marker_types = ['None']

meth_name = 'IIT'
method_id = db_funs.get_method_id(db_session, meth_name, create=False)

plot_alpha = np.zeros((moving_avg_centers.__len__(), inst_list.__len__()))
plot_x = np.zeros((moving_avg_centers.__len__(), inst_list.__len__()))
plot_mean = np.zeros((moving_avg_centers.__len__(), inst_list.__len__()))
plot_std = np.zeros((moving_avg_centers.__len__(), inst_list.__len__()))
plot_act_mean = np.zeros((moving_avg_centers.__len__(), inst_list.__len__()))
plot_mean_act = np.zeros((moving_avg_centers.__len__(), inst_list.__len__()))

for inst_index, instrument in enumerate(inst_list):
    print("Interpolating IIT parameters for " + instrument + ".")

    # get all instrument histograms
    inst_hist_pd = db_funs.query_hist(
        db_session=db_session,
        meth_id=method_id[1],
예제 #6
0
def plot_iit_histograms(db_session,
                        hdf_data_dir,
                        hist_query_time_min,
                        hist_query_time_max,
                        inst_list,
                        ref_inst,
                        n_intensity_bins=200,
                        lat_band=[-np.pi / 2.4, np.pi / 2.4],
                        R0=1.01,
                        log10=True):
    # start time
    start_time = time.time()

    #### GET REFERENCE INFO FOR LATER USE ####
    # get index number of reference instrument
    ref_index = inst_list.index(ref_inst)
    # query euv images to get carrington rotation range
    ref_instrument = [
        ref_inst,
    ]
    euv_images = db_funcs.query_euv_images(db_session,
                                           time_min=hist_query_time_min,
                                           time_max=hist_query_time_max,
                                           instrument=ref_instrument)
    # get min and max carrington rotation
    rot_max = euv_images.cr_rot.max()
    rot_min = euv_images.cr_rot.min()

    # method information
    meth_name = "IIT"
    method_id = db_funcs.get_method_id(db_session,
                                       meth_name,
                                       meth_desc=None,
                                       var_names=None,
                                       var_descs=None,
                                       create=False)

    # query for IIT histograms
    pd_lbc_hist = db_funcs.query_hist(db_session=db_session,
                                      meth_id=method_id[1],
                                      n_intensity_bins=n_intensity_bins,
                                      lat_band=lat_band,
                                      time_min=hist_query_time_min,
                                      time_max=hist_query_time_max)
    pd_lbc_hist_srt = pd_lbc_hist.sort_values(by=['image_id'])
    # convert the binary types back to arrays
    mu_bin_edges, intensity_bin_edges, full_lbc_hist = psi_d_types.binary_to_hist(
        pd_lbc_hist_srt, n_mu_bins=None, n_intensity_bins=n_intensity_bins)
    # create corrected/original histograms
    original_hist_list = np.full(full_lbc_hist.shape, 0, dtype=np.int64)
    corrected_hist_list = np.full(full_lbc_hist.shape, 0, dtype=np.int64)
    for inst_index, instrument in enumerate(inst_list):
        print("Applying corrections for", instrument)
        #### QUERY IMAGES ####
        query_instrument = [
            instrument,
        ]
        rot_images = db_funcs.query_euv_images_rot(db_session,
                                                   rot_min=rot_min,
                                                   rot_max=rot_max,
                                                   instrument=query_instrument)
        image_pd = rot_images.sort_values(by=['cr_rot'])
        # get time minimum and maximum for instrument
        inst_time_min = rot_images.date_obs.min()
        inst_time_max = rot_images.date_obs.max()
        # query correct image combos
        lbc_meth_name = "LBCC"
        combo_query_lbc = db_funcs.query_inst_combo(db_session, inst_time_min,
                                                    inst_time_max,
                                                    lbc_meth_name, instrument)
        iit_meth_name = "IIT"
        combo_query_iit = db_funcs.query_inst_combo(db_session, inst_time_min,
                                                    inst_time_max,
                                                    iit_meth_name, instrument)
        # query correct image combos
        combo_query_lbc = db_funcs.query_inst_combo(db_session,
                                                    hist_query_time_min,
                                                    hist_query_time_max,
                                                    meth_name="LBCC",
                                                    instrument=instrument)
        # query correct image combos
        combo_query_iit = db_funcs.query_inst_combo(db_session,
                                                    hist_query_time_min,
                                                    hist_query_time_max,
                                                    meth_name="IIT",
                                                    instrument=instrument)
        for index, row in image_pd.iterrows():
            # apply LBC
            original_los, lbcc_image, mu_indices, use_indices, theoretic_query = lbcc_funcs.apply_lbc(
                db_session,
                hdf_data_dir,
                combo_query_lbc,
                image_row=row,
                n_intensity_bins=n_intensity_bins,
                R0=R0)

            #### ORIGINAL LOS DATA ####
            # calculate IIT histogram from original data
            original_los_hist = psi_d_types.LosImage.iit_hist(
                original_los, intensity_bin_edges, lat_band, log10)
            # add 1D histogram to array
            original_hist_list[:, index] = original_los_hist

            #### CORRECTED DATA ####
            # apply IIT correction
            lbcc_image, iit_image, use_indices, alpha, x = iit_funcs.apply_iit(
                db_session,
                combo_query_iit,
                lbcc_image,
                use_indices,
                original_los,
                R0=R0)

            #### CREATE CORRECTED IIT HISTOGRAM #####
            # calculate IIT histogram from LBC
            hist_iit = psi_d_types.IITImage.iit_hist(iit_image, lat_band,
                                                     log10)
            # create IIT histogram datatype
            corrected_hist = psi_d_types.create_iit_hist(
                iit_image, method_id[1], lat_band, hist_iit)
            corrected_hist_list[:, index] = corrected_hist.hist

    # plotting definitions
    color_list = ['red', 'blue', 'black']
    linestyle_list = ['solid', 'dashed', 'dashdot']

    #### CREATE NEW HISTOGRAM ####
    for inst_index, instrument in enumerate(inst_list):
        print("Plotting Histograms for", instrument)
        #### GET INDICES TO USE ####
        # get index of instrument in histogram dataframe
        hist_inst = pd_lbc_hist_srt['instrument']
        pd_inst_index = hist_inst[hist_inst == instrument].index

        #### ORIGINAL HISTOGRAM #####
        # define histogram
        original_hist = original_hist_list[:, pd_inst_index].sum(axis=1)
        # normalize histogram
        row_sums = original_hist.sum(axis=0, keepdims=True)
        norm_original_hist = original_hist / row_sums

        # plot original
        Plotting.Plot1d_Hist(norm_original_hist,
                             instrument,
                             inst_index,
                             intensity_bin_edges,
                             color_list,
                             linestyle_list,
                             figure=100,
                             xlabel="Intensity (log10)",
                             ylabel="H(I)",
                             title="Histogram: Original LOS Data")

        #### LBCC HISTOGRAM #####
        # define histogram
        lbc_hist = full_lbc_hist[:, pd_inst_index].sum(axis=1)
        # normalize histogram
        lbc_sums = lbc_hist.sum(axis=0, keepdims=True)
        norm_lbc_hist = lbc_hist / lbc_sums

        # plot lbcc
        Plotting.Plot1d_Hist(norm_lbc_hist,
                             instrument,
                             inst_index,
                             intensity_bin_edges,
                             color_list,
                             linestyle_list,
                             figure=200,
                             xlabel="Intensity (log10)",
                             ylabel="H(I)",
                             title="Histogram: Post LBCC")

        #### CORRECTED HISTOGRAM ####
        # define histogram
        corrected_hist = corrected_hist_list[:, pd_inst_index].sum(axis=1)
        # normalize histogram
        iit_sums = corrected_hist.sum(axis=0, keepdims=True)
        norm_corrected_hist = corrected_hist / iit_sums

        # plot corrected
        Plotting.Plot1d_Hist(norm_corrected_hist,
                             instrument,
                             inst_index,
                             intensity_bin_edges,
                             color_list,
                             linestyle_list,
                             figure=300,
                             xlabel="Intensity (log10)",
                             ylabel="H(I)",
                             title="Histogram: Post IIT")

    # end time
    end_time = time.time()
    print("ITT has been applied and original/resulting histograms plotted.")
    print("Total elapsed time to apply correction and plot histograms: " +
          str(round(end_time - start_time, 3)) + " seconds.")

    return None
예제 #7
0
def create_histograms(db_session,
                      inst_list,
                      lbc_query_time_min,
                      lbc_query_time_max,
                      hdf_data_dir,
                      n_intensity_bins=200,
                      lat_band=[-np.pi / 2.4, np.pi / 2.4],
                      log10=True,
                      R0=1.01,
                      wavelengths=None):
    """
    create and save (to database) IIT-Histograms from LBC Data


    @param db_session: connected db session for querying EUV images and saving histograms
    @param inst_list: list of instruments
    @param lbc_query_time_min: minimum query time for applying lbc fit
    @param lbc_query_time_max: maximum query time for applying lbc fit
    @param hdf_data_dir: directory of processed images to plot original images
    @param n_intensity_bins: number of intensity bins
    @param lat_band: latitude band
    @param log10: boolean value
    @param R0: radius
    @return: None, saves histograms to database
    """
    # start time
    start_time = time.time()

    # create IIT method
    meth_name = "IIT"
    meth_desc = "IIT Fit Method"
    method_id = db_funcs.get_method_id(db_session,
                                       meth_name,
                                       meth_desc,
                                       var_names=None,
                                       var_descs=None,
                                       create=True)

    for instrument in inst_list:
        print("Beginning loop for instrument:", instrument)
        # query EUV images
        query_instrument = [
            instrument,
        ]
        image_pd_all = db_funcs.query_euv_images(db_session=db_session,
                                                 time_min=lbc_query_time_min,
                                                 time_max=lbc_query_time_max,
                                                 instrument=query_instrument,
                                                 wavelength=wavelengths)
        # query LBCC histograms
        hist_pd = db_funcs.query_hist(db_session,
                                      meth_id=method_id[1],
                                      n_intensity_bins=n_intensity_bins,
                                      lat_band=lat_band,
                                      time_min=lbc_query_time_min,
                                      time_max=lbc_query_time_max,
                                      instrument=query_instrument,
                                      wavelength=wavelengths)

        if hist_pd.shape[0] == 0:
            # use all images in range
            in_index = pd.Series([False] * image_pd_all.shape[0])
        else:
            # compare image results to hist results based on image_id
            in_index = image_pd_all.image_id.isin(hist_pd.image_id)

        # return only images that do not have corresponding histograms
        image_pd = image_pd_all[~in_index]

        # check that images remain that need histograms
        if image_pd.shape[0] == 0:
            print("All " + instrument +
                  " images in timeframe already have associated histograms.")
            continue

        # apply LBC
        for index, row in image_pd.iterrows():
            print("Calculating IIT histogram at time:", row.date_obs)

            original_los, lbcc_image, mu_indices, use_indices, theoretic_query = lbcc_funcs.apply_lbc_2(
                db_session,
                hdf_data_dir,
                image_row=row,
                n_intensity_bins=n_intensity_bins,
                R0=R0)
            # check that image load and LBCC application finished successfully
            if original_los is None:
                continue

            # calculate IIT histogram from LBC
            hist = psi_d_types.LBCCImage.iit_hist(lbcc_image, lat_band, log10)

            # create IIT histogram datatype
            iit_hist = psi_d_types.create_iit_hist(lbcc_image, method_id[1],
                                                   lat_band, hist)

            # add IIT histogram and meta data to database
            db_funcs.add_hist(db_session, iit_hist)

    end_time = time.time()
    print(
        "Inter-instrument transformation histograms have been created and saved to the database."
    )
    print("Total elapsed time for histogram creation: " +
          str(round(end_time - start_time, 3)) + " seconds.")

    return None
예제 #8
0
def calc_iit_coefficients(db_session,
                          inst_list,
                          ref_inst,
                          calc_query_time_min,
                          calc_query_time_max,
                          weekday=0,
                          number_of_days=180,
                          image_freq=2,
                          image_del=np.timedelta64(30, 'm'),
                          n_intensity_bins=200,
                          lat_band=[-np.pi / 2.4, np.pi / 2.4],
                          create=False,
                          wavelengths=None):
    # start time
    start_time = time.time()

    # create IIT method
    meth_name = "IIT"
    meth_desc = "IIT Fit Method"
    method_id = db_funcs.get_method_id(db_session,
                                       meth_name,
                                       meth_desc,
                                       var_names=None,
                                       var_descs=None,
                                       create=False)

    #### GET REFERENCE INFO FOR LATER USE ####
    # get index number of reference instrument
    ref_index = inst_list.index(ref_inst)
    # query euv images to get carrington rotation range
    ref_instrument = [
        ref_inst,
    ]
    euv_images = db_funcs.query_euv_images(db_session,
                                           time_min=calc_query_time_min,
                                           time_max=calc_query_time_max,
                                           instrument=ref_instrument,
                                           wavelength=wavelengths)
    # get min and max carrington rotation
    rot_max = euv_images.cr_rot.max()
    rot_min = euv_images.cr_rot.min()

    # calculate the moving average centers
    ref_moving_avg_centers, moving_width = lbcc.moving_averages(
        calc_query_time_min, calc_query_time_max, weekday, number_of_days)
    # calculate image cadence centers
    range_min_date = ref_moving_avg_centers[0] - moving_width / 2
    range_max_date = ref_moving_avg_centers[-1] + moving_width / 2
    image_centers = synch_utils.get_dates(
        time_min=range_min_date.astype(datetime.datetime),
        time_max=range_max_date.astype(datetime.datetime),
        map_freq=image_freq)

    # query histograms
    ref_hist_pd = db_funcs.query_hist(
        db_session=db_session,
        meth_id=method_id[1],
        n_intensity_bins=n_intensity_bins,
        lat_band=lat_band,
        time_min=calc_query_time_min - datetime.timedelta(days=number_of_days),
        time_max=calc_query_time_max + datetime.timedelta(days=number_of_days),
        instrument=ref_instrument,
        wavelength=wavelengths)
    # keep only one observation-histogram per image_center window
    keep_ind = lbcc.cadence_choose(ref_hist_pd.date_obs, image_centers,
                                   image_del)
    ref_hist_pd = ref_hist_pd.iloc[keep_ind]

    # convert binary to histogram data
    mu_bin_edges, intensity_bin_edges, ref_full_hist = psi_d_types.binary_to_hist(
        hist_binary=ref_hist_pd,
        n_mu_bins=None,
        n_intensity_bins=n_intensity_bins)

    # determine date of first AIA image
    min_ref_time = db_session.query(func.min(
        db_class.EUV_Images.date_obs)).filter(
            db_class.EUV_Images.instrument == ref_inst).all()
    base_ref_min = min_ref_time[0][0]
    base_ref_center = base_ref_min + datetime.timedelta(
        days=number_of_days) / 2
    base_ref_max = base_ref_center + datetime.timedelta(
        days=number_of_days) / 2
    if (calc_query_time_min - datetime.timedelta(days=7)) < base_ref_center:
        # generate histogram for first year of reference instrument
        ref_base_hist = ref_full_hist[:, (
            ref_hist_pd['date_obs'] >= str(base_ref_min)) & (
                ref_hist_pd['date_obs'] <= str(base_ref_max))]
    else:
        ref_base_hist = None

    for inst_index, instrument in enumerate(inst_list):
        # check if this is the reference instrument
        if inst_index == ref_index:
            # loop through moving average centers
            for date_index, center_date in enumerate(ref_moving_avg_centers):
                print("Starting calculations for", instrument, ":",
                      center_date)

                if center_date > ref_hist_pd.date_obs.max(
                ) or center_date < ref_hist_pd.date_obs.min():
                    print("Date is out of instrument range, skipping.")
                    continue

                # determine time range based off moving average centers
                min_date = center_date - moving_width / 2
                max_date = center_date + moving_width / 2
                # get the correct date range to use for image combos
                ref_pd_use = ref_hist_pd[
                    (ref_hist_pd['date_obs'] >= str(min_date))
                    & (ref_hist_pd['date_obs'] <= str(max_date))]

                # save alpha/x as [1, 0] for reference instrument
                alpha = 1
                x = 0
                db_funcs.store_iit_values(db_session, ref_pd_use, meth_name,
                                          meth_desc, [alpha, x], create)
        else:
            # query euv_images for correct carrington rotation
            query_instrument = [
                instrument,
            ]

            rot_images = db_funcs.query_euv_images_rot(
                db_session,
                rot_min=rot_min,
                rot_max=rot_max,
                instrument=query_instrument,
                wavelength=wavelengths)
            if rot_images.shape[0] == 0:
                print("No images in timeframe for ", instrument, ". Skipping")
                continue
            # get time minimum and maximum for instrument
            inst_time_min = rot_images.date_obs.min()
            inst_time_max = rot_images.date_obs.max()
            # if Stereo A or B has images before AIA, calc IIT for those weeks
            if inst_time_min > calc_query_time_min:
                all_images = db_funcs.query_euv_images(
                    db_session,
                    time_min=calc_query_time_min,
                    time_max=calc_query_time_max,
                    instrument=query_instrument,
                    wavelength=wavelengths)
                if all_images.date_obs.min() < inst_time_min:
                    inst_time_min = all_images.date_obs.min()

            moving_avg_centers, moving_width = lbcc.moving_averages(
                inst_time_min, inst_time_max, weekday, number_of_days)
            # calculate image cadence centers
            range_min_date = moving_avg_centers[0] - moving_width / 2
            range_max_date = moving_avg_centers[-1] + moving_width / 2
            image_centers = synch_utils.get_dates(
                time_min=range_min_date.astype(datetime.datetime),
                time_max=range_max_date.astype(datetime.datetime),
                map_freq=image_freq)

            inst_hist_pd = db_funcs.query_hist(
                db_session=db_session,
                meth_id=method_id[1],
                n_intensity_bins=n_intensity_bins,
                lat_band=lat_band,
                time_min=inst_time_min -
                datetime.timedelta(days=number_of_days),
                time_max=inst_time_max +
                datetime.timedelta(days=number_of_days),
                instrument=query_instrument,
                wavelength=wavelengths)
            # keep only one observation-histogram per image_center window
            keep_ind = lbcc.cadence_choose(inst_hist_pd.date_obs,
                                           image_centers, image_del)
            inst_hist_pd = inst_hist_pd.iloc[keep_ind]

            # convert binary to histogram data
            mu_bin_edges, intensity_bin_edges, inst_full_hist = psi_d_types.binary_to_hist(
                hist_binary=inst_hist_pd,
                n_mu_bins=None,
                n_intensity_bins=n_intensity_bins)
            # loops through moving average centers
            for date_index, center_date in enumerate(moving_avg_centers):
                print("Starting calculations for", instrument, ":",
                      center_date)

                if center_date > inst_hist_pd.date_obs.max(
                ) or center_date < inst_hist_pd.date_obs.min():
                    print("Date is out of instrument range, skipping.")
                    continue

                # determine time range based off moving average centers
                min_date = center_date - moving_width / 2
                max_date = center_date + moving_width / 2
                # get proper time-range of reference histograms
                if center_date <= base_ref_center:
                    # if date is earlier than reference (AIA) first year, use reference (AIA) first year
                    ref_hist_use = ref_base_hist
                else:
                    # get indices for calculation of reference histogram
                    ref_hist_ind = (ref_hist_pd['date_obs'] >= str(
                        min_date)) & (ref_hist_pd['date_obs'] <= str(max_date))
                    ref_hist_use = ref_full_hist[:, ref_hist_ind]

                # get the correct date range to use for the instrument histogram
                inst_hist_ind = (inst_hist_pd['date_obs'] >= str(min_date)) & (
                    inst_hist_pd['date_obs'] <= str(max_date))
                inst_pd_use = inst_hist_pd[inst_hist_ind]
                # get indices and histogram for calculation
                inst_hist_use = inst_full_hist[:, inst_hist_ind]

                # sum histograms
                hist_fit = inst_hist_use.sum(axis=1)
                hist_ref = ref_hist_use.sum(axis=1)

                # normalize fit histogram
                fit_sum = hist_fit.sum()
                norm_hist_fit = hist_fit / fit_sum

                # normalize reference histogram
                ref_sum = hist_ref.sum()
                norm_hist_ref = hist_ref / ref_sum

                # get reference/fit peaks
                ref_peak_index = np.argmax(
                    norm_hist_ref)  # index of max value of hist_ref
                ref_peak_val = norm_hist_ref[
                    ref_peak_index]  # max value of hist_ref
                fit_peak_index = np.argmax(
                    norm_hist_fit)  # index of max value of hist_fit
                fit_peak_val = norm_hist_fit[
                    fit_peak_index]  # max value of hist_fit
                # estimate correction coefficients that match fit_peak to ref_peak
                alpha_est = fit_peak_val / ref_peak_val
                x_est = intensity_bin_edges[
                    ref_peak_index] - alpha_est * intensity_bin_edges[
                        fit_peak_index]
                init_pars = np.asarray([alpha_est, x_est], dtype=np.float64)

                # calculate alpha and x
                alpha_x_parameters = iit.optim_iit_linear(norm_hist_ref,
                                                          norm_hist_fit,
                                                          intensity_bin_edges,
                                                          init_pars=init_pars)
                # save alpha and x to database
                db_funcs.store_iit_values(db_session, inst_pd_use, meth_name,
                                          meth_desc, alpha_x_parameters.x,
                                          create)

    end_time = time.time()
    tot_time = end_time - start_time
    time_tot = str(datetime.timedelta(minutes=tot_time))

    print(
        "Inter-instrument transformation fit parameters have been calculated and saved to the database."
    )
    print("Total elapsed time for IIT fit parameter calculation: " + time_tot)

    return None