Example #1
0
def plot_bad_images(hdf_dir, data):
    for im_ind, im_row in data.iterrows():
        print("Plotting image number", im_row.data_id, ".")
        if im_row.fname_hdf == "":
            print("Warning: Image # " + str(im_row.data_id) +
                  " does not have an associated hdf file. Skipping")
            continue
        hdf = os.path.join(hdf_dir, im_row.fname_hdf)
        los_image = psi_d_types.read_los_image(hdf)
        # add coordinates to los object
        los_image.get_coordinates(R0=R0)

        #### plot image
        # set color palette and normalization (improve by using Ron's colormap setup)
        norm = mpl.colors.LogNorm(vmin=1.0, vmax=np.nanmax(los_image.data))
        # norm = mpl.colors.LogNorm()
        im_cmap = plt.get_cmap('sohoeit195')

        # remove extremely small values from data so that log color scale treats them as black
        # rather than white
        plot_arr = los_image.data
        plot_arr[plot_arr < .001] = .001

        # plot the initial image
        plt.figure(im_row.data_id)
        plt.imshow(plot_arr,
                   extent=[
                       los_image.x.min(),
                       los_image.x.max(),
                       los_image.y.min(),
                       los_image.y.max()
                   ],
                   origin="lower",
                   cmap=im_cmap,
                   aspect="equal",
                   norm=norm)
        plt.xlabel("x (solar radii)")
        plt.ylabel("y (solar radii)")
        plt.title(im_row.data_id)
    plt.show()
Example #2
0
    query_pd = query_pd_all[~in_index]

    # check that images remain that need histograms
    if query_pd.shape[0] == 0:
        print("All" + instrument + " images in timeframe already have associated histograms.")
        continue

    for index, row in query_pd.iterrows():
        print("Processing image number", row.data_id, ".")
        if row.fname_hdf == "":
            print("Warning: Image # " + str(row.data_id) + " does not have an associated hdf file. Skipping")
            continue
        hdf_path = os.path.join(hdf_data_dir, row.fname_hdf)
        # attempt to open and read file
        try:
            los_temp = psi_d_types.read_los_image(hdf_path)
        except:
            print("Something went wrong opening: ", hdf_path, ". Skipping")
            continue
        # add coordinates to los object
        los_temp.get_coordinates(R0=R0)
        # perform 2D histogram on mu and image intensity
        temp_hist = los_temp.mu_hist(image_intensity_bin_edges, mu_bin_edges, lat_band=lat_band, log10=log10)
        hist_lbcc = psi_d_types.create_lbcc_hist(hdf_path, row.data_id, method_id[1], mu_bin_edges,
                                                 image_intensity_bin_edges, lat_band, temp_hist)

        # add this histogram and meta data to database
        add_hist(db_session, hist_lbcc)

db_session.close()
Example #3
0
# Establish connection to database
db_session = db_funcs.init_db_conn(db_type,
                                   db_class.Base,
                                   db_loc,
                                   db_name=mysql_db_name,
                                   user=user,
                                   password=password,
                                   cred_dir=cred_dir)

# SAMPLE QUERY
# use database session to query available pre-processed images
query_time_min = datetime.datetime(2011, 2, 1, 0, 0, 0)
query_time_max = datetime.datetime(2011, 2, 1, 12, 0, 0)

image_pd = db_funcs.query_euv_images(db_session,
                                     time_min=query_time_min,
                                     time_max=query_time_max)

# view a snapshot of the results
image_pd.loc[:, ['date_obs', 'instrument', 'fname_hdf']]

# open the first image
image_path = os.path.join(hdf_data_dir, image_pd.fname_hdf[0])
psi_image = psi_dtypes.read_los_image(image_path)

# plot deconvolved image
psi_plots.PlotImage(psi_image)

# CLOSE CONNECTION
db_session.close()
Example #4
0
# Prep WITH deconvolution
subdir, fname, los_aia = prep.prep_euv_image(fitsfile_aia,
                                             prep_home_dir,
                                             deconvolve=True,
                                             write=write)

# save the example plots
prefix = os.path.join(plot_dir, 'plot_aia')
lmin, lmax = 1.00, 3.25
plot_examples(map_raw_aia, los_aia_nod.map, los_aia.map, lmin, lmax,
              lmin + 0.3, lmax + 0.3, prefix)

# read in the deconvolved, prepped hdf5 file (.h5) as our custom image format
hdf_file = os.path.join(prep_home_dir, subdir, fname)
t1 = time.perf_counter()
los = datatypes.read_los_image(hdf_file)
t2 = time.perf_counter()
print("time to read .h5 file and create LosImage with a map: ", t2 - t1)

# ----------------------------------------------------------------------
# Example 2: EUVI-A
# ----------------------------------------------------------------------
### NOTE: UNLESS SSW/IDL is setup for your system, the EUVI examples WILL NOT WORK
### ---> we need to figure out a solution for remote calls.

# start up an IDL session (used for SSW/IDL secchi_prep for STEREO A and B)
idl_session = idl_helper.Session()

# Read in the raw image
map_raw_sta = sunpy.map.Map(fitsfile_sta)
Example #5
0
    combo_query = query_inst_combo(
        db_session, lbc_query_time_min - datetime.timedelta(weeks=2),
        lbc_query_time_max + datetime.timedelta(weeks=2), meth_name,
        instrument)

    ###### GET LOS IMAGES COORDINATES (DATA) #####
    # apply LBC
    for index in range(n_images_plot):
        row = image_pd.iloc[index]
        print("Processing image number", row.data_id, ".")
        if row.fname_hdf == "":
            print("Warning: Image # " + str(row.data_id) +
                  " does not have an associated hdf file. Skipping")
            continue
        hdf_path = os.path.join(hdf_data_dir, row.fname_hdf)
        original_los = psi_d_types.read_los_image(hdf_path)
        original_los.get_coordinates(R0=R0)
        theoretic_query = query_var_val(
            db_session,
            meth_name,
            date_obs=original_los.info['date_string'],
            inst_combo_query=combo_query)

        ###### DETERMINE LBC CORRECTION (for valid mu values) ######
        beta1d, y1d, mu_indices, use_indices = lbcc.get_beta_y_theoretic_continuous_1d_indices(
            theoretic_query, los_image=original_los)

        ###### APPLY LBC CORRECTION (log10 space) ######
        corrected_lbc_data = np.copy(original_los.data)
        corrected_lbc_data[use_indices] = 10**(
            beta1d * np.log10(original_los.data[use_indices]) + y1d)
Example #6
0
z = np.array(z)
f = np.array(f)

h5file.close()

losAIA = psi_datatype.LosImage(f, x, y)

# query DB EUV images to use their meta data
euv_images = db_funs.query_euv_images(db_session,
                                      time_min=query_start,
                                      time_max=query_end)

A_ind = euv_images.instrument == "EUVI-A"
A_fname = euv_images.fname_hdf[A_ind].item()
A_path = os.path.join(image_hdf_dir, A_fname)
A_los = psi_datatype.read_los_image(A_path)
losA.info = A_los.info

B_ind = euv_images.instrument == "EUVI-B"
B_fname = euv_images.fname_hdf[B_ind].item()
B_path = os.path.join(image_hdf_dir, B_fname)
B_los = psi_datatype.read_los_image(B_path)
losB.info = B_los.info

AIA_ind = euv_images.instrument == "AIA"
AIA_fname = euv_images.fname_hdf[AIA_ind].item()
AIA_path = os.path.join(image_hdf_dir, AIA_fname)
AIA_los = psi_datatype.read_los_image(AIA_path)
losAIA.info = AIA_los.info

# interpolate to map
Example #7
0
        # query images
        query_pd = pd.read_sql(
            db_session.query(db_class.EUV_Images, db_class.Data_Files).filter(
                db_class.EUV_Images.data_id.in_(bad_image_lists[inst_index]),
                db_class.Data_Files.data_id ==
                db_class.EUV_Images.data_id).order_by(
                    db_class.EUV_Images.date_obs).statement, db_session.bind)
        # remove duplicate columns
        query_pd = query_pd.loc[:, ~query_pd.columns.duplicated()]

        n_images = bad_image_lists[inst_index].__len__()
        for im_num, row in query_pd.iterrows():
            full_path = os.path.join(hdf_data_dir, row.fname_hdf)
            print("Plotting", instrument, im_num + 1, "of", n_images, "-",
                  row.date_obs)
            bad_im = psi_d_types.read_los_image(full_path)
            EasyPlot.PlotImage(bad_im, nfig=0)
            plt.waitforbuttonpress()
            plt.close(0)

# loop through flag_bad and change flag in database
for inst_index, instrument in enumerate(inst_list):
    # query images
    query_pd = pd.read_sql(
        db_session.query(db_class.EUV_Images, db_class.Data_Files).filter(
            db_class.EUV_Images.data_id.in_(bad_image_lists[inst_index]),
            db_class.Data_Files.data_id ==
            db_class.EUV_Images.data_id).order_by(
                db_class.EUV_Images.date_obs).statement, db_session.bind)
    # remove duplicate columns
    query_pd = query_pd.loc[:, ~query_pd.columns.duplicated()]
Example #8
0
##### QUERY IMAGES ######

for inst_index, instrument in enumerate(inst_list):
    # query wants a list
    query_instrument = [instrument, ]
    image_pd = query_euv_images(db_session=db_session, time_min=query_time_min, time_max=query_time_max,
                                instrument=query_instrument)

###### GET LOS IMAGES COORDINATES (DATA) #####
    for index, row in image_pd.iterrows():
        print("Processing image number", row.data_id, ".")
        if row.fname_hdf == "":
            print("Warning: Image # " + str(row.data_id) + " does not have an associated hdf file. Skipping")
            continue
        hdf_path = os.path.join(hdf_data_dir, row.fname_hdf)
        los_temp = psi_d_types.read_los_image(hdf_path)
        los_temp.get_coordinates(R0=R0)

    ###### APPLY LBC CORRECTION #######

    # select image
    selected_image = image_pd.iloc[0]
    # read hdf file to LOS object
    hdf_file = os.path.join(hdf_data_dir, selected_image.fname_hdf)
    original_los = psi_d_types.read_los_image(hdf_file)
    original_los.get_coordinates(R0=R0)

    mu_array = original_los.mu
    beta_query, y_query = query_lbcc_fit(db_session, image = selected_image, meth_name = "LBCC Theoretic")
    beta_array = np.zeros((len(mu_array), len(mu_array)))
    y_array = np.zeros((len(mu_array), len(mu_array)))
Example #9
0
"""

import numpy as np
from numpy import mean, size, zeros, where
from scipy import linspace, stats

import chmap.utilities.datatypes.datatypes as PSI_dtypes

from matplotlib.pyplot import figure, plot, xlabel, ylabel,\
    title, hist

lat_band = [-np.pi/64., np.pi/64.]
R0 = 1.01
hdf_fname = '/Volumes/Work/CHD_DB/processed_images/2011/03/26/aia_lvl2_20110326T040031_193.h5'

los_image = PSI_dtypes.read_los_image(hdf_fname)
los_image.get_coordinates(R0=R0)

lat_band_index = np.logical_and(los_image.lat <= max(lat_band), los_image.lat >= min(lat_band))
mu_min = 0.14
mu_max = 1.0
mu_index = np.logical_and(los_image.mu >= mu_min, los_image.mu <= mu_max)
use_index = np.logical_and(mu_index, lat_band_index)

use_mu = los_image.mu[use_index]
use_data = los_image.data[use_index]

use_data[use_data < 1.] = 1.
x = np.log10(use_data)

# calc Freedman-Diaconis bin size
# query some images
query_time_min = datetime.datetime(2011, 4, 1, 19, 35, 0)
query_time_max = datetime.datetime(2011, 4, 13, 21, 37, 0)
query_pd = query_euv_images(db_session=db_session,
                            time_min=query_time_min,
                            time_max=query_time_max)

# select and image (index 0)
selected_image = query_pd.iloc[0]

# print the selection info
print(selected_image)

# read hdf file to LOS object
hdf_file = os.path.join(hdf_data_dir, selected_image.fname_hdf)
test_los = psi_d_types.read_los_image(hdf_file)

# also read fits file for reference
fits_infile_aia = os.path.join(raw_data_dir, selected_image.fname_raw)

# Load images image using the built-in methods of SunPy
map_aia = sunpy.map.Map(fits_infile_aia)

# map parameters (input)
R0 = 1.01
y_range = [-1, 1]
x_range = [0, 2 * np.pi]

# map parameters (from image)
cr_lat = test_los.info['cr_lat']
cr_lon = test_los.info['cr_lon']
Example #11
0
for column in db_class.Method_Defs.__table__.columns:
    meth_columns.append(column.key)
defs_columns = []
for column in db_class.Var_Defs.__table__.columns:
    defs_columns.append(column.key)
df_cols = set().union(meth_columns, defs_columns, ("var_val", ))
methods_template = pd.DataFrame(data=None, columns=df_cols)
# generate a list of methods dataframes
methods_list = [methods_template] * query_pd.__len__()

# read hdf file(s) to a list of LOS objects
los_list = [None] * query_pd.__len__()
image_plot_list = [None] * query_pd.__len__()
for index, row in query_pd.iterrows():
    hdf_path = os.path.join(hdf_data_dir, row.fname_hdf)
    los_list[index] = psi_d_types.read_los_image(hdf_path)
    EasyPlot.PlotImage(los_list[index],
                       nfig=index,
                       title="Image " + str(index))

# --- 2. Limb-brightening correction ------------------------------------------

for ii in range(len(los_list)):
    # call function to de-limb los_list[ii]
    # los_list[ii] = limb_correct(los_list[ii])
    # generate a record of the method and variable values used for LBC
    new_method = {
        'meth_name': ("lbcc_v1", "lbcc_v1"),
        'meth_description': [
            "Limb Brightening Correction from "
            "linear-log space model. Version 1"