def open_relevant_data(gal_name, loc):
    """Open up the relevant dataframe of PNe for a given galaxy and location, along with A/rN map, 
    [x,y] coordinates and the wcs objs.

    Parameters
    ----------
    gal_name : str
        Galaxy Name (format of FCC000 as example)
    loc : str
        pointing location: center, middle or halo

    Returns
    -------
    [type]
        DIR_dict, PNe_df, A_rN_plot_shape, x_y_list, galaxy_info, x_data, y_data, wcs_obj
    """
    DIR_dict = paths(gal_name, loc)  # Get directories
    res_cube, res_hdr, wavelength, res_shape, x_data, y_data, galaxy_info = open_data(
        galaxy_name, loc,
        DIR_dict)  # open corresponding data for galaxy and loc

    # Read in PN dataframe, and using the object column, plot signage of PNe, SNR, HII, impostor and over-luminous.
    PNe_df = pd.read_csv(DIR_dict["EXPORT_DIR"] + "_PNe_df.csv")
    x_y_list = np.load(DIR_dict["EXPORT_DIR"] + "_PNe_x_y_list.npy")
    A_rN_plot = np.load(DIR_dict["EXPORT_DIR"] + "_A_rN.npy")

    A_rN_plot_shape = A_rN_plot.reshape(y_data, x_data)

    # Get WCS coordinates
    with fits.open(DIR_dict["RAW_DATA"]) as hdu_wcs:
        hdr_wcs = hdu_wcs[1].header
        wcs_obj = WCS(hdr_wcs, naxis=2)

    return DIR_dict, PNe_df, A_rN_plot_shape, x_y_list, galaxy_info, x_data, y_data, wcs_obj
Example #2
0
def test_open_data():
    galaxy_name = "FCCtest"
    loc = "center"
    DIR_dict = paths(galaxy_name, loc)
    res_data, res_hdr, wavelength, res_shape, x_data, y_data, galaxy_data = open_data(galaxy_name, loc, DIR_dict)
    
    assert len(np.shape(res_data)) == 3 # test data is in list format, i.e. 2 dimensions
Example #3
0
def test_dir_dict():
    galaxy_name = "FCCtest"
    loc = "center"
    
    DIR_dict = paths(galaxy_name, loc)
    
    assert len(DIR_dict) > 0 # test that DIR_dict isn't empty
    #assert that all the values in the DIR_dict dictionary are string types
    assert [isinstance(entry, str) for entry in DIR_dict.values() ] == [True] * len(list(DIR_dict.values())) 
Example #4
0
def test_generate_mask():
    galaxy_name = "FCCtest"
    loc = "center"
    DIR_dict = paths(galaxy_name, loc)
    res_cube, res_hdr, wavelength, res_shape, x_data, y_data, galaxy_info = open_data(galaxy_name, loc, DIR_dict)

    ellip_mask = generate_mask(img_shape=[y_data, x_data], mask_params=galaxy_info["gal_mask"], mask_shape="ellipse")
    circle_mask = [generate_mask(img_shape=[y_data, x_data], mask_params=star_mask, mask_shape="circle") for star_mask in galaxy_info["star_mask"]]

    assert np.shape(ellip_mask) == (y_data, x_data) # Normally only one mask for each galaxy.
    assert np.shape(circle_mask) == (len(galaxy_info["star_mask"]), y_data, x_data) # There can be more than one mask for stars in a FOV.
Example #5
0
def reconstructed_image(galaxy_name, loc):
    DIR_dict = paths(galaxy_name, loc)
    
    with fits.open(DIR_dict["RAW_DATA"]) as hdu:
        hdr  = hdu[1].header
        s    = np.shape(hdu[1].data)
        wave = hdr['CRVAL3']+(np.arange(s[0]))*hdr['CD3_3']   
        cond = (wave >= 4900.0) & (wave <= 5100.0)
        data = np.sum(hdu[1].data[cond,:,:], axis=0)

    return data, wave, hdr
def make_table(galaxy_name, loc):
    DIR_dict = paths(galaxy_name, loc)

    PNe_df = pd.read_csv(DIR_dict["EXPORT_DIR"] + "_PNe_df.csv")

    index_check = PNe_df["ID"].isin(["-"])
    y_idx = PNe_df.loc[~index_check].index.values

    RA_for_table = [
        RA.replace("h", "").replace("m", "").replace("s", "")
        for RA in PNe_df["Ra (J2000)"].loc[y_idx]
    ]
    DEC_for_table = [
        DEC.replace("d", "").replace("m", "").replace("s", "")
        for DEC in PNe_df["Dec (J2000)"].loc[y_idx]
    ]

    ID_for_table = [
        "F3D J" + RA_for_table[i] + DEC_for_table[i] for i in range(len(y_idx))
    ]
    m_5007 = PNe_df["m 5007"].loc[~index_check].round(2).values
    m_5007_err = PNe_df.loc[~index_check,
                            ["mag error up", "mag error lo"]].median(1).round(
                                2).values

    PNe_LOSV = PNe_df["PNe_LOS_V"].loc[~index_check].round(1).values
    PNe_LOSV_err = PNe_df["PNe_LOS_V_err"].loc[~index_check].round(1).values

    PNe_table = Table([
        ID_for_table, PNe_df["Ra (J2000)"].loc[~index_check],
        PNe_df["Dec (J2000)"].loc[~index_check], m_5007, m_5007_err,
        PNe_df["A/rN"].loc[~index_check].round(1), PNe_LOSV, PNe_LOSV_err,
        PNe_df["ID"].loc[~index_check], PNe_df["index"].loc[~index_check]
    ],
                      names=("PN ID", "Ra", "Dec", "m 5007", "mag err", "A/rN",
                             "LOSV", "LOSV err", "ID", "index"))

    # Save table in tab separated format.
    ascii.write(PNe_table,
                DIR_dict["EXPORT_DIR"] + "_fit_results.txt",
                format="tab",
                overwrite=True)
    # Save latex table of data.
    ascii.write(PNe_table,
                DIR_dict["EXPORT_DIR"] + "_fit_results_latex.txt",
                format="latex",
                overwrite=True)
Example #7
0
def test_PNe_minicube_extractor():
    galaxy_name = "FCCtest"
    loc = "center"
    
    DIR_dict = paths(galaxy_name, loc)
    
    # Load in the residual data, in list form
    res_data, res_hdr, wavelength, res_shape, x_data, y_data, galaxy_data = open_data(galaxy_name, loc, DIR_dict)

    x_y_list = np.load(DIR_dict["EXPORT_DIR"]+"_PNe_x_y_list.npy")
    x_PNe = np.array([x[0] for x in x_y_list])
    y_PNe = np.array([y[1] for y in x_y_list])
    
    sources = np.array([PNe_minicube_extractor(x, y, 9, res_data, wavelength) for x,y in zip(x_PNe, y_PNe)])

    assert len(np.shape(sources)) == 3  # test sources is 3 dimensional array
    assert np.shape(sources)[0] == len(x_PNe) # test number of sources matches number of PNe in x_y_list
Example #8
0
    help=
    "Boolean flag to determine if the figures created from this script should be saved on this run. Default is False"
)

args = my_parser.parse_args()

# Define galaxy name
galaxy_name = args.galaxy
show_plot = args.show
plt_save = args.save

# Define Working directory

gist_dir = f"/path/to/gist_results/{galaxy_name}MUSEPNeweighted_contamination/{galaxy_name}MUSEPNeweighted"

DIR_dict = paths(galaxy_name, "center")

# Read in PNe dataframe from the PNe fitting script
PNe_df = pd.read_csv(DIR_dict["EXPORT_DIR"] + "_PNe_df.csv")

# create an index array for source that pass both the A/rN>3, as well as the chi-square filters.
# i.e. all entries that have "ID" not equal to "-"
indx = PNe_df[PNe_df["ID"] != "-"].index.values
m_5007 = PNe_df.loc[indx, "m 5007"]

# Open and name the following result files
gandalf_emission = fits.open(f"{gist_dir}_gandalf-emission_SPAXEL.fits")
gandalf_best = fits.open(f"{gist_dir}_gandalf-bestfit_SPAXEL.fits")
gandalf_clean = fits.open(f"{gist_dir}_gandalf-cleaned_SPAXEL.fits")
gandalf_results = fits.open(f"{gist_dir}_gandalf_SPAXEL.fits")
AllSpectra = fits.open(f"{gist_dir}_AllSpectra.fits")
Example #9
0
def calc_Lbol(gal, loc, calc_app=False):
    """Simple function made for running the Lbol calculation, across a number of galaxies, matching criteria and wether or not to use appertures.

    Parameters
    ----------
    gal : str
        Galaxy name, given as FCC000 or NGC0000
    loc : str
        Location of the pointing, used for Fornax3D survey as some galaxies are observed across a center, middle and halo pointings.
    calc_app : bool
        Boolean flag to decide if appertures should be used, or not. Defaults to False.

    Returns
    -------
    dict
        Contains all the information needed from using pPXF to calculate the luminosity, with errors, for a given galaxy.
        Keys: "Lbol", "Lbol_err_up", "Lbol_err_lo", "mag_g", "mag_r", "mag_v", "sigma"
    """
    galaxy_data = galaxy_info[f"{gal}_{loc}"]
    DIR_dict = paths(gal, loc)

    # use the PNLF derived dM from running the PNe fitting script.
    dM = PNe_results_df['PNLF dM'][0]
    dM_err_up = PNe_results_df['PNLF dM err up'][0]
    dM_err_lo = PNe_results_df['PNLF dM err lo'][0]

    #  Alternatively, you can comment this section out and input your own values and errors for dM.
    #dM = your_dM_value_here
    #dM_err_up = your_dM_upper_error_here
    #dM_err_lo = your_dM_lower_error_here

    z = galaxy_data["velocity"] * 1e3 / c

    # Inlcude below examples of how I have run the Lbol routines on Fornax3D data.
    # If galaxy not in the below list, or a middle or halo pointing
    # calculate the bolometric luminosity of the spectra, across FOV, with masking applied.
    if (gal not in ["FCC119", "FCC143", "FCC176", "FCC255", "FCC301"
                    ]) or (loc in ["middle", "halo"]):

        Lbol_results = run_Lbol(DIR_dict,
                                dM,
                                galaxy_data,
                                z,
                                dM_err_up,
                                dM_err_lo,
                                app_sum=calc_app)

    # else if galaxy is in below list, and is a central pointing, calculate the bolometric luminosity using pre-made,
    # defined apertures, using the app_params argument.
    elif (gal in ["FCC119", "FCC143", "FCC176", "FCC255", "FCC301"
                  ]) and (loc == "center"):
        gal_centre_pix = Table.read("exported_data/galaxy_centre_pix.dat",
                                    format="ascii")
        gal_indx = np.argwhere(gal_centre_pix["Galaxy"] == gal)
        gal_x = gal_centre_pix[gal_indx]["x_pix"]
        gal_y = gal_centre_pix[gal_indx]["y_pix"]
        if gal == "FCC119":
            app_params = [
                galaxy_data["centre"][0], galaxy_data["centre"][1], 100, 100, 1
            ]
        elif gal == "FCC143":
            app_params = [gal_x, gal_y, 260, 140, 2.1]
        elif gal == "FCC176":
            app_params = [gal_x, gal_y, 400, 400, 1.0]
        elif gal == "FCC255":
            app_params = [gal_x, gal_y, 400, 200, -0.1]
        elif gal == "FCC301":
            app_params = [gal_x, gal_y, 270, 200, -0.3]

        Lbol_results = run_Lbol(gal,
                                loc,
                                DIR_dict,
                                dM,
                                galaxy_data,
                                z,
                                dM_err_up,
                                dM_err_lo,
                                custom_app=True,
                                custom_app_params=app_params)

    return Lbol_results
Example #10
0
                                loc,
                                DIR_dict,
                                dM,
                                galaxy_data,
                                z,
                                dM_err_up,
                                dM_err_lo,
                                custom_app=True,
                                custom_app_params=app_params)

    return Lbol_results


for i, (gal, loc) in enumerate(zip(galaxy_selection, locs)):
    print(f"\n Calculating Lbol for {gal} {loc}")
    DIR_dict = paths(gal, loc)
    PNe_results_df = pd.read_csv(
        f"exported_data/{gal}/{gal}{loc}_PN_result_df.csv")
    Lbol_results = calc_Lbol(gal, loc, calc_app)
    L_bol_df = pd.DataFrame(data=[[
        Lbol_results["Lbol"],
        Lbol_results["Lbol_err_up"] - Lbol_results["Lbol"],
        Lbol_results["Lbol"] - Lbol_results["Lbol_err_lo"],
        Lbol_results["mag_v"], Lbol_results["mag_r"], Lbol_results["sigma"]
    ]],
                            columns=("Lbol", "Lbol_err_up", "Lbol_err_lo",
                                     "mag_v", "mag_r", "sigma"))

    L_bol_df.to_csv(DIR_dict["EXPORT_DIR"] + "_Lbol_df.csv")
    print(f"\n{gal} {loc} completed! \n")
Example #11
0
# Setup for argparse
my_parser = argparse.ArgumentParser()

my_parser.add_argument('--galaxy', action='store', type=str, required=True, help="The name of the galaxy to be analysed.")
my_parser.add_argument("--save", action="store_true", default=False, help="Flag for saving the plots, default is False")
my_parser.add_argument("--show", action="store_true", default=False, help="Flag to decide if the plots made with this script are shown afterwards. Default is False.")
args = my_parser.parse_args()

# Define galaxy name
galaxy_name = args.galaxy   # galaxy name, format of FCC000
save_plot = args.save       # save plots
show = args.show

loc = "center" 

DIR_dict = paths(galaxy_name, loc)

with open(DIR_dict["YAML"], "r") as yaml_data:
    yaml_info = yaml.load(yaml_data, Loader=yaml.FullLoader)
        
galaxy_info = yaml_info[f"{galaxy_name}_{loc}"]

PNe_df = pd.read_csv(DIR_dict["EXPORT_DIR"]+"_PNe_df.csv")

obs_comp = np.load(DIR_dict["EXPORT_DIR"]+"_completeness_ratio.npy")

gal_m_5007 = PNe_df["m 5007"].loc[PNe_df["ID"].isin(["PN"])].values
gal_m_5007_err_up = PNe_df["mag error up"].loc[PNe_df["ID"].isin(["PN"])].values
gal_m_5007_err_lo = PNe_df["mag error lo"].loc[PNe_df["ID"].isin(["PN"])].values

step = 0.001
Example #12
0
my_parser.add_argument(
    "--sep",
    action="store_true",
    default=False,
    help=
    "The sep flag makes the script save the output PNe locations, as detected using the SEP package."
)

args = my_parser.parse_args()

galaxy_name = args.galaxy
loc = args.loc
fit_spaxel = args.fit
save_sep = args.sep

DIR_dict = paths(galaxy_name, loc)

# To be used when working with residual cubes
res_cube, res_hdr, wavelength, res_shape, x_data, y_data, galaxy_info = open_data(
    galaxy_name, loc, DIR_dict)

# reshape the residual cube into a residual list, so as to work with current code
res_data_list = res_cube.reshape(len(wavelength), x_data * y_data)
res_data_list = np.swapaxes(res_data_list, 1, 0)
n_spax = np.shape(res_data_list)[0]

# Indexes where there is spectral data to fit. We check where there is data that doesn't start with 0.0 (spectral data should never be 0.0).
non_zero_index = np.squeeze(
    np.where(res_cube[1, :, :] != 0.))  # use with residual cube

# Constants