Exemplo n.º 1
0
def variable_aper_phot(target,
                       centroided_sources,
                       multiplicative_factors,
                       an_in=12.,
                       an_out=30.,
                       plots=False,
                       gain=8.21,
                       qe=0.9,
                       plate_scale=0.579):
    pines_path = pines_dir_check()
    short_name = short_name_creator(target)

    #Remove any leading/trailing spaces in the column names.
    centroided_sources.columns = centroided_sources.columns.str.lstrip()
    centroided_sources.columns = centroided_sources.columns.str.rstrip()

    #Get list of reduced files for target.
    reduced_path = pines_path / ('Objects/' + short_name + '/reduced')
    reduced_filenames = natsort.natsorted(
        [x.name for x in reduced_path.glob('*red.fits')])
    reduced_files = np.array([reduced_path / i for i in reduced_filenames])

    #Get source names.
    source_names = get_source_names(centroided_sources)

    #Get seeing.
    seeing = np.array(centroided_sources['Seeing'])

    #Loop over multiplicative factors
    for i in range(len(multiplicative_factors)):
        fact = multiplicative_factors[i]
        print(
            'Doing variable aperture photometry for {}, multiplicative seeing factor = {}, inner annulus radius = {} pix, outer annulus radius = {} pix.'
            .format(target, fact, an_in, an_out))

        #Declare a new dataframe to hold the information for all targets for this aperture.
        columns = [
            'Filename', 'Time UT', 'Time JD UTC', 'Time BJD TDB', 'Airmass',
            'Seeing'
        ]
        for j in range(0, len(source_names)):
            columns.append(source_names[j] + ' Flux')
            columns.append(source_names[j] + ' Flux Error')
            columns.append(source_names[j] + ' Background')
            columns.append(source_names[j] + ' Interpolation Flag')

        var_df = pd.DataFrame(index=range(len(reduced_files)), columns=columns)
        output_filename = pines_path / (
            'Objects/' + short_name + '/aper_phot/' + short_name +
            '_variable_aper_phot_' + str(float(fact)) + '_seeing_factor.csv')

        #Loop over all images.
        pbar = ProgressBar()
        for j in pbar(range(len(reduced_files))):
            data = fits.open(reduced_files[j])[0].data
            #Read in some supporting information.
            log_path = pines_path / (
                'Logs/' + reduced_files[j].name.split('.')[0] + '_log.txt')
            log = pines_log_reader(log_path)
            log_ind = np.where(
                log['Filename'] == reduced_files[j].name.split('_')[0] +
                '.fits')[0][0]

            header = fits.open(reduced_files[j])[0].header
            date_obs = header['DATE-OBS']
            #Catch a case that can cause datetime strptime to crash; Mimir headers sometimes have DATE-OBS with seconds specified as 010.xx seconds, when it should be 10.xx seconds.
            if len(date_obs.split(':')[-1].split('.')[0]) == 3:
                date_obs = date_obs.split(':')[0] + ':' + date_obs.split(
                    ':')[1] + ':' + date_obs.split(':')[-1][1:]

            if date_obs.split(':')[-1] == '60.00':
                date_obs = date_obs.split(':')[0] + ':' + str(
                    int(date_obs.split(':')[1]) + 1) + ':00.00'
            #Keep a try/except clause here in case other unknown DATE-OBS formats pop up.
            try:
                date = datetime.datetime.strptime(date_obs,
                                                  '%Y-%m-%dT%H:%M:%S.%f')
            except:
                print(
                    'Header DATE-OBS format does not match the format code in strptime! Inspect/correct the DATE-OBS value.'
                )
                pdb.set_trace()

            #Get the closest date master_dark_stddev image for this exposure time.
            #We'll use this to measure read noise and dark current.
            date_str = date_obs.split('T')[0].replace('-', '')
            master_dark_stddev = master_dark_stddev_chooser(
                pines_path / ('Calibrations/Darks/Master Darks Stddev/'),
                header)

            days = date.day + hmsm_to_days(date.hour, date.minute, date.second,
                                           date.microsecond)
            jd = date_to_jd(date.year, date.month, days)
            var_df['Filename'][j] = reduced_files[j].name
            var_df['Time UT'][j] = header['DATE-OBS']
            var_df['Time JD UTC'][j] = jd
            var_df['Time BJD TDB'][j] = jd_utc_to_bjd_tdb(
                jd, header['TELRA'], header['TELDEC'])
            var_df['Airmass'][j] = header['AIRMASS']
            var_df['Seeing'][j] = log['X seeing'][np.where(
                log['Filename'] == reduced_files[j].name.split('_')[0] +
                '.fits')[0][0]]

            #If the shift quality has been flagged, skip this image.
            if log['Shift quality flag'].iloc[log_ind] == 1:
                continue

            #Get the source positions in this image.
            positions = []
            for k in range(len(source_names)):
                positions.append(
                    (centroided_sources[source_names[k] + ' Image X'][j],
                     centroided_sources[source_names[k] + ' Image Y'][j]))

            #Create an aperture centered on this position with radius (in pixels) of (seeing*multiplicative_factor[j])/plate_scale.
            try:
                apertures = CircularAperture(positions,
                                             r=(seeing[j] * fact) /
                                             plate_scale)
            except:
                pdb.set_trace()

            #Create an annulus centered on this position.
            annuli = CircularAnnulus(positions, r_in=an_in, r_out=an_out)

            photometry_tbl = iraf_style_photometry(apertures, annuli,
                                                   data * gain,
                                                   master_dark_stddev * gain,
                                                   header, var_df['Seeing'][j])

            for k in range(len(photometry_tbl)):
                var_df[source_names[k] +
                       ' Flux'][j] = photometry_tbl['flux'][k]
                var_df[source_names[k] +
                       ' Flux Error'][j] = photometry_tbl['flux_error'][k]
                var_df[source_names[k] +
                       ' Background'][j] = photometry_tbl['background'][k]
                var_df[source_names[k] + ' Interpolation Flag'][j] = int(
                    photometry_tbl['interpolation_flag'][k])

        #Write output to file.
        print(
            'Saving multiplicative factor = {} variable aperture photometry output to {}.'
            .format(fact, output_filename))
        print('')
        with open(output_filename, 'w') as f:
            for j in range(len(var_df)):
                #Write in the header.
                if j == 0:
                    f.write(
                        '{:>21s}, {:>22s}, {:>17s}, {:>17s}, {:>7s}, {:>7s}, '.
                        format('Filename', 'Time UT', 'Time JD UTC',
                               'Time BJD TDB', 'Airmass', 'Seeing'))
                    for k in range(len(source_names)):
                        if k != len(source_names) - 1:
                            f.write(
                                '{:>22s}, {:>28s}, {:>28s}, {:>34s}, '.format(
                                    source_names[k] + ' Flux',
                                    source_names[k] + ' Flux Error',
                                    source_names[k] + ' Background',
                                    source_names[k] + ' Interpolation Flag'))
                        else:
                            f.write(
                                '{:>22s}, {:>28s}, {:>28s}, {:>34s}\n'.format(
                                    source_names[k] + ' Flux',
                                    source_names[k] + ' Flux Error',
                                    source_names[k] + ' Background',
                                    source_names[k] + ' Interpolation Flag'))

                #Write in Filename, Time UT, Time JD, Airmass, Seeing values.
                format_string = '{:21s}, {:22s}, {:17.9f}, {:17.9f}, {:7.2f}, {:7.1f}, '
                #If the seeing value for this image is 'nan' (a string), convert it to a float.
                #TODO: Not sure why it's being read in as a string, fix that.
                if type(var_df['Seeing'][j]) == str:
                    var_df['Seeing'][j] = float(var_df['Seeing'][j])

                #Do a try/except clause for writeout, in case it breaks in the future.
                try:
                    f.write(
                        format_string.format(var_df['Filename'][j],
                                             var_df['Time UT'][j],
                                             var_df['Time JD UTC'][j],
                                             var_df['Time BJD TDB'][j],
                                             var_df['Airmass'][j],
                                             var_df['Seeing'][j]))
                except:
                    print(
                        'Writeout failed! Inspect quantities you are trying to write out.'
                    )
                    pdb.set_trace()

                #Write in Flux, Flux Error, and Background values for every source.
                for i in range(len(source_names)):
                    if i != len(source_names) - 1:
                        format_string = '{:22.5f}, {:28.5f}, {:28.5f}, {:34d}, '
                    else:
                        format_string = '{:22.5f}, {:28.5f}, {:28.5f}, {:34d}\n'
                    try:
                        f.write(
                            format_string.format(
                                var_df[source_names[i] + ' Flux'][j],
                                var_df[source_names[i] + ' Flux Error'][j],
                                var_df[source_names[i] + ' Background'][j],
                                var_df[source_names[i] +
                                       ' Interpolation Flag'][j]))
                    except:
                        if i != len(source_names) - 1:
                            format_string = '{:22.5f}, {:28.5f}, {:28.5f}, {:34f}, '
                        else:
                            format_string = '{:22.5f}, {:28.5f}, {:28.5f}, {:34f}\n'
                        f.write(
                            format_string.format(
                                var_df[source_names[i] + ' Flux'][j],
                                var_df[source_names[i] + ' Flux Error'][j],
                                var_df[source_names[i] + ' Background'][j],
                                var_df[source_names[i] +
                                       ' Interpolation Flag'][j]))
        print('')
    return
Exemplo n.º 2
0
def fixed_aper_phot(target,
                    centroided_sources,
                    ap_radii,
                    an_in=12.,
                    an_out=30.,
                    plots=False,
                    gain=8.21,
                    qe=0.9):
    '''Authors:
		Patrick Tamburo, Boston University, June 2020
	Purpose:
        Performs *fixed* aperture photometry on a set of reduced images given dataframe of source positions.
        The iraf_style_photometry, compute_phot_error, perture_stats_tbl, and calc_aperture_mmm routines are from Varun Bajaj on github:
            https://github.com/spacetelescope/wfc3_photometry/blob/master/photometry_tools/photometry_with_errors.py. 
	Inputs:
        target (str): The target's full 2MASS name.
        sources (pandas dataframe): List of source names, x and y positions in every image. 
        ap_radii (list of floats): List of aperture radii in pixels for which aperture photometry wil be performed. 
        an_in (float, optional): The inner radius of the annulus used to estimate background, in pixels. 
        an_out (float, optional): The outer radius of the annulus used to estimate background, in pixels. 
        plots (bool, optional): Whether or not to output surface plots. Images output to aper_phot directory within the object directory.
        gain (float, optional): The gain of the detector in e-/ADU.
        qe (float, optional): The quantum efficiency of the detector.
    Outputs:
        Saves aperture photometry csv to PINES_analysis_toolkit/Objects/short_name/aper_phot/ for each aperture.
	TODO:
    '''

    pines_path = pines_dir_check()
    short_name = short_name_creator(target)

    #Remove any leading/trailing spaces in the column names.
    centroided_sources.columns = centroided_sources.columns.str.lstrip()
    centroided_sources.columns = centroided_sources.columns.str.rstrip()

    #Get list of reduced files for target.
    reduced_path = pines_path / ('Objects/' + short_name + '/reduced')
    reduced_filenames = natsort.natsorted(
        [x.name for x in reduced_path.glob('*red.fits')])
    reduced_files = np.array([reduced_path / i for i in reduced_filenames])

    #source_names = natsort.natsorted(list(set([i.replace('X','').replace('Y','').replace('Centroid Warning','').strip() for i in centroided_sources.keys() if i != 'Filename'])))
    source_names = get_source_names(centroided_sources)

    #Create output plot directories for each source.
    if plots:
        #Camera angles for surface plots
        azim_angles = np.linspace(0, 360 * 1.5, len(reduced_files)) % 360
        elev_angles = np.zeros(len(azim_angles)) + 25
        for name in source_names:
            #If the folders are already there, delete them.
            source_path = (
                pines_path /
                ('Objects/' + short_name + '/aper_phot/' + name + '/'))
            if source_path.exists():
                shutil.rmtree(source_path)
            #Create folders.
            os.mkdir(source_path)

    #Loop over all aperture radii.
    for ap in ap_radii:
        print(
            'Doing fixed aperture photometry for {}, aperture radius = {:1.1f} pix, inner annulus radius = {} pix, outer annulus radius = {} pix.'
            .format(target, ap, an_in, an_out))

        #Declare a new dataframe to hold the information for all targets for this aperture.
        columns = [
            'Filename', 'Time UT', 'Time JD UTC', 'Time BJD TDB', 'Airmass',
            'Seeing'
        ]
        for i in range(0, len(source_names)):
            columns.append(source_names[i] + ' Flux')
            columns.append(source_names[i] + ' Flux Error')
            columns.append(source_names[i] + ' Background')
            columns.append(source_names[i] + ' Interpolation Flag')

        ap_df = pd.DataFrame(index=range(len(reduced_files)), columns=columns)
        output_filename = pines_path / (
            'Objects/' + short_name + '/aper_phot/' + short_name +
            '_fixed_aper_phot_{:1.1f}_pix_radius.csv'.format(float(ap)))

        #Loop over all images.
        pbar = ProgressBar()
        for j in pbar(range(len(reduced_files))):
            data = fits.open(reduced_files[j])[0].data

            #Read in some supporting information.
            log_path = pines_path / (
                'Logs/' + reduced_files[j].name.split('.')[0] + '_log.txt')
            log = pines_log_reader(log_path)
            log_ind = np.where(
                log['Filename'] == reduced_files[j].name.split('_')[0] +
                '.fits')[0][0]

            header = fits.open(reduced_files[j])[0].header
            date_obs = header['DATE-OBS']
            #Catch a case that can cause datetime strptime to crash; Mimir headers sometimes have DATE-OBS with seconds specified as 010.xx seconds, when it should be 10.xx seconds.
            if len(date_obs.split(':')[-1].split('.')[0]) == 3:
                date_obs = date_obs.split(':')[0] + ':' + date_obs.split(
                    ':')[1] + ':' + date_obs.split(':')[-1][1:]

            if date_obs.split(':')[-1] == '60.00':
                date_obs = date_obs.split(':')[0] + ':' + str(
                    int(date_obs.split(':')[1]) + 1) + ':00.00'
            #Keep a try/except clause here in case other unknown DATE-OBS formats pop up.
            try:
                date = datetime.datetime.strptime(date_obs,
                                                  '%Y-%m-%dT%H:%M:%S.%f')
            except:
                print(
                    'Header DATE-OBS format does not match the format code in strptime! Inspect/correct the DATE-OBS value.'
                )
                pdb.set_trace()

            #Get the closest date master_dark_stddev image for this exposure time.
            #We'll use this to measure read noise and dark current.
            date_str = date_obs.split('T')[0].replace('-', '')
            master_dark_stddev = master_dark_stddev_chooser(
                pines_path / ('Calibrations/Darks/Master Darks Stddev/'),
                header)

            days = date.day + hmsm_to_days(date.hour, date.minute, date.second,
                                           date.microsecond)
            jd = date_to_jd(date.year, date.month, days)
            ap_df['Filename'][j] = reduced_files[j].name
            ap_df['Time UT'][j] = header['DATE-OBS']
            ap_df['Time JD UTC'][j] = jd
            ap_df['Time BJD TDB'][j] = jd_utc_to_bjd_tdb(
                jd, header['TELRA'], header['TELDEC']
            )  #Using the telescope ra and dec should be accurate enough for our purposes
            ap_df['Airmass'][j] = header['AIRMASS']
            ap_df['Seeing'][j] = log['X seeing'][log_ind]

            #If the shift quality has been flagged, skip this image.
            if log['Shift quality flag'].iloc[log_ind] == 1:
                continue

            #Get the source positions in this image.
            positions = []
            for i in range(len(source_names)):
                positions.append((float(centroided_sources[source_names[i] +
                                                           ' Image X'][j]),
                                  float(centroided_sources[source_names[i] +
                                                           ' Image Y'][j])))

            #Create an aperture centered on this position with radius = ap.
            try:
                apertures = CircularAperture(positions, r=ap)
            except:
                pdb.set_trace()

            #Create an annulus centered on this position.
            annuli = CircularAnnulus(positions, r_in=an_in, r_out=an_out)

            photometry_tbl = iraf_style_photometry(apertures, annuli,
                                                   data * gain,
                                                   master_dark_stddev * gain,
                                                   header, ap_df['Seeing'][j])

            for i in range(len(photometry_tbl)):
                ap_df[source_names[i] + ' Flux'][j] = photometry_tbl['flux'][i]
                ap_df[source_names[i] +
                      ' Flux Error'][j] = photometry_tbl['flux_error'][i]
                ap_df[source_names[i] +
                      ' Background'][j] = photometry_tbl['background'][i]
                ap_df[source_names[i] + ' Interpolation Flag'][j] = int(
                    photometry_tbl['interpolation_flag'][i])

            #Make surface plots.
            if plots:
                for i in range(len(photometry_tbl)):
                    x_p = photometry_tbl['X'][i]
                    y_p = photometry_tbl['Y'][i]

                    fig = plt.figure()
                    ax = fig.add_subplot(111, projection='3d')
                    xx, yy = np.meshgrid(
                        np.arange(int(x_p) - 10,
                                  int(x_p) + 10 + 1),
                        np.arange(int(y_p) - 10,
                                  int(y_p) + 10 + 1))
                    theta = np.linspace(0, 2 * np.pi, 201)
                    y_circ = ap * np.cos(theta) + y_p
                    x_circ = ap * np.sin(theta) + x_p
                    vmin = np.nanmedian(data[yy, xx])
                    vmax = vmin + 2.5 * np.nanstd(data[yy, xx])
                    ax.plot_surface(xx,
                                    yy,
                                    data[yy, xx],
                                    cmap=cm.viridis,
                                    alpha=0.8,
                                    rstride=1,
                                    cstride=1,
                                    edgecolor='k',
                                    lw=0.2,
                                    vmin=vmin,
                                    vmax=vmax)
                    current_z = ax.get_zlim()
                    ax.set_zlim(current_z[0] - 150, current_z[1])
                    current_z = ax.get_zlim()
                    cset = ax.contourf(xx,
                                       yy,
                                       data[yy, xx],
                                       zdir='z',
                                       offset=current_z[0],
                                       cmap=cm.viridis)
                    ax.plot(x_circ,
                            y_circ,
                            np.zeros(len(x_circ)) + current_z[0],
                            color='r',
                            lw=2,
                            zorder=100)
                    ax.set_xlabel('X')
                    ax.set_ylabel('Y')
                    ax.set_zlabel('Counts')

                    ax.set_title('SURFACE DIAGNOSTIC PLOT, ' + ', Ap. = ' +
                                 str(ap) + '\n' + source_names[i] + ', ' +
                                 reduced_files[j].name + ' (image ' +
                                 str(j + 1) + ' of ' +
                                 str(len(reduced_files)) + ')')
                    ax.view_init(elev=elev_angles[j], azim=azim_angles[j])
                    plot_output_path = (
                        pines_path /
                        ('Objects/' + short_name + '/aper_phot/' +
                         source_names[i] + '/' + str(j).zfill(4) + '.jpg'))
                    plt.tight_layout()
                    plt.savefig(plot_output_path)
                    plt.close()

        #Write output to file.
        print('Saving ap = {:1.1f} aperture photometry output to {}.'.format(
            ap, output_filename))
        print('')
        with open(output_filename, 'w') as f:
            for j in range(len(ap_df)):
                #Write in the header.
                if j == 0:
                    f.write(
                        '{:>21s}, {:>22s}, {:>17s}, {:>17s}, {:>7s}, {:>7s}, '.
                        format('Filename', 'Time UT', 'Time JD UTC',
                               'Time BJD TDB', 'Airmass', 'Seeing'))
                    for i in range(len(source_names)):
                        if i != len(source_names) - 1:
                            f.write(
                                '{:>22s}, {:>28s}, {:>28s}, {:>34s}, '.format(
                                    source_names[i] + ' Flux',
                                    source_names[i] + ' Flux Error',
                                    source_names[i] + ' Background',
                                    source_names[i] + ' Interpolation Flag'))
                        else:
                            f.write(
                                '{:>22s}, {:>28s}, {:>28s}, {:>34s}\n'.format(
                                    source_names[i] + ' Flux',
                                    source_names[i] + ' Flux Error',
                                    source_names[i] + ' Background',
                                    source_names[i] + ' Interpolation Flag'))

                #Write in Filename, Time UT, Time JD, Airmass, Seeing values.
                format_string = '{:21s}, {:22s}, {:17.9f}, {:17.9f}, {:7.2f}, {:7.1f}, '
                #If the seeing value for this image is 'nan' (a string), convert it to a float.
                #TODO: Not sure why it's being read in as a string, fix that.
                if type(ap_df['Seeing'][j]) == str:
                    ap_df['Seeing'][j] = float(ap_df['Seeing'][j])

                #Do a try/except clause for writeout, in case it breaks in the future.
                try:
                    f.write(
                        format_string.format(ap_df['Filename'][j],
                                             ap_df['Time UT'][j],
                                             ap_df['Time JD UTC'][j],
                                             ap_df['Time BJD TDB'][j],
                                             ap_df['Airmass'][j],
                                             ap_df['Seeing'][j]))
                except:
                    print(
                        'Writeout failed! Inspect quantities you are trying to write out.'
                    )
                    pdb.set_trace()

                #Write in Flux, Flux Error, and Background values for every source.
                for i in range(len(source_names)):
                    if i != len(source_names) - 1:
                        format_string = '{:22.5f}, {:28.5f}, {:28.5f}, {:34d}, '
                    else:
                        format_string = '{:22.5f}, {:28.5f}, {:28.5f}, {:34d}\n'
                    try:
                        f.write(
                            format_string.format(
                                ap_df[source_names[i] + ' Flux'][j],
                                ap_df[source_names[i] + ' Flux Error'][j],
                                ap_df[source_names[i] + ' Background'][j],
                                ap_df[source_names[i] +
                                      ' Interpolation Flag'][j]))
                    except:
                        if i != len(source_names) - 1:
                            format_string = '{:22.5f}, {:28.5f}, {:28.5f}, {:34f}, '
                        else:
                            format_string = '{:22.5f}, {:28.5f}, {:28.5f}, {:34f}\n'
                        f.write(
                            format_string.format(
                                ap_df[source_names[i] + ' Flux'][j],
                                ap_df[source_names[i] + ' Flux Error'][j],
                                ap_df[source_names[i] + ' Background'][j],
                                ap_df[source_names[i] +
                                      ' Interpolation Flag'][j]))

    print('')
    return
Exemplo n.º 3
0
def corr_all_sources_plot(target):
    print('Generating corrected flux plots for all sources...\n')
    pines_path = pines_dir_check()
    short_name = short_name_creator(target)
    analysis_path = pines_path / ('Objects/' + short_name + '/analysis/')
    photometry_path = pines_path / ('Objects/' + short_name + '/aper_phot/')

    #Grab the data for the best aperture.
    if os.path.exists(analysis_path / ('optimal_aperture.txt')):
        with open(analysis_path / ('optimal_aperture.txt'), 'r') as f:
            best_ap = f.readlines()[0].split(':  ')[1].split('\n')[0]
            phot_type = best_ap.split('_')[1]
            if phot_type == 'fixed':
                s = 'r'
            elif phot_type == 'variable':
                s = 'f'
    else:
        raise RuntimeError(
            'No optimal_aperture.txt file for {}.\nUsing first photometry file in {}.'
            .format(target, phot_path))

    filename = short_name.replace(
        ' ', '') + '_' + phot_type + '_aper_phot_' + s + '=' + best_ap.split(
            '_')[0] + '_nightly_weighted_lc.csv'
    best_phot_path = analysis_path / ('aper_phot_analysis/' + best_ap + '/')
    output_path = best_phot_path / ('corr_ref_plots/')
    if not os.path.exists(output_path):
        os.mkdir(output_path)

    data = pines_log_reader(best_phot_path / filename)
    ref_names = get_source_names(data)[1:]
    num_refs = len(ref_names)

    times = np.array(data['Time BJD TDB'])
    night_inds = night_splitter(times)
    num_nights = len(night_inds)

    cmap = plt.get_cmap('viridis')
    for i in range(num_refs + 1):
        fig, ax = plt.subplots(nrows=1,
                               ncols=num_nights,
                               figsize=(17, 5),
                               sharey=True)
        plt.subplots_adjust(left=0.07, wspace=0.05, top=0.92, bottom=0.17)

        if i == 0:
            color = cmap(0)
            flux = np.array(data[short_name + ' Corrected Flux'],
                            dtype='float64')
            flux_err = np.array(data[short_name + ' Corrected Flux Error'],
                                dtype='float64')
            title = short_name
            output_name = short_name + '_corrected_flux.png'

        else:
            color = cmap(95)
            ref_name = ref_names[i - 1]
            flux = np.array(data[ref_name + ' Corrected Flux'],
                            dtype='float64')
            flux_err = np.array(data[ref_name + ' Corrected Flux Error'],
                                dtype='float64')
            if i < 10:
                num = '0' + str(i)
            else:
                num = str(i)
            output_name = 'reference_' + num + '_corrected_flux.png'

        for j in range(num_nights):
            if i != 0:
                weight = np.array(data[ref_name +
                                       ' ALC Weight'])[night_inds[j]][0]
                title = ref_name.replace(
                    'erence', '.') + ', weight = {:1.3f}'.format(weight)

            if j == 0:
                ax[j].set_ylabel('Normalized Flux', fontsize=20)

            inds = night_inds[j]

            block_inds = block_splitter(times[inds])
            binned_time = []
            binned_flux = []
            binned_err = []
            for k in range(len(block_inds)):
                binned_time.append(np.nanmean(times[inds][block_inds[k]]))
                binned_flux.append(np.nanmean(flux[inds][block_inds[k]]))
                binned_err.append(
                    np.nanstd(flux[inds][block_inds[k]]) /
                    np.sqrt(len(block_inds[k])))

            ax[j].plot(times[inds],
                       flux[inds],
                       color=color,
                       linestyle='',
                       marker='.',
                       alpha=0.25)
            ax[j].errorbar(binned_time,
                           binned_flux,
                           binned_err,
                           color=color,
                           linestyle='',
                           marker='o',
                           ms=10,
                           mfc='none',
                           mew=2)
            ax[j].set_xlabel('Time (BJD$_{TDB}$)', fontsize=20)
            ax[j].tick_params(labelsize=16)
            ax[j].axhline(1, color='k', alpha=0.7, lw=1, zorder=0)
            ax[j].grid(alpha=0.2)
            ax[j].set_title(title, fontsize=20, color=color)
            ax[j].set_ylim(0.9, 1.1)

        plt.savefig(output_path / output_name, dpi=300)
        plt.close()
Exemplo n.º 4
0
def seeing_plot(target, centroided_sources):
    pines_path = pines_dir_check()
    short_name = short_name_creator(target)
    #Get plot style parameters.
    title_size, axis_title_size, axis_ticks_font_size, legend_font_size = plot_style(
    )

    #Get list of souce names in the centroid output.
    source_names = get_source_names(centroided_sources)
    centroided_sources.columns = centroided_sources.keys().str.strip()

    #Read in times and seeing values.
    times_full = np.array(centroided_sources['Time (JD UTC)'])
    seeing = np.array(centroided_sources['Seeing'])

    #Split up times by nights.
    night_inds = night_splitter(times_full)
    num_nights = len(night_inds)
    times_nights = [times_full[night_inds[i]] for i in range(num_nights)]
    standard_x = standard_x_range(times_nights)

    fig, ax = plt.subplots(nrows=1,
                           ncols=num_nights,
                           figsize=(17, 5),
                           sharey=True)
    for i in range(num_nights):
        if i == 0:
            ax[i].set_ylabel('Seeing (")', fontsize=axis_title_size)

        inds = night_inds[i]
        ax[i].plot(times_nights[i],
                   seeing[inds],
                   marker='.',
                   linestyle='',
                   alpha=0.3,
                   label='Raw seeing')
        ax[i].tick_params(labelsize=axis_ticks_font_size)
        ax[i].set_xlabel('Time (JD UTC)', fontsize=axis_title_size)
        ax[i].grid(alpha=0.2)
        ax[i].set_xlim(
            np.mean(times_nights[i]) - standard_x / 2,
            np.mean(times_nights[i]) + standard_x / 2)

        #bin
        block_inds = block_splitter(times_nights[i])
        block_x = np.zeros(len(block_inds))
        block_y = np.zeros(len(block_inds))
        block_y_err = np.zeros(len(block_inds))
        for j in range(len(block_inds)):
            block_x[j] = np.mean(times_nights[i][block_inds[j]])
            block_y[j] = np.mean(seeing[inds][block_inds[j]])
            block_y_err[j] = np.std(seeing[inds][block_inds[j]]) / np.sqrt(
                len(seeing[inds][block_inds[j]]))

        ax[i].errorbar(block_x,
                       block_y,
                       block_y_err,
                       marker='o',
                       linestyle='',
                       color='tab:blue',
                       ms=8,
                       mfc='none',
                       mew=2,
                       label='Bin seeing')

        #Interpolate each night's seeing.
        fit_times = np.linspace(block_x[0], block_x[-1], 1000)
        interp = CubicSpline(block_x, block_y)
        interp_fit = interp(fit_times)
        ax[i].plot(fit_times,
                   interp_fit,
                   color='r',
                   lw=2,
                   zorder=0,
                   alpha=0.7,
                   label='CS Interp.')

    ax[i].legend(bbox_to_anchor=(1.01, 0.5), fontsize=legend_font_size)
    plt.suptitle(short_name + ' Seeing Measurements', fontsize=title_size)
    plt.subplots_adjust(left=0.07, wspace=0.05, top=0.92, bottom=0.17)

    output_filename = pines_path / ('Objects/' + short_name +
                                    '/analysis/diagnostic_plots/' +
                                    short_name + '_seeing.png')
    plt.savefig(output_filename, dpi=300)
    return
Exemplo n.º 5
0
def absolute_image_position_plot(target, centroided_sources):
    pines_path = pines_dir_check()
    short_name = short_name_creator(target)

    #Get plot style parameters.
    title_size, axis_title_size, axis_ticks_font_size, legend_font_size = plot_style(
    )

    #Get list of souce names in the centroid output.
    source_names = get_source_names(centroided_sources)
    centroided_sources.columns = centroided_sources.keys().str.strip()

    #Get times from the centroid output and split them by night.
    times_full = np.array(centroided_sources['Time (JD UTC)'])
    night_inds = night_splitter(times_full)
    num_nights = len(night_inds)
    times_nights = [times_full[night_inds[i]] for i in range(num_nights)]
    standard_x = standard_x_range(times_nights)

    source = source_names[0]
    fig, ax = plt.subplots(nrows=2,
                           ncols=num_nights,
                           figsize=(17, 9),
                           sharex='col',
                           sharey='row')
    plt.subplots_adjust(left=0.07,
                        hspace=0.05,
                        wspace=0.05,
                        top=0.92,
                        bottom=0.17)
    for j in range(num_nights):
        if j == 0:
            ax[0, j].set_ylabel('Image X', fontsize=axis_title_size)
            ax[1, j].set_ylabel('Image Y', fontsize=axis_title_size)

        inds = night_inds[j]
        times = times_nights[j]
        absolute_x = np.array(centroided_sources[source + ' Image X'][inds],
                              dtype='float')
        absolute_y = np.array(centroided_sources[source + ' Image Y'][inds],
                              dtype='float')
        ax[0, j].plot(times,
                      absolute_x,
                      marker='.',
                      linestyle='',
                      alpha=0.3,
                      color='tab:blue',
                      label='Raw x')
        ax[1, j].plot(times,
                      absolute_y,
                      marker='.',
                      linestyle='',
                      alpha=0.3,
                      color='tab:orange',
                      label='Raw y')

        #bin
        block_inds = block_splitter(times)
        block_times = np.zeros(len(block_inds))
        block_x = np.zeros(len(block_inds))
        block_x_err = np.zeros(len(block_inds))
        block_y = np.zeros(len(block_inds))
        block_y_err = np.zeros(len(block_inds))
        for k in range(len(block_inds)):
            try:
                block_times[k] = np.nanmean(times[block_inds[k]])
                block_x[k] = np.nanmean(absolute_x[block_inds[k]])
                block_x_err[k] = np.nanstd(
                    absolute_x[block_inds[k]]) / np.sqrt(
                        len(absolute_x[block_inds[k]]))
                block_y[k] = np.nanmean(absolute_y[block_inds[k]])
                block_y_err[k] = np.nanstd(
                    absolute_y[block_inds[k]]) / np.sqrt(
                        len(absolute_y[block_inds[k]]))
            except:
                pdb.set_trace()

        ax[0, j].errorbar(block_times,
                          block_x,
                          block_x_err,
                          marker='o',
                          linestyle='',
                          color='tab:blue',
                          ms=8,
                          mfc='none',
                          mew=2,
                          label='Bin x')
        ax[1, j].errorbar(block_times,
                          block_y,
                          block_y_err,
                          marker='o',
                          linestyle='',
                          color='tab:orange',
                          ms=8,
                          mfc='none',
                          mew=2,
                          label='Bin y')

        ax[0, j].tick_params(labelsize=axis_ticks_font_size)
        ax[1, j].tick_params(labelsize=axis_ticks_font_size)

        ax[0, j].grid(alpha=0.2)
        ax[1, j].grid(alpha=0.2)
        ax[1, j].set_xlabel('Time (JD UTC)', fontsize=axis_title_size)

        if j == num_nights - 1:
            ax[0, j].legend(bbox_to_anchor=(1.29, 1),
                            fontsize=legend_font_size)
            ax[1, j].legend(bbox_to_anchor=(1.29, 1),
                            fontsize=legend_font_size)

    plt.suptitle(source + ' Image Centroid Positions', fontsize=title_size)
    #plt.subplots_adjust(left=0.07, hspace=0.05, wspace=0.05, top=0.92, bottom=0.08, right=0.85)

    #ax.legend(bbox_to_anchor=(1.01, 1), fontsize=14)

    ax[0, j].set_xlim(
        np.mean(times) - standard_x / 2,
        np.mean(times) + standard_x / 2)
    ax[1, j].set_xlim(
        np.mean(times) - standard_x / 2,
        np.mean(times) + standard_x / 2)

    output_filename = pines_path / ('Objects/' + short_name +
                                    '/analysis/diagnostic_plots/' + source +
                                    '_image_positions.png')
    plt.savefig(output_filename, dpi=300)

    return
Exemplo n.º 6
0
def relative_cutout_position_plot(target, centroided_sources):
    pines_path = pines_dir_check()
    short_name = short_name_creator(target)

    #Get plot style parameters.
    title_size, axis_title_size, axis_ticks_font_size, legend_font_size = plot_style(
    )

    #Get list of souce names in the centroid output.
    source_names = get_source_names(centroided_sources)
    centroided_sources.columns = centroided_sources.keys().str.strip()

    #Get times from the centroid output and split them by night.
    times_full = np.array(centroided_sources['Time (JD UTC)'])
    night_inds = night_splitter(times_full)
    num_nights = len(night_inds)
    times_nights = [times_full[night_inds[i]] for i in range(num_nights)]
    standard_x = standard_x_range(times_nights)

    #Get the box size (I don't like that this is being determined by using the mean of the data...output it from centroider?)
    box_w = int(
        np.round(
            2 * np.nanmean(
                np.array(centroided_sources['Reference 1 Cutout X'],
                         dtype='float')), 0))

    fig, ax = plt.subplots(nrows=2,
                           ncols=num_nights,
                           figsize=(17, 9),
                           sharey=True)
    plt.subplots_adjust(left=0.07,
                        hspace=0.05,
                        wspace=0.05,
                        top=0.92,
                        bottom=0.17)
    markers = ['+', 'x', '*', 'X']
    for j in range(num_nights):
        inds = night_inds[j]
        if j == 0:
            ax[0, j].set_ylabel('Cutout X Position', fontsize=axis_title_size)
            ax[1, j].set_ylabel('Cutout Y Position', fontsize=axis_title_size)

        for i in range(len(source_names)):
            cutout_x = np.array(centroided_sources[source_names[i] +
                                                   ' Cutout X'][inds],
                                dtype='float')
            cutout_y = np.array(centroided_sources[source_names[i] +
                                                   ' Cutout Y'][inds],
                                dtype='float')

            if i == 0:
                marker = 'o'
                label = 'Target'
            else:
                marker = markers[(i - 1) % len(markers)]
                label = 'Ref. ' + str(i)
            ax[0, j].plot(times_nights[j],
                          cutout_x,
                          marker=marker,
                          label=label,
                          linestyle='')
            ax[1, j].plot(times_nights[j],
                          cutout_y,
                          marker=marker,
                          linestyle='')

        ax[0, j].tick_params(labelsize=axis_ticks_font_size)
        ax[0, j].set_xticklabels([])
        ax[0, j].axhline(box_w / 2,
                         zorder=0,
                         color='r',
                         label='Center pix.',
                         lw=2)
        ax[0, j].set_xlim(
            np.mean(times_nights[j]) - standard_x / 2,
            np.mean(times_nights[j]) + standard_x / 2)
        ax[0, j].grid(alpha=0.2)
        ax[1, j].tick_params(labelsize=axis_ticks_font_size)
        ax[1, j].axhline(box_w / 2,
                         zorder=0,
                         color='r',
                         label='Center pix.',
                         lw=2)
        ax[1, j].set_xlim(
            np.mean(times_nights[j]) - standard_x / 2,
            np.mean(times_nights[j]) + standard_x / 2)
        ax[1, j].set_xlabel('Time (JD UTC)', fontsize=axis_title_size)
        ax[1, j].grid(alpha=0.2)

        if j == num_nights - 1:
            ax[0, j].legend(bbox_to_anchor=(1.01, 1.0),
                            fontsize=legend_font_size)

    plt.suptitle(short_name + ' Cutout Centroid Positions',
                 fontsize=title_size)

    output_filename = pines_path / ('Objects/' + short_name +
                                    '/analysis/diagnostic_plots/' +
                                    short_name + '_cutout_positions.png')
    plt.savefig(output_filename, dpi=300)

    return