コード例 #1
0
def get_optical_psf(expid, aos=False):
    # set up objects. make sure I get the right mesh
    digestor = Digestor()
    PSF_Evaluator = Moment_Evaluator()
    mesh_name = "Science-20121120s1-v20i2_All"
    PSF_Interpolator = Mesh_Interpolator(mesh_name=mesh_name, directory=mesh_directory)

    # This will be our main wavefront
    WF = DECAM_Model_Wavefront(PSF_Interpolator=PSF_Interpolator)

    # load up data
    expid_path = "{0:08d}/{1:08d}".format(expid - expid % 1000, expid)
    data_directory = base_directory + expid_path
    files = sorted(glob(data_directory + "/*{0}".format("_selpsfcat.fits")))

    # load up all the data from an exposure. Unfortunately, pandas is stupid and
    # can't handle the vignet format, so we don't load those up
    # note that you CAN load them up by passing "do_exclude=True", which then
    # returns a second variable containing the vignets and aperture fluxes and
    # errors

    # data has certain columns removed, needed for processing.
    # unfortunately I need full_data's vignettes and other info for later steps
    # TODO optimize this cuz this is clearly wasteful
    # I'm loading an HDUlist in 2 places, but overhauling the digestor to load it once would be a challenge
    data = digestor.digest_fits(files[0], do_exclude=False)
    metaHDUList = [fits.open(files[0])]  # list of HDULists #META

    for file in files[1:]:
        tmpData = digestor.digest_fits(file, do_exclude=False)
        data = data.append(tmpData)
        metaHDUList.append(fits.open(file))

    fit_i = jamierod_results.loc[expid]

    misalignment = {
        "z04d": fit_i["z04d"],
        "z04x": fit_i["z04x"],
        "z04y": fit_i["z04y"],
        "z05d": fit_i["z05d"],
        "z05x": fit_i["z05x"],
        "z05y": fit_i["z05y"],
        "z06d": fit_i["z06d"],
        "z06x": fit_i["z06x"],
        "z06y": fit_i["z06y"],
        "z07d": fit_i["z07d"],
        "z07x": fit_i["z07x"],
        "z07y": fit_i["z07y"],
        "z08d": fit_i["z08d"],
        "z08x": fit_i["z08x"],
        "z08y": fit_i["z08y"],
        "z09d": fit_i["z09d"],
        "z09x": fit_i["z09x"],
        "z09y": fit_i["z09y"],
        "z10d": fit_i["z10d"],
        "z10x": fit_i["z10x"],
        "z10y": fit_i["z10y"],
        "rzero": fit_i["rzero"],
    }

    # print(misalignment['rzero'])
    # rzero needs to be adjusted to be smaller than the stars!
    x = 0.3 / 0.14  # 4
    misalignment["rzero"] = 1 / (1 / misalignment["rzero"] - x)
    # print(misalignment['rzero'])

    # print(.14*x )

    data["rzero"] = misalignment["rzero"]
    optPSFStamps, model = WF.draw_psf(data, misalignment=misalignment)

    # optPSFStamps is a numpy data cube
    # full_data is data frame including vignettes of the stars
    return optPSFStamps, metaHDUList
コード例 #2
0
def stamp_collector(expid, Nmax=0, rzero=None, snr_max=400, snr_min=90):
    results = np.load(jamierod_dir + '/params/params_{0:08d}.npy'.format(expid)).item()
    keys = sorted([key for key in results.keys() if ('error' not in key and 'fix' not in key and 'limit' not in key)])
    misalignment = {key: results[key] for key in keys}
    params = {'expid': expid,
              'misalignment': misalignment,
              # jamie's params:
              'mesh_directory': '/nfs/slac/g/ki/ki22/roodman/ComboMeshesv20',
              'mesh_name': 'Science-20121120s1-v20i2_All', }

    params_default = {'snr_key': 'SNR_WIN',
                      'snr_max': snr_max,
                      'snr_min': snr_min,
                      'data_name': '_selpsfcat.fits',
                      'num_bins': 4
                      }
    params_default.update(param_default_kils(expid))
    params_default.update(params)
    params = params_default

    fits_files = glob(params['data_directory'] + '/*{0}'.format(params['data_name']))
    dig = Digestor()
    model, data_stamps = dig.digest_fits(fits_files[0], do_exclude=True)
    data_stamps = data_stamps['VIGNET']
    for fits_file in fits_files[1:]:
        model_i, data_stamps_i = dig.digest_fits(fits_file, do_exclude=True)
        model = model.append(model_i, ignore_index=True)
        data_stamps = np.vstack((data_stamps, data_stamps_i['VIGNET']))

    if params['snr_key'] in model.columns:
        conds = ((model[params['snr_key']] > params['snr_min']) &
                      (model[params['snr_key']] < params['snr_max']))
        data_stamps = data_stamps[conds.values]
        model = model[conds]

    if Nmax > 0:
        # cut out Nmax objects
        rows = np.random.choice(len(model),
                                Nmax, replace=False)
        model = model.iloc[rows]
        data_stamps = data_stamps[rows]

    # set data_stamps to be float64 like the model stamps
    data_stamps = data_stamps.astype(np.float64)

    if type(rzero) == type(None):
        model['rzero'] = results['rzero']
    else:
        model['rzero'] = rzero

    # get the PSF_Interpolator
    PSF_Interpolator = Mesh_Interpolator(mesh_name=params['mesh_name'],
                                         directory=params['mesh_directory'])

    WF = DECAM_Model_Wavefront(PSF_Interpolator=PSF_Interpolator,
                               num_bins=params['num_bins'],
                               model=model)

    def plane(rzero,
              z04d, z04x, z04y,
              z05d, z05x, z05y,
              z06d, z06x, z06y,
              z07d, z07x, z07y,
              z08d, z08x, z08y,
              z09d, z09x, z09y,
              z10d, z10x, z10y,
              z11d, z11x, z11y,
              dz, dx, dy, xt, yt,
              e0, e1, e2,
              delta1, delta2,
              zeta1, zeta2, **kwargs):
        wf_misalignment = {'z04d': z04d, 'z04x': z04x, 'z04y': z04y,
                           'z05d': z05d, 'z05x': z05x, 'z05y': z05y,
                           'z06d': z06d, 'z06x': z06x, 'z06y': z06y,
                           'z07d': z07d, 'z07x': z07x, 'z07y': z07y,
                           'z08d': z08d, 'z08x': z08x, 'z08y': z08y,
                           'z09d': z09d, 'z09x': z09x, 'z09y': z09y,
                           'z10d': z10d, 'z10x': z10x, 'z10y': z10y,
                           'z11d': z11d, 'z11x': z11x, 'z11y': z11y,
                           'dz': dz, 'dx': dx, 'dy': dy, 'xt': xt, 'yt': yt}
        return wf_misalignment

    wf_misalignment = plane(**misalignment)
    # get evaluated PSFs
    stamps, eval_data = WF.draw_psf(WF.data, force_interpolation=True,
                                    misalignment=wf_misalignment)
    # apply shear to stamps directly!




    evaluated_psfs = WF.evaluate_psf(stamps, misalignment=wf_misalignment)
    # make sure evaluated_psfs has the right index
    evaluated_psfs.index = eval_data.index
    # combine the results from PSF_Evaluator with your input data
    combined_df = evaluated_psfs.combine_first(eval_data)
    # TODO: Deal with constant jitter terms! Currently the results are off and seem to be off related to the overall magnitude...

    ## # add dc factors
    ## # combined_df['e0'] += results['e0']
    ## # combined_df['e1'] += results['e1']
    ## # combined_df['e2'] += results['e2']
    ## # combined_df['delta1'] += results['delta1']
    ## # combined_df['delta2'] += results['delta2']
    ## # combined_df['zeta1'] += results['zeta1']
    ## # combined_df['zeta2'] += results['zeta2']

    ## e1 = results['e1'] * 5  # empirical correction?
    ## e2 = results['e2'] * 5
    ## Mx = combined_df['Mx'].values
    ## My = combined_df['My'].values
    ## esq = e1 * e1 + e2 * e2

    ## if esq < 1e-8:
    ##     A = 1 + esq / 8 + e1 * (0.5 + esq * 3 / 16)
    ##     B = 1 + esq / 8 - e1 * (0.5 + esq * 3 / 16)
    ##     C = e2 * (0.5 + esq * 3 / 16)
    ## else:
    ##     temp = np.sqrt(1 - esq)
    ##     cc = np.sqrt(0.5 * (1 + 1 / temp))
    ##     temp = cc * (1 - temp) / esq
    ##     C = temp * e2
    ##     temp *= e1
    ##     A = cc + temp
    ##     B = cc - temp
    ## matrix = np.array([[A, C], [C, B]])
    ## matrix /= A * B - C * C
    ## offsetsx = -Mx * (A + C - 1)
    ## offsetsy = -My * (C + B - 1)

    ## # apply transformation
    ## stamps_transformed = []
    ## for stamp, offsetx, offsety in zip(stamps, offsetsx, offsetsy):
    ##     stamps_transformed_i = affine_transform(stamp, matrix, (offsetx, offsety))
    ##     stamps_transformed.append(stamps_transformed_i)
    ## stamps_transformed = np.array(stamps_transformed)
    ## evaluated_psfs = WF.evaluate_psf(stamps_transformed, misalignment=wf_misalignment)
    ## # make sure evaluated_psfs has the right index
    ## evaluated_psfs.index = eval_data.index
    ## # combine the results from PSF_Evaluator with your input data
    ## combined_df_trans = evaluated_psfs.combine_first(eval_data)

    ## e1 = combined_df['e1']
    ## e2 = combined_df['e2']
    ## e1_t = combined_df_trans['e1']
    ## e2_t = combined_df_trans['e2']
    ## print((e1 - e1_t) / (results['e1']))
    ## print((e2 - e2_t) / (results['e2']))

    return combined_df, stamps, data_stamps
コード例 #3
0
def do_run(expid, aos=False):
    if aos:
        plot_dir = out_dir + '/plots_aos/{0:08d}'.format(expid)
    else:
        plot_dir = out_dir + '/plots/{0:08d}'.format(expid)
    if not path.exists(plot_dir):
        makedirs(plot_dir)
    import matplotlib
    matplotlib.use('Agg')
    import matplotlib.pyplot as plt
    from WavefrontPSF.psf_interpolator import Mesh_Interpolator
    from WavefrontPSF.wavefront import Wavefront
    from WavefrontPSF.digestor import Digestor
    from WavefrontPSF.psf_evaluator import Moment_Evaluator
    from WavefrontPSF.donutengine import DECAM_Model_Wavefront


    medsubkeys = ['e0', 'e1', 'e2', 'E1norm', 'E2norm', 'delta1', 'delta2', 'zeta1', 'zeta2']

    rows = ['e0', 'e0_medsub',
            'e1', 'e1_medsub',
            'e2', 'e2_medsub',
            'E1norm', 'E1norm_medsub',
            'E2norm', 'E2norm_medsub',
            'delta1', 'delta1_medsub',
            'delta2', 'delta2_medsub',
            'zeta1', 'zeta1_medsub',
            'zeta2', 'zeta2_medsub']

    # set up objects. make sure I get the right mesh
    digestor = Digestor()
    PSF_Evaluator = Moment_Evaluator()
    mesh_name = 'Science-20121120s1-v20i2_All'
    PSF_Interpolator = Mesh_Interpolator(mesh_name=mesh_name, directory=mesh_directory)

    # This will be our main wavefront
    WF = DECAM_Model_Wavefront(PSF_Interpolator=PSF_Interpolator)
    # let's create a Wavefront object for the data
    WF_data = Wavefront(PSF_Interpolator=None, PSF_Evaluator=PSF_Evaluator)

    # premake coordinate list
    coords = []
    for num_bins in xrange(6):
        # create coordinates
        x = []
        y = []
        if num_bins >= 2:
            num_bins_make = num_bins + (num_bins-1)
        else:
            num_bins_make = num_bins
        for key in WF.decaminfo.infoDict.keys():
            if 'F' in key:
                continue
            xi, yi = WF.decaminfo.getBounds(key, num_bins_make)
            xi = np.array(xi)
            xi = 0.5 * (xi[1:] + xi[:-1])
            yi = np.array(yi)
            yi = 0.5 * (yi[1:] + yi[:-1])
            xi, yi = np.meshgrid(xi, yi)
            xi = xi.flatten()
            yi = yi.flatten()
            x += list(xi)
            y += list(yi)
        x = np.array(x)
        y = np.array(y)
        coords_i = pd.DataFrame({'x': x, 'y': y})
        coords.append(coords_i)


    # load up data
    expid_path = '{0:08d}/{1:08d}'.format(expid - expid % 1000, expid)
    data_directory = base_directory + expid_path

    # load up all the data from an exposure. Unfortunately, pandas is stupid and
    # can't handle the vignet format, so we don't load those up
    # note that you CAN load them up by passing "do_exclude=True", which then
    # returns a second variable containing the vignets and aperture fluxes and
    # errors
    model = digestor.digest_directory(
                data_directory,
                file_type='_selpsfcat.fits')
    # cut the old data appropriately
    model = model[(model['SNR_WIN'] > 90) &
                  (model['SNR_WIN'] < 400)]

    # create normalized moments
    model['E1norm'] = model['e1'] / model['e0']
    model['E2norm'] = model['e2'] / model['e0']

    # do med sub and add to WF_data
    for key in medsubkeys:
        model['{0}_medsub'.format(key)] = model[key] - np.median(model[key])
    WF_data.data = model

    # set the number of bins from total number of stars
    if len(model) < 200:
        num_bins = 0
        num_bins_mis = 0
        num_bins_whisker = 0
    elif len(model) < 1000:
        num_bins = 1
        # num_bins_mis = 0
        num_bins_mis = 1
        num_bins_whisker = 1
    elif len(model) < 10000:
        num_bins = 2
        # num_bins_mis = 1
        num_bins_mis = 2
        num_bins_whisker = 2
    else:
        num_bins = 3
        num_bins_mis = 2
        num_bins_whisker = 2


    # generate optics model from fit data
    fit_i = jamierod_results.loc[expid]
    # TODO: Add ALL fit_i params that were used?
    # TODO: get rzero?

    if aos:
        misalignment = {'z04d': fit_i['aos_z04d'],
                        'z05d': fit_i['aos_z05d'], 'z05x': fit_i['aos_z05x'], 'z05y': fit_i['aos_z05y'],
                        'z06d': fit_i['aos_z06d'], 'z06x': fit_i['aos_z06x'], 'z06y': fit_i['aos_z06y'],
                        'z07d': fit_i['aos_z07d'], 'z07x': fit_i['aos_z07x'], 'z07y': fit_i['aos_z07y'],
                        'z08d': fit_i['aos_z08d'], 'z08x': fit_i['aos_z08x'], 'z08y': fit_i['aos_z08y'],
                        'z09d': fit_i['aos_z09d'],
                        'z10d': fit_i['aos_z10d'],
                        'rzero': fit_i['aos_rzero']}
    else:
        misalignment = {'z04d': fit_i['z04d'], 'z04x': fit_i['z04x'], 'z04y': fit_i['z04y'],
                        'z05d': fit_i['z05d'], 'z05x': fit_i['z05x'], 'z05y': fit_i['z05y'],
                        'z06d': fit_i['z06d'], 'z06x': fit_i['z06x'], 'z06y': fit_i['z06y'],
                        'z07d': fit_i['z07d'], 'z07x': fit_i['z07x'], 'z07y': fit_i['z07y'],
                        'z08d': fit_i['z08d'], 'z08x': fit_i['z08x'], 'z08y': fit_i['z08y'],
                        'z09d': fit_i['z09d'], 'z09x': fit_i['z09x'], 'z09y': fit_i['z09y'],
                        'z10d': fit_i['z10d'], 'z10x': fit_i['z10x'], 'z10y': fit_i['z10y'],
                        'rzero': fit_i['rzero']}

    # create model fit from donuts
    WF.data = coords[num_bins].copy()
    WF.data['rzero'] = misalignment['rzero']
    WF.data = WF(WF.data, misalignment=misalignment)
    # add dc factors
    WF.data['e0'] += fit_i['e0']
    WF.data['e1'] += fit_i['e1']
    WF.data['e2'] += fit_i['e2']
    WF.data['delta1'] += fit_i['delta1']
    WF.data['delta2'] += fit_i['delta2']
    WF.data['zeta1'] += fit_i['zeta1']
    WF.data['zeta2'] += fit_i['zeta2']

    # create normalized moments
    WF.data['E1norm'] = WF.data['e1'] / WF.data['e0']
    WF.data['E2norm'] = WF.data['e2'] / WF.data['e0']

    # update WF medsubs appropriately
    for key in medsubkeys:
        WF.data['{0}_medsub'.format(key)] = WF.data[key] - np.median(WF.data[key])

    # add a couple diagnostic things
    WF.data['num_bins'] = num_bins
    WF.data['expid'] = expid

    # put in data into field
    WF.reduce(num_bins=num_bins)
    # create another reduced field for setting the color levels
    field_model, _, _ = WF.reduce_data_to_field(
        WF.data, xkey='x', ykey='y', reducer=np.median,
        num_bins=num_bins_mis)

    # update WF_data fields
    WF_data.reduce(num_bins=num_bins)
    # create another reduced field for setting the color levels
    field_data, _, _ = WF_data.reduce_data_to_field(
        WF_data.data, xkey='x', ykey='y', reducer=np.median,
        num_bins=num_bins_mis)

    # put in residual of data minus model for field
    for row_i, row in enumerate(rows):
        WF.field[row + '_data'] = WF_data.field[row]
        WF.field[row + '_residual'] = WF_data.field[row] - WF.field[row]
        field_model[row + '_residual'] = field_data[row] - field_model[row]

    # create plots
    for row in rows:
        ncols = 3
        nrows = 1
        fig, axs = plt.subplots(nrows=nrows, ncols=ncols, figsize=(6*ncols, 5*nrows))
        fig.suptitle('Expid: {0}, {1}'.format(expid, row))
        vmin = np.nanmin((field_model[row].min(),
                          field_data[row].min()))
        vmax = np.nanmax((field_data[row].max(),
                          field_model[row].max()))
        vmindiff = field_model[row + '_residual'].min()
        vmaxdiff = field_model[row + '_residual'].max()
        ax = axs[0]
        ax.set_title('Data')
        WF_data.plot_field(row, fig=fig, ax=ax, a=vmin, b=vmax)
        ax = axs[1]
        ax.set_title('Model')
        WF.plot_field(row, fig=fig, ax=ax, a=vmin, b=vmax)
        ax = axs[2]
        ax.set_title('Residual')
        WF.plot_field(row + '_residual', fig=fig, ax=ax, a=vmindiff, b=vmaxdiff)
        fig.savefig(plot_dir + '/{0}_{1}.png'.format(row, expid))
        fig.savefig(plot_dir + '/{0}_{1}.pdf'.format(row, expid))

        # TODO: Save each axis individually as well:
        fig, ax = plt.subplots(nrows=1, ncols=1, figsize=(12, 10))
        ax.set_title('Expid: {0}, {1}'.format(expid, row))
        vmin = np.nanmin((field_model[row].min(),
                          field_data[row].min()))
        vmax = np.nanmax((field_data[row].max(),
                          field_model[row].max()))
        vmindiff = field_model[row + '_residual'].min()
        vmaxdiff = field_model[row + '_residual'].max()
        WF_data.plot_field(row, fig=fig, ax=ax, a=vmin, b=vmax)
        fig.savefig(plot_dir + '/{0}_{1}_data.png'.format(row, expid))
        fig.savefig(plot_dir + '/{0}_{1}_data.pdf'.format(row, expid))

        fig, ax = plt.subplots(nrows=1, ncols=1, figsize=(12, 10))
        ax.set_title('Expid: {0}, {1}'.format(expid, row))
        WF.plot_field(row, fig=fig, ax=ax, a=vmin, b=vmax)
        fig.savefig(plot_dir + '/{0}_{1}_model.png'.format(row, expid))
        fig.savefig(plot_dir + '/{0}_{1}_model.pdf'.format(row, expid))

        fig, ax = plt.subplots(nrows=1, ncols=1, figsize=(12, 10))
        ax.set_title('Expid: {0}, {1}'.format(expid, row))
        WF.plot_field(row + '_residual', fig=fig, ax=ax, a=vmindiff, b=vmaxdiff)
        fig.savefig(plot_dir + '/{0}_{1}_residual.png'.format(row, expid))
        fig.savefig(plot_dir + '/{0}_{1}_residual.pdf'.format(row, expid))
        plt.close('all')

    # save whisker plots too
    # do w, e, and normalized e.
    # for each: data and model separate, data plus model, residual

    ###########################################################################
    # w
    ###########################################################################
    num_spokes = 2
    scalefactor = 0.2
    scalefactor_residual = 0.5
    quiverdict = {'width': 2}
    # w
    # data
    fig, ax = WF.plot_whisker(WF.field, num_bins=num_bins_whisker,
                              normalized_ellipticity=False,
                              fig=None, ax=None,
                              scalefactor=scalefactor,
                              num_spokes=num_spokes,
                              color='black',
                              e1key='e1_data', e2key='e2_data',
                              do_var=False, legend=True, quiverdict=quiverdict)
    fig.savefig(plot_dir + '/whisker_w_{0}_data.png'.format(expid))
    fig.savefig(plot_dir + '/whisker_w_{0}_data.pdf'.format(expid))

    # w
    # model
    fig, ax = WF.plot_whisker(WF.field, num_bins=num_bins_whisker,
                              normalized_ellipticity=False,
                              fig=None, ax=None,
                              scalefactor=scalefactor,
                              num_spokes=num_spokes,
                              color='black',
                              e1key='e1', e2key='e2',
                              do_var=False, legend=True, quiverdict=quiverdict)
    fig.savefig(plot_dir + '/whisker_w_{0}_model.png'.format(expid))
    fig.savefig(plot_dir + '/whisker_w_{0}_model.pdf'.format(expid))

    # w
    # data + model
    fig, ax = WF.plot_whisker(WF.field, num_bins=num_bins_whisker,
                              normalized_ellipticity=False,
                              fig=None, ax=None,
                              scalefactor=scalefactor,
                              num_spokes=num_spokes,
                              color='black',
                              e1key='e1_data', e2key='e2_data',
                              do_var=False, legend=False, quiverdict=quiverdict)
    fig, ax = WF.plot_whisker(WF.field, num_bins=num_bins_whisker,
                              normalized_ellipticity=False,
                              fig=fig, ax=ax,
                              scalefactor=scalefactor, num_spokes=num_spokes,
                              color='red',
                              e1key='e1', e2key='e2',
                              do_var=False, legend=True, quiverdict=quiverdict)
    fig.savefig(plot_dir + '/whisker_w_{0}_blackdata_redmodel.png'.format(expid))
    fig.savefig(plot_dir + '/whisker_w_{0}_blackdata_redmodel.pdf'.format(expid))

    # w
    # residual
    fig, ax = WF.plot_whisker(WF.field, num_bins=num_bins_whisker,
                              normalized_ellipticity=False,
                              fig=None, ax=None,
                              scalefactor=scalefactor_residual,
                              num_spokes=num_spokes,
                              color='black',
                              e1key='e1_residual', e2key='e2_residual',
                              do_var=False, legend=True, quiverdict=quiverdict)
    fig.savefig(plot_dir + '/whisker_w_{0}_residual.png'.format(expid))
    fig.savefig(plot_dir + '/whisker_w_{0}_residual.pdf'.format(expid))
    ###########################################################################


    ###########################################################################
    # e
    ###########################################################################
    num_spokes = 1
    scalefactor = 1
    scalefactor_residual = 5
    quiverdict = {'width': 2}
    # e
    # data
    fig, ax = WF.plot_whisker(WF.field, num_bins=num_bins_whisker,
                              normalized_ellipticity=False,
                              fig=None, ax=None,
                              scalefactor=scalefactor,
                              num_spokes=num_spokes,
                              color='black',
                              e1key='e1_data', e2key='e2_data',
                              do_var=False, legend=True, quiverdict=quiverdict)
    fig.savefig(plot_dir + '/whisker_e_{0}_data.png'.format(expid))
    fig.savefig(plot_dir + '/whisker_e_{0}_data.pdf'.format(expid))

    # e
    # model
    fig, ax = WF.plot_whisker(WF.field, num_bins=num_bins_whisker,
                              normalized_ellipticity=False,
                              fig=None, ax=None,
                              scalefactor=scalefactor,
                              num_spokes=num_spokes,
                              color='black',
                              e1key='e1', e2key='e2',
                              do_var=False, legend=True, quiverdict=quiverdict)
    fig.savefig(plot_dir + '/whisker_e_{0}_model.png'.format(expid))
    fig.savefig(plot_dir + '/whisker_e_{0}_model.pdf'.format(expid))

    # e
    # data + model
    fig, ax = WF.plot_whisker(WF.field, num_bins=num_bins_whisker,
                              normalized_ellipticity=False,
                              fig=None, ax=None,
                              scalefactor=scalefactor,
                              num_spokes=num_spokes,
                              color='black',
                              e1key='e1_data', e2key='e2_data',
                              do_var=False, legend=False, quiverdict=quiverdict)
    fig, ax = WF.plot_whisker(WF.field, num_bins=num_bins_whisker,
                              normalized_ellipticity=False,
                              fig=fig, ax=ax,
                              scalefactor=scalefactor, num_spokes=num_spokes,
                              color='red',
                              e1key='e1', e2key='e2',
                              do_var=False, legend=True, quiverdict=quiverdict)
    fig.savefig(plot_dir + '/whisker_e_{0}_blackdata_redmodel.png'.format(expid))
    fig.savefig(plot_dir + '/whisker_e_{0}_blackdata_redmodel.pdf'.format(expid))

    # e
    # residual
    fig, ax = WF.plot_whisker(WF.field, num_bins=num_bins_whisker,
                              normalized_ellipticity=False,
                              fig=None, ax=None,
                              scalefactor=scalefactor_residual,
                              num_spokes=num_spokes,
                              color='black',
                              e1key='e1_residual', e2key='e2_residual',
                              do_var=False, legend=True, quiverdict=quiverdict)
    fig.savefig(plot_dir + '/whisker_e_{0}_residual.png'.format(expid))
    fig.savefig(plot_dir + '/whisker_e_{0}_residual.pdf'.format(expid))
    ###########################################################################

    ###########################################################################
    # E
    ###########################################################################
    num_spokes = 1
    scalefactor = 1
    scalefactor_residual = 5
    quiverdict = {'width': 2}
    # E
    # data
    fig, ax = WF.plot_whisker(WF.field, num_bins=num_bins_whisker,
                              normalized_ellipticity=True,
                              fig=None, ax=None,
                              scalefactor=scalefactor,
                              num_spokes=num_spokes,
                              color='black',
                              e1key='E1norm_data', e2key='E2norm_data',
                              do_var=False, legend=True, quiverdict=quiverdict)
    fig.savefig(plot_dir + '/whisker_Enorm_{0}_data.png'.format(expid))
    fig.savefig(plot_dir + '/whisker_Enorm_{0}_data.pdf'.format(expid))

    # E
    # model
    fig, ax = WF.plot_whisker(WF.field, num_bins=num_bins_whisker,
                              normalized_ellipticity=True,
                              fig=None, ax=None,
                              scalefactor=scalefactor,
                              num_spokes=num_spokes,
                              color='black',
                              e1key='E1norm', e2key='E2norm',
                              do_var=False, legend=True, quiverdict=quiverdict)
    fig.savefig(plot_dir + '/whisker_Enorm_{0}_model.png'.format(expid))
    fig.savefig(plot_dir + '/whisker_Enorm_{0}_model.pdf'.format(expid))

    # E
    # data + model
    fig, ax = WF.plot_whisker(WF.field, num_bins=num_bins_whisker,
                              normalized_ellipticity=True,
                              fig=None, ax=None,
                              scalefactor=scalefactor,
                              num_spokes=num_spokes,
                              color='black',
                              e1key='E1norm_data', e2key='E2norm_data',
                              do_var=False, legend=False, quiverdict=quiverdict)
    fig, ax = WF.plot_whisker(WF.field, num_bins=num_bins_whisker,
                              normalized_ellipticity=True,
                              fig=fig, ax=ax,
                              scalefactor=scalefactor, num_spokes=num_spokes,
                              color='red',
                              e1key='E1norm', e2key='E2norm',
                              do_var=False, legend=True, quiverdict=quiverdict)
    fig.savefig(plot_dir + '/whisker_Enorm_{0}_blackdata_redmodel.png'.format(expid))
    fig.savefig(plot_dir + '/whisker_Enorm_{0}_blackdata_redmodel.pdf'.format(expid))

    # E
    # residual
    fig, ax = WF.plot_whisker(WF.field, num_bins=num_bins_whisker,
                              normalized_ellipticity=True,
                              fig=None, ax=None,
                              scalefactor=scalefactor_residual,
                              num_spokes=num_spokes,
                              color='black',
                              e1key='E1norm_residual', e2key='E2norm_residual',
                              do_var=False, legend=True, quiverdict=quiverdict)
    fig.savefig(plot_dir + '/whisker_Enorm_{0}_residual.png'.format(expid))
    fig.savefig(plot_dir + '/whisker_Enorm_{0}_residual.pdf'.format(expid))
    ###########################################################################


    # make plot of N
    fig, ax = plt.subplots(figsize=(6,5))
    WF_data.plot_field('N', fig=fig, ax=ax)
    fig.savefig(plot_dir + '/N_{0}.png'.format(expid))
    plt.close('all')

    # save summary statistics of stars to npy file for later collection
    if aos:
        field_jamie = pd.read_pickle(pkl_dir + '/{0:08d}.pkl'.format(expid))
        for row in rows:
            WF.field[row + '_jamie'] = field_jamie[row]
            WF.field[row + '_jamie_residual'] = WF.field[row + '_data'] - \
                                                WF.field[row + '_jamie']
        WF.field.to_pickle(pkl_dir + '/aos_{0:08d}.pkl'.format(expid))
    else:
        WF.field.to_pickle(pkl_dir + '/{0:08d}.pkl'.format(expid))
コード例 #4
0
ファイル: FAfits.py プロジェクト: cpadavis/WavefrontPSF
        command += ['python', code_path,
                    '--job', 'run',
                    '--expid', str(expid)]
        call(command)

elif args['job'] == 'run':

    from WavefrontPSF.psf_interpolator import Mesh_Interpolator
    from WavefrontPSF.wavefront import Wavefront
    from WavefrontPSF.digestor import Digestor
    from WavefrontPSF.psf_evaluator import Moment_Evaluator
    from WavefrontPSF.analytic_interpolator import DECAM_Analytic_Wavefront, r0_guess
    from WavefrontPSF.donutengine import DECAM_Model_Wavefront, generate_random_coordinates
    from WavefrontPSF.donutengine import correct_dz, correct_dz_theta

    digestor = Digestor()
    base_directory = '/nfs/slac/g/ki/ki18/des/cpd/psfex_catalogs/SVA1_FINALCUT/psfcat/'
    PSF_Evaluator = Moment_Evaluator()
    mesh_directory = '/nfs/slac/g/ki/ki18/cpd/Projects/WavefrontPSF/meshes/Science-20140212s2-v1i2'
    mesh_name = 'Science-20140212s2-v1i2_All'
    PSF_Interpolator = Mesh_Interpolator(mesh_name=mesh_name, directory=mesh_directory)

    # This will be our main wavefront
    WF = DECAM_Model_Wavefront(PSF_Interpolator=PSF_Interpolator)
    # let's create a Wavefront object for the data
    WF_data = Wavefront(PSF_Interpolator=None, PSF_Evaluator=PSF_Evaluator)

    # premake coordinate list
    coords = []
    for num_bins in xrange(6):
        # create coordinates
コード例 #5
0
ファイル: TreeRings.py プロジェクト: cpadavis/WavefrontPSF
expids = np.array(expids)


out_dir_base = '/nfs/slac/g/ki/ki18/des/cpd/psfex_catalogs/SVA1_FINALCUT/treerings_15_09_30/'
out_dir_base = '/nfs/slac/g/ki/ki18/des/cpd/psfex_catalogs/SVA1_FINALCUT/treerings_15_10_20/'
if not path.exists(out_dir_base):
    makedirs(out_dir_base)
if not path.exists(out_dir_base + 'logs'):
    makedirs(out_dir_base + 'logs')
if not path.exists(out_dir_base + 'individual'):
    makedirs(out_dir_base + 'individual')
if not path.exists(out_dir_base + 'individual_stamps'):
    makedirs(out_dir_base + 'individual_stamps')

PSF_Evaluator = Moment_Evaluator()
digestor = Digestor()
WF = Wavefront(PSF_Interpolator=None, PSF_Evaluator=PSF_Evaluator)

# what columns will we extract
model_keys = ['XWIN_IMAGE', 'YWIN_IMAGE', 'ext', 'x', 'y', 'expnum', 'FLAGS', 'SNR_WIN']
model_keys += ['MAG_APER_6']
moment_keys = ['Mx', 'My', 'flux', 'e0', 'e1', 'e2', 'e0prime', 'delta1', 'delta2', 'zeta1', 'zeta2', 'fwhm', 'a4']


def clean_stamps(stamps, sexhdu):
    # find background
    skycol = 'NONE'
    for coli, col in enumerate(sexhdu):
        if 'SKYBRITE' in col:
            skycol = col
    if skycol == 'NONE':
コード例 #6
0
def make_wavefront(expid, output_dir, optpsf=None, atmpsf=None, starminusopt=None, model=None):
    """
    Make a wavefront, useful for diagnostic plots
    :param expid: the id of the exposure being studied
    :param output_dir: the directory to store the output and temp files
    :param optpsf: (Optional) the optical psf in a datacube
    :param atmpsf: (Optional) the atmopsheric psf in a datacube
    :param starminusopt: (Optional) the residual when the optpsf is deconvolved
    :param model: (model) the convolution of optpsf and atmpsf
    :return: None
    """
    # these give the deconvolved stars
    # Wish I knew how to loop this
    if optpsf is None:
        deconvopt_loc = output_dir + '{0:08d}/{0}_opt.npy'.format(expid)
        optpsf = np.load(deconvopt_loc)

    if atmpsf is None:
        deconvatm_loc = output_dir + '{0:08d}/{0}_atm.npy'.format(expid)
        atmpsf = np.load(deconvatm_loc)

    if starminusopt is None:
        deconvstarsminusopt_loc = output_dir + '{0:08d}/{0}_stars_minus_opt.npy'.format(expid)
        # set the shape to be right
        starminusopt = np.load(deconvstarsminusopt_loc)[:, 15:47, 15:47]

    if model is None:
        deconvmodel_loc = output_dir + '{0:08d}/{0}_stars.npy'.format(expid)
        model = np.load(deconvmodel_loc)

    mesh_directory = '/nfs/slac/g/ki/ki22/roodman/ComboMeshesv20'
    # directory containing the input data files
    base_directory = '/nfs/slac/g/ki/ki18/des/cpd/psfex_catalogs/SVA1_FINALCUT/psfcat/'

    # set up objects. make sure I get the right mesh
    digestor = Digestor()
    mesh_name = 'Science-20121120s1-v20i2_All'
    PSF_Interpolator = Mesh_Interpolator(mesh_name=mesh_name, directory=mesh_directory)

    # This will be our main wavefront
    WF = DECAM_Model_Wavefront(PSF_Interpolator=PSF_Interpolator)

    # load up data
    expid_path = '/{0:08d}/{1:08d}'.format(expid - expid % 1000, expid)
    data_directory = base_directory + expid_path
    files = sorted(glob(data_directory + '/*{0}'.format('_selpsfcat.fits')))

    data_df = digestor.digest_fits(files[0], do_exclude=False)
    # Can't use the new one above, because we're calling on different data.
    meta_hdulist = [fits.open(files[0])]

    for file in files[1:]:
        tmpData = digestor.digest_fits(file, do_exclude=False)
        data_df = data_df.append(tmpData)
        meta_hdulist.append(fits.open(file))

    hdu_idxs = get_hdu_idxs(meta_hdulist)
    NObj = hdu_idxs[-1]

    # make the psfex models for both portions
    psf_files = sorted(glob(data_directory + '/*{0}'.format('psfcat_validation_subtracted.psf')))

    psfexpsf = load_psfex(psf_files, NObj, meta_hdulist)

    stars = get_vignettes(NObj, meta_hdulist, hdu_idxs)

    stars_df = evaluate_stamps_and_combine_with_data(WF, stars, data_df)
    psfexpsf_df = evaluate_stamps_and_combine_with_data(WF, psfexpsf, data_df)

    atmpsf_df = evaluate_stamps_and_combine_with_data(WF, atmpsf, data_df)
    optpsf_df = evaluate_stamps_and_combine_with_data(WF, optpsf, data_df)
    starminusopt_df = evaluate_stamps_and_combine_with_data(WF, starminusopt, data_df)
    model_df = evaluate_stamps_and_combine_with_data(WF, model, data_df)

    combinekeys = ['e0', 'e1', 'e2', 'E1norm', 'E2norm', 'delta1', 'delta2', 'zeta1', 'zeta2']
    # make a big df with all the above columns combined
    df = stars_df.copy()
    names = ['model', 'psfex', 'starminusopt', 'opt', 'atm', 'psfex_flip']
    df_list = [model_df, psfexpsf_df, starminusopt_df, optpsf_df, atmpsf_df]

    # names += ['opt_load']
    # df_list += [optpsf_load_df]

    # names += ['atm_make']
    # df_list += [atmpsf_make_df]

    for key in combinekeys:
        # add the other medsub
        if key == 'E1norm':
            df[key] = df['e1'] / df['e0']
        elif key == 'E2norm':
            df[key] = df['e2'] / df['e0']
        df['{0}_medsub'.format(key)] = df[key] - df[key].median()
        for name, psf in zip(names, df_list):
            if key == 'E1norm':
                psf[key] = psf['e1'] / psf['e0']
            elif key == 'E2norm':
                psf[key] = psf['e2'] / psf['e0']
            df['{0}_{1}'.format(name, key)] = psf[key]
            # add medsub
            df['{0}_{1}_medsub'.format(name, key)] = df['{0}_{1}'.format(name, key)] - df[
                '{0}_{1}'.format(name, key)].median()
            df['{0}_{1}_diff'.format(name, key)] = df['{0}_{1}'.format(name, key)] - df[key]
            df['{0}_{1}_medsub_diff'.format(name, key)] = df['{0}_{1}_medsub'.format(name, key)]\
                                                          - df['{0}_medsub'.format(key)]

    np.save(output_dir + '{0:08d}/{0}_psfexalone.npy'.format(expid), psfexpsf)
    np.save(output_dir + '{0:08d}/{0}_data.npy'.format(expid), stars)

    df.to_hdf(output_dir + '{0:08d}/results.h5'.format(expid),
              key='table_{0:08d}'.format(expid),
              mode='a', format='table', append=False)