Beispiel #1
0
def do_residuals(f, verbose=True, number_plot=0):
    c = f.replace('.piff', '.yaml')
    piff_name = f.split('/')[-1].split('.piff')[0]
    label = 'fitted'
    config = piff.read_config(c)
    if verbose:
        print('psf')
    psf = piff.read(f)

    # initialize output
    directory = '/'.join(c.split('/')[:-1])
    config['output']['dir'] = directory
    output = piff.Output.process(config['output'])

    # select nstars based on stars piece of config
    stat = output.stats_list[3]  # TODO: this bit is hardcoded
    if number_plot != 0:
        stat.number_plot = number_plot
    stat.indices = np.random.choice(len(psf.stars),
                                    stat.number_plot,
                                    replace=False)

    # pass params into output
    stat.stars = []
    for i, index in enumerate(stat.indices):
        star = psf.stars[index]
        stat.stars.append(star)
    # load their images
    if verbose:
        print('loading star images')
    stat.stars = load_star_images(stat.stars, config)

    if verbose:
        print('loading model')
    stat.models = []
    for star in stat.stars:

        # draw model star
        params = star.fit.params
        prof = psf.getProfile(params)
        model = psf.drawProfile(star, prof, params, copy_image=True)
        stat.models.append(model)

    if verbose:
        print('writing')

    file_name = '{0}/{1}_{2}_{3}'.format(directory, label, piff_name,
                                         os.path.split(stat.file_name)[1])
    stat.write(file_name=file_name)
    return file_name, stat.stars, stat.models
Beispiel #2
0
def call_collect(run_config_path, bsub, check, call, skip_rho, skip_oned,
                 skip_twod, skip_params):

    run_config = piff.read_config(run_config_path)
    directory = run_config[
        'directory']  # directory such that you can go glob(directory/*/psf.piff) and find piff files

    # load up psf names
    # rho stats and atmo params
    piff_names = []
    psf_file_paths = []
    do_optatmos = []
    for psf_file in run_config['psf_optics_files']:
        config = piff.read_config(psf_file)
        base_piff_name = config['output']['file_name'].split('.piff')[0]
        interps = config['interps']
        # stick in the interps
        for interp_key in interps.keys():
            config_interp = interps[interp_key]
            piff_name = '{0}_{1}'.format(base_piff_name, interp_key)
            piff_names.append(piff_name)
            psf_file_paths.append(psf_file)
            do_optatmos.append(True)

            if 'GPInterp' in config_interp['type']:
                piff_name = '{0}_{1}_meanified'.format(base_piff_name,
                                                       interp_key)
                piff_names.append(piff_name)
                psf_file_paths.append(psf_file)
                do_optatmos.append(True)

        piff_names.append('{0}_noatmo'.format(base_piff_name))
        psf_file_paths.append(psf_file)
        do_optatmos.append(False)
    for psf_file in run_config['psf_other_files']:
        config = piff.read_config(psf_file)
        piff_name = config['output']['file_name'].split('.piff')[0]
        psf_file_paths.append(psf_file)
        piff_names.append(piff_name)
        do_optatmos.append(False)

    # go through the names and call
    for psf_file_path, piff_name, do_optatmo in zip(psf_file_paths, piff_names,
                                                    do_optatmos):

        time = 600
        memory = 3

        # code to make job_directory and name
        out_directory = '{0}/plots/{1}'.format(directory, piff_name)
        if not os.path.exists(out_directory):
            os.makedirs(out_directory)
        job_name = 'collect_{0}'.format(piff_name)

        # create command
        command = [
            'python',
            'collect.py',
            '--directory',
            directory,
            '--out_directory',
            out_directory,
            '--piff_name',
            piff_name,
        ]
        if do_optatmo and not skip_params:
            command += ['--do_optatmo']
        if skip_rho:
            command += ['--skip_rho']
        if skip_oned:
            command += ['--skip_oned']
        if skip_twod:
            command += ['--skip_twod']
        if not skip_twod:
            # the twod bit is lonnnnng
            time = 2000

        # call command
        skip_iter = False
        if check and bsub and call:
            jobcheck = subprocess.check_output(['bjobs', '-wJ', job_name])
            if job_name in jobcheck:
                print('skipping {0} because it is already running'.format(
                    job_name))
                skip_iter = True
        if skip_iter:
            continue

        if bsub:
            logfile = '{0}/bsub_collect_{1}.log'.format(
                out_directory, piff_name)
            # check file exists, make it
            if os.path.exists(logfile) and call:
                os.remove(logfile)

            bsub_command = ['bsub', '-J', job_name, '-o', logfile]
            if time > 0:
                bsub_command += ['-W', str(time)]
            if memory > 1:
                bsub_command += [
                    '-n',
                    str(memory),
                ]
                # '-R', '"span[ptile={0}]"'.format(memory)]

            command = bsub_command + command

        print(' '.join(command))
        if call:
            print(job_name)
            subprocess.call(command)
Beispiel #3
0
import copy
# import os
from itertools import product
from sklearn.linear_model import LinearRegression
import galsim
import piff

from piff.optatmo_psf import poly, poly_full

test_mode = False
out_dir = '/nfs/slac/g/ki/ki18/cpd/Projects/piff_des/analytics'

# In[310]:

config = piff.read_config(
    '/u/ki/cpd/ki19/piff_test/y3/mar_mesh_configs_fix_sph/00233466/Science-20121120s1-v20i2_limited/2018.03.29/config.yaml'
)

# In[311]:

# create an OptAtmoPSF for drawing
psf_fit = piff.read(
    '/u/ki/cpd/ki19/piff_test/y3/mar_mesh_configs_fix_sph/00233466/Science-20121120s1-v20i2_limited/2018.03.29/psf.piff'
)

# In[312]:

# load up some stars
# do import modules so we can import pixmappy wcs
galsim.config.ImportModules(config)
# only load one ccd
Beispiel #4
0
    import argparse
    parser = argparse.ArgumentParser()
    parser.add_argument(action='store',
                        dest='run_config_path',
                        default='config.yaml',
                        help='Run config to load up and do')
    parser.add_argument('--index',
                        action='store',
                        dest='index',
                        type=int,
                        default=-1)
    options = parser.parse_args()
    kwargs = vars(options)

    # load config
    run_config = piff.read_config(run_config_path)
    psf_files = run_config['psf_optics_files'] + run_config['psf_other_files']
    directory = run_config['directory']  # where we save files

    files = []
    for psf_file in psf_files:
        piff_name = psf_file.split('.yaml')[0].split('/')[-1]
        files_i = glob.glob('{0}/*/{1}.piff'.format(directory, piff_name))
        files += files_i
    # # create list of files via glob
    # vkfiles = glob.glob('/u/ki/cpd/ki19/piff_test/y3pipeline/00*/psf_optatmovk.piff')
    # kofiles = glob.glob('/u/ki/cpd/ki19/piff_test/y3pipeline/00*/psf_optatmo.piff')
    # files = vkfiles + kofiles

    index = kwargs['index'] - 1
    if index == -2:
def zernike(directory, config_file_name, piff_name, do_interp):
    config = piff.read_config('{0}/{1}.yaml'.format(directory, config_file_name))
    logger = piff.setup_logger(verbose=3)

    # load base optics psf
    out_path = '{0}/{1}.piff'.format(directory, piff_name)
    psf = piff.read(out_path)

    # load images for train stars
    psf.stars = load_star_images(psf.stars, config, logger=logger)
    stars = psf.stars

    params = psf.getParamsList(stars)

    # if do_interp, draw star models and fit radial profile
    if do_interp:
        # draw model stars
        model_stars = psf.drawStarList(stars)

        # fit radial piece
        radial_agg = collect_radial_profiles(stars, model_stars)
        interpfunc = interp1d(radial_agg['r'].values, radial_agg['dI'].values)
        radial_agg.to_hdf('{0}/radial_{1}_{2}.h5'.format(directory, 'train', piff_name), 'data')
        fig = Figure(figsize = (10, 5))
        ax = fig.add_subplot(1, 1, 1)
        ax.plot(radial_agg['r'], radial_agg['dI'])
        ax.set_xlabel('r')
        ax.set_ylabel('Residual radial image')
        canvas = FigureCanvasAgg(fig)
        # Do this after we've set the canvas to use Agg to avoid warning.
        fig.set_tight_layout(True)
        plot_path = '{0}/radial_{1}_{2}.pdf'.format(directory, 'train', piff_name)
        logger.info('saving plot to {0}'.format(plot_path))
        canvas.print_figure(plot_path, dpi=100)

        # do the fits of the stars
        logger.info('Fitting {0} stars'.format(len(stars)))
        model_fitted_stars = []
        for star_i, star in zip(range(len(stars)), stars):
            if (star_i + 1) % int(max([len(stars) * 0.05, 1])) == 0:
                logger.info('doing {0} out of {1}:'.format(star_i + 1, len(stars)))
            try:
                model_fitted_star, results = fit_with_radial(psf, star, interpfunc, vary_shape=True, vary_optics=True)
                model_fitted_stars.append(model_fitted_star)
                if (star_i + 1) % int(max([len(stars) * 0.05, 1])) == 0:
                    logger.debug(lmfit.fit_report(results, min_correl=0.5))
            except (KeyboardInterrupt, SystemExit):
                raise
            except Exception as e:
                logger.warning('{0}'.format(str(e)))
                logger.warning('Warning! Failed to fit atmosphere model for star {0}. Ignoring star in atmosphere fit'.format(star_i))
        stars = model_fitted_stars
        logger.info('Drawing final model stars')
        drawn_stars = [drawProfile(psf, star, psf.getProfile(star.fit.params), star.fit.params, use_fit=True, copy_image=True, interpfunc=interpfunc) for star in stars]
    else:
        # else just do regular zernike fit
        logger.info('Fitting {0} stars'.format(len(stars)))
        model_fitted_stars = []
        for star_i, star in zip(range(len(stars)), stars):
            if (star_i + 1) % int(max([len(stars) * 0.05, 1])) == 0:
                logger.info('doing {0} out of {1}:'.format(star_i + 1, len(stars)))
            try:
                if (star_i + 1) % int(max([len(stars) * 0.05, 1])) == 0:
                    model_fitted_star, results = psf.fit_model(star, params=params[star_i], vary_shape=True, vary_optics=True, mode='pixel', logger=logger)
                else:
                    model_fitted_star, results = psf.fit_model(star, params=params[star_i], vary_shape=True, vary_optics=True, mode='pixel')
                model_fitted_stars.append(model_fitted_star)
            except (KeyboardInterrupt, SystemExit):
                raise
            except Exception as e:
                logger.warning('{0}'.format(str(e)))
                logger.warning('Warning! Failed to fit atmosphere model for star {0}. Ignoring star in atmosphere fit'.format(star_i))
        stars = model_fitted_stars
        logger.info('Drawing final model stars')
        drawn_stars = [psf.drawProfile(star, psf.getProfile(star.fit.params), star.fit.params, copy_image=True, use_fit=True) for star in stars]


    logger.info('Measuring star shapes')
    shapes = measure_star_shape(stars, drawn_stars, logger=logger)

    logger.info('Adding fitted params and params_var')
    shape_keys = ['e0', 'e1', 'e2', 'delta1', 'delta2', 'zeta1', 'zeta2']
    shape_plot_keys = []
    for key in shape_keys:
        shape_plot_keys.append(['data_' + key, 'model_' + key, 'd' + key])
    param_keys = ['atmo_size', 'atmo_g1', 'atmo_g2'] + ['optics_size', 'optics_g1', 'optics_g2'] + ['z{0:02d}'.format(zi) for zi in range(4, 45)]
    logger.info('Extracting training fit parameters')
    params = np.array([star.fit.params for star in stars])
    params_var = np.array([star.fit.params_var for star in stars])
    for i in range(params.shape[1]):
        shapes['{0}_fit'.format(param_keys[i])] = params[:, i]
        shapes['{0}_var'.format(param_keys[i])] = params_var[:, i]

    shapes['chisq'] = np.array([star.fit.chisq for star in stars])
    shapes['dof'] = np.array([star.fit.dof for star in stars])

    logger.info('saving shapes')
    shapes.to_hdf('{0}/zernikeshapes_{1}_{2}_zernike{3}.h5'.format(directory, 'train', piff_name, ['_reg', '_interp'][do_interp]), 'data')

    logger.info('saving stars')
    fits_path = '{0}/zernikestars_{1}_{2}_zernike{3}.fits'.format(directory, 'train', piff_name, ['_reg', '_interp'][do_interp])
    with fitsio.FITS(fits_path, 'rw', clobber=True) as f:
        piff.Star.write(stars, f, extname='zernike_stars')

    logger.info('making 2d plots')
    # plot shapes
    fig, axs = plot_2dhist_shapes(shapes, shape_plot_keys, diff_mode=True)
    # save
    fig.savefig('{0}/zernike{3}_shapes_{1}_{2}.pdf'.format(directory, 'train', piff_name, ['_reg', '_interp'][do_interp]))
    # plot params
    plot_keys = []
    plot_keys_i = []
    for i in range(params.shape[1]):
        plot_keys_i.append(param_keys[i])
        if len(plot_keys_i) == 3:
            plot_keys.append(plot_keys_i)
            plot_keys_i = []
    if len(plot_keys_i) > 0:
        plot_keys_i += [plot_keys_i[0]] * (3 - len(plot_keys_i))
        plot_keys.append(plot_keys_i)
    fig, axs = plot_2dhist_shapes(shapes, [[key + '_fit' for key in kp] for kp in plot_keys], diff_mode=False)
    fig.savefig('{0}/zernike{3}_fit_params_{1}_{2}.pdf'.format(directory, 'train', piff_name, ['_reg', '_interp'][do_interp]))

    nstars = min([20, len(stars)])
    indices = np.random.choice(len(stars), nstars, replace=False)
    logger.info('saving {0} star images'.format(nstars))
    fig = Figure(figsize = (4 * 4, 3 * nstars))
    for i, indx in enumerate(indices):
        axs = [ fig.add_subplot(nstars, 4, i * 4 + j + 1) for j in range(4)]
        # select a star
        star = stars[indx]
        # draw the model star
        params = star.fit.params
        prof = psf.getProfile(params)
        if do_interp:
            star_drawn = drawProfile(psf, star, psf.getProfile(star.fit.params), star.fit.params, use_fit=True, copy_image=True, interpfunc=interpfunc)
        else:
            star_drawn = psf.drawProfile(star, prof, params, use_fit=True, copy_image=True)
        # make plot
        draw_stars(star, star_drawn, fig, axs)

    canvas = FigureCanvasAgg(fig)
    # Do this after we've set the canvas to use Agg to avoid warning.
    fig.set_tight_layout(True)

    # save files based on what is listed
    plot_path = '{0}/zernike{3}_stars_{1}_{2}.pdf'.format(directory, 'train', piff_name, ['_reg', '_interp'][do_interp])
    logger.info('saving plot to {0}'.format(plot_path))
    canvas.print_figure(plot_path, dpi=100)
Beispiel #6
0
	# header_list = f[hdu].read_header_list()
	# header_list = [ d for d in header_list if 'CONTINUE' not in d['name'] ]
	# h = fitsio.FITSHDR(header_list)
	# detpos = h['DETPOS'].strip()
	# dp = detpos[ccdnum]
	# wz = np.where((which_zone['expnum'] == expnum) & (which_zone['detpos'] == dp))[0][0]
	# zone = which_zone['zone'][wz]

	# pixmappy = os.path.join(pixmappy_dir, 'zone%03d.astro'%zone)
	# wcs = pixmappy.GalSimWCS(pixmappy_file, exp=expnum, ccdnum=ccdnum, default_color=0)

	cat_file = '/Users/rebeccachen/Desktop/Piff_work/y3_test/'+str(expnum)+'_c'+str_ccdnum+'.fits'
	psf_file = '/Users/rebeccachen/Desktop/Piff_work/y3_test/D00'+str(expnum)+'_r_c'+str_ccdnum+'_r2277p01_gaiamatch.piff'


	config = piff.read_config(piff_config)
	config['input']['image_file_name'] = img_file
	config['input']['cat_file_name'] = cat_file
	config['output']['file_name'] = psf_file
	# config['input']['wcs']['file_name'] = pixmappy
	# config['input']['wcs']['exp'] = expnum
	# config['input']['wcs']['ccdnum'] = ccdnum
	piff.piffify(config, logger)


#gaia matches run
for i in [x for x in range(1, 63) if (x != 31 and x!= 61 and x!=2 and x!=5)]:
	ccdnum = i
	str_ccdnum = "%.2d" % ccdnum
	img_file = '/Users/rebeccachen/Desktop/Piff_work/y3_test/D00'+str(expnum)+'_r_c'+str_ccdnum+'_r2277p01_immasked.fits'
Beispiel #7
0
def fit_psf(directory, config_file_name, print_log, meanify_file_path='', fit_interp_only=False):
    do_meanify = meanify_file_path != ''
    piff_name = config_file_name
    # load config file
    config = piff.read_config('{0}/{1}.yaml'.format(directory, config_file_name))
    is_optatmo = 'OptAtmo' in config['psf']['type']

    # do galsim modules
    if 'modules' in config:
        galsim.config.ImportModules(config)

    # create logger
    verbose = config.get('verbose', 3)
    if print_log:
        logger = piff.setup_logger(verbose=verbose)
    else:
        if do_meanify:
            logger = piff.setup_logger(verbose=verbose, log_file='{0}/{1}_fit_psf_meanify_logger.log'.format(directory, config_file_name))
        else:
            logger = piff.setup_logger(verbose=verbose, log_file='{0}/{1}_fit_psf_logger.log'.format(directory, config_file_name))

    if (do_meanify or fit_interp_only) and is_optatmo:
        # load base optics psf
        out_path = '{0}/{1}.piff'.format(directory, piff_name)
        logger.info('Loading saved PSF at {0}'.format(out_path))
        psf = piff.read(out_path)

        # load images for train stars
        logger.info('loading train stars')
        psf.stars = load_star_images(psf.stars, config, logger=logger)

        # load test stars and their images
        logger.info('loading test stars')
        test_stars = read_stars(out_path, logger=logger)
        test_stars = load_star_images(test_stars, config, logger=logger)

        # make output
        config['output']['dir'] = directory
        output = piff.Output.process(config['output'], logger=logger)

    elif (do_meanify or fit_interp_only) and not is_optatmo:
        # welp, not much to do here. shouldn't even have gotten here! :(
        logger.warning('Somehow passed the meanify to a non-optatmo argument. This should not happen.')
        return

    else:
        # load stars
        stars, wcs, pointing = piff.Input.process(config['input'], logger=logger)

        # separate stars
        # set seed
        np.random.seed(12345)
        test_fraction = config.get('test_fraction', 0.2)
        test_indx = np.random.choice(len(stars), int(test_fraction * len(stars)), replace=False)
        test_stars = []
        train_stars = []
        # kludgey:
        for star_i, star in enumerate(stars):
            if star_i in test_indx:
                test_stars.append(star)
            else:
                train_stars.append(star)

        # initialize psf
        psf = piff.PSF.process(config['psf'], logger=logger)

        # piffify
        logger.info('Fitting PSF')
        psf.fit(train_stars, wcs, pointing, logger=logger)
        logger.info('Fitted PSF!')

        # fit atmosphere parameters
        if is_optatmo:
            logger.info('Fitting PSF atmosphere parameters')
            logger.info('getting param info for {0} stars'.format(len(psf.stars)))
            params = psf.getParamsList(psf.stars)
            psf._enable_atmosphere = False
            new_stars = []
            for star_i, star in zip(range(len(psf.stars)), psf.stars):
                if star_i % 100 == 0:
                    logger.info('Fitting star {0} of {1}'.format(star_i, len(psf.stars)))
                try:
                    model_fitted_star, results = psf.fit_model(star, params=params[star_i], vary_shape=True, vary_optics=False, logger=logger)
                    new_stars.append(model_fitted_star)
                except (KeyboardInterrupt, SystemExit):
                    raise
                except Exception as e:
                    logger.warning('{0}'.format(str(e)))
                    logger.warning('Warning! Failed to fit atmosphere model for star {0}. Ignoring star in atmosphere fit'.format(star_i))
            psf.stars = new_stars

        # save psf
        # make sure the output is in the right directory
        config['output']['dir'] = directory
        output = piff.Output.process(config['output'], logger=logger)
        logger.info('Saving PSF')
        # save fitted PSF
        psf.write(output.file_name, logger=logger)

        # and write test stars
        write_stars(test_stars, output.file_name, logger=logger)

    shape_keys = ['e0', 'e1', 'e2', 'delta1', 'delta2', 'zeta1', 'zeta2']
    shape_plot_keys = []
    for key in shape_keys:
        shape_plot_keys.append(['data_' + key, 'model_' + key, 'd' + key])
    if is_optatmo:
        interps = config.pop('interps')
        interp_keys = interps.keys()
        if not (do_meanify or fit_interp_only):
            # do noatmo only when we do not have meanify
            interp_keys = ['noatmo'] + interp_keys
        train_stars = psf.stars
        for interp_key in interp_keys:
            piff_name = '{0}_{1}'.format(config_file_name, interp_key)
            logger.info('Fitting optatmo interpolate for {0}'.format(interp_key))
            if interp_key == 'noatmo':
                psf.atmo_interp = None
                psf._enable_atmosphere = False
                passed_test_stars = test_stars
            else:
                # fit interps
                config_interp = interps[interp_key]

                if do_meanify:
                    config_interp['average_fits'] = meanify_file_path
                    piff_name += '_meanified'

                # extract chicut, madcut, snrcut from interp config, if provided
                interp_chicut = config_interp.pop('chicut', 0)
                interp_madcut = config_interp.pop('madcut', 0)
                interp_snrcut = config_interp.pop('snrcut', 0)

                # test stars undergo snr cut, but no other cuts
                used_interp_stars, passed_test_stars = fit_interp(train_stars, test_stars, config_interp, psf, interp_chicut, interp_madcut, interp_snrcut, logger)
                psf.stars = used_interp_stars

            # save
            out_path = '{0}/{1}.piff'.format(directory, piff_name)
            psf.write(out_path, logger=logger)
            write_stars(passed_test_stars, out_path, logger=logger)

            # evaluate
            logger.info('Evaluating {0}'.format(piff_name))

            for stars, label in zip([psf.stars, passed_test_stars], ['train', 'test']):
                # get shapes
                logger.debug('drawing {0} model stars'.format(label))
                model_stars = psf.drawStarList(stars)
                shapes = measure_star_shape(stars, model_stars, logger=logger)

                param_keys = ['atmo_size', 'atmo_g1', 'atmo_g2']
                if psf.atmosphere_model == 'vonkarman':
                    param_keys += ['atmo_L0']
                param_keys += ['optics_size', 'optics_g1', 'optics_g2'] + ['z{0:02d}'.format(zi) for zi in range(4, 45)]
                if label == 'train':
                    # if train, plot fitted params
                    logger.info('Extracting training fit parameters')
                    params = np.array([star.fit.params for star in stars])
                    params_var = np.array([star.fit.params_var for star in stars])
                    for i in range(params.shape[1]):
                        shapes['{0}_fit'.format(param_keys[i])] = params[:, i]
                        shapes['{0}_var'.format(param_keys[i])] = params_var[:, i]
                    logger.info('Getting training fit parameters')
                    params = psf.getParamsList(stars)
                    for i in range(params.shape[1]):
                        shapes[param_keys[i]] = params[:, i]

                elif label == 'test':
                    # if test, plot predicted params
                    logger.info('Getting test parameters')
                    params = psf.getParamsList(stars)
                    for i in range(params.shape[1]):
                        shapes[param_keys[i]] = params[:, i]

                # save shapes
                shapes.to_hdf('{0}/shapes_{1}_{2}.h5'.format(directory, label, piff_name), 'data', mode='w')

                # plot shapes
                fig, axs = plot_2dhist_shapes(shapes, shape_plot_keys, diff_mode=True)
                # save
                fig.savefig('{0}/plot_2dhist_shapes_{1}_{2}.pdf'.format(directory, label, piff_name))

                # plot params
                plot_keys = []
                plot_keys_i = []
                for i in range(params.shape[1]):
                    plot_keys_i.append(param_keys[i])
                    if len(plot_keys_i) == 3:
                        plot_keys.append(plot_keys_i)
                        plot_keys_i = []
                if len(plot_keys_i) > 0:
                    plot_keys_i += [plot_keys_i[0]] * (3 - len(plot_keys_i))
                    plot_keys.append(plot_keys_i)
                fig, axs = plot_2dhist_shapes(shapes, plot_keys, diff_mode=False)
                # save
                fig.savefig('{0}/plot_2dhist_params_{1}_{2}.pdf'.format(directory, label, piff_name))
                # and repeat for the fit params
                if label == 'train':
                    fig, axs = plot_2dhist_shapes(shapes, [[key + '_fit' for key in kp] for kp in plot_keys], diff_mode=False)
                    # save
                    fig.savefig('{0}/plot_2dhist_fit_params_{1}_{2}.pdf'.format(directory, label, piff_name))

                # if test, fit the flux and centering
                if label == 'test':
                    logger.info('Fitting the centers and fluxes of {0} test stars'.format(len(stars)))
                    # fit stars for stats
                    new_stars = []
                    for star, param in zip(stars, params):
                        try:
                            new_star, res = psf.fit_model(star, param, vary_shape=False, vary_optics=False, logger=logger)
                            new_stars.append(new_star) 
                        except (KeyboardInterrupt, SystemExit):
                            raise
                        except Exception as e:
                            logger.warning('{0}'.format(str(e)))
                            logger.warning('Warning! Failed to fit atmosphere model for star {0}. Ignoring star in atmosphere fit'.format(star))
                    stars = new_stars

                # do the output processing
                logger.info('Writing Stats Output of {0} stars'.format(label))
                for stat in output.stats_list:
                    stat.compute(psf, stars, logger=logger)
                    file_name = '{0}/{1}_{2}_{3}'.format(directory, label, piff_name, os.path.split(stat.file_name)[1])
                    stat.write(file_name=file_name, logger=logger)

    else:
        logger.info('Evaluating {0}'.format(piff_name))

        for stars, label in zip([psf.stars, test_stars], ['train', 'test']):
            # get shapes
            model_stars = psf.drawStarList(stars)
            shapes = measure_star_shape(stars, model_stars, logger=logger)
            # save shapes
            shapes.to_hdf('{0}/shapes_{1}_{2}.h5'.format(directory, label, piff_name), 'data', mode='w')

            # plot shapes
            fig, axs = plot_2dhist_shapes(shapes, shape_plot_keys, diff_mode=True)
            # save
            fig.savefig('{0}/plot_2dhist_shapes_{1}_{2}.pdf'.format(directory, label, piff_name))

            logger.info('Writing Stats Output of {0} stars'.format(label))
            for stat in output.stats_list:
                stat.compute(psf, stars, logger=logger)
                file_name = '{0}/{1}_{2}_{3}'.format(directory, label, piff_name, os.path.split(stat.file_name)[1])
                stat.write(file_name=file_name, logger=logger)
Beispiel #8
0
def call_zernike(run_config_path, bsub, check, call):

    run_config = piff.read_config(run_config_path)
    directory = run_config[
        'directory']  # directory such that you can go glob(directory/*/psf.piff) and find piff files

    # load up psf names
    # rho stats and atmo params
    piff_names = []
    psf_file_paths = []
    for psf_file in run_config['psf_optics_files']:
        config = piff.read_config(psf_file)
        base_piff_name = config['output']['file_name'].split('.piff')[0]
        interps = config['interps']
        # stick in the interps
        for interp_key in interps.keys():
            config_interp = interps[interp_key]
            piff_name = '{0}_{1}'.format(base_piff_name, interp_key)
            piff_names.append(piff_name)
            psf_file_paths.append(psf_file)

            if 'GPInterp' in config_interp['type']:
                piff_name = '{0}_{1}_meanified'.format(base_piff_name,
                                                       interp_key)
                piff_names.append(piff_name)
                psf_file_paths.append(psf_file)

        piff_names.append('{0}_noatmo'.format(base_piff_name))
        psf_file_paths.append(psf_file)

    # go through the names and call
    for psf_file_path, piff_name in zip(psf_file_paths, piff_names):
        psf_name = psf_file.split('.yaml')[0].split('/')[-1]

        time = 2000
        memory = 2

        # go through expids
        files = sorted(glob.glob('{0}/*/{1}.piff'.format(directory, psf_name)))
        expids = [int(val.split('/')[-2]) for val in files]

        for expid in expids:
            for do_interp in [False, True]:
                job_directory = '{0}/{1:08d}'.format(directory, expid)
                job_name = 'zernike__expid_{0:08d}__{1}{2}'.format(
                    expid, piff_name, ['', '_dointerp'][do_interp])

                # create command
                command = [
                    'python',
                    'zernike_radial.py',
                    '--directory',
                    job_directory,
                    '--piff_name',
                    piff_name,
                    '--config_file_name',
                    psf_name,
                ]
                if do_interp:
                    command += ['--do_interp']

                # call command
                skip_iter = False
                if check and bsub and call:
                    jobcheck = subprocess.check_output(
                        ['bjobs', '-wJ', job_name])
                    if job_name in jobcheck:
                        print('skipping {0} because it is already running'.
                              format(job_name))
                        skip_iter = True
                if skip_iter:
                    continue

                if bsub:
                    logfile = '{0}/bsub_zernike_{1}{2}.log'.format(
                        job_directory, piff_name, ['', '_dointerp'][do_interp])
                    # check file exists, make it
                    if os.path.exists(logfile) and call:
                        os.remove(logfile)

                    bsub_command = ['bsub', '-J', job_name, '-o', logfile]
                    if time > 0:
                        bsub_command += ['-W', str(time)]
                    if memory > 1:
                        bsub_command += [
                            '-n',
                            str(memory),
                        ]
                        # '-R', '"span[ptile={0}]"'.format(memory)]

                    command = bsub_command + command

                print(' '.join(command))
                if call:
                    print(job_name)
                    subprocess.call(command)
        raise Exception('Temporarily stopping the zernike after first ap')
Beispiel #9
0
def call_fit_psf(run_config_path, bsub, qsub, check, call, print_log, overwrite, meanify, nmax, fit_interp_only):

    if qsub and bsub:
        ValueError('qsub and bsub cannot be set at the same time, qsub @ ccin2p3 and bsub @ slac') 

    if qsub: 
        os.system('tar cvzf piffy3pipeline.tar.gz ../piffy3pipeline/')

    run_config = piff.read_config(run_config_path)
    if meanify or fit_interp_only:
        psf_files = run_config['psf_optics_files']
    else:
        psf_files = run_config['psf_optics_files'] + run_config['psf_other_files']
    psf_dir = run_config['psf_dir']  # where we load the psf files
    directory = run_config['directory']  # where we save files

    # get list of expids
    expids = [int(val.split('/')[-2]) for val in sorted(glob.glob('{0}/*/input.yaml'.format(psf_dir)))]

    nrun = 0
    for expid in expids:
        for psf_file in psf_files:
            psf_name = psf_file.split('.yaml')[0].split('/')[-1]
            if meanify:
                meanify_file_path = '{0}/meanify_{1}.fits'.format(directory, psf_name)
                if not os.path.exists(meanify_file_path):
                    continue

            config = piff.read_config(psf_file)
            time = config.pop('time', 30)
            memory = config.pop('memory', 1)

            config['input']['image_file_name'] = '{0}/{1}/*.fits.fz'.format(psf_dir, expid)
            # load up wcs info from expid
            try:
                expid_config = piff.read_config('{0}/{1}/input.yaml'.format(psf_dir, expid))
            except IOError:
                # input doesn't exist or is malformed
                print('skipping {0} because input.yaml cannot load'.format(expid))
                continue

            config['input']['wcs'] = expid_config['input']['wcs']
            # I messed up expnum when saving these
            config['input']['wcs']['exp'] = expid
            # and I also messed up the ccd splitting
            config['input']['wcs']['ccdnum']['str'] = "image_file_name.split('_')[-1].split('.fits')[0]"

            if print_log:
                config['verbose'] = 3

            # code to make job_directory and name
            job_directory = '{0}/{1:08d}'.format(directory, expid)
            job_name = 'fit_psf__expid_{0:08d}__{1}'.format(expid, psf_name)

            # with updated everying, save config file
            if not os.path.exists(job_directory):
                os.makedirs(job_directory)
            save_config(config, job_directory + '/{0}.yaml'.format(psf_name))

            # create command
            command = [
                       'python',
                       'fit_psf.py',
                       '--directory', job_directory,
                       '--config_file_name', psf_name,
                       ]
            if print_log:
                command = command + ['--print_log']
            if meanify:
                command = command + ['--meanify_file_path', meanify_file_path]
            if fit_interp_only:
                command = command + ['--fit_interp_only']

            # call command
            skip_iter = False
            if check and bsub and call:
                jobcheck = subprocess.check_output(['bjobs', '-wJ', job_name])
                if job_name in jobcheck:
                    print('skipping {0} because it is already running'.format(job_name))
                    skip_iter = True
            if skip_iter:
                continue

            # check if output exists
            if meanify:
                if not os.path.exists('{0}/{1}.piff'.format(job_directory, psf_name)):
                    continue
            else:
                if overwrite and not fit_interp_only:
                    file_names = glob.glob('{0}/{1}*'.format(job_directory, psf_name))
                    for file_name in file_names:
                        if '{0}.yaml'.format(psf_name) in file_name:
                            continue
                        elif os.path.exists(file_name):
                            os.remove(file_name)
                else:
                    if os.path.exists('{0}/{1}.piff'.format(job_directory, psf_name)):
                        if fit_interp_only:
                            pass
                        else:
                            continue
                    else:
                        if fit_interp_only:
                            # need that piff file to exist in order to fit it
                            continue
                        else:
                            pass

            if bsub:
                logfile = '{0}/bsub_fit_psf_{1}.log'.format(job_directory, psf_name)
                # check file exists, make it
                if os.path.exists(logfile) and call and overwrite:
                    os.remove(logfile)

                bsub_command = ['bsub',
                                '-J', job_name,
                                '-o', logfile]
                if time > 0:
                    bsub_command += ['-W', str(time)]
                if memory > 1:
                    bsub_command += ['-n', str(memory),]
                                     # '-R', '"span[ptile={0}]"'.format(memory)]

                command = bsub_command + command

            print(' '.join(command))
            if qsub and call: 
                logfile = '{0}/bsub_fit_psf_{1}.log'.format(job_directory, psf_name)
                logfile_err = '{0}/bsub_fit_psf_{1}_err.log'.format(job_directory, psf_name)
                launch_job_to_ccin2p3(command, logfile, logfile_err, job_name)
            elif bsub and call:
                print(job_name)
                subprocess.call(command)
            elif call:
                # do in python directoy so that if things fail we fail
                print(job_name)
                # call manually if no bsub
                subprocess.call(command)
            
        nrun += 1
        if nmax > 0 and nrun >= nmax:
            break