コード例 #1
0
def auto_extract(root='j023507-040202', maglim=[16.5, 26]):

    from grizli import utils
    from grizli.pipeline import auto_script
    utils.set_warnings()

    tab = utils.GTable.gread('{0}_footprint.fits'.format(root))

    pline = DITHERED_PLINE = {
        'kernel': 'point',
        'pixfrac': 0.2,
        'pixscale': 0.1,
        'size': 8,
        'wcs': None
    }

    args = np.load('fit_args.npy')[0]
    pline = args['pline']

    auto_script.extract(field_root=root,
                        maglim=maglim,
                        ids=None,
                        run_fit=False,
                        MW_EBV=tab.meta['MW_EBV'],
                        pline=pline)
コード例 #2
0
ファイル: generation.py プロジェクト: grizli-project/koala
def database_from_query(query):
    engine = db.get_db_engine()
    utils.set_warnings()

    result = db.from_sql(query, engine)

    return result
コード例 #3
0
def auto_run(root='j023507-040202'):
    
    import os
    import glob
    
    import matplotlib.pyplot as plt
    
    from grizli import utils, prep
    from grizli.pipeline import auto_script, photoz
    utils.set_warnings()
    
    tab = utils.GTable.gread('{0}_footprint.fits'.format(root))
    
    HOME_PATH = os.getcwd()
    
    auto_script.VALID_FILTERS = ['F098M', 'F105W', 'F110W', 'F125W', 'F127M', 'F139M', 'F140W', 'F153M', 'F160W', 'F410M', 'F435W', 'F438W', 'F439W', 'F450W', 'F467M', 'F475W', 'F475X', 'F547M', 'F550M', 'F555W', 'F569W', 'F600LP', 'F606W', 'F621M', 'F622W', 'F625W', 'F675W', 'F689M', 'F702W', 'F763M', 'F775W', 'F791W', 'F814W', 'F845M', 'F850LP', 'F350LP']
    
    # Include grisms
    auto_script.VALID_FILTERS += ['G102','G141','G800L']
    
    IS_PARALLEL = utils.column_string_operation(tab['proposal_pi'], 'alkan', method='count', logical='or').sum() > 0
        
    IS_DASH = list(np.unique(np.cast[int](tab['proposal_id']))) == [14114]
    
    master_radec = '{0}/{1}_master.radec'.format(os.getcwd(), root)
    if not os.path.exists(master_radec):
        if root.startswith('cos-') & os.path.exists('hsc-udeep-i25_corr_cosmos.radec'):
            master_radec = '{0}/{1}'.format(os.getcwd(), 'hsc-udeep-i25_corr_cosmos.radec')
        else:
            master_radec = None
    
    parent_radec = '{0}/{1}_parent.radec'.format(os.getcwd(), root)
    if not os.path.exists(parent_radec):
        parent_radec = None
       
    BKG_PARAMS = {'bw': 1024, 'bh': 1024, 'fw': 3, 'fh': 3}
    
    catalogs = ['PS1','DES','NSC', 'SDSS','GAIA','WISE']
    
    # MACS0717
    if 'macs0717' in root:
        catalogs = ['GAIA', 'PS1']
    
    run_fine_alignment=False
        
    auto_script.go(root=root, maglim=[16, 23], HOME_PATH=HOME_PATH, 
                   inspect_ramps=False, manual_alignment=False, 
                   is_parallel_field=IS_PARALLEL, reprocess_parallel=True, 
                   only_preprocess=True, run_extractions=True, run_fit=False, 
                   s3_sync='cp', catalogs=catalogs,
                   fine_radec=None, run_fine_alignment=run_fine_alignment, 
                   combine_all_filters=False, 
                   align_mag_limits=[16,24],
                   gaia_by_date=True, align_simple=False, align_clip=120, 
                   master_radec=master_radec, parent_radec=parent_radec, 
                   is_dash=IS_DASH, run_parse_visits=True, 
                   reference_wcs_filters=['F160W','F140W','F125W','F105W','F110W','F098M','F814W','F850LP', 'F606W','F435W'], 
                   make_phot=False, make_mosaics=False, align_rms_limit=4, 
                   align_min_overlap=2, imaging_bkg_params=BKG_PARAMS)
コード例 #4
0
def auto_run(root='j023507-040202', flag_global_crs=False):

    import os
    import glob
    import numpy as np

    import astropy.io.fits as pyfits
    import astropy.wcs as pywcs

    from drizzlepac import updatehdr
    from stwcs import updatewcs

    from grizli import utils, prep
    from grizli.pipeline import auto_script
    utils.set_warnings()

    visit_file = '{0}_visits.npy'.format(root)
    visits, all_groups, info = np.load(visit_file)

    # Something wrong with some files with bad shifts, reset wcs
    for visit in visits:
        for file in visit['files']:
            utils.fetch_hst_calibs(
                file, calib_types=['IDCTAB', 'NPOLFILE', 'IMPHTTAB'])
            updatewcs.updatewcs(file, verbose=True, use_db=False)

        # Apply shifts
        shift_log = '{0}_shifts.log'.format(visit['product'])
        if os.path.exists(shift_log):
            sh = utils.read_catalog(shift_log)
            flt0 = pyfits.open(sh['flt'][0])
            wcs_ref = pywcs.WCS(flt0['SCI', 1].header, fobj=flt0, relax=True)
            shift_dict = {}
            for i in range(len(sh)):
                shift_dict[sh['flt'][i]] = [sh['xshift'][i], sh['yshift'][i]]

            prep.apply_tweak_shifts(wcs_ref,
                                    shift_dict,
                                    grism_matches={},
                                    verbose=False)

    # Redrizzle mosaics
    prep.drizzle_overlaps(visits,
                          check_overlaps=False,
                          skysub=False,
                          static=False,
                          pixfrac=0.5,
                          scale=None,
                          final_wcs=False,
                          fetch_flats=False,
                          final_rot=None,
                          include_saturated=True)

    ####### Alignment
    os.system('rm *wcs.*')

    # Radec
    master_radec = '{0}/../../{1}_master.radec'.format(os.getcwd(), root)

    if not os.path.exists(master_radec):
        master_radec = None

    ref_catalog = 'USER'

    if root.startswith('cos-'):
        hsc = '{0}/../../{1}'.format(os.getcwd(),
                                     'hsc-udeep-i25_corr_cosmos.radec')
        if os.path.exists(hsc):
            master_radec = hsc
            ref_catalog = 'HSC'

    elif root.startswith('uds-'):
        hsc = '{0}/../../{1}'.format(os.getcwd(),
                                     'hsc-udeep-sxds_corr_uds.radec')
        if os.path.exists(hsc):
            master_radec = hsc
            ref_catalog = 'HSC'

    parent_radec = '{0}/../../{1}_parent.radec'.format(os.getcwd(), root)
    if not os.path.exists(parent_radec):
        parent_radec = None

    if master_radec is not None:
        radec = master_radec
    elif parent_radec is not None:
        radec = parent_radec
    else:
        radec = None

    if radec is None:
        needs_gaia = True
    else:
        needs_gaia = False

    REFERENCE = 'GAIA'
    REFERENCE = 'PS1'

    print('master RADEC file: ', radec)

    thresh = 2.5
    for visit in visits:

        # Clean catalogs
        files = glob.glob('{0}.*'.format(visit['product']))
        for file in files:
            os.remove(file)

        # Generate GAIA alignment catalog at the observation epoch
        clip = 120
        clip = -1
        if needs_gaia:
            flt = pyfits.open(visit['files'][0])
            h = flt['SCI', 1].header
            ra_i, dec_i = h['CRVAL1'], h['CRVAL2']
            radec, ref_catalog = prep.get_radec_catalog(
                ra=ra_i,
                dec=dec_i,
                product=visit['product'],
                date=flt[0].header['EXPSTART'],
                date_format='mjd',
                reference_catalogs=[REFERENCE],
                radius=5.)
            flt.close()
            if REFERENCE == 'GAIA':
                mag_limits = [16, 20]
            else:
                mag_limits = [18, 22]
                #clip = 50

            if '_flc' in visit['files'][0]:
                triangle_size_limit = [5, 4000 * np.sqrt(2)]
            else:
                triangle_size_limit = [5, 1300]
        else:
            mag_limits = [19, 23]
            triangle_size_limit = [5, 1300]

        # Remake catalogs
        cat = prep.make_SEP_catalog(root=visit['product'], threshold=thresh)

        # Redo alignment
        try:
            print('XXX clip', clip, mag_limits, triangle_size_limit)
            result = prep.align_drizzled_image(
                root=visit['product'],
                radec=radec,
                mag_limits=mag_limits,
                simple=False,
                max_err_percentile=80,
                clip=clip,
                outlier_threshold=5,
                rms_limit=2.5,
                triangle_size_limit=triangle_size_limit)
        except:
            print('First align failed!  Relax parameters')
            try:
                result = prep.align_drizzled_image(
                    root=visit['product'],
                    radec=radec,
                    mag_limits=[10, 20],
                    simple=False,
                    max_err_percentile=99,
                    clip=160,
                    outlier_threshold=20,
                    rms_limit=2.5,
                    triangle_size_limit=triangle_size_limit)
            except:
                try:
                    result = prep.align_drizzled_image(
                        root=visit['product'],
                        radec=radec,
                        mag_limits=[10, 20],
                        simple=False,
                        max_err_percentile=99,
                        clip=160,
                        outlier_threshold=40,
                        rms_limit=2.5,
                        triangle_size_limit=triangle_size_limit)
                except:
                    radec = '{0}_ps1.radec'.format(visit['product'])
                    ref_catalog = 'PS1'
                    result = prep.align_drizzled_image(
                        root=visit['product'],
                        radec=radec,
                        mag_limits=mag_limits,
                        simple=False,
                        max_err_percentile=80,
                        clip=120,
                        outlier_threshold=5,
                        rms_limit=2.5,
                        triangle_size_limit=triangle_size_limit)

            #continue

        orig_wcs, drz_wcs, out_shift, out_rot, out_scale = result

        # Propagate shifts
        for file in visit['files']:
            updatehdr.updatewcs_with_shift(file,
                                           str('{0}_wcs.fits'.format(
                                               visit['product'])),
                                           xsh=out_shift[0],
                                           ysh=out_shift[1],
                                           rot=out_rot,
                                           scale=out_scale,
                                           wcsname=ref_catalog,
                                           force=True,
                                           reusename=True,
                                           verbose=True,
                                           sciext='SCI')

            ### Bug in astrodrizzle? Dies if the FLT files don't have MJD-OBS
            ### keywords
            im = pyfits.open(file, mode='update')
            im[0].header['MJD-OBS'] = im[0].header['EXPSTART']
            im.flush()

    # Redrizzle mosaics again including new shifts
    prep.drizzle_overlaps(visits,
                          check_overlaps=False,
                          skysub=False,
                          static=False,
                          pixfrac=0.8,
                          scale=None,
                          final_wcs=False,
                          fetch_flats=False,
                          final_rot=None)
    # Remake catalogs
    thresh = 2.5
    for visit in visits:
        # Remake catalogs
        cat = prep.make_SEP_catalog(root=visit['product'], threshold=thresh)
        prep.table_to_regions(cat, '{0}.cat.reg'.format(visit['product']))
        prep.table_to_radec(cat, '{0}.cat.radec'.format(visit['product']))

    # Update visits file
    v = auto_script.get_visit_exposure_footprints(visit_file=visit_file,
                                                  check_paths=['./', '../RAW'],
                                                  simplify=1.e-6)

    if flag_global_crs:
        # Assume everything at same orient
        pass

    if False:
        # Mosaic
        auto_script.drizzle_overlaps(root,
                                     filters=['F160W'],
                                     min_nexp=1,
                                     pixfrac=0.8,
                                     scale=0.1,
                                     make_combined=False,
                                     ref_image=None,
                                     static=False)
コード例 #5
0
    #     fp = open('{0}.fp.reg'.format(root),'w')
    #     fp.write('fk5\n')
    #
    #     for weight_image in files:
    #         root_i = '_dr'.join(weight_image.split('_dr')[:-1])
    #         reg = prep.drizzle_footprint(weight_image, shrink=10, ext=0,
    #                                      outfile=None, label=root_i)
    #         fp.write(reg+'\n')
    #
    #     fp.close()


if __name__ == "__main__":
    import sys
    import time

    import numpy as np
    from grizli import utils
    from grizli.pipeline import auto_script
    utils.set_warnings()

    root = sys.argv[1]
    #auto_run(root=root)

    try:
        auto_run(root=root)
    except:
        fp = open('{0}.failed'.format(root), 'w')
        fp.write(time.ctime() + '\n')
        fp.close()
コード例 #6
0
def auto_run(root='j023507-040202'):

    import os
    import matplotlib.pyplot as plt

    from grizli import utils
    from grizli.pipeline import auto_script, photoz
    utils.set_warnings()

    tab = utils.GTable.gread('{0}_footprint.fits'.format(root))

    HOME_PATH = os.getcwd()

    auto_script.VALID_FILTERS = [
        'F098M', 'F105W', 'F110W', 'F125W', 'F127M', 'F139M', 'F140W', 'F153M',
        'F160W', 'F410M', 'F435W', 'F438W', 'F439W', 'F450W', 'F467M', 'F475W',
        'F475X', 'F547M', 'F550M', 'F555W', 'F569W', 'F600LP', 'F606W',
        'F621M', 'F622W', 'F625W', 'F675W', 'F689M', 'F702W', 'F763M', 'F775W',
        'F791W', 'F814W', 'F845M', 'F850LP', 'F350LP'
    ]

    IS_PARALLEL = utils.column_string_operation(
        tab['proposal_pi'], 'alkan', method='count', logical='or').sum() > 0

    auto_script.go(root=root,
                   maglim=[19, 23],
                   HOME_PATH=HOME_PATH,
                   inspect_ramps=False,
                   manual_alignment=False,
                   is_parallel_field=IS_PARALLEL,
                   reprocess_parallel=False,
                   only_preprocess=True,
                   run_extractions=False,
                   run_fit=False,
                   s3_sync='cp',
                   fine_radec=None,
                   combine_all_filters=False,
                   gaia_by_date=True,
                   align_simple=False,
                   align_clip=100,
                   master_radec=None,
                   is_dash=False,
                   run_parse_visits=True,
                   reference_wcs_filters=[
                       'F160W', 'F140W', 'F125W', 'F105W', 'F110W', 'F098M',
                       'F814W', 'F850LP', 'F606W', 'F435W'
                   ])

    plt.ioff()
    fig = auto_script.field_rgb(root=root, HOME_PATH=HOME_PATH, xsize=18)
    plt.close(fig)

    # Photo-z
    try:
        out = photoz.eazy_photoz(root,
                                 object_only=False,
                                 force=True,
                                 aper_ix=1,
                                 sys_err=0.05,
                                 apply_prior=False,
                                 beta_prior=True,
                                 external_limits=3,
                                 external_sys_err=0.3)
    except:
        pass
コード例 #7
0
def extract_beams_from_flt(root, bucket, id, clean=True, silent=False):
    """
    Download GrismFLT files and extract the beams file
    """
    import gc
    import boto3
    
    import matplotlib.pyplot as plt
    
    import grizli
    from grizli import fitting, utils, multifit
    from grizli.version import __version__ as grizli__version
    
    utils.set_warnings()
    from grizli.pipeline import auto_script
    
    s3 = boto3.resource('s3')
    s3_client = boto3.client('s3')
    bkt = s3.Bucket(bucket)
    
    # WCS files for ACS
    files = [obj.key for obj in bkt.objects.filter(Prefix='Pipeline/{0}/Extractions/j'.format(root))]
    files += [obj.key for obj in bkt.objects.filter(Prefix='Pipeline/{0}/Extractions/i'.format(root))]

    files += [obj.key for obj in bkt.objects.filter(Prefix='Pipeline/{0}/Extractions/{0}-ir.cat.fits'.format(root))]

    files += [obj.key for obj in bkt.objects.filter(Prefix='Pipeline/{0}/Extractions/fit_args.npy'.format(root))]
    
    download_files = []
    for file in np.unique(files):
        if ('cat.fits' in file) | ('fit_args' in file):
            if os.path.exists(os.path.basename(file)):
                continue
            
            download_files.append(file)
        
    for file in download_files:
        print(file)    
        bkt.download_file(file, os.path.basename(file),
                          ExtraArgs={"RequestPayer": "requester"})
    
    # Read the catalog
    ircat = utils.read_catalog('{0}-ir.cat.fits'.format(root))
    ix = ircat['NUMBER'] == id
    object_rd = (ircat['X_WORLD'][ix], ircat['Y_WORLD'][ix])
    del(ircat)
    
    # One beam at a time
    beams = None
    
    flt_files = []
    for file in files:
        if 'GrismFLT.fits' in file:
            flt_files.append(file)
    
    if not silent:
        print('Read {0} GrismFLT files'.format(len(flt_files)))
    
    if os.path.exists('{0}_fit_args.npy'.format(root)):
        args_file = '{0}_fit_args.npy'.format(root)
    else:
        args_file = 'fit_args.npy'
        
    for i, file in enumerate(flt_files):
        if not silent:
            print('# Read {0}/{1}'.format(i+1, len(flt_files)))

        flt, ext, _, _ = os.path.basename(file).split('.')          
        if flt.startswith('i'):
            fl = 'flt'
        else:
            fl = 'flc'
        
        out_files = ['{0}_{2}.{1}.wcs.fits'.format(flt, ext, fl), 
                     '{0}.{1}.GrismFLT.fits'.format(flt, ext), 
                     '{0}.{1}.GrismFLT.pkl'.format(flt, ext)]
        
        exp_has_id = False
        
        for j, f_j in enumerate(out_files):             
            aws_file = os.path.join(os.path.dirname(file), f_j)
            if not silent:
                print('  ', aws_file)
            
            if not os.path.exists(f_j):
                bkt.download_file(aws_file, f_j, 
                                  ExtraArgs={"RequestPayer": "requester"})
            
            # WCS file, check if object in footprint
            if f_j.endswith('.wcs.fits'):
                #exp_has_id = check_object_in_footprint(id, f_j, ircat)
                exp_has_id = check_object_in_footprint(None, f_j, None, rd=object_rd)
                if not exp_has_id:
                    if clean:
                        os.remove(f_j)
                    break
        
        if not exp_has_id:
            continue
                
        beams_i =                           auto_script.extract(field_root=root, maglim=[13,24], prior=None, MW_EBV=0.00, ids=id, pline={}, fit_only_beams=True, run_fit=False, poly_order=7, master_files=[os.path.basename(file)], grp=None, bad_pa_threshold=None, fit_trace_shift=False, size=32, diff=True, min_sens=0.02, skip_complete=True, fit_args={}, args_file=args_file, get_only_beams=True)
        
        # Remove the GrismFLT file    
        for f_j in out_files:
            if ('GrismFLT' in f_j) & clean:
                os.remove(f_j)

        if beams is None:
            beams = beams_i
        else:
            beams.extend(beams_i)
    
    # Garbage collector
    gc.collect()
        
    if not beams:
        print('No beams found for {0} id={1}'.format(root, id))
        return False
    
    # Grism Object
    args = np.load(args_file, allow_pickle=True)[0]
    mb = multifit.MultiBeam(beams, **args)
    mb.write_master_fits()
    
    # 1D spectrum with R=30 fit
    if True:
        bin_steps, step_templ = utils.step_templates(wlim=[5000, 18000.0], 
                                                     R=30, round=10)  

        tfit = mb.template_at_z(z=0, templates=step_templ,
                                fit_background=True, fitter='lstsq', 
                                get_uncertainties=2)
        
        fig1 = mb.oned_figure(figsize=[5,3], tfit=tfit, show_beams=True, 
                              scale_on_stacked=True, ylim_percentile=5)
                              
        outroot='{0}_{1:05d}.R{2:.0f}'.format(root, id, 30)
        hdu = mb.oned_spectrum_to_hdu(outputfile=outroot+'.fits', 
                                              tfit=tfit, wave=bin_steps)                     
        
        fig1.savefig(outroot+'.png')
        del(hdu)
        
        # Drizzled spectrum
        hdu, fig = mb.drizzle_grisms_and_PAs(fcontam=args['fcontam'],
                                             flambda=False, 
                                             kernel='point', size=32, 
                                             zfit=tfit, diff=False)

        hdu[0].header['GRIZLIV'] = (grizli__version, 'Grizli version')
                                             
        fig.savefig('{0}_{1:05d}.stack.png'.format(root, id))

        hdu.writeto('{0}_{1:05d}.stack.fits'.format(root, id), 
                    overwrite=True)
        
        plt.close('all')
        del(hdu)
        
    outfiles = ['{0}_{1:05d}.beams.fits'.format(root, id)]
    outfiles += glob.glob(outroot+'*')
    outfiles += glob.glob('{0}_{1:05d}.stack*'.format(root, id))
    
    return(outfiles)
コード例 #8
0
def run_grizli_fit(event):
    import boto3
    import json
    import shutil
    import gc
    
    import matplotlib.pyplot as plt
    
    import grizli
    from grizli import fitting, utils, multifit
    
    try:
        from grizli.aws import db as grizli_db
        dbFLAGS = grizli_db.FLAGS
    except:
        pass
        
    utils.set_warnings()
    
    #event = {'s3_object_path':'Pipeline/j001452+091221/Extractions/j001452+091221_00277.beams.fits'}
    
    silent = False
    if 'silent' in event:
        silent = event['silent'] in TRUE_OPTIONS
        
    ###
    ### Parse event arguments
    ### 
    event_kwargs = {}
    for k in event:
        
        # Lists
        if isinstance(event[k], str):
            # Split lists
            if ',' in event[k]:
                try:
                    event_kwargs[k] = np.cast[float](event[k].split(','))
                except:
                    event_kwargs[k] = event[k].split(',')
            else:
                event_kwargs[k] = event[k]
        else:
            try:
                event_kwargs[k] = json.loads(event[k])
            except:
                event_kwargs[k] = event[k]
                
    # Defaults
    if 'skip_started' not in event_kwargs:
        event_kwargs['skip_started'] = True
        
    for k in ['quasar_fit', 'extract_from_flt', 'fit_stars', 'beam_info_only']:
        if k not in event_kwargs:
            event_kwargs[k] = False
    
    if event_kwargs['beam_info_only'] in TRUE_OPTIONS:
        dbtable = 'multibeam'        
    elif event_kwargs['quasar_fit'] in TRUE_OPTIONS:
        dbtable = 'redshift_fit_quasar'
    elif event_kwargs['fit_stars'] in TRUE_OPTIONS:
        dbtable = 'stellar_fit'
    else:
        dbtable = 'redshift_fit'
            
    if not silent:
        print('Grizli version: ', grizli.__version__)
    
    # Disk space
    total, used, free = shutil.disk_usage("/")    
    if not silent:
        print('Disk info: Total = {0:.2f} / Used = {1:.2f} / Free = {2:.2f}'.format(total // (2**20), used // (2**20), free // (2**20)))

    ## Output path
    if 'output_path' in event:
        output_path = event['output_path']
    else:
        output_path = None
    
    if 'bucket' in event:
        event_kwargs['bucket'] = event['bucket']
    else:
        event_kwargs['bucket'] = 'aws-grivam'
                        
    if 'working_directory' in event:
        os.chdir(event['working_directory'])
    else:
        os.chdir('/tmp/')
    
    if not silent:
        print('Working directory: {0}'.format(os.getcwd()))
    
    files = glob.glob('*')
    files.sort()
    
    # Filenames, etc.
    beams_file = os.path.basename(event['s3_object_path'])
    root = beams_file.split('_')[0]
    id = int(beams_file.split('_')[1].split('.')[0])
    
    try:
        db_status = grizli_db.get_redshift_fit_status(root, id, table=dbtable)
    except:
        db_status = -1
                
    # Initial log
    start_log = '{0}_{1:05d}.start.log'.format(root, id)
    full_start = 'Pipeline/{0}/Extractions/{1}'.format(root, start_log)
    if ((start_log in files) | (db_status >= 0)) & event_kwargs['skip_started']:
        print('Log file {0} found in {1} (db_status={2})'.format(start_log, os.getcwd(), db_status))
        return True
        
    if not silent:
        for i, file in enumerate(files):
            print('Initial file ({0}): {1}'.format(i+1, file))
    
    if os.path.exists('{0}/matplotlibrc'.format(grizli.GRIZLI_PATH)):
        os.system('cp {0}/matplotlibrc .'.format(grizli.GRIZLI_PATH))
    
    s3 = boto3.resource('s3')
    s3_client = boto3.client('s3')
    bkt = s3.Bucket(event_kwargs['bucket'])
        
    if event_kwargs['skip_started']:
        res = [r.key for r in bkt.objects.filter(Prefix=full_start)]
        if res:
            print('Already started ({0}), aborting.'.format(start_log))
            return True
            
    fp = open(start_log,'w')
    fp.write(time.ctime()+'\n')
    fp.close()
    bkt.upload_file(start_log, full_start)
    
    # Download fit arguments
    if 'force_args' in event:
        force_args = event['force_args'] in TRUE_OPTIONS
    else:
        force_args = False
        
    args_files = ['{0}_fit_args.npy'.format(root), 'fit_args.npy']
    for args_file in args_files:
        if (not os.path.exists(args_file)) | force_args:
            aws_file = 'Pipeline/{0}/Extractions/{1}'.format(root, args_file)
            try:
                bkt.download_file(aws_file, './{0}'.format(args_file),
                              ExtraArgs={"RequestPayer": "requester"})
                print('Use args_file = {0}'.format(args_file))
                break
            except:
                continue
            
    # If no beams file in the bucket, try to generate it
    put_beams=False
    try:
        if not os.path.exists(beams_file):
            bkt.download_file(event['s3_object_path'], './{0}'.format(beams_file), ExtraArgs={"RequestPayer": "requester"})
            put_beams = False
    except:
        print('Extract from GrismFLT object!')
        if 'clean' in event:
            if isinstance(event['clean'], str):
                run_clean = event['clean'].lower() in ['true', 'y', '1']
            else:
                run_clean = event['clean']
        else:
            run_clean = True
        
        try:
            # Extracting beams
            grizli_db.update_redshift_fit_status(root, id, 
                                                status=dbFLAGS['start_beams'],
                                                table=dbtable)
        except:
            print('Set DB flag failed: start_beams')
            pass
            
        status = extract_beams_from_flt(root, event_kwargs['bucket'], id, 
                                        clean=run_clean, silent=silent)
        
        # Garbage collector
        gc.collect()
        
        if status is False:
            return False
        else:
            beams_file = status[0]
        
        try:
            # Beams are done
            grizli_db.update_redshift_fit_status(root, id, 
                                                 status=dbFLAGS['done_beams'],
                                                 table=dbtable)
        except:
            pass
            
        put_beams = True
        
        # upload it now
        output_path = 'Pipeline/{0}/Extractions'.format(root)
        for outfile in status:
            aws_file = '{0}/{1}'.format(output_path, outfile)
            print(aws_file)
            bkt.upload_file(outfile, aws_file, 
                        ExtraArgs={'ACL': 'public-read'})
            
    if ('run_fit' in event) & (dbtable == 'redshift_fit'):
        if event['run_fit'] in FALSE_OPTIONS:
            res = bkt.delete_objects(Delete={'Objects':[{'Key':full_start}]})
            
            try:
                grizli_db.update_redshift_fit_status(root, id, 
                                                 status=dbFLAGS['no_run_fit'],
                                                 table=dbtable)
            except:
                pass
            
            return True
    
    utils.fetch_acs_wcs_files(beams_file, bucket_name=event_kwargs['bucket'])
    
    # Update the multibeam/beam_geometry tables
    if os.path.exists(beams_file):
        args = np.load(args_file, allow_pickle=True)[0]
        for arg in event_kwargs:
            if arg in args:
                args[arg] = event_kwargs[arg]
                
        grizli_db.multibeam_to_database(beams_file, Rspline=15, force=False, 
                                        **args)
        
    if dbtable == 'multibeam':
        ### Done
        res = bkt.delete_objects(Delete={'Objects':[{'Key':full_start}]})
        return True
                
    # Download WCS files
    # if event_kwargs['check_wcs']:
    #     # WCS files for ACS
    #     files = [obj.key for obj in bkt.objects.filter(Prefix='Pipeline/{0}/Extractions/j'.format(root))]
    #     for file in files:
    #         if 'wcs.fits' in file:
    #             if os.path.exists(os.path.basename(file)):
    #                 continue
    #             
    #             bkt.download_file(file, os.path.basename(file),
    #                               ExtraArgs={"RequestPayer": "requester"})
     
    # Is zr in the event dict?
    # if 'zr' in event:
    #     zr = list(np.cast[float](event['zr']))
    # else:
    #     try:
    #         zr = np.load('fit_args.npy')[0]['zr']
    #     except:
    #         zr = np.load('fit_args.npy', allow_pickle=True)[0]['zr']
    
    # Directory listing
    files = glob.glob('*')
    files.sort()
    
    for i, file in enumerate(files):
        print('File ({0}): {1}'.format(i+1, file))
    
    try:
        files = glob.glob('{0}_{1:05d}*R30.fits'.format(root, id)) 
        if (len(files) > 0) & (dbtable == 'redshift_fit'):
            grizli_db.send_1D_to_database(files=files)
    except:
        print('Failed to send R30 to spec1d database')
        pass
                    
    ###   
    ### Run the fit
    try:
        grizli_db.update_redshift_fit_status(root, id, table=dbtable,
                            status=dbFLAGS['start_redshift_fit'])
    except:
        print('Set DB flag failed: start_redshift_fit')
        pass
    
    if event_kwargs['quasar_fit'] in TRUE_OPTIONS:
                
        # Don't recopy beams file
        put_beams = False
        
        # Don't make line maps
        if 'min_line_sn' not in event_kwargs:
            event_kwargs['min_line_sn'] = np.inf
        
        # Don't make drizzled psfs
        if 'get_ir_psfs' not in event_kwargs:
            event_kwargs['get_ir_psfs'] = False
        
        # Fit line widths
        if 'get_line_width' not in event_kwargs:
            event_kwargs['get_line_width'] = True
        
        # sys_err
        if 'sys_err' not in event_kwargs:
            event_kwargs['sys_err'] = 0.05
        
        # Don't use photometry
        event_kwargs['phot_obj'] = None
        event_kwargs['use_phot_obj'] = False
    
        event_kwargs['fit_only_beams'] = True
        event_kwargs['fit_beams'] = False
        
        templ_args = {'uv_line_complex': True, 
                      'broad_fwhm':2800,
                      'narrow_fwhm':1000,
                      'fixed_narrow_lines':True,
                      'Rspline':15,
                      'include_reddened_balmer_lines':False}
        
        for k in templ_args:
            if k in event_kwargs:
                templ_args[k] = event_kwargs.pop(k)
        
        if templ_args['broad_fwhm'] < 0:
            use_simple_templates=True
            templ_args['broad_fwhm'] *= -1
        else:
            use_simple_templates = False
            
        print('load_quasar_templates(**{0})'.format(templ_args))
        q0, q1 = utils.load_quasar_templates(**templ_args)
        
        if use_simple_templates:
            x0 = utils.load_templates(full_line_list=['highO32'], continuum_list=['quasar_lines.txt', 'red_blue_continuum.txt'], line_complexes=False, fwhm=1000)
             
            for t in q0:
                if 'bspl' in t:
                    x0[t] = q0[t]
            
            q0 = x0
            q1['red_blue_continuum.txt'] = x0['red_blue_continuum.txt']
            
        # Quasar templates with fixed line ratios
        # q0, q1 = utils.load_quasar_templates(uv_line_complex=True,
        #                                     broad_fwhm=2800, narrow_fwhm=1000,
        #                                     fixed_narrow_lines=True, 
        #                                     Rspline=15)
        
        if 'zr' not in event_kwargs:
            event_kwargs['zr'] = [0.03, 6.8]
        if 'fitter' not in event_kwargs:
            event_kwargs['fitter'] = ['lstsq', 'lstsq']
        
        print('run_all_parallel: {0}'.format(event_kwargs))
            
        fitting.run_all_parallel(id, t0=q0, t1=q1, args_file=args_file, 
                                 **event_kwargs)
        
        if output_path is None:
            #output_path = 'Pipeline/QuasarFit'.format(root)
            output_path = 'Pipeline/{0}/Extractions'.format(root)
    
    elif event_kwargs['fit_stars'] in TRUE_OPTIONS:
                    
        args = np.load(args_file, allow_pickle=True)[0]
        
        if 'psf' in event_kwargs:
            args['psf'] = event_kwargs['psf'] in TRUE_OPTIONS     
        
        for k in ['fcontam', 'min_sens', 'sys_err']:
            if k in event_kwargs:
                print('Set arg {0}={1}'.format(k, event_kwargs[k]))
                args[k] = event_kwargs[k]
        
        # Load MultiBeam    
        mb = multifit.MultiBeam(beams_file, **args)
        
        if 'fit_trace_shift' in args:
            if args['fit_trace_shift']:
                tr = mb.fit_trace_shift()
                         
        if 'spline_correction' in event_kwargs:
            spline_correction = event_kwargs['spline_correction'] in TRUE_OPTIONS     
        else:
            spline_correction = True
        
        if 'fit_background' in event_kwargs:
            fit_background = event_kwargs['fit_background'] in TRUE_OPTIONS     
        else:
            fit_background = True

        if 'fitter' in event_kwargs:
            fitter = event_kwargs['fitter']    
        else:
            fitter = 'lstsq'
        
        if 'Rspline' in event_kwargs:
            Rspline = event_kwargs['Rspline']    
        else:
            Rspline = 15
            
        if Rspline == 15:
            logg_list = [4.5]
        else:
            logg_list = utils.PHOENIX_LOGG
        
        if 'add_carbon_star' in event_kwargs:
            add_carbon_star = event_kwargs['add_carbon_star']    
        else:
            add_carbon_star = 25
            
        if 'use_phoenix' in event_kwargs:
            p = event_kwargs.pop('use_phoenix')
            if p in TRUE_OPTIONS:
                tstar = utils.load_phoenix_stars(logg_list=logg_list, 
                                             add_carbon_star=add_carbon_star)
            else:
                tstar = utils.load_templates(stars=True,
                                             add_carbon_star=add_carbon_star)
        else:
            tstar = utils.load_phoenix_stars(logg_list=logg_list,
                                             add_carbon_star=add_carbon_star)
        
        kws = {'spline_correction':spline_correction, 
               'fit_background':fit_background,
               'fitter':fitter,
               'spline_args':{'Rspline':Rspline}}
        
        print('kwargs: {0}'.format(kws))
                   
        # Fit the stellar templates
        _res = mb.xfit_star(tstar=tstar, oned_args={}, **kws)
        
        _res[0].savefig('{0}_{1:05d}.star.png'.format(root, id))

        # Save log info
        fp = open('{0}_{1:05d}.star.log'.format(root, id), 'w')
        fp.write(_res[1])
        fp.close()
                
        if output_path is None:
            #output_path = 'Pipeline/QuasarFit'.format(root)
            output_path = 'Pipeline/{0}/Extractions'.format(root)
        
    else:
        
        # Normal galaxy redshift fit
        fitting.run_all_parallel(id, fit_only_beams=True, fit_beams=False,  
                                 args_file=args_file, **event_kwargs)
        
        if output_path is None:
            output_path = 'Pipeline/{0}/Extractions'.format(root)
            
    # Output files
    files = glob.glob('{0}_{1:05d}*'.format(root, id))
    for file in files:
        if ('beams.fits' not in file) | put_beams:
            aws_file = '{0}/{1}'.format(output_path, file)
            
            if event_kwargs['quasar_fit'] in TRUE_OPTIONS:
                # Don't copy stack
                if 'stack' in file:
                    continue
                
                # Add qso extension on outputs
                aws_file = aws_file.replace('_{0:05d}.'.format(id), 
                                            '_{0:05d}.qso.'.format(id))
                                            
            print('Upload {0} -> {1}'.format(file, aws_file))
            
            bkt.upload_file(file, aws_file, ExtraArgs={'ACL': 'public-read'})
    
    # Put data in the redshift_fit database table
    try:
        if dbtable == 'stellar_fit':
            rowfile = '{0}_{1:05d}.star.log'.format(root, id)
        else:    
            rowfile = '{0}_{1:05d}.row.fits'.format(root, id)
        
        if os.path.exists(rowfile):
            grizli_db.add_redshift_fit_row(rowfile, table=dbtable, 
                                           verbose=True)
        
        # Add 1D spectra
        files = glob.glob('{0}_{1:05d}*1D.fits'.format(root, id))
        if (len(files) > 0) & (dbtable == 'redshift_fit'):
            grizli_db.send_1D_to_database(files=files)
        
    except:
        print('Update row failed')
        pass
    
    # Remove start log now that done
    res = bkt.delete_objects(Delete={'Objects':[{'Key':full_start}]})
    
    # Garbage collector
    gc.collect()
コード例 #9
0
def auto_run(root='j023507-040202', args=[]):
    import os
    import yaml
    import time

    import matplotlib.pyplot as plt
    plt.ioff()

    from grizli import utils
    utils.set_warnings()

    from grizli.pipeline import auto_script, default_params
    import grizli

    # Run query again

    kwargs = auto_script.get_yml_parameters()

    kwargs['fetch_files_args']['reprocess_parallel'] = True
    kwargs['preprocess_args']['skip_single_optical_visits'] = False
    kwargs['run_fine_alignment'] = False
    kwargs['visit_prep_args']['reference_catalogs'] = [
        'PS1', 'DES', 'NSC', 'SDSS', 'GAIA', 'WISE'
    ]

    # params = {
    # # Preprocessing flags
    # 's3_sync': True,               # Fetch data from AWS
    # 'inspect_ramps': False,        # Visual inspection on ramps
    # 'remove_bad':True,             # Remove bad EXPFLAG visits
    # 'reprocess_parallel': True,    # Reprocess ramps in parallel
    # 'is_dash': False,              # Flag if visits are in DASH mode
    # 'filters': auto_script.VALID_FILTERS,   #  Only use these filters
    # 'run_parse_visits': True,      # Parse visits to _visits.npy
    # 'combine_minexp':2,            # Try to combine visits with 2 or fewer exp
    # 'is_parallel_field': False,    # If parallels, change visit parsing
    # 'skip_single_optical_visits': False, # Skip preprocess if single exp
    # 'fix_stars': True,             # Fill DQ flagged centers of stars
    # 'imaging_bkg_params': {'bh': 128, 'bw': 128, 'fh': 3, 'fw': 3,
    #                    'pixel_scale': 0.06}, # Imaging background
    #
    # # Alignment
    # 'manual_alignment': False,    # Interactive alignment
    # 'align_clip': 120,            # Parameter for initial alignment pairs
    # 'align_mag_limits': [14, 24], # Magnitude range for alignment sources
    # 'align_min_overlap': 0.2,     # Overlap fraction to use HST catalogs
    # 'align_outlier_threshold': 4, # Parameter for triangle matches
    # 'align_rms_limit': 2,    # Force shifts = 0 if rms > rms limit
    # 'align_simple': False,   # ??
    # 'catalogs': ['PS1','DES','NSC','SDSS','GAIA','WISE'], # Ref cat order
    # 'gaia_by_date': True,   # Get GAIA catalogs by visit epoch
    # 'master_radec': None,    # Force reference
    # 'parent_radec': None,    # Reference to use if no HST overlaps
    #
    # 'run_fine_alignment': False, # Run Fine alignment script
    # 'fine_radec': None,         # Fine alingment reference,  else GAIA
    #
    # # Image Mosaics
    # 'make_mosaics': True,             # Make full-field mosaics
    # 'fill_mosaics': 'grism',          # Fill empty parts of IR mosaics
    # 'mask_spikes': False,             # Mask IR diffraction spikes
    # 'combine_all_filters': False,      # Combine ACS & IR
    # 'mosaic_pixel_scale': None,       # Pixel scale of mosaics (0.06)
    # 'half_optical_pixscale': False,   # Make smaller ACS/UVIS pixel scale
    # 'mosaic_pixfrac': 0.75,           # Pixfrac of mosaic images
    # 'reference_wcs_filters': auto_script.VALID_FILTERS, # These define WCS if they exist
    # 'make_phot': True,                # Make photometric catalog
    #
    # # Spectral Extractions
    # 'only_preprocess': False, # Make grism models
    # 'run_extractions': False, # Don't extract grism spectra
    # 'maglim': [17, 26],       # Magnitude range of grism spectra
    # 'run_fit': False          # Fit grism spectra
    # }

    try:
        tab = utils.GTable.gread('{0}_footprint.fits'.format(root))

        #IS_PARALLEL = utils.column_string_operation(tab['proposal_pi'], ['Malkan', 'Trenti'], method='count', logical='or').sum() > 0

        IS_PARALLEL = (tab['target'] == 'ANY').sum() > 0

        IS_PARALLEL = bool(IS_PARALLEL)
    except:
        IS_PARALLEL = False

    master_radec = '{0}/{1}_master.radec'.format(os.getcwd(), root)
    if not os.path.exists(master_radec):
        master_radec = None

    parent_radec = '{0}/{1}_parent.radec'.format(os.getcwd(), root)
    if not os.path.exists(parent_radec):
        parent_radec = None

    kwargs['preprocess_args']['parent_radec'] = parent_radec
    kwargs['preprocess_args']['master_radec'] = master_radec

    # kwargs['is_dash'] = '14114' in tab['proposal_id']
    # if kwargs['is_dash']:
    #     print('\n\n!!! Process as DASH !!!!\n\n')

    # Limited filters
    kwargs['only_preprocess'] = False
    kwargs['filters'] = default_params.IR_W_FILTERS + default_params.IR_GRISMS
    kwargs['filters'] += default_params.IR_M_FILTERS

    # Optical filters.  Bluer than F555W often fail for low source counts?
    kwargs['filters'] += [
        'F814W', 'F850LP', 'F775W', 'F625W', 'F606W', 'F555W', 'F350LP',
        'F600LP'
    ]  #, 'G800L']

    kwargs['is_parallel_field'] = IS_PARALLEL

    pixel_scale = 0.06 + 0.02 * IS_PARALLEL
    kwargs['mosaic_args']['wcs_params']['pixel_scale'] = pixel_scale
    kwargs['mosaic_args']['mosaic_pixfrac'] = pixel_scale / 0.12

    ### Force conservative pixel scale
    kwargs['mosaic_args']['wcs_params']['pixel_scale'] = 0.1
    kwargs['mosaic_args']['mosaic_pixfrac'] = 0.33  #pixel_scale/0.12
    kwargs['mosaic_args']['half_optical_pixscale'] = True

    # Try less aggressive background if CLASH
    # IS_BRIGHT_CLUSTER = utils.column_string_operation(tab['proposal_pi'], ['Postman', 'Lotz'], method='count', logical='or').sum() > 0
    # if IS_BRIGHT_CLUSTER:
    #     print('CLUSTER!')
    # Force conservative background
    conservative_background = {
        'bh': 256,
        'bw': 256,
        'fh': 3,
        'fw': 3,
        'pixel_scale': 0.128
    }  # Imaging background

    kwargs['visit_prep_args']['imaging_bkg_params'] = conservative_background

    print('BKG PARAMS: {0}'.format(conservative_background))

    # Command line arguments
    if args:
        for arg in args:
            if arg.startswith('--'):
                if arg in [
                        '--grism', '--sync', '--noclean', '--lambda_verbose'
                ]:
                    continue

                pspl = arg.strip('--').split('=')[0]
                val = arg.split('=')[1]

                if pspl == 'redo_query':
                    if val.lower() in ['true']:
                        redo_query(root=root)

                    # Next argument
                    continue

                if pspl == 'extra_filters':
                    kwargs['filters'] += [f.upper() for f in val.split(',')]
                    kwargs['filters'] = list(np.unique(kwargs['filters']))
                    kwargs['filters'] = [str(f) for f in kwargs['filters']]
                    print('Extra filters: {0}'.format(val.split(',')))
                    continue

                if pspl == 'remove_filters':
                    pop_filters = [f.upper() for f in val.split(',')]
                    for f in pop_filters:
                        if f in kwargs['filters']:
                            kwargs['filters'].pop(kwargs['filters'].index(f))

                    print('Filters after pop: {0}'.format(kwargs['filters']))
                    continue

                # Split nested dictionaries by '.'
                if '.' in pspl:
                    valid = False
                    ps = pspl.split('.')
                    d = kwargs
                    for p in ps:
                        if p in d:
                            valid = True
                            if isinstance(d[p], dict):
                                d = d[p]
                else:
                    d = kwargs
                    valid = pspl in kwargs
                    p = pspl

                if valid:
                    if val.strip() in ['None', 'null']:
                        d[p] = None
                        print('Runtime argument: {0} = {1}/[None]'.format(
                            p, val))
                    elif isinstance(d[p], list):
                        lval = val.replace('[', '').replace(']', '').split(',')

                        # Item shoud be a list
                        if (len(lval) < len(d[p])) & ('filter' not in arg):
                            msg = 'Parameter {0} should be a list like {1}'
                            raise (ValueError(msg.format(arg, d[p])))

                        try:
                            lval = list(np.cast[float](lval))
                        except:
                            pass

                        d[p] = lval
                        print('Runtime argument: {0} = {1}'.format(p, lval))
                    elif '.ids' in arg:
                        print(arg, p, val)

                        if ',' in val:
                            val = np.cast[int](val.split(','))

                        d[p] = val
                    else:
                        if isinstance(d[p], bool):
                            if val.isdigit():
                                d[p] = int(val)
                            else:
                                d[p] = val.lower() == 'true'
                        else:
                            try:
                                d[p] = d[p].__class__(val)
                            except:
                                try:
                                    d[p] = float(val)
                                except:
                                    d[p] = val

                        print('Runtime argument: {0} = {1}'.format(p, d[p]))

    # Save YAML parameter file
    # Need copies of a few things that will break yaml.dump
    # phot_apertures = kwargs['multiband_catalog_args']['phot_apertures']
    # filter_kernel = kwargs['multiband_catalog_args']['detection_params']['filter_kernel']
    #
    # kwargs['multiband_catalog_args']['phot_apertures'] = None
    # kwargs['multiband_catalog_args']['detection_params']['filter_kernel'] = None
    #
    # fp = open('{0}.run.yml'.format(root),'w')
    # fp.write('# {0}\n'.format(time.ctime()))
    # fp.write('# Grizli version = {0}\n'.format(grizli.__version__))
    #
    # for k in kwargs:
    #     try:
    #         d = {k:kwargs[k].copy()}
    #     except:
    #         d = {k:kwargs[k]}
    #
    #     yaml.dump(d, stream=fp, default_flow_style=False)
    #
    # fp.close()
    #
    # kwargs['multiband_catalog_args']['phot_apertures'] = phot_apertures
    # kwargs['multiband_catalog_args']['detection_params']['filter_kernel'] = filter_kernel

    # Master radec
    if '--preprocess_args.parent_radec' not in [a.split('=')[0] for a in args]:
        if 'j021732m0512' in root:
            radec = 'gaia_sxds-dud-HSCdr2_corr_uds.radec'
            radec = 'uds_prelim_hst.radec'
        elif 'j033236m2748' in root:
            #radec = 'gaia-pm_mast-hlf_corr_cdfs.radec'
            radec = 'cdfs_prelim_hst.radec'
        elif 'j100012p0210' in root:
            radec = 'hsc_cosmos-dud_dr2.radec'
        elif 'j141956p5255' in root:
            #radec = 'gaia_ps1+hsc_corr_aegis.radec'
            radec = 'egs_prelim_hst.radec'
        elif 'j123656p6215' in root:
            radec = 'goodsn_prelim_hst.radec'
        elif ('xxxj001352m3023' in root) | ('xxxj001420m3024' in root):
            radec = 'gaia_subaru_corr_abell2744.radec'
        elif root in [
                'j224844m4432', 'j224916m4432', 'j024012m0138', 'j023952m0135'
        ]:
            radec = '{0}_f814w_mag23.radec'.format(root)
            kwargs['preprocess_args']['master_radec'] = '../../' + radec
        elif root in ['j114936p2222']:
            radec = '{0}_f160w_mag23.radec'.format(root)
            kwargs['preprocess_args']['master_radec'] = '../../' + radec
        else:
            radec = None

        if radec is not None:
            os.system(
                'aws s3 cp s3://grizli/AlignmentCatalogs/{0} .'.format(radec))
            kwargs['preprocess_args']['parent_radec'] = '../../' + radec

    for k in kwargs:
        if kwargs[k] == 'NoneType':
            kwargs[k] = None
        elif isinstance(kwargs[k], dict):
            for k_i in kwargs[k]:
                if kwargs[k][k_i] == 'NoneType':
                    kwargs[k][k_i] = None

    output_yml = '{0}.auto_script.yml'.format(root)
    auto_script.write_params_to_yml(kwargs, output_file=output_yml)

    auto_script.go(root=root, **kwargs)

    #auto_script.go(root=root, maglim=[19, 23], HOME_PATH=HOME_PATH, inspect_ramps=False, manual_alignment=False, is_parallel_field=IS_PARALLEL, reprocess_parallel=True, only_preprocess=False, run_extractions=False, run_fit=False, s3_sync='cp', fine_radec=None, combine_all_filters=False, gaia_by_date=True, align_simple=False, align_clip=100, master_radec=master_radec, parent_radec=parent_radec, is_dash=False, run_parse_visits=True, reference_wcs_filters=['F160W','F140W','F125W','F105W','F110W','F098M','F814W','F850LP', 'F606W','F435W'])

    os.chdir('../Prep/')
    auto_script.make_report(root, make_rgb=True)

    # Done without errors
    os.system('date > /tmp/{0}.success'.format(root))
コード例 #10
0
ファイル: run_MPI.py プロジェクト: gbrammer/grizli
Needs 'fit_args.py' created by `auto_script.generate_fit_params`.

"""
import time
import os
import glob

import numpy as np

import matplotlib.pyplot as plt
plt.ioff()

from grizli.fitting import run_all_parallel
from grizli import utils
utils.set_warnings()

def find_ids():
    # Find objects that with extarcted spectra and that need to be fit
    all_files=glob.glob('*beams.fits')
    files = []
    for file in all_files:
        if not os.path.exists(file.replace('beams.fits', 'full.fits')):
            files.append(file)

    print('{0} files to fit'.format(len(files)))
        
    ids = [int(file.split('_')[1].split('.')[0]) for file in files]
    
    return ids
    
コード例 #11
0
def run_grizli_fit(event):
    import boto3
    import json
    import shutil
    import gc

    import grizli
    from grizli import fitting, utils, multifit
    utils.set_warnings()

    #event = {'s3_object_path':'Pipeline/j001452+091221/Extractions/j001452+091221_00277.beams.fits'}

    silent = False
    if 'silent' in event:
        silent = event['silent'] in TRUE_OPTIONS

    ###
    ### Parse event arguments
    ###
    event_kwargs = {}
    for k in event:

        # Lists
        if isinstance(event[k], str):
            # Split lists
            if ',' in event[k]:
                try:
                    event_kwargs[k] = np.cast[float](event[k].split(','))
                except:
                    event_kwargs[k] = event[k].split(',')
            else:
                event_kwargs[k] = event[k]
        else:
            try:
                event_kwargs[k] = json.loads(event[k])
            except:
                event_kwargs[k] = event[k]

    # Defaults
    if 'skip_started' not in event_kwargs:
        event_kwargs['skip_started'] = True

    for k in ['quasar_fit', 'extract_from_flt']:
        if k not in event_kwargs:
            event_kwargs[k] = False

    if not silent:
        print('Grizli version: ', grizli.__version__)

    # Disk space
    total, used, free = shutil.disk_usage("/")
    if not silent:
        print('Disk info: Total = {0:.2f} / Used = {1:.2f} / Free = {2:.2f}'.
              format(total // (2**20), used // (2**20), free // (2**20)))

    ## Output path
    if 'output_path' in event:
        output_path = event['output_path']
    else:
        output_path = None

    if 'bucket' in event:
        event_kwargs['bucket'] = event['bucket']
    else:
        event_kwargs['bucket'] = 'aws-grivam'

    if 'working_directory' in event:
        os.chdir(event['working_directory'])
    else:
        os.chdir('/tmp/')

    if not silent:
        print('Working directory: {0}'.format(os.getcwd()))

    files = glob.glob('*')
    files.sort()

    # Filenames, etc.
    beams_file = os.path.basename(event['s3_object_path'])
    root = beams_file.split('_')[0]
    id = int(beams_file.split('_')[1].split('.')[0])

    # Initial log
    start_log = '{0}_{1:05d}.start.log'.format(root, id)
    full_start = 'Pipeline/{0}/Extractions/{1}'.format(root, start_log)
    if (start_log in files) & event_kwargs['skip_started']:
        print('Log file {0} found in {1}'.format(start_log, os.getcwd()))
        return True

    if not silent:
        for i, file in enumerate(files):
            print('Initial file ({0}): {1}'.format(i + 1, file))

    os.system('cp {0}/matplotlibrc .'.format(grizli.GRIZLI_PATH))

    s3 = boto3.resource('s3')
    s3_client = boto3.client('s3')
    bkt = s3.Bucket(event_kwargs['bucket'])

    if event_kwargs['skip_started']:
        res = [r.key for r in bkt.objects.filter(Prefix=full_start)]
        if res:
            print('Already started ({0}), aborting.'.format(start_log))
            return True

    fp = open(start_log, 'w')
    fp.write(time.ctime() + '\n')
    fp.close()
    bkt.upload_file(start_log, full_start)

    # Download fit arguments
    args_file = 'fit_args.npy'
    needs_args = False
    if not os.path.exists(args_file):
        needs_args = True
    else:
        if 'force_args' in event:
            needs_args = event['force_args'] in TRUE_OPTIONS

    if needs_args:
        aws_args = 'Pipeline/{0}/Extractions/fit_args.npy'.format(root)
        bkt.download_file(aws_args,
                          './fit_args.npy',
                          ExtraArgs={"RequestPayer": "requester"})

    # If no beams file in the bucket, try to generate it
    put_beams = False
    try:
        if not os.path.exists(beams_file):
            bkt.download_file(event['s3_object_path'],
                              './{0}'.format(beams_file),
                              ExtraArgs={"RequestPayer": "requester"})
            put_beams = False
    except:
        print('Extract from GrismFLT object!')
        if 'clean' in event:
            if isinstance(event['clean'], str):
                run_clean = event['clean'].lower() in ['true', 'y', '1']
            else:
                run_clean = event['clean']
        else:
            run_clean = True

        status = extract_beams_from_flt(root,
                                        event_kwargs['bucket'],
                                        id,
                                        clean=run_clean,
                                        silent=silent)

        # Garbage collector
        gc.collect()

        if status is False:
            return False
        else:
            beams_file = status[0]

        put_beams = True

        # upload it now
        output_path = 'Pipeline/{0}/Extractions'.format(root)
        for outfile in status:
            aws_file = '{0}/{1}'.format(output_path, outfile)
            print(aws_file)
            bkt.upload_file(outfile,
                            aws_file,
                            ExtraArgs={'ACL': 'public-read'})

    if 'run_fit' in event:
        if event['run_fit'] in FALSE_OPTIONS:
            return True

    utils.fetch_acs_wcs_files(beams_file, bucket_name=event_kwargs['bucket'])

    # Download WCS files
    # if event_kwargs['check_wcs']:
    #     # WCS files for ACS
    #     files = [obj.key for obj in bkt.objects.filter(Prefix='Pipeline/{0}/Extractions/j'.format(root))]
    #     for file in files:
    #         if 'wcs.fits' in file:
    #             if os.path.exists(os.path.basename(file)):
    #                 continue
    #
    #             bkt.download_file(file, os.path.basename(file),
    #                               ExtraArgs={"RequestPayer": "requester"})

    # Is zr in the event dict?
    # if 'zr' in event:
    #     zr = list(np.cast[float](event['zr']))
    # else:
    #     try:
    #         zr = np.load('fit_args.npy')[0]['zr']
    #     except:
    #         zr = np.load('fit_args.npy', allow_pickle=True)[0]['zr']

    # Directory listing
    files = glob.glob('*')
    files.sort()

    for i, file in enumerate(files):
        print('File ({0}): {1}'.format(i + 1, file))

    ###
    ### Run the fit

    if event_kwargs['quasar_fit']:

        # Quasar templates
        uv_lines = True  #zr[1] > 3.5
        t0, t1 = utils.load_quasar_templates(uv_line_complex=uv_lines,
                                             broad_fwhm=2800,
                                             narrow_fwhm=1000,
                                             fixed_narrow_lines=True,
                                             nspline=13)

        fitting.run_all_parallel(id,
                                 t0=t0,
                                 t1=t1,
                                 fit_only_beams=True,
                                 fit_beams=False,
                                 phot_obj=None,
                                 **event_kwargs)

        if output_path is None:
            output_path = 'Pipeline/QuasarFit'.format(root)

    else:

        # Normal galaxy redshift fit
        fitting.run_all_parallel(id,
                                 fit_only_beams=True,
                                 fit_beams=False,
                                 **event_kwargs)

        if output_path is None:
            output_path = 'Pipeline/{0}/Extractions'.format(root)

    # Output files
    files = glob.glob('{0}_{1:05d}*'.format(root, id))
    for file in files:
        if ('beams.fits' not in file) | put_beams:
            aws_file = '{0}/{1}'.format(output_path, file)
            print(aws_file)
            bkt.upload_file(file, aws_file, ExtraArgs={'ACL': 'public-read'})

    # Remove start log now that done
    res = bkt.delete_objects(Delete={'Objects': [{'Key': full_start}]})

    # Garbage collector
    gc.collect()