def run_grizli_fit(event):
    
    #event = {'s3_object_path':'Pipeline/j001452+091221/Extractions/j001452+091221_00277.beams.fits'}
    
    if False:
        event = {'s3_object_path':'Pipeline/j0332m2743/Extractions/j0332m2743_01846.beams.fits', 'check_wcs':False, 'quasar_fit':False, 'use_psf':False, 'skip_started':True}
        
    ###
    ### Parse event booleans
    ### 
    event_bools = {}
    for k in ['verbose', 'check_wcs', 'quasar_fit', 'use_psf', 'skip_started']:
        if k in event:
            event_bools[k] = event[k] in ["True", True]
        else:
            event_bools[k] = False
    
    ## Output path
    if 'output_path' in event:
        output_path = event['output_path']
    else:
        output_path = None
    
    if 'bucket' in event:
        event_bools['bucket'] = event['bucket']
    else:
        event_bools['bucket'] = 'aws-grivam'
                            
    os.chdir('/tmp/')
    #os.system('cp {0}/matplotlibrc .'.format(grizli.GRIZLI_PATH))
    
    s3 = boto3.resource('s3')
    s3_client = boto3.client('s3')
    bkt = s3.Bucket(event_bools['bucket'])

    beams_file = os.path.basename(event['s3_object_path'])
    root = beams_file.split('_')[0]
    id = int(beams_file.split('_')[1].split('.')[0])
     
    # Initial log
    start_log = '{0}_{1:05d}.start.log'.format(root, id)
    full_start = 'Pipeline/{0}/Extractions/{1}'.format(root, start_log)
    
    if event_bools['skip_started']:
        obj = [r.key for r in bkt.objects.filter(Prefix=full_start)]
        obj += [r.key for r in bkt.objects.filter(Prefix=full_start.replace('.start.log','.full.fits'))]
        if obj:
            print('Already started ({0}), aborting.'.format(obj))
            
            fp = open('/tmp/fit_redshift_single.log','a')
            fp.write('{0}: {1} Skip\n'.format(time.ctime(), beams_file))
            fp.close()
            
            return True
        else:
            fp = open('/tmp/fit_redshift_single.log','a')
            fp.write('{0}: {1} Run\n'.format(time.ctime(), beams_file))
            fp.close()            
    else:
        fp = open('/tmp/fit_redshift_single.log','a')
        fp.write('{0}: {1} Run\n'.format(time.ctime(), beams_file))
        fp.close()
                
    fp = open(start_log,'w')
    fp.write(time.ctime()+'\n')
    fp.close()
    bkt.upload_file(start_log, full_start)
    
    if event_bools['check_wcs']:
        # WCS files for ACS
        files = [obj.key for obj in bkt.objects.filter(Prefix='Pipeline/{0}/Extractions/j'.format(root))]
        for file in files:
            if 'wcs.fits' in file:
                bkt.download_file(file, os.path.basename(file),
                                  ExtraArgs={"RequestPayer": "requester"})
 
    # Download files for the fit
    bkt.download_file(event['s3_object_path'], './{0}'.format(beams_file), ExtraArgs={"RequestPayer": "requester"})

    bkt.download_file('Pipeline/{0}/Extractions/fit_args.npy'.format(root), './fit_args.npy', ExtraArgs={"RequestPayer": "requester"})
    
    # Is zr in the event dict?
    if 'zr' in event:
        zr = list(np.cast[float](event['zr']))
    else:
        zr = np.load('fit_args.npy')[0]['zr']
     
    ###   
    ### Run the fit
    
    if event_bools['quasar_fit']:
        
        # Quasar templates
        t0, t1 = utils.load_quasar_templates(uv_line_complex=False,
                                            broad_fwhm=2800, narrow_fwhm=1000,
                                            fixed_narrow_lines=True)
        
        fitting.run_all_parallel(id, t0=t0, t1=t1, fit_only_beams=True,
                                 fit_beams=False,  zr=zr,
                                 use_psf=event_bools['use_psf'],
                                 verbose=event_bools['verbose'])
        
        if output_path is None:
            output_path = 'Pipeline/QuasarFit'.format(root)
        
    else:
        
        # Normal galaxy redshift fit
        fitting.run_all_parallel(id, zr=zr, fit_only_beams=True,
                                 fit_beams=False,  
                                 verbose=event_bools['verbose'])
        
        if output_path is None:
            output_path = 'Pipeline/{0}/Extractions'.format(root)
        
    # Output files
    files = glob.glob('{0}_{1:05d}*'.format(root, id))
    for file in files:
        if 'beams.fits' not in file:
            print(file)
            bkt.upload_file(file, '{0}/{1}'.format(output_path, file), ExtraArgs={'ACL': 'public-read'})
        
    # Cleanup
    for file in files:
        os.remove(file)
            
    # Remove start log now that done
    res = bkt.delete_objects(Delete={'Objects':[{'Key':full_start}]})
    
    return True
示例#2
0
def run_grizli_fit(event):
    import boto3
    import json
    import shutil
    import gc
    
    import matplotlib.pyplot as plt
    
    import grizli
    from grizli import fitting, utils, multifit
    
    try:
        from grizli.aws import db as grizli_db
        dbFLAGS = grizli_db.FLAGS
    except:
        pass
        
    utils.set_warnings()
    
    #event = {'s3_object_path':'Pipeline/j001452+091221/Extractions/j001452+091221_00277.beams.fits'}
    
    silent = False
    if 'silent' in event:
        silent = event['silent'] in TRUE_OPTIONS
        
    ###
    ### Parse event arguments
    ### 
    event_kwargs = {}
    for k in event:
        
        # Lists
        if isinstance(event[k], str):
            # Split lists
            if ',' in event[k]:
                try:
                    event_kwargs[k] = np.cast[float](event[k].split(','))
                except:
                    event_kwargs[k] = event[k].split(',')
            else:
                event_kwargs[k] = event[k]
        else:
            try:
                event_kwargs[k] = json.loads(event[k])
            except:
                event_kwargs[k] = event[k]
                
    # Defaults
    if 'skip_started' not in event_kwargs:
        event_kwargs['skip_started'] = True
        
    for k in ['quasar_fit', 'extract_from_flt', 'fit_stars', 'beam_info_only']:
        if k not in event_kwargs:
            event_kwargs[k] = False
    
    if event_kwargs['beam_info_only'] in TRUE_OPTIONS:
        dbtable = 'multibeam'        
    elif event_kwargs['quasar_fit'] in TRUE_OPTIONS:
        dbtable = 'redshift_fit_quasar'
    elif event_kwargs['fit_stars'] in TRUE_OPTIONS:
        dbtable = 'stellar_fit'
    else:
        dbtable = 'redshift_fit'
            
    if not silent:
        print('Grizli version: ', grizli.__version__)
    
    # Disk space
    total, used, free = shutil.disk_usage("/")    
    if not silent:
        print('Disk info: Total = {0:.2f} / Used = {1:.2f} / Free = {2:.2f}'.format(total // (2**20), used // (2**20), free // (2**20)))

    ## Output path
    if 'output_path' in event:
        output_path = event['output_path']
    else:
        output_path = None
    
    if 'bucket' in event:
        event_kwargs['bucket'] = event['bucket']
    else:
        event_kwargs['bucket'] = 'aws-grivam'
                        
    if 'working_directory' in event:
        os.chdir(event['working_directory'])
    else:
        os.chdir('/tmp/')
    
    if not silent:
        print('Working directory: {0}'.format(os.getcwd()))
    
    files = glob.glob('*')
    files.sort()
    
    # Filenames, etc.
    beams_file = os.path.basename(event['s3_object_path'])
    root = beams_file.split('_')[0]
    id = int(beams_file.split('_')[1].split('.')[0])
    
    try:
        db_status = grizli_db.get_redshift_fit_status(root, id, table=dbtable)
    except:
        db_status = -1
                
    # Initial log
    start_log = '{0}_{1:05d}.start.log'.format(root, id)
    full_start = 'Pipeline/{0}/Extractions/{1}'.format(root, start_log)
    if ((start_log in files) | (db_status >= 0)) & event_kwargs['skip_started']:
        print('Log file {0} found in {1} (db_status={2})'.format(start_log, os.getcwd(), db_status))
        return True
        
    if not silent:
        for i, file in enumerate(files):
            print('Initial file ({0}): {1}'.format(i+1, file))
    
    if os.path.exists('{0}/matplotlibrc'.format(grizli.GRIZLI_PATH)):
        os.system('cp {0}/matplotlibrc .'.format(grizli.GRIZLI_PATH))
    
    s3 = boto3.resource('s3')
    s3_client = boto3.client('s3')
    bkt = s3.Bucket(event_kwargs['bucket'])
        
    if event_kwargs['skip_started']:
        res = [r.key for r in bkt.objects.filter(Prefix=full_start)]
        if res:
            print('Already started ({0}), aborting.'.format(start_log))
            return True
            
    fp = open(start_log,'w')
    fp.write(time.ctime()+'\n')
    fp.close()
    bkt.upload_file(start_log, full_start)
    
    # Download fit arguments
    if 'force_args' in event:
        force_args = event['force_args'] in TRUE_OPTIONS
    else:
        force_args = False
        
    args_files = ['{0}_fit_args.npy'.format(root), 'fit_args.npy']
    for args_file in args_files:
        if (not os.path.exists(args_file)) | force_args:
            aws_file = 'Pipeline/{0}/Extractions/{1}'.format(root, args_file)
            try:
                bkt.download_file(aws_file, './{0}'.format(args_file),
                              ExtraArgs={"RequestPayer": "requester"})
                print('Use args_file = {0}'.format(args_file))
                break
            except:
                continue
            
    # If no beams file in the bucket, try to generate it
    put_beams=False
    try:
        if not os.path.exists(beams_file):
            bkt.download_file(event['s3_object_path'], './{0}'.format(beams_file), ExtraArgs={"RequestPayer": "requester"})
            put_beams = False
    except:
        print('Extract from GrismFLT object!')
        if 'clean' in event:
            if isinstance(event['clean'], str):
                run_clean = event['clean'].lower() in ['true', 'y', '1']
            else:
                run_clean = event['clean']
        else:
            run_clean = True
        
        try:
            # Extracting beams
            grizli_db.update_redshift_fit_status(root, id, 
                                                status=dbFLAGS['start_beams'],
                                                table=dbtable)
        except:
            print('Set DB flag failed: start_beams')
            pass
            
        status = extract_beams_from_flt(root, event_kwargs['bucket'], id, 
                                        clean=run_clean, silent=silent)
        
        # Garbage collector
        gc.collect()
        
        if status is False:
            return False
        else:
            beams_file = status[0]
        
        try:
            # Beams are done
            grizli_db.update_redshift_fit_status(root, id, 
                                                 status=dbFLAGS['done_beams'],
                                                 table=dbtable)
        except:
            pass
            
        put_beams = True
        
        # upload it now
        output_path = 'Pipeline/{0}/Extractions'.format(root)
        for outfile in status:
            aws_file = '{0}/{1}'.format(output_path, outfile)
            print(aws_file)
            bkt.upload_file(outfile, aws_file, 
                        ExtraArgs={'ACL': 'public-read'})
            
    if ('run_fit' in event) & (dbtable == 'redshift_fit'):
        if event['run_fit'] in FALSE_OPTIONS:
            res = bkt.delete_objects(Delete={'Objects':[{'Key':full_start}]})
            
            try:
                grizli_db.update_redshift_fit_status(root, id, 
                                                 status=dbFLAGS['no_run_fit'],
                                                 table=dbtable)
            except:
                pass
            
            return True
    
    utils.fetch_acs_wcs_files(beams_file, bucket_name=event_kwargs['bucket'])
    
    # Update the multibeam/beam_geometry tables
    if os.path.exists(beams_file):
        args = np.load(args_file, allow_pickle=True)[0]
        for arg in event_kwargs:
            if arg in args:
                args[arg] = event_kwargs[arg]
                
        grizli_db.multibeam_to_database(beams_file, Rspline=15, force=False, 
                                        **args)
        
    if dbtable == 'multibeam':
        ### Done
        res = bkt.delete_objects(Delete={'Objects':[{'Key':full_start}]})
        return True
                
    # Download WCS files
    # if event_kwargs['check_wcs']:
    #     # WCS files for ACS
    #     files = [obj.key for obj in bkt.objects.filter(Prefix='Pipeline/{0}/Extractions/j'.format(root))]
    #     for file in files:
    #         if 'wcs.fits' in file:
    #             if os.path.exists(os.path.basename(file)):
    #                 continue
    #             
    #             bkt.download_file(file, os.path.basename(file),
    #                               ExtraArgs={"RequestPayer": "requester"})
     
    # Is zr in the event dict?
    # if 'zr' in event:
    #     zr = list(np.cast[float](event['zr']))
    # else:
    #     try:
    #         zr = np.load('fit_args.npy')[0]['zr']
    #     except:
    #         zr = np.load('fit_args.npy', allow_pickle=True)[0]['zr']
    
    # Directory listing
    files = glob.glob('*')
    files.sort()
    
    for i, file in enumerate(files):
        print('File ({0}): {1}'.format(i+1, file))
    
    try:
        files = glob.glob('{0}_{1:05d}*R30.fits'.format(root, id)) 
        if (len(files) > 0) & (dbtable == 'redshift_fit'):
            grizli_db.send_1D_to_database(files=files)
    except:
        print('Failed to send R30 to spec1d database')
        pass
                    
    ###   
    ### Run the fit
    try:
        grizli_db.update_redshift_fit_status(root, id, table=dbtable,
                            status=dbFLAGS['start_redshift_fit'])
    except:
        print('Set DB flag failed: start_redshift_fit')
        pass
    
    if event_kwargs['quasar_fit'] in TRUE_OPTIONS:
                
        # Don't recopy beams file
        put_beams = False
        
        # Don't make line maps
        if 'min_line_sn' not in event_kwargs:
            event_kwargs['min_line_sn'] = np.inf
        
        # Don't make drizzled psfs
        if 'get_ir_psfs' not in event_kwargs:
            event_kwargs['get_ir_psfs'] = False
        
        # Fit line widths
        if 'get_line_width' not in event_kwargs:
            event_kwargs['get_line_width'] = True
        
        # sys_err
        if 'sys_err' not in event_kwargs:
            event_kwargs['sys_err'] = 0.05
        
        # Don't use photometry
        event_kwargs['phot_obj'] = None
        event_kwargs['use_phot_obj'] = False
    
        event_kwargs['fit_only_beams'] = True
        event_kwargs['fit_beams'] = False
        
        templ_args = {'uv_line_complex': True, 
                      'broad_fwhm':2800,
                      'narrow_fwhm':1000,
                      'fixed_narrow_lines':True,
                      'Rspline':15,
                      'include_reddened_balmer_lines':False}
        
        for k in templ_args:
            if k in event_kwargs:
                templ_args[k] = event_kwargs.pop(k)
        
        if templ_args['broad_fwhm'] < 0:
            use_simple_templates=True
            templ_args['broad_fwhm'] *= -1
        else:
            use_simple_templates = False
            
        print('load_quasar_templates(**{0})'.format(templ_args))
        q0, q1 = utils.load_quasar_templates(**templ_args)
        
        if use_simple_templates:
            x0 = utils.load_templates(full_line_list=['highO32'], continuum_list=['quasar_lines.txt', 'red_blue_continuum.txt'], line_complexes=False, fwhm=1000)
             
            for t in q0:
                if 'bspl' in t:
                    x0[t] = q0[t]
            
            q0 = x0
            q1['red_blue_continuum.txt'] = x0['red_blue_continuum.txt']
            
        # Quasar templates with fixed line ratios
        # q0, q1 = utils.load_quasar_templates(uv_line_complex=True,
        #                                     broad_fwhm=2800, narrow_fwhm=1000,
        #                                     fixed_narrow_lines=True, 
        #                                     Rspline=15)
        
        if 'zr' not in event_kwargs:
            event_kwargs['zr'] = [0.03, 6.8]
        if 'fitter' not in event_kwargs:
            event_kwargs['fitter'] = ['lstsq', 'lstsq']
        
        print('run_all_parallel: {0}'.format(event_kwargs))
            
        fitting.run_all_parallel(id, t0=q0, t1=q1, args_file=args_file, 
                                 **event_kwargs)
        
        if output_path is None:
            #output_path = 'Pipeline/QuasarFit'.format(root)
            output_path = 'Pipeline/{0}/Extractions'.format(root)
    
    elif event_kwargs['fit_stars'] in TRUE_OPTIONS:
                    
        args = np.load(args_file, allow_pickle=True)[0]
        
        if 'psf' in event_kwargs:
            args['psf'] = event_kwargs['psf'] in TRUE_OPTIONS     
        
        for k in ['fcontam', 'min_sens', 'sys_err']:
            if k in event_kwargs:
                print('Set arg {0}={1}'.format(k, event_kwargs[k]))
                args[k] = event_kwargs[k]
        
        # Load MultiBeam    
        mb = multifit.MultiBeam(beams_file, **args)
        
        if 'fit_trace_shift' in args:
            if args['fit_trace_shift']:
                tr = mb.fit_trace_shift()
                         
        if 'spline_correction' in event_kwargs:
            spline_correction = event_kwargs['spline_correction'] in TRUE_OPTIONS     
        else:
            spline_correction = True
        
        if 'fit_background' in event_kwargs:
            fit_background = event_kwargs['fit_background'] in TRUE_OPTIONS     
        else:
            fit_background = True

        if 'fitter' in event_kwargs:
            fitter = event_kwargs['fitter']    
        else:
            fitter = 'lstsq'
        
        if 'Rspline' in event_kwargs:
            Rspline = event_kwargs['Rspline']    
        else:
            Rspline = 15
            
        if Rspline == 15:
            logg_list = [4.5]
        else:
            logg_list = utils.PHOENIX_LOGG
        
        if 'add_carbon_star' in event_kwargs:
            add_carbon_star = event_kwargs['add_carbon_star']    
        else:
            add_carbon_star = 25
            
        if 'use_phoenix' in event_kwargs:
            p = event_kwargs.pop('use_phoenix')
            if p in TRUE_OPTIONS:
                tstar = utils.load_phoenix_stars(logg_list=logg_list, 
                                             add_carbon_star=add_carbon_star)
            else:
                tstar = utils.load_templates(stars=True,
                                             add_carbon_star=add_carbon_star)
        else:
            tstar = utils.load_phoenix_stars(logg_list=logg_list,
                                             add_carbon_star=add_carbon_star)
        
        kws = {'spline_correction':spline_correction, 
               'fit_background':fit_background,
               'fitter':fitter,
               'spline_args':{'Rspline':Rspline}}
        
        print('kwargs: {0}'.format(kws))
                   
        # Fit the stellar templates
        _res = mb.xfit_star(tstar=tstar, oned_args={}, **kws)
        
        _res[0].savefig('{0}_{1:05d}.star.png'.format(root, id))

        # Save log info
        fp = open('{0}_{1:05d}.star.log'.format(root, id), 'w')
        fp.write(_res[1])
        fp.close()
                
        if output_path is None:
            #output_path = 'Pipeline/QuasarFit'.format(root)
            output_path = 'Pipeline/{0}/Extractions'.format(root)
        
    else:
        
        # Normal galaxy redshift fit
        fitting.run_all_parallel(id, fit_only_beams=True, fit_beams=False,  
                                 args_file=args_file, **event_kwargs)
        
        if output_path is None:
            output_path = 'Pipeline/{0}/Extractions'.format(root)
            
    # Output files
    files = glob.glob('{0}_{1:05d}*'.format(root, id))
    for file in files:
        if ('beams.fits' not in file) | put_beams:
            aws_file = '{0}/{1}'.format(output_path, file)
            
            if event_kwargs['quasar_fit'] in TRUE_OPTIONS:
                # Don't copy stack
                if 'stack' in file:
                    continue
                
                # Add qso extension on outputs
                aws_file = aws_file.replace('_{0:05d}.'.format(id), 
                                            '_{0:05d}.qso.'.format(id))
                                            
            print('Upload {0} -> {1}'.format(file, aws_file))
            
            bkt.upload_file(file, aws_file, ExtraArgs={'ACL': 'public-read'})
    
    # Put data in the redshift_fit database table
    try:
        if dbtable == 'stellar_fit':
            rowfile = '{0}_{1:05d}.star.log'.format(root, id)
        else:    
            rowfile = '{0}_{1:05d}.row.fits'.format(root, id)
        
        if os.path.exists(rowfile):
            grizli_db.add_redshift_fit_row(rowfile, table=dbtable, 
                                           verbose=True)
        
        # Add 1D spectra
        files = glob.glob('{0}_{1:05d}*1D.fits'.format(root, id))
        if (len(files) > 0) & (dbtable == 'redshift_fit'):
            grizli_db.send_1D_to_database(files=files)
        
    except:
        print('Update row failed')
        pass
    
    # Remove start log now that done
    res = bkt.delete_objects(Delete={'Objects':[{'Key':full_start}]})
    
    # Garbage collector
    gc.collect()
示例#3
0
def run_grizli_fit(event):
    import boto3
    import json
    import shutil
    import gc

    import grizli
    from grizli import fitting, utils, multifit
    utils.set_warnings()

    #event = {'s3_object_path':'Pipeline/j001452+091221/Extractions/j001452+091221_00277.beams.fits'}

    silent = False
    if 'silent' in event:
        silent = event['silent'] in TRUE_OPTIONS

    ###
    ### Parse event arguments
    ###
    event_kwargs = {}
    for k in event:

        # Lists
        if isinstance(event[k], str):
            # Split lists
            if ',' in event[k]:
                try:
                    event_kwargs[k] = np.cast[float](event[k].split(','))
                except:
                    event_kwargs[k] = event[k].split(',')
            else:
                event_kwargs[k] = event[k]
        else:
            try:
                event_kwargs[k] = json.loads(event[k])
            except:
                event_kwargs[k] = event[k]

    # Defaults
    if 'skip_started' not in event_kwargs:
        event_kwargs['skip_started'] = True

    for k in ['quasar_fit', 'extract_from_flt']:
        if k not in event_kwargs:
            event_kwargs[k] = False

    if not silent:
        print('Grizli version: ', grizli.__version__)

    # Disk space
    total, used, free = shutil.disk_usage("/")
    if not silent:
        print('Disk info: Total = {0:.2f} / Used = {1:.2f} / Free = {2:.2f}'.
              format(total // (2**20), used // (2**20), free // (2**20)))

    ## Output path
    if 'output_path' in event:
        output_path = event['output_path']
    else:
        output_path = None

    if 'bucket' in event:
        event_kwargs['bucket'] = event['bucket']
    else:
        event_kwargs['bucket'] = 'aws-grivam'

    if 'working_directory' in event:
        os.chdir(event['working_directory'])
    else:
        os.chdir('/tmp/')

    if not silent:
        print('Working directory: {0}'.format(os.getcwd()))

    files = glob.glob('*')
    files.sort()

    # Filenames, etc.
    beams_file = os.path.basename(event['s3_object_path'])
    root = beams_file.split('_')[0]
    id = int(beams_file.split('_')[1].split('.')[0])

    # Initial log
    start_log = '{0}_{1:05d}.start.log'.format(root, id)
    full_start = 'Pipeline/{0}/Extractions/{1}'.format(root, start_log)
    if (start_log in files) & event_kwargs['skip_started']:
        print('Log file {0} found in {1}'.format(start_log, os.getcwd()))
        return True

    if not silent:
        for i, file in enumerate(files):
            print('Initial file ({0}): {1}'.format(i + 1, file))

    os.system('cp {0}/matplotlibrc .'.format(grizli.GRIZLI_PATH))

    s3 = boto3.resource('s3')
    s3_client = boto3.client('s3')
    bkt = s3.Bucket(event_kwargs['bucket'])

    if event_kwargs['skip_started']:
        res = [r.key for r in bkt.objects.filter(Prefix=full_start)]
        if res:
            print('Already started ({0}), aborting.'.format(start_log))
            return True

    fp = open(start_log, 'w')
    fp.write(time.ctime() + '\n')
    fp.close()
    bkt.upload_file(start_log, full_start)

    # Download fit arguments
    args_file = 'fit_args.npy'
    needs_args = False
    if not os.path.exists(args_file):
        needs_args = True
    else:
        if 'force_args' in event:
            needs_args = event['force_args'] in TRUE_OPTIONS

    if needs_args:
        aws_args = 'Pipeline/{0}/Extractions/fit_args.npy'.format(root)
        bkt.download_file(aws_args,
                          './fit_args.npy',
                          ExtraArgs={"RequestPayer": "requester"})

    # If no beams file in the bucket, try to generate it
    put_beams = False
    try:
        if not os.path.exists(beams_file):
            bkt.download_file(event['s3_object_path'],
                              './{0}'.format(beams_file),
                              ExtraArgs={"RequestPayer": "requester"})
            put_beams = False
    except:
        print('Extract from GrismFLT object!')
        if 'clean' in event:
            if isinstance(event['clean'], str):
                run_clean = event['clean'].lower() in ['true', 'y', '1']
            else:
                run_clean = event['clean']
        else:
            run_clean = True

        status = extract_beams_from_flt(root,
                                        event_kwargs['bucket'],
                                        id,
                                        clean=run_clean,
                                        silent=silent)

        # Garbage collector
        gc.collect()

        if status is False:
            return False
        else:
            beams_file = status[0]

        put_beams = True

        # upload it now
        output_path = 'Pipeline/{0}/Extractions'.format(root)
        for outfile in status:
            aws_file = '{0}/{1}'.format(output_path, outfile)
            print(aws_file)
            bkt.upload_file(outfile,
                            aws_file,
                            ExtraArgs={'ACL': 'public-read'})

    if 'run_fit' in event:
        if event['run_fit'] in FALSE_OPTIONS:
            return True

    utils.fetch_acs_wcs_files(beams_file, bucket_name=event_kwargs['bucket'])

    # Download WCS files
    # if event_kwargs['check_wcs']:
    #     # WCS files for ACS
    #     files = [obj.key for obj in bkt.objects.filter(Prefix='Pipeline/{0}/Extractions/j'.format(root))]
    #     for file in files:
    #         if 'wcs.fits' in file:
    #             if os.path.exists(os.path.basename(file)):
    #                 continue
    #
    #             bkt.download_file(file, os.path.basename(file),
    #                               ExtraArgs={"RequestPayer": "requester"})

    # Is zr in the event dict?
    # if 'zr' in event:
    #     zr = list(np.cast[float](event['zr']))
    # else:
    #     try:
    #         zr = np.load('fit_args.npy')[0]['zr']
    #     except:
    #         zr = np.load('fit_args.npy', allow_pickle=True)[0]['zr']

    # Directory listing
    files = glob.glob('*')
    files.sort()

    for i, file in enumerate(files):
        print('File ({0}): {1}'.format(i + 1, file))

    ###
    ### Run the fit

    if event_kwargs['quasar_fit']:

        # Quasar templates
        uv_lines = True  #zr[1] > 3.5
        t0, t1 = utils.load_quasar_templates(uv_line_complex=uv_lines,
                                             broad_fwhm=2800,
                                             narrow_fwhm=1000,
                                             fixed_narrow_lines=True,
                                             nspline=13)

        fitting.run_all_parallel(id,
                                 t0=t0,
                                 t1=t1,
                                 fit_only_beams=True,
                                 fit_beams=False,
                                 phot_obj=None,
                                 **event_kwargs)

        if output_path is None:
            output_path = 'Pipeline/QuasarFit'.format(root)

    else:

        # Normal galaxy redshift fit
        fitting.run_all_parallel(id,
                                 fit_only_beams=True,
                                 fit_beams=False,
                                 **event_kwargs)

        if output_path is None:
            output_path = 'Pipeline/{0}/Extractions'.format(root)

    # Output files
    files = glob.glob('{0}_{1:05d}*'.format(root, id))
    for file in files:
        if ('beams.fits' not in file) | put_beams:
            aws_file = '{0}/{1}'.format(output_path, file)
            print(aws_file)
            bkt.upload_file(file, aws_file, ExtraArgs={'ACL': 'public-read'})

    # Remove start log now that done
    res = bkt.delete_objects(Delete={'Objects': [{'Key': full_start}]})

    # Garbage collector
    gc.collect()