def Gen_multibeams(beams, args = args): mb = multifit.MultiBeam(beams,**args) grism_beams = {} for g in mb.PA: grism_beams[g.lower()] = [] for pa in mb.PA[g]: for i in mb.PA[g][pa]: grism_beams[g.lower()].append(mb.beams[i]) mb_g102 = multifit.MultiBeam(grism_beams['g102'], fcontam=mb.fcontam, min_sens=mb.min_sens, min_mask=mb.min_mask, group_name=mb.group_name+'-g102') # bug, will be fixed ~today to not have to do this in the future for b in mb_g102.beams: if hasattr(b, 'xp'): delattr(b, 'xp') mb_g102.initialize_masked_arrays() mb_g141 = multifit.MultiBeam(grism_beams['g141'], fcontam=mb.fcontam, min_sens=mb.min_sens, min_mask=mb.min_mask, group_name=mb.group_name+'-g141') # bug, will be fixed ~today to not have to do this in the future for b in mb_g141.beams: if hasattr(b, 'xp'): delattr(b, 'xp') mb_g141.initialize_masked_arrays() return mb_g102, mb_g141
def Gen_initial_MB(field, gid): # get beam list fl = glob('/Volumes/Vince_CLEAR/RELEASE_v2.1.0/BEAMS/*{}*/*{}*'.format(field,gid)) # sort beams sz = [] for f in fl: sz.append(os.path.getsize(f)) fl = np.array(fl)[np.argsort(sz)] # remove repeats nlist = [] blist = [] for f in fl: mb = multifit.MultiBeam(f,**args) for bm in mb.beams: if bm.grism.parent_file not in nlist: nlist.append(bm.grism.parent_file) blist.append(bm) #make the mb mb = multifit.MultiBeam(blist,**args) for b in mb.beams: if hasattr(b, 'xp'): delattr(b, 'xp') mb.initialize_masked_arrays() return mb
def edit_img(fl, ID): mb = multifit.MultiBeam(fl, **args) ### step 1 isolate iso = np.array(mb.beams[0].beam.direct * (mb.beams[0].beam.seg == ID)) ### step 2 zoom in ziso = iso[59:99, 59:99] ### step 3 get 3% and 97% - tiles llim = np.percentile(ziso[ziso != 0], 3) hlim = np.percentile(ziso[ziso != 0], 97) ### step 4 cap lower and under values ziso[ziso < llim] = llim ziso[ziso > hlim] = hlim hlim -= llim ### step 5 set lower to 0 for i in range(len(ziso)): for ii in range(len(ziso[0])): if ziso[i][ii] != 0: ziso[i][ii] -= llim ### step 6 set top to 1 ziso /= hlim return ziso
def Clean_mb(mb, BEAM_exempt, clip_lims, clipspec, omitspec): fblist = [] idc = 0 for bm in mb.beams: if bm.grism.parent_file in BEAM_exempt: if clipspec[idc] == 1: xspec, yspec, yerr = bm.beam.optimal_extract(bm.grism.data['SCI'] - bm.contam,ivar = bm.ivar) lms = clip_lims[idc] for i in range(len(xspec)): if lms[0] < xspec[i]< lms[1]: bm.grism.data['SCI'].T[i] = np.zeros_like(bm.grism.data['SCI'].T[i]) bm.grism.data['ERR'].T[i] = np.ones_like(bm.grism.data['ERR'].T[i])*1000 if omitspec[idc] == 1: pass else: fblist.append(bm) idc += 1 else: fblist.append(bm) mb = multifit.MultiBeam(fblist,**args) for b in mb.beams: if hasattr(b, 'xp'): delattr(b, 'xp') mb.initialize_masked_arrays() return mb
def load_beams_and_trns(wv, field, galaxy_id, instr): ### Set transmission curve sp = fsps.StellarPopulation(imf_type = 0, tpagb_norm_type=0, zcontinuous = 1, logzsol = np.log10(0.002/0.019), sfh = 4, tau = 0.6, dust_type = 1) model_wave, model_flux = sp.get_spectrum(tage = 3.6, peraa = True) ### set beams BEAMS = [] blist = glob(beam_path + '*{}*_*{}*'.format(field[1], galaxy_id) ) for b in blist: mb = multifit.MultiBeam(b,**args) PAlist = [] for bm in mb.beams: if bm.grism.filter == instr: if bm.get_dispersion_PA() not in PAlist: PAlist.append(bm.get_dispersion_PA()) BEAMS.append(bm) TRANS = [] for i in BEAMS: W, F = forward_model_grism(i, model_wave, np.ones(len(model_wave))) trans = interp1d(W,F)(wv) TRANS.append(trans) return BEAMS, TRANS
def get_matched_multibeam(matched_id, good_ids, redshift_cat = [], sed_cat = [], engine = []): gal_id = good_ids[matched_id] sed_ra = sed_cat[gal_id]['ra'] sed_dec = sed_cat[gal_id]['dec'] sed_spec = redshift_cat[gal_id]['z_spec'] print('gal_id: %.0f, z_spec: %.3f' %(gal_id+1, sed_spec)) columns=['status','root','id','ra','dec','mag_auto','flux_radius', 'bic_diff', 'q_z','z_map','d4000','t_g102','t_g141'] SQL = ("SELECT {colstr} FROM redshift_fit NATURAL JOIN photometry_apcorr" " WHERE q_z > -0.2").format(colstr=','.join(columns)) # " AND z_map > 0.2 AND z_map < 0.3 AND mag_auto < 27").format(colstr=','.join(columns)) extra = " AND ra < %.3f AND ra > %.3f" %(sed_ra + 0.001, sed_ra - 0.001) extra += " AND dec < %.3f AND dec > %.3f" %(sed_dec + 0.001, sed_dec - 0.001) extra += " AND z_map < %.3f AND z_map > %.3f" %(sed_spec + 0.005, sed_spec - 0.005) SQL = SQL + extra #print(SQL) res = grizli_db.from_sql(SQL, engine) print('N: ', len(res)) so = np.argsort(res['mag_auto']) HTML(grizli_db.render_for_notebook(res[so]['root','id','ra','dec','mag_auto','q_z','d4000','z_map'],image_extensions=['stack','full'])) match_obj = res[0] root, id = match_obj['root'].item(), match_obj['id'].item() print(root, id) # Fetch grism spectra file base_url = 'https://s3.amazonaws.com/grizli-v1/Pipeline/{0}/Extractions'.format(root) files = ['{0}_{1:05d}.beams.fits'.format(root, id), '{0}_fit_args.npy'.format(root)] for file in files: #print(file) if not os.path.exists(file): os.system('wget {0}/{1}'.format(base_url, file)) #print('wget {0}/{1}'.format(base_url, file)) #args = np.load('{0}_fit_args.npy'.format(root), allow_pickle=True)[0] ix = (res['root'] == root) & (res['id'] == id) z_grism = res['z_map'][ix][0] print('Grism redshift: {0:.4f}'.format(z_grism)) # let's load this spectrum in now: mb = multifit.MultiBeam('{0}_{1:05d}.beams.fits'.format(root, id)) return mb, z_grism, gal_id
def extract_beams_from_flt(root, bucket, id, clean=True, silent=False): """ Download GrismFLT files and extract the beams file """ import gc import boto3 import matplotlib.pyplot as plt import grizli from grizli import fitting, utils, multifit from grizli.version import __version__ as grizli__version utils.set_warnings() from grizli.pipeline import auto_script s3 = boto3.resource('s3') s3_client = boto3.client('s3') bkt = s3.Bucket(bucket) # WCS files for ACS files = [obj.key for obj in bkt.objects.filter(Prefix='Pipeline/{0}/Extractions/j'.format(root))] files += [obj.key for obj in bkt.objects.filter(Prefix='Pipeline/{0}/Extractions/i'.format(root))] files += [obj.key for obj in bkt.objects.filter(Prefix='Pipeline/{0}/Extractions/{0}-ir.cat.fits'.format(root))] files += [obj.key for obj in bkt.objects.filter(Prefix='Pipeline/{0}/Extractions/fit_args.npy'.format(root))] download_files = [] for file in np.unique(files): if ('cat.fits' in file) | ('fit_args' in file): if os.path.exists(os.path.basename(file)): continue download_files.append(file) for file in download_files: print(file) bkt.download_file(file, os.path.basename(file), ExtraArgs={"RequestPayer": "requester"}) # Read the catalog ircat = utils.read_catalog('{0}-ir.cat.fits'.format(root)) ix = ircat['NUMBER'] == id object_rd = (ircat['X_WORLD'][ix], ircat['Y_WORLD'][ix]) del(ircat) # One beam at a time beams = None flt_files = [] for file in files: if 'GrismFLT.fits' in file: flt_files.append(file) if not silent: print('Read {0} GrismFLT files'.format(len(flt_files))) if os.path.exists('{0}_fit_args.npy'.format(root)): args_file = '{0}_fit_args.npy'.format(root) else: args_file = 'fit_args.npy' for i, file in enumerate(flt_files): if not silent: print('# Read {0}/{1}'.format(i+1, len(flt_files))) flt, ext, _, _ = os.path.basename(file).split('.') if flt.startswith('i'): fl = 'flt' else: fl = 'flc' out_files = ['{0}_{2}.{1}.wcs.fits'.format(flt, ext, fl), '{0}.{1}.GrismFLT.fits'.format(flt, ext), '{0}.{1}.GrismFLT.pkl'.format(flt, ext)] exp_has_id = False for j, f_j in enumerate(out_files): aws_file = os.path.join(os.path.dirname(file), f_j) if not silent: print(' ', aws_file) if not os.path.exists(f_j): bkt.download_file(aws_file, f_j, ExtraArgs={"RequestPayer": "requester"}) # WCS file, check if object in footprint if f_j.endswith('.wcs.fits'): #exp_has_id = check_object_in_footprint(id, f_j, ircat) exp_has_id = check_object_in_footprint(None, f_j, None, rd=object_rd) if not exp_has_id: if clean: os.remove(f_j) break if not exp_has_id: continue beams_i = auto_script.extract(field_root=root, maglim=[13,24], prior=None, MW_EBV=0.00, ids=id, pline={}, fit_only_beams=True, run_fit=False, poly_order=7, master_files=[os.path.basename(file)], grp=None, bad_pa_threshold=None, fit_trace_shift=False, size=32, diff=True, min_sens=0.02, skip_complete=True, fit_args={}, args_file=args_file, get_only_beams=True) # Remove the GrismFLT file for f_j in out_files: if ('GrismFLT' in f_j) & clean: os.remove(f_j) if beams is None: beams = beams_i else: beams.extend(beams_i) # Garbage collector gc.collect() if not beams: print('No beams found for {0} id={1}'.format(root, id)) return False # Grism Object args = np.load(args_file, allow_pickle=True)[0] mb = multifit.MultiBeam(beams, **args) mb.write_master_fits() # 1D spectrum with R=30 fit if True: bin_steps, step_templ = utils.step_templates(wlim=[5000, 18000.0], R=30, round=10) tfit = mb.template_at_z(z=0, templates=step_templ, fit_background=True, fitter='lstsq', get_uncertainties=2) fig1 = mb.oned_figure(figsize=[5,3], tfit=tfit, show_beams=True, scale_on_stacked=True, ylim_percentile=5) outroot='{0}_{1:05d}.R{2:.0f}'.format(root, id, 30) hdu = mb.oned_spectrum_to_hdu(outputfile=outroot+'.fits', tfit=tfit, wave=bin_steps) fig1.savefig(outroot+'.png') del(hdu) # Drizzled spectrum hdu, fig = mb.drizzle_grisms_and_PAs(fcontam=args['fcontam'], flambda=False, kernel='point', size=32, zfit=tfit, diff=False) hdu[0].header['GRIZLIV'] = (grizli__version, 'Grizli version') fig.savefig('{0}_{1:05d}.stack.png'.format(root, id)) hdu.writeto('{0}_{1:05d}.stack.fits'.format(root, id), overwrite=True) plt.close('all') del(hdu) outfiles = ['{0}_{1:05d}.beams.fits'.format(root, id)] outfiles += glob.glob(outroot+'*') outfiles += glob.glob('{0}_{1:05d}.stack*'.format(root, id)) return(outfiles)
def run_grizli_fit(event): import boto3 import json import shutil import gc import matplotlib.pyplot as plt import grizli from grizli import fitting, utils, multifit try: from grizli.aws import db as grizli_db dbFLAGS = grizli_db.FLAGS except: pass utils.set_warnings() #event = {'s3_object_path':'Pipeline/j001452+091221/Extractions/j001452+091221_00277.beams.fits'} silent = False if 'silent' in event: silent = event['silent'] in TRUE_OPTIONS ### ### Parse event arguments ### event_kwargs = {} for k in event: # Lists if isinstance(event[k], str): # Split lists if ',' in event[k]: try: event_kwargs[k] = np.cast[float](event[k].split(',')) except: event_kwargs[k] = event[k].split(',') else: event_kwargs[k] = event[k] else: try: event_kwargs[k] = json.loads(event[k]) except: event_kwargs[k] = event[k] # Defaults if 'skip_started' not in event_kwargs: event_kwargs['skip_started'] = True for k in ['quasar_fit', 'extract_from_flt', 'fit_stars', 'beam_info_only']: if k not in event_kwargs: event_kwargs[k] = False if event_kwargs['beam_info_only'] in TRUE_OPTIONS: dbtable = 'multibeam' elif event_kwargs['quasar_fit'] in TRUE_OPTIONS: dbtable = 'redshift_fit_quasar' elif event_kwargs['fit_stars'] in TRUE_OPTIONS: dbtable = 'stellar_fit' else: dbtable = 'redshift_fit' if not silent: print('Grizli version: ', grizli.__version__) # Disk space total, used, free = shutil.disk_usage("/") if not silent: print('Disk info: Total = {0:.2f} / Used = {1:.2f} / Free = {2:.2f}'.format(total // (2**20), used // (2**20), free // (2**20))) ## Output path if 'output_path' in event: output_path = event['output_path'] else: output_path = None if 'bucket' in event: event_kwargs['bucket'] = event['bucket'] else: event_kwargs['bucket'] = 'aws-grivam' if 'working_directory' in event: os.chdir(event['working_directory']) else: os.chdir('/tmp/') if not silent: print('Working directory: {0}'.format(os.getcwd())) files = glob.glob('*') files.sort() # Filenames, etc. beams_file = os.path.basename(event['s3_object_path']) root = beams_file.split('_')[0] id = int(beams_file.split('_')[1].split('.')[0]) try: db_status = grizli_db.get_redshift_fit_status(root, id, table=dbtable) except: db_status = -1 # Initial log start_log = '{0}_{1:05d}.start.log'.format(root, id) full_start = 'Pipeline/{0}/Extractions/{1}'.format(root, start_log) if ((start_log in files) | (db_status >= 0)) & event_kwargs['skip_started']: print('Log file {0} found in {1} (db_status={2})'.format(start_log, os.getcwd(), db_status)) return True if not silent: for i, file in enumerate(files): print('Initial file ({0}): {1}'.format(i+1, file)) if os.path.exists('{0}/matplotlibrc'.format(grizli.GRIZLI_PATH)): os.system('cp {0}/matplotlibrc .'.format(grizli.GRIZLI_PATH)) s3 = boto3.resource('s3') s3_client = boto3.client('s3') bkt = s3.Bucket(event_kwargs['bucket']) if event_kwargs['skip_started']: res = [r.key for r in bkt.objects.filter(Prefix=full_start)] if res: print('Already started ({0}), aborting.'.format(start_log)) return True fp = open(start_log,'w') fp.write(time.ctime()+'\n') fp.close() bkt.upload_file(start_log, full_start) # Download fit arguments if 'force_args' in event: force_args = event['force_args'] in TRUE_OPTIONS else: force_args = False args_files = ['{0}_fit_args.npy'.format(root), 'fit_args.npy'] for args_file in args_files: if (not os.path.exists(args_file)) | force_args: aws_file = 'Pipeline/{0}/Extractions/{1}'.format(root, args_file) try: bkt.download_file(aws_file, './{0}'.format(args_file), ExtraArgs={"RequestPayer": "requester"}) print('Use args_file = {0}'.format(args_file)) break except: continue # If no beams file in the bucket, try to generate it put_beams=False try: if not os.path.exists(beams_file): bkt.download_file(event['s3_object_path'], './{0}'.format(beams_file), ExtraArgs={"RequestPayer": "requester"}) put_beams = False except: print('Extract from GrismFLT object!') if 'clean' in event: if isinstance(event['clean'], str): run_clean = event['clean'].lower() in ['true', 'y', '1'] else: run_clean = event['clean'] else: run_clean = True try: # Extracting beams grizli_db.update_redshift_fit_status(root, id, status=dbFLAGS['start_beams'], table=dbtable) except: print('Set DB flag failed: start_beams') pass status = extract_beams_from_flt(root, event_kwargs['bucket'], id, clean=run_clean, silent=silent) # Garbage collector gc.collect() if status is False: return False else: beams_file = status[0] try: # Beams are done grizli_db.update_redshift_fit_status(root, id, status=dbFLAGS['done_beams'], table=dbtable) except: pass put_beams = True # upload it now output_path = 'Pipeline/{0}/Extractions'.format(root) for outfile in status: aws_file = '{0}/{1}'.format(output_path, outfile) print(aws_file) bkt.upload_file(outfile, aws_file, ExtraArgs={'ACL': 'public-read'}) if ('run_fit' in event) & (dbtable == 'redshift_fit'): if event['run_fit'] in FALSE_OPTIONS: res = bkt.delete_objects(Delete={'Objects':[{'Key':full_start}]}) try: grizli_db.update_redshift_fit_status(root, id, status=dbFLAGS['no_run_fit'], table=dbtable) except: pass return True utils.fetch_acs_wcs_files(beams_file, bucket_name=event_kwargs['bucket']) # Update the multibeam/beam_geometry tables if os.path.exists(beams_file): args = np.load(args_file, allow_pickle=True)[0] for arg in event_kwargs: if arg in args: args[arg] = event_kwargs[arg] grizli_db.multibeam_to_database(beams_file, Rspline=15, force=False, **args) if dbtable == 'multibeam': ### Done res = bkt.delete_objects(Delete={'Objects':[{'Key':full_start}]}) return True # Download WCS files # if event_kwargs['check_wcs']: # # WCS files for ACS # files = [obj.key for obj in bkt.objects.filter(Prefix='Pipeline/{0}/Extractions/j'.format(root))] # for file in files: # if 'wcs.fits' in file: # if os.path.exists(os.path.basename(file)): # continue # # bkt.download_file(file, os.path.basename(file), # ExtraArgs={"RequestPayer": "requester"}) # Is zr in the event dict? # if 'zr' in event: # zr = list(np.cast[float](event['zr'])) # else: # try: # zr = np.load('fit_args.npy')[0]['zr'] # except: # zr = np.load('fit_args.npy', allow_pickle=True)[0]['zr'] # Directory listing files = glob.glob('*') files.sort() for i, file in enumerate(files): print('File ({0}): {1}'.format(i+1, file)) try: files = glob.glob('{0}_{1:05d}*R30.fits'.format(root, id)) if (len(files) > 0) & (dbtable == 'redshift_fit'): grizli_db.send_1D_to_database(files=files) except: print('Failed to send R30 to spec1d database') pass ### ### Run the fit try: grizli_db.update_redshift_fit_status(root, id, table=dbtable, status=dbFLAGS['start_redshift_fit']) except: print('Set DB flag failed: start_redshift_fit') pass if event_kwargs['quasar_fit'] in TRUE_OPTIONS: # Don't recopy beams file put_beams = False # Don't make line maps if 'min_line_sn' not in event_kwargs: event_kwargs['min_line_sn'] = np.inf # Don't make drizzled psfs if 'get_ir_psfs' not in event_kwargs: event_kwargs['get_ir_psfs'] = False # Fit line widths if 'get_line_width' not in event_kwargs: event_kwargs['get_line_width'] = True # sys_err if 'sys_err' not in event_kwargs: event_kwargs['sys_err'] = 0.05 # Don't use photometry event_kwargs['phot_obj'] = None event_kwargs['use_phot_obj'] = False event_kwargs['fit_only_beams'] = True event_kwargs['fit_beams'] = False templ_args = {'uv_line_complex': True, 'broad_fwhm':2800, 'narrow_fwhm':1000, 'fixed_narrow_lines':True, 'Rspline':15, 'include_reddened_balmer_lines':False} for k in templ_args: if k in event_kwargs: templ_args[k] = event_kwargs.pop(k) if templ_args['broad_fwhm'] < 0: use_simple_templates=True templ_args['broad_fwhm'] *= -1 else: use_simple_templates = False print('load_quasar_templates(**{0})'.format(templ_args)) q0, q1 = utils.load_quasar_templates(**templ_args) if use_simple_templates: x0 = utils.load_templates(full_line_list=['highO32'], continuum_list=['quasar_lines.txt', 'red_blue_continuum.txt'], line_complexes=False, fwhm=1000) for t in q0: if 'bspl' in t: x0[t] = q0[t] q0 = x0 q1['red_blue_continuum.txt'] = x0['red_blue_continuum.txt'] # Quasar templates with fixed line ratios # q0, q1 = utils.load_quasar_templates(uv_line_complex=True, # broad_fwhm=2800, narrow_fwhm=1000, # fixed_narrow_lines=True, # Rspline=15) if 'zr' not in event_kwargs: event_kwargs['zr'] = [0.03, 6.8] if 'fitter' not in event_kwargs: event_kwargs['fitter'] = ['lstsq', 'lstsq'] print('run_all_parallel: {0}'.format(event_kwargs)) fitting.run_all_parallel(id, t0=q0, t1=q1, args_file=args_file, **event_kwargs) if output_path is None: #output_path = 'Pipeline/QuasarFit'.format(root) output_path = 'Pipeline/{0}/Extractions'.format(root) elif event_kwargs['fit_stars'] in TRUE_OPTIONS: args = np.load(args_file, allow_pickle=True)[0] if 'psf' in event_kwargs: args['psf'] = event_kwargs['psf'] in TRUE_OPTIONS for k in ['fcontam', 'min_sens', 'sys_err']: if k in event_kwargs: print('Set arg {0}={1}'.format(k, event_kwargs[k])) args[k] = event_kwargs[k] # Load MultiBeam mb = multifit.MultiBeam(beams_file, **args) if 'fit_trace_shift' in args: if args['fit_trace_shift']: tr = mb.fit_trace_shift() if 'spline_correction' in event_kwargs: spline_correction = event_kwargs['spline_correction'] in TRUE_OPTIONS else: spline_correction = True if 'fit_background' in event_kwargs: fit_background = event_kwargs['fit_background'] in TRUE_OPTIONS else: fit_background = True if 'fitter' in event_kwargs: fitter = event_kwargs['fitter'] else: fitter = 'lstsq' if 'Rspline' in event_kwargs: Rspline = event_kwargs['Rspline'] else: Rspline = 15 if Rspline == 15: logg_list = [4.5] else: logg_list = utils.PHOENIX_LOGG if 'add_carbon_star' in event_kwargs: add_carbon_star = event_kwargs['add_carbon_star'] else: add_carbon_star = 25 if 'use_phoenix' in event_kwargs: p = event_kwargs.pop('use_phoenix') if p in TRUE_OPTIONS: tstar = utils.load_phoenix_stars(logg_list=logg_list, add_carbon_star=add_carbon_star) else: tstar = utils.load_templates(stars=True, add_carbon_star=add_carbon_star) else: tstar = utils.load_phoenix_stars(logg_list=logg_list, add_carbon_star=add_carbon_star) kws = {'spline_correction':spline_correction, 'fit_background':fit_background, 'fitter':fitter, 'spline_args':{'Rspline':Rspline}} print('kwargs: {0}'.format(kws)) # Fit the stellar templates _res = mb.xfit_star(tstar=tstar, oned_args={}, **kws) _res[0].savefig('{0}_{1:05d}.star.png'.format(root, id)) # Save log info fp = open('{0}_{1:05d}.star.log'.format(root, id), 'w') fp.write(_res[1]) fp.close() if output_path is None: #output_path = 'Pipeline/QuasarFit'.format(root) output_path = 'Pipeline/{0}/Extractions'.format(root) else: # Normal galaxy redshift fit fitting.run_all_parallel(id, fit_only_beams=True, fit_beams=False, args_file=args_file, **event_kwargs) if output_path is None: output_path = 'Pipeline/{0}/Extractions'.format(root) # Output files files = glob.glob('{0}_{1:05d}*'.format(root, id)) for file in files: if ('beams.fits' not in file) | put_beams: aws_file = '{0}/{1}'.format(output_path, file) if event_kwargs['quasar_fit'] in TRUE_OPTIONS: # Don't copy stack if 'stack' in file: continue # Add qso extension on outputs aws_file = aws_file.replace('_{0:05d}.'.format(id), '_{0:05d}.qso.'.format(id)) print('Upload {0} -> {1}'.format(file, aws_file)) bkt.upload_file(file, aws_file, ExtraArgs={'ACL': 'public-read'}) # Put data in the redshift_fit database table try: if dbtable == 'stellar_fit': rowfile = '{0}_{1:05d}.star.log'.format(root, id) else: rowfile = '{0}_{1:05d}.row.fits'.format(root, id) if os.path.exists(rowfile): grizli_db.add_redshift_fit_row(rowfile, table=dbtable, verbose=True) # Add 1D spectra files = glob.glob('{0}_{1:05d}*1D.fits'.format(root, id)) if (len(files) > 0) & (dbtable == 'redshift_fit'): grizli_db.send_1D_to_database(files=files) except: print('Update row failed') pass # Remove start log now that done res = bkt.delete_objects(Delete={'Objects':[{'Key':full_start}]}) # Garbage collector gc.collect()
def Clean_multibeam(self): if int(self.galaxy_id) < 10000: gid = '0' + str(self.galaxy_id) else: gid = self.galaxy_id BMX = np.load(beam_path +'{}_{}_ex.npy'.format(self.field, self.galaxy_id),allow_pickle=True) clip, clipspec, omitspec = np.load(beam_path +'{}_{}.npy'.format(self.field, self.galaxy_id),allow_pickle=True) if hpath == '/home/vestrada78840/': fl = beam_2d_path + 'j021820m0510_{}.beams.fits'.format(gid) else: fl = beam_2d_path + 'j021820m0510_{}.beams.fits'.format(gid) mb = multifit.MultiBeam(fl,**args) blist = mb.beams #####clip or omit fblist = [] idc = 0 for bm in blist: if bm.grism.parent_file in BMX: if clipspec[idc] == 1: xspec, yspec, yerr = bm.beam.optimal_extract(bm.grism.data['SCI'] - bm.contam,ivar = bm.ivar) lms = clip[idc] if len(lms) == 1: lms = lms[0] for i in range(len(xspec)): if lms[0] < xspec[i]< lms[1]: bm.grism.data['SCI'].T[i] = np.zeros_like(bm.grism.data['SCI'].T[i]) bm.grism.data['ERR'].T[i] = np.ones_like(bm.grism.data['ERR'].T[i])*1000 if omitspec[idc] == 1: pass else: fblist.append(bm) idc += 1 else: fblist.append(bm) mb = multifit.MultiBeam(fblist,**args) for b in mb.beams: if hasattr(b, 'xp'): delattr(b, 'xp') mb.initialize_masked_arrays() grism_beams = {} for g in mb.PA: grism_beams[g.lower()] = [] for pa in mb.PA[g]: for i in mb.PA[g][pa]: grism_beams[g.lower()].append(mb.beams[i]) try: self.mb_g102 = multifit.MultiBeam(grism_beams['g102'], fcontam=mb.fcontam, min_sens=mb.min_sens, min_mask=mb.min_mask, group_name=mb.group_name+'-g102') # bug, will be fixed ~today to not have to do this in the future for b in self.mb_g102.beams: if hasattr(b, 'xp'): delattr(b, 'xp') self.mb_g102.initialize_masked_arrays() self.g102 = True except: self.g102 = False try: self.mb_g141 = multifit.MultiBeam(grism_beams['g141'], fcontam=mb.fcontam, min_sens=mb.min_sens, min_mask=mb.min_mask, group_name=mb.group_name+'-g141') # bug, will be fixed ~today to not have to do this in the future for b in self.mb_g141.beams: if hasattr(b, 'xp'): delattr(b, 'xp') self.mb_g141.initialize_masked_arrays() self.g141 = True except: self.g141 = False