def extract_flare(obsid_list, times): count = 1 for obs in obsid_list: # Cycles through obsID's binlength = raw_input('Select bin length, (usually 200): ') # Bin set to 200 index1 = (count*2)-2 # Gets the start time index of the obsID index2 = (count*2)-1 # Gets the stop time index of the obsID start = times[index1] # Selects the start and stop times for the obsID being run stop = times[index2] count += 1 # Adds 1 to the count to continue formula ie. obsID number 3 will have count = 3, index1=4, and index2=5 which are its start and stop times in the times list print "Performing dmextract and deflare on obsID %s" % obs dmextract(infile="reprojected_data/%s_background.fits[bin time=%s:%s:%s]" % (obs, start, stop, binlength), outfile='reprojected_data/%s_background.lc' % obs, opt='ltc1', clobber='yes') deflare(infile='reprojected_data/%s_background.lc' % obs, outfile='reprojected_data/%s_bkg_deflare.gti' % obs, method='clean', plot='no', save='reprojected_data/%s_plot' % obs)
def makeLightCurves(srcreg, bkgreg, radtag, nametag, evtFile, obsID, dataPath, emin, emax, etag, lcbin, ccd): # define input file names srcFile = evtFile + '[sky=region(' + srcreg + '),ccd_id=' + str( ccd) + ',energy=' + str(emin) + ':' + str( emax) + '][bin time=::' + str(lcbin) + ']' srcFile2 = evtFile + '[sky=region(' + srcreg + '),ccd_id=' + str( ccd) + ',energy=' + str(emin) + ':' + str( emax) + '][bin time=::0.44104]' bkgFile = evtFile + '[sky=region(' + bkgreg + '),ccd_id=' + str( ccd) + ',energy=' + str(emin) + ':' + str(emax) + ']' # define output file names lc_nobkg = str( obsID) + "_" + nametag + "_" + etag + "_" + radtag + "_lc_" + str( lcbin) + "s.fits" lc_nobkg_noTbin = str( obsID ) + "_" + nametag + "_" + etag + "_" + radtag + "_lc_0.44104s.fits" lc_bkg = str( obsID ) + "_" + nametag + "_bkgsub_" + etag + "_" + radtag + "_lc_" + str( lcbin) + "s.fits" # source without background rt.dmextract(srcFile, outfile=dataPath + lc_nobkg, bkg="", opt="ltc1", clobber="yes") print(" created " + lc_nobkg) rt.dmextract(srcFile2, outfile=dataPath + lc_nobkg_noTbin, bkg="", opt="ltc1", clobber="yes") print(" created " + lc_nobkg_noTbin) # source with background rt.dmextract(srcFile, outfile=dataPath + lc_bkg, bkg=bkgFile, opt="ltc1", clobber="yes") print(" created " + lc_bkg) return dataPath + lc_nobkg, dataPath + lc_bkg
def extract_spec(observation, region_file, region_number, dtime, btime): infile = "{clean}[sky=region({region_file})][bin pi]".format( clean=observation.sc_clean, region_file=region_file ) outfile = io.get_path( "{super_comp_dir}/{obsid}_{region_number}.pi".format( super_comp_dir=observation.cluster.super_comp_dir, obsid=observation.id, region_number=region_number )) rt.dmextract(infile=infile, outfile=outfile, clobber=True) infile = "{back}[sky=region({region_file})][bin pi]".format( back=observation.sc_back, region_file=region_file ) outfile = io.get_path( "{super_comp_dir}/{obsid}_back_{region_number}.pi".format( super_comp_dir=observation.cluster.super_comp_dir, obsid=observation.id, region_number=region_number )) rt.dmextract(infile=infile, outfile=outfile, clobber=True) data_pi = "{super_comp_dir}/{obsid}_{region_number}.pi".format( super_comp_dir=observation.cluster.super_comp_dir, obsid=observation.id, region_number=region_number ) back_pi = "{super_comp_dir}/{obsid}_back_{region_number}.pi".format( super_comp_dir=observation.cluster.super_comp_dir, obsid=observation.id, region_number=region_number ) warf = "'{super_comp_dir}/{name}_{obsid}.arf'".format( super_comp_dir=observation.cluster.super_comp_dir, name=observation.cluster.name, obsid=observation.id ) wrmf = "'{super_comp_dir}/{name}_{obsid}.rmf'".format( super_comp_dir=observation.cluster.super_comp_dir, name=observation.cluster.name, obsid=observation.id ) # Put this background file into the 'grouped' data file for the region #rt.dmhedit(infile=data_pi, filelist="", operation="add", key="BACKFILE", value=back_pi) rt.dmhedit(infile=data_pi, filelist="", operation="add", key="EXPOSURE", value=dtime) rt.dmhedit(infile=data_pi, filelist="", operation="add", key="RESPFILE", value=wrmf) rt.dmhedit(infile=data_pi, filelist="", operation="add", key="ANCRFILE", value=warf) rt.dmhedit(infile=data_pi, filelist="", operation="add", key="BACKFILE", value=back_pi) rt.dmhedit(infile=back_pi, filelist="", operation="add", key="EXPOSURE", value=btime) io.append_to_file(observation.cluster.spec_lis(region_number), "{}\n".format(data_pi)) return (data_pi, back_pi)
def create_global_response_file_for(cluster, obsid, region_file): observation = cluster.observation(obsid) #min_counts = 525 obs_analysis_dir = observation.analysis_directory global_response_dir = "{}/globalresponse/".format(obs_analysis_dir) io.make_directory(global_response_dir) clean = observation.clean back = observation.back pbk0 = io.get_filename_matching("{}/acis*pbk0*.fits".format(obs_analysis_dir))[0] bad_pixel_file = io.get_filename_matching("{}/bpix1_new.fits".format(obs_analysis_dir))[0] rt.ardlib.punlearn() rt.acis_set_ardlib(badpixfile=bad_pixel_file) mask_file = io.get_filename_matching("{}/*msk1.fits".format(obs_analysis_dir)) make_pcad_lis(cluster, obsid) infile = "{}[sky=region({})]".format(clean, region_file) outroot = "{}/acisI_region_0".format(global_response_dir) weight = True correct_psf = False pcad = "@{}/pcad_asol1.lis".format(obs_analysis_dir) combine = False bkg_file = "" bkg_resp = False group_type = "NUM_CTS" binspec = 1 clobber = True rt.specextract(infile=infile, outroot=outroot, weight=weight, correctpsf=correct_psf, asp=pcad, combine=combine, mskfile=mask_file, bkgfile=bkg_file, bkgresp=bkg_resp, badpixfile=bad_pixel_file, grouptype=group_type, binspec=binspec, clobber=clobber) infile = "{}[sky=region({})][bin pi]".format(back, region_file) outfile = "{}/acisI_back_region_0.pi".format(global_response_dir) clobber = True rt.dmextract.punlearn() print("Running: dmextract infile={}, outfile={}, clobber={}".format(infile, outfile, clobber)) rt.dmextract(infile=infile, outfile=outfile, clobber=clobber) rt.dmhedit.punlearn() infile = "{}/acisI_region_0.pi".format(global_response_dir) filelist = "" operation = "add" key = "BACKFILE" value = outfile rt.dmhedit(infile=infile, filelist=filelist, operation=operation, key=key, value=value) observation = cluster.observation(obsid) aux_response_file = '{global_response_directory}/acisI_region_0.arf'.format( global_response_directory=observation.global_response_directory) redist_matrix_file = '{global_response_directory}/acisI_region_0.rmf'.format( global_response_directory=observation.global_response_directory) io.copy(aux_response_file, observation.aux_response_file) io.copy(redist_matrix_file, observation.redistribution_matrix_file)
def lightcurves_with_exclusion(cluster): for observation in cluster.observations: # data_nosrc_hiEfilter = "{}/acisI_nosrc_fullE.fits".format(obs_analysis_dir) data_nosrc_hiEfilter = "{}/acisI_nosrc_hiEfilter.fits".format(observation.analysis_directory) print("Creating the image with sources removed") data = observation.acis_nosrc_filename image_nosrc = "{}/img_acisI_nosrc_fullE.fits".format(observation.analysis_directory) if io.file_exists(observation.exclude_file): print("Removing sources from event file to be used in lightcurve") infile = "{}[exclude sky=region({})]".format(data_nosrc_hiEfilter, observation.exclude) outfile = "{}/acisI_lcurve.fits".format(observation.analysis_directory) clobber = True rt.dmcopy.punlearn() rt.dmcopy(infile=infile, outfile=outfile, clobber=clobber) data_lcurve = "{}/acisI_lcurve.fits".format(observation.analysis_directory) else: yes_or_no = io.check_yes_no( "Are there sources to be excluded from observation {} while making the lightcurve? ".format(observation.id)) if yes_or_no: # yes_or_no == True print("Create the a region file with the region to be excluded and save it as {}".format(observation.exclude_file)) else: data_lcurve = data_nosrc_hiEfilter backbin = 259.28 echo = True tstart = rt.dmkeypar(infile=data_nosrc_hiEfilter, keyword="TSTART", echo=echo) tstop = rt.dmkeypar(infile=data_nosrc_hiEfilter, keyword="TSTOP", echo=echo) print("Creating lightcurve from the events list with dmextract") infile = "{}[bin time={}:{}:{}]".format(data_lcurve, tstart, tstop, backbin) outfile = "{}/acisI_lcurve.lc".format(observation.analysis_directory) opt = "ltc1" rt.dmextract.punlearn() rt.dmextract(infile=infile, outfile=outfile, opt=opt, clobber=clobber) lcurve = outfile print("Cleaning the lightcurve by removing flares with deflare. Press enter to continue.") rt.deflare.punlearn() infile = lcurve outfile = "{}/acisI_gti.gti".format(observation.analysis_directory) method = "clean" save = "{}/acisI_lcurve".format(observation.analysis_directory) rt.deflare(infile=infile, outfile=outfile, method=method, save=save) gti = outfile print("filtering the event list using GTI info just obtained.") infile = "{}[@{}]".format(data_nosrc_hiEfilter, gti) outfile = observation.clean clobber = True rt.dmcopy(infile=infile, outfile=outfile, clobber=clobber) data_clean = outfile print("Don't forget to check the light curves!")
def generate_light_curve(observation): # filter out high energy background flares obsid_analysis_dir = observation.analysis_directory data = observation.acis_nosrc_filename background = observation.background_nosrc_filename infile = "{}[energy=9000:12000]".format(data) outfile = "{}/acisI_hiE.fits".format(obsid_analysis_dir) clobber = True rt.dmcopy.punlearn() rt.dmcopy(infile=infile, outfile=outfile, clobber=clobber) data_hiE = outfile infile = "{}[bin sky=8]".format(data_hiE) outfile = "{}/img_acisI_hiE.fits".format(obsid_analysis_dir) rt.dmcopy.punlearn() rt.dmcopy(infile=infile, outfile=outfile, clobber=clobber) backbin = 259.28 echo = True tstart = rt.dmkeypar(infile=data_hiE, keyword="TSTART", echo=echo) tstop = rt.dmkeypar(infile=data_hiE, keyword="TSTOP", echo=echo) print("Creating a lightcurve from the high energy events list with dmextract") rt.dmextract.punlearn() infile = "{}[bin time={}:{}:{}]".format(data_hiE, tstart, tstop, backbin) outfile = "{}/acisI_lcurve_hiE.lc".format(obsid_analysis_dir) print('Running dmextract infile={} outfile={} opt=ltc1 clobber=True'.format(infile, outfile)) rt.dmextract(infile=infile, outfile=outfile, opt='ltc1', clobber=True) lcurve_hiE = outfile print("cleaning the lightcurve for {}, press enter to continue.".format(observation.id)) rt.deflare.punlearn() outfile = "{}/acisI_gti_hiE.gti".format(obsid_analysis_dir) method = "clean" save = "{}/acisI_lcurve_hiE".format(obsid_analysis_dir) rt.deflare(infile=lcurve_hiE, outfile=outfile, method=method, save=save) gti_hiE = outfile print("Filtering the event list using GTI info from high energy flares.") infile = "{}[@{}]".format(data, gti_hiE) outfile = "{}/acisI_nosrc_hiEfilter.fits".format(obsid_analysis_dir) print("running: dmcopy infile={} outfile={} clobber={}".format(infile, outfile, clobber)) rt.dmcopy.punlearn() rt.dmcopy(infile=infile, outfile=outfile, clobber=clobber) data_nosrc_hiEfilter = outfile infile = "{}[bin sky=8]".format(data_nosrc_hiEfilter) outfile = "{}/img_acisI_nosrc_fullE.fits".format(obsid_analysis_dir) rt.dmcopy.punlearn() rt.dmcopy(infile=infile, outfile=outfile, clobber=clobber)
#how to decide on timebin size? bkg_lc_file = args.output_dir+'bkg_lc.fits' #- filter on energy - crt.dmcopy.punlearn() estr = evt2_infile+'[energy=2400:6000]' crt.dmcopy(estr,evt2_infile+'_2400-6000') #- extract light curve - crt.dmextract.punlearn() if args.exclude_region != 'none': evt2_file_str = evt2_infile+'[exclude sky=region('+args.exclude_region+')][bin time=::'+str(args.timebin)+']' else: evt2_file_str = evt2_infile+'_2400-6000[bin time=::'+str(args.timebin)+']' crt.dmextract(evt2_file_str,outfile=bkg_lc_file,opt='ltc1',clobber=args.clobber) #- remove energy filtered event file - os.remove(evt2_infile+'_2400-6000') #--Create GTI file-- gti_outfile = args.output_dir+'flare_gti.fits' if args.method == 'lc_sigma_clip': if args.sigma == None: args.sigma=3 lightcurves.lc_sigma_clip(bkg_lc_file,outfile=gti_outfile,sigma=args.sigma) else: lightcurves.lc_clean(bkg_lc_file,outfile=gti_outfile,sigma=args.sigma) #--Apply GTI file-- evt2_file_str = evt2_infile+'[@'+gti_outfile+']' crt.dmcopy(evt2_file_str,evt2_outfile,opt='all',clobber=args.clobber)
#- filter on energy - crt.dmcopy.punlearn() estr = evt2_infile + '[energy=2400:6000]' crt.dmcopy(estr, evt2_infile + '_2400-6000') #- extract light curve - crt.dmextract.punlearn() if args.exclude_region != 'none': evt2_file_str = evt2_infile + '[exclude sky=region(' + args.exclude_region + ')][bin time=::' + str( args.timebin) + ']' else: evt2_file_str = evt2_infile + '_2400-6000[bin time=::' + str( args.timebin) + ']' crt.dmextract(evt2_file_str, outfile=bkg_lc_file, opt='ltc1', clobber=args.clobber) #- remove energy filtered event file - os.remove(evt2_infile + '_2400-6000') #--Create GTI file-- gti_outfile = args.output_dir + 'flare_gti.fits' if args.method == 'lc_sigma_clip': if args.sigma == None: args.sigma = 3 lightcurves.lc_sigma_clip(bkg_lc_file, outfile=gti_outfile, sigma=args.sigma) else: lightcurves.lc_clean(bkg_lc_file, outfile=gti_outfile, sigma=args.sigma)