def set_bkg(rnum, bnum, r_rt, b_rt, verbose=False, debug=False): """Execute CIAO dmhedit to link spectra to backgrounds Check that r_rt and b_rt point to actual files (but I don't check that they are actual FITS files with spectra) Be careful -- this method modifies files (i.e., there is risk of data loss!) """ # Create paths, with assumptions about filename structure reg_path = '{root}_src{number:d}.pi'.format(root=r_rt, number=rnum) reggrp_path = '{root}_src{number:d}_grp.pi'.format(root=r_rt, number=rnum) bkg_path = '{root}_src{number:d}.pi'.format(root=b_rt, number=bnum) # Check for valid files if not (os.path.isfile(reg_path) and os.path.isfile(reggrp_path) and os.path.isfile(bkg_path)): print 'One of these paths is bad:' print reg_path, reggrp_path, bkg_path raise Exception('ERROR: path does not exist!') # Now, find RELATIVE path from reg/reggrp files, to bkg file reg2bkg_path = os.path.relpath(bkg_path, os.path.dirname(reg_path)) if verbose: print '\nSetting file headers for:' print ' {}\n {}'.format(reg_path, reggrp_path) print ' Relative path to bkg: {}'.format(reg2bkg_path) # Set dmhedit parameters dmhedit.punlearn() dmhedit.filelist = 'none' dmhedit.operation = 'add' dmhedit.key = 'BACKFILE' dmhedit.value = '\'{}\''.format(reg2bkg_path) # Single quotes for paths if verbose: dmhedit.verbose = str(1) else: dmhedit.verbose = str(0) # Execute dmhedit on each spectrum for spec in [reg_path, reggrp_path]: dmhedit.infile = spec if not debug: if verbose: print dmhedit() else: dmhedit() else: print 'DEBUG: A call to dmhedit would occur here'
def getDmhedit(filename, CRVAL_num, value): infile = filename filelist = 'none' operation = 'add' key = CRVAL_num value = value datatype = 'indef' clobber = 'yes' rt.dmhedit.punlearn() rt.dmhedit(infile=infile, filelist=filelist, operation=operation, key=key, value=value, datatype=datatype)
def set_bkg(num, rt, verbose=False): """Execute CIAO dmhedit to link spectra to backgrounds Check that specified root points to actual files (but I don't check that they are FITS files with spectra) Be careful -- this method modifies files (i.e., there is risk of data loss!) """ # Create paths, with assumptions about filename structure reg_path = '{root}_src{number:d}.pi'.format(root=rt, number=num) reggrp_path = '{root}_src{number:d}_grp.pi'.format(root=rt, number=num) # Check for valid files if not (os.path.isfile(reg_path) and os.path.isfile(reggrp_path)): print 'One of these paths is bad:' print reg_path, reggrp_path raise Exception('ERROR: path does not exist!') if verbose: print '\nResetting BACKFILE for:' print ' {}\n {}'.format(reg_path, reggrp_path) # Set dmhedit parameters dmhedit.punlearn() dmhedit.filelist = 'none' dmhedit.operation = 'add' dmhedit.key = 'BACKFILE' dmhedit.value = 'none' if verbose: dmhedit.verbose = str(1) else: dmhedit.verbose = str(0) # Execute dmhedit on each spectrum for spec in [reg_path, reggrp_path]: dmhedit.infile = spec if verbose: print dmhedit() else: dmhedit()
def extract_spec(observation, region_file, region_number, dtime, btime): infile = "{clean}[sky=region({region_file})][bin pi]".format( clean=observation.sc_clean, region_file=region_file ) outfile = io.get_path( "{super_comp_dir}/{obsid}_{region_number}.pi".format( super_comp_dir=observation.cluster.super_comp_dir, obsid=observation.id, region_number=region_number )) rt.dmextract(infile=infile, outfile=outfile, clobber=True) infile = "{back}[sky=region({region_file})][bin pi]".format( back=observation.sc_back, region_file=region_file ) outfile = io.get_path( "{super_comp_dir}/{obsid}_back_{region_number}.pi".format( super_comp_dir=observation.cluster.super_comp_dir, obsid=observation.id, region_number=region_number )) rt.dmextract(infile=infile, outfile=outfile, clobber=True) data_pi = "{super_comp_dir}/{obsid}_{region_number}.pi".format( super_comp_dir=observation.cluster.super_comp_dir, obsid=observation.id, region_number=region_number ) back_pi = "{super_comp_dir}/{obsid}_back_{region_number}.pi".format( super_comp_dir=observation.cluster.super_comp_dir, obsid=observation.id, region_number=region_number ) warf = "'{super_comp_dir}/{name}_{obsid}.arf'".format( super_comp_dir=observation.cluster.super_comp_dir, name=observation.cluster.name, obsid=observation.id ) wrmf = "'{super_comp_dir}/{name}_{obsid}.rmf'".format( super_comp_dir=observation.cluster.super_comp_dir, name=observation.cluster.name, obsid=observation.id ) # Put this background file into the 'grouped' data file for the region #rt.dmhedit(infile=data_pi, filelist="", operation="add", key="BACKFILE", value=back_pi) rt.dmhedit(infile=data_pi, filelist="", operation="add", key="EXPOSURE", value=dtime) rt.dmhedit(infile=data_pi, filelist="", operation="add", key="RESPFILE", value=wrmf) rt.dmhedit(infile=data_pi, filelist="", operation="add", key="ANCRFILE", value=warf) rt.dmhedit(infile=data_pi, filelist="", operation="add", key="BACKFILE", value=back_pi) rt.dmhedit(infile=back_pi, filelist="", operation="add", key="EXPOSURE", value=btime) io.append_to_file(observation.cluster.spec_lis(region_number), "{}\n".format(data_pi)) return (data_pi, back_pi)
def create_global_response_file_for(cluster, obsid, region_file): observation = cluster.observation(obsid) #min_counts = 525 obs_analysis_dir = observation.analysis_directory global_response_dir = "{}/globalresponse/".format(obs_analysis_dir) io.make_directory(global_response_dir) clean = observation.clean back = observation.back pbk0 = io.get_filename_matching("{}/acis*pbk0*.fits".format(obs_analysis_dir))[0] bad_pixel_file = io.get_filename_matching("{}/bpix1_new.fits".format(obs_analysis_dir))[0] rt.ardlib.punlearn() rt.acis_set_ardlib(badpixfile=bad_pixel_file) mask_file = io.get_filename_matching("{}/*msk1.fits".format(obs_analysis_dir)) make_pcad_lis(cluster, obsid) infile = "{}[sky=region({})]".format(clean, region_file) outroot = "{}/acisI_region_0".format(global_response_dir) weight = True correct_psf = False pcad = "@{}/pcad_asol1.lis".format(obs_analysis_dir) combine = False bkg_file = "" bkg_resp = False group_type = "NUM_CTS" binspec = 1 clobber = True rt.specextract(infile=infile, outroot=outroot, weight=weight, correctpsf=correct_psf, asp=pcad, combine=combine, mskfile=mask_file, bkgfile=bkg_file, bkgresp=bkg_resp, badpixfile=bad_pixel_file, grouptype=group_type, binspec=binspec, clobber=clobber) infile = "{}[sky=region({})][bin pi]".format(back, region_file) outfile = "{}/acisI_back_region_0.pi".format(global_response_dir) clobber = True rt.dmextract.punlearn() print("Running: dmextract infile={}, outfile={}, clobber={}".format(infile, outfile, clobber)) rt.dmextract(infile=infile, outfile=outfile, clobber=clobber) rt.dmhedit.punlearn() infile = "{}/acisI_region_0.pi".format(global_response_dir) filelist = "" operation = "add" key = "BACKFILE" value = outfile rt.dmhedit(infile=infile, filelist=filelist, operation=operation, key=key, value=value) observation = cluster.observation(obsid) aux_response_file = '{global_response_directory}/acisI_region_0.arf'.format( global_response_directory=observation.global_response_directory) redist_matrix_file = '{global_response_directory}/acisI_region_0.rmf'.format( global_response_directory=observation.global_response_directory) io.copy(aux_response_file, observation.aux_response_file) io.copy(redist_matrix_file, observation.redistribution_matrix_file)