def ccd_sort(cluster): print("Running ccd_sort on {}.".format(cluster.name)) for observation in cluster.observations: print("Working on {}/{}".format(cluster.name, observation.id)) analysis_path = observation.analysis_directory os.chdir(analysis_path) evt1_filename = io.get_path("{}/{}".format(analysis_path, io.get_filename_matching("acis*evt1.fits")[0])) evt2_filename = io.get_path("{}/{}".format(analysis_path, io.get_filename_matching("evt2.fits")[0])) detname = rt.dmkeypar(infile=evt1_filename, keyword="DETNAM", echo=True) print("evt1 : {}\nevt2 : {}\ndetname : {}".format(evt1_filename, evt2_filename, detname)) assert not isinstance(detname, type(None)), "detname returned nothing!" detnums = [int(x) for x in detname.split('-')[-1]] for acis_id in detnums: print("{cluster}/{observation}: Making level 2 event file for ACIS Chip id: {acis_id}".format( cluster=cluster.name, observation=observation.id, acis_id=acis_id)) rt.dmcopy(infile="{evt2_file}[ccd_id={acis_id}]".format(evt2_file=evt2_filename, acis_id=acis_id), outfile="acis_ccd{acis_id}.fits".format(acis_id=acis_id), clobber=True) acisI_list = io.get_filename_matching("acis_ccd[0-3].fits") for i in range(len(acisI_list)): acisI_list[i] = io.get_path("{obs_analysis_dir}/{file}".format(obs_analysis_dir=observation.analysis_directory, file=acisI_list[i])) io.write_contents_to_file("\n".join(acisI_list), observation.ccd_merge_list, binary=False) merge_data_and_backgrounds(cluster, acisI_list) return
def copy_event_files(source_dir, destination_dir): os.chdir(source_dir) evt2_filename = io.get_filename_matching("{source_dir}/acis*repro_evt2.fits".format(source_dir=source_dir)) if isinstance(evt2_filename, list): evt2_filename = evt2_filename[-1] bpix1_filename = io.get_filename_matching("{source_dir}/*repro_bpix1.fits".format(source_dir=source_dir)) if isinstance(bpix1_filename, list): bpix1_filename = bpix1_filename[-1] io.copy(evt2_filename, io.get_path("{}/evt2.fits".format(destination_dir))) io.copy(bpix1_filename, io.get_path("{}/bpix1_new.fits".format(destination_dir))) print("Copied level 2 event files") return None
def get_cluster_config(clstr_name): data_dir = config.data_directory() config_file = io.get_filename_matching('{0}{1}/{1}_pypeline_config.ini'.format(data_dir, clstr_name)) if len(config_file) >= 1: return config_file[-1] else: return None
def make_pcad_lis(cluster, obsid): analysis_dir = cluster.obs_analysis_directory(obsid) search_str = "{}/*asol1.fits".format(analysis_dir) pcad_files = io.get_filename_matching(search_str) pcad_list_string = "\n".join(pcad_files) pcad_filename = "{}/pcad_asol1.lis".format(analysis_dir) io.write_contents_to_file(pcad_list_string, pcad_filename, binary=False) return pcad_filename
def merge_data_and_backgrounds(cluster, acis_list): rt.dmmerge.punlearn() merged_file = "acisI.fits" rt.dmmerge(infile="@acisI.lis[subspace -expno]", outfile=merged_file, clobber=True) detname = rt.dmkeypar(infile=io.get_filename_matching("acis*evt1.fits"), keyword="DETNAM") # acisI3 = detname.find("3") # acisS3 = detname.find("7") rt.dmlist.punlearn() rt.dmlist(infile=merged_file, opt="header") return None
def acis_mask(self): filename = io.get_filename_matching("{analysis_dir}/*_msk1.fits".format( analysis_dir=self.analysis_directory ))[0] return filename
def fov_file(self): return io.get_filename_matching("{}/*{}*fov1.fits".format( self.analysis_directory, self.id ))[0]
def create_global_response_file_for(cluster, obsid, region_file): observation = cluster.observation(obsid) #min_counts = 525 obs_analysis_dir = observation.analysis_directory global_response_dir = "{}/globalresponse/".format(obs_analysis_dir) io.make_directory(global_response_dir) clean = observation.clean back = observation.back pbk0 = io.get_filename_matching("{}/acis*pbk0*.fits".format(obs_analysis_dir))[0] bad_pixel_file = io.get_filename_matching("{}/bpix1_new.fits".format(obs_analysis_dir))[0] rt.ardlib.punlearn() rt.acis_set_ardlib(badpixfile=bad_pixel_file) mask_file = io.get_filename_matching("{}/*msk1.fits".format(obs_analysis_dir)) make_pcad_lis(cluster, obsid) infile = "{}[sky=region({})]".format(clean, region_file) outroot = "{}/acisI_region_0".format(global_response_dir) weight = True correct_psf = False pcad = "@{}/pcad_asol1.lis".format(obs_analysis_dir) combine = False bkg_file = "" bkg_resp = False group_type = "NUM_CTS" binspec = 1 clobber = True rt.specextract(infile=infile, outroot=outroot, weight=weight, correctpsf=correct_psf, asp=pcad, combine=combine, mskfile=mask_file, bkgfile=bkg_file, bkgresp=bkg_resp, badpixfile=bad_pixel_file, grouptype=group_type, binspec=binspec, clobber=clobber) infile = "{}[sky=region({})][bin pi]".format(back, region_file) outfile = "{}/acisI_back_region_0.pi".format(global_response_dir) clobber = True rt.dmextract.punlearn() print("Running: dmextract infile={}, outfile={}, clobber={}".format(infile, outfile, clobber)) rt.dmextract(infile=infile, outfile=outfile, clobber=clobber) rt.dmhedit.punlearn() infile = "{}/acisI_region_0.pi".format(global_response_dir) filelist = "" operation = "add" key = "BACKFILE" value = outfile rt.dmhedit(infile=infile, filelist=filelist, operation=operation, key=key, value=value) observation = cluster.observation(obsid) aux_response_file = '{global_response_directory}/acisI_region_0.arf'.format( global_response_directory=observation.global_response_directory) redist_matrix_file = '{global_response_directory}/acisI_region_0.rmf'.format( global_response_directory=observation.global_response_directory) io.copy(aux_response_file, observation.aux_response_file) io.copy(redist_matrix_file, observation.redistribution_matrix_file)
def ciao_back(cluster, overwrite=False): print("Running ciao_back on {}.".format(cluster.name)) for observation in cluster.observations: pcad_file = make_pcad_lis(cluster, observation.id) backI_lis = [] backS_lis = [] analysis_path = observation.analysis_directory filelist = io.read_contents_of_file(observation.ccd_merge_list).split('\n') pcad = io.read_contents_of_file(pcad_file) for acis_file in filelist: rt.acis_bkgrnd_lookup.punlearn() print("Finding background for {}".format(acis_file)) path_to_background = rt.acis_bkgrnd_lookup(infile=acis_file) print("Found background at {}".format(path_to_background)) acis_id = int(acis_file.split('/')[-1].split('.')[-2][-1]) assert isinstance(acis_id, int), "acis_id = {}".format(acis_id) assert not isinstance(path_to_background, type(None)), "Cannot find background {}".format(acis_file) local_background_path = io.get_path("{}/back_ccd{}.fits".format(analysis_path, acis_id)) try: if io.file_exists(local_background_path) and overwrite: io.delete(local_background_path) io.copy(path_to_background, local_background_path) except OSError: print("Problem copying background file {}. Do you have the right permissions and a full CALDB?".format( path_to_background)) raise acis_gain = rt.dmkeypar(infile=acis_file, keyword="GAINFILE", echo=True) background_gain = rt.dmkeypar(infile=local_background_path, keyword="GAINFILE", echo=True) print("{}/{}/acis_ccd{}.fits gain: {}".format(cluster.name, observation.id, acis_id, acis_gain)) print("{}/{}/back_ccd{}.fits gain: {}".format(cluster.name, observation.id, acis_id, background_gain)) if dates_and_versions_match(acis_gain, background_gain): print("Date/version numbers don't match on the acis data and background. Reprocessing.") local_background_path = reprocess(cluster, observation.id, acis_gain, background_gain, acis_id) print("Reprojecting background") rt.reproject_events.punlearn() infile = local_background_path outfile = io.get_path("{local_path}/back_reproj_ccd{acis_id}.fits".format(local_path=analysis_path, acis_id=acis_id)) match = acis_file print( "Running:\n reproject_events(infile={infile}, outfile={outfile}, aspect={pcad}, match={match})".format( infile=infile, outfile=outfile, pcad=pcad, match=match) ) rt.reproject_events(infile=infile, outfile=outfile, aspect="{pcad_file}".format(pcad_file=pcad), match=match, random=0, clobber=True) back_reproject = outfile datamode = rt.dmkeypar(infile=io.get_filename_matching(io.get_path("{}/acis*evt1*.fits".format(analysis_path))), keyword="DATAMODE") if datamode == "VFAINT": print("VFAINT Mode, resetting setting status bits") rt.dmcopy.punlearn() rt.dmcopy(infile="{}[status=0]".format(back_reproject), outfile=outfile, clobber=True) if acis_id <= 3: backI_lis.append(back_reproject) else: backS_lis.append(back_reproject) merged_back_list = backI_lis + backS_lis print("writing backI.lis and backS.lis") io.write_contents_to_file("\n".join(backI_lis), io.get_path("{}/backI.lis".format(analysis_path)), binary=False) io.write_contents_to_file("\n".join(backS_lis), io.get_path("{}/backS.lis".format(analysis_path)), binary=False) io.write_contents_to_file("\n".join(merged_back_list), observation.merged_back_lis, binary=False) return