def copy_event_files(source_dir, destination_dir): os.chdir(source_dir) evt2_filename = io.get_filename_matching("{source_dir}/acis*repro_evt2.fits".format(source_dir=source_dir)) if isinstance(evt2_filename, list): evt2_filename = evt2_filename[-1] bpix1_filename = io.get_filename_matching("{source_dir}/*repro_bpix1.fits".format(source_dir=source_dir)) if isinstance(bpix1_filename, list): bpix1_filename = bpix1_filename[-1] io.copy(evt2_filename, io.get_path("{}/evt2.fits".format(destination_dir))) io.copy(bpix1_filename, io.get_path("{}/bpix1_new.fits".format(destination_dir))) print("Copied level 2 event files") return None
def remove_sources_from_observation(observation): print("removing sources from {}".format(observation.id)) # remove sources from foreground and background fore_or_back = [observation.data_filename, observation.back_filename] for i, type_of_obs in enumerate(fore_or_back): infile = "{type_of_obs}[exclude sky=region({sources})]".format( type_of_obs=type_of_obs, sources=observation.cluster.sources_file ) outfile = [observation.acis_nosrc_filename, observation.background_nosrc_filename][i] clobber = True print("infile: {}".format(infile)) print("outfile: {}".format(outfile)) rt.dmcopy.punlearn() rt.dmcopy(infile=infile, outfile=outfile, clobber=clobber) if type_of_obs is observation.background_nosrc_filename: print("Copying background to {}".format(observation.back)) io.copy(outfile, observation.back)
def prepare_for_spec(cluster_obj: cluster.ClusterObj): try: import ciao except ImportError: print("Must be running CIAO before running prepare_for_spec.") raise io.make_directory(cluster_obj.super_comp_dir) cluster_obj.initialize_best_fits_file() print("Preparing files for spectral analysis and copying to {super_comp_dir} for offloading computation.".format( super_comp_dir=cluster_obj.super_comp_dir )) io.copy(cluster_obj.configuration_filename, cluster_obj.super_comp_cluster_config) for observation in cluster_obj.observations: print("Copying files for {obsid}".format(obsid=observation.id)) io.copy(observation.clean, cluster_obj.acisI_clean_obs(observation.id)) io.copy(observation.back, cluster_obj.backI_clean_obs(observation.id)) io.copy(observation.aux_response_file, observation.arf_sc) io.copy(observation.redistribution_matrix_file, observation.rmf_sc) io.copy(observation.acis_mask, observation.acis_mask_sc) exposure = ciao.get_exposure(observation.clean) io.write_contents_to_file(exposure, observation.exposure_time_file, binary=False)
def create_global_response_file_for(cluster, obsid, region_file): observation = cluster.observation(obsid) #min_counts = 525 obs_analysis_dir = observation.analysis_directory global_response_dir = "{}/globalresponse/".format(obs_analysis_dir) io.make_directory(global_response_dir) clean = observation.clean back = observation.back pbk0 = io.get_filename_matching("{}/acis*pbk0*.fits".format(obs_analysis_dir))[0] bad_pixel_file = io.get_filename_matching("{}/bpix1_new.fits".format(obs_analysis_dir))[0] rt.ardlib.punlearn() rt.acis_set_ardlib(badpixfile=bad_pixel_file) mask_file = io.get_filename_matching("{}/*msk1.fits".format(obs_analysis_dir)) make_pcad_lis(cluster, obsid) infile = "{}[sky=region({})]".format(clean, region_file) outroot = "{}/acisI_region_0".format(global_response_dir) weight = True correct_psf = False pcad = "@{}/pcad_asol1.lis".format(obs_analysis_dir) combine = False bkg_file = "" bkg_resp = False group_type = "NUM_CTS" binspec = 1 clobber = True rt.specextract(infile=infile, outroot=outroot, weight=weight, correctpsf=correct_psf, asp=pcad, combine=combine, mskfile=mask_file, bkgfile=bkg_file, bkgresp=bkg_resp, badpixfile=bad_pixel_file, grouptype=group_type, binspec=binspec, clobber=clobber) infile = "{}[sky=region({})][bin pi]".format(back, region_file) outfile = "{}/acisI_back_region_0.pi".format(global_response_dir) clobber = True rt.dmextract.punlearn() print("Running: dmextract infile={}, outfile={}, clobber={}".format(infile, outfile, clobber)) rt.dmextract(infile=infile, outfile=outfile, clobber=clobber) rt.dmhedit.punlearn() infile = "{}/acisI_region_0.pi".format(global_response_dir) filelist = "" operation = "add" key = "BACKFILE" value = outfile rt.dmhedit(infile=infile, filelist=filelist, operation=operation, key=key, value=value) observation = cluster.observation(obsid) aux_response_file = '{global_response_directory}/acisI_region_0.arf'.format( global_response_directory=observation.global_response_directory) redist_matrix_file = '{global_response_directory}/acisI_region_0.rmf'.format( global_response_directory=observation.global_response_directory) io.copy(aux_response_file, observation.aux_response_file) io.copy(redist_matrix_file, observation.redistribution_matrix_file)
def ciao_back(cluster, overwrite=False): print("Running ciao_back on {}.".format(cluster.name)) for observation in cluster.observations: pcad_file = make_pcad_lis(cluster, observation.id) backI_lis = [] backS_lis = [] analysis_path = observation.analysis_directory filelist = io.read_contents_of_file(observation.ccd_merge_list).split('\n') pcad = io.read_contents_of_file(pcad_file) for acis_file in filelist: rt.acis_bkgrnd_lookup.punlearn() print("Finding background for {}".format(acis_file)) path_to_background = rt.acis_bkgrnd_lookup(infile=acis_file) print("Found background at {}".format(path_to_background)) acis_id = int(acis_file.split('/')[-1].split('.')[-2][-1]) assert isinstance(acis_id, int), "acis_id = {}".format(acis_id) assert not isinstance(path_to_background, type(None)), "Cannot find background {}".format(acis_file) local_background_path = io.get_path("{}/back_ccd{}.fits".format(analysis_path, acis_id)) try: if io.file_exists(local_background_path) and overwrite: io.delete(local_background_path) io.copy(path_to_background, local_background_path) except OSError: print("Problem copying background file {}. Do you have the right permissions and a full CALDB?".format( path_to_background)) raise acis_gain = rt.dmkeypar(infile=acis_file, keyword="GAINFILE", echo=True) background_gain = rt.dmkeypar(infile=local_background_path, keyword="GAINFILE", echo=True) print("{}/{}/acis_ccd{}.fits gain: {}".format(cluster.name, observation.id, acis_id, acis_gain)) print("{}/{}/back_ccd{}.fits gain: {}".format(cluster.name, observation.id, acis_id, background_gain)) if dates_and_versions_match(acis_gain, background_gain): print("Date/version numbers don't match on the acis data and background. Reprocessing.") local_background_path = reprocess(cluster, observation.id, acis_gain, background_gain, acis_id) print("Reprojecting background") rt.reproject_events.punlearn() infile = local_background_path outfile = io.get_path("{local_path}/back_reproj_ccd{acis_id}.fits".format(local_path=analysis_path, acis_id=acis_id)) match = acis_file print( "Running:\n reproject_events(infile={infile}, outfile={outfile}, aspect={pcad}, match={match})".format( infile=infile, outfile=outfile, pcad=pcad, match=match) ) rt.reproject_events(infile=infile, outfile=outfile, aspect="{pcad_file}".format(pcad_file=pcad), match=match, random=0, clobber=True) back_reproject = outfile datamode = rt.dmkeypar(infile=io.get_filename_matching(io.get_path("{}/acis*evt1*.fits".format(analysis_path))), keyword="DATAMODE") if datamode == "VFAINT": print("VFAINT Mode, resetting setting status bits") rt.dmcopy.punlearn() rt.dmcopy(infile="{}[status=0]".format(back_reproject), outfile=outfile, clobber=True) if acis_id <= 3: backI_lis.append(back_reproject) else: backS_lis.append(back_reproject) merged_back_list = backI_lis + backS_lis print("writing backI.lis and backS.lis") io.write_contents_to_file("\n".join(backI_lis), io.get_path("{}/backI.lis".format(analysis_path)), binary=False) io.write_contents_to_file("\n".join(backS_lis), io.get_path("{}/backS.lis".format(analysis_path)), binary=False) io.write_contents_to_file("\n".join(merged_back_list), observation.merged_back_lis, binary=False) return