def ccd_sort(cluster): print("Running ccd_sort on {}.".format(cluster.name)) for observation in cluster.observations: print("Working on {}/{}".format(cluster.name, observation.id)) analysis_path = observation.analysis_directory os.chdir(analysis_path) evt1_filename = io.get_path("{}/{}".format(analysis_path, io.get_filename_matching("acis*evt1.fits")[0])) evt2_filename = io.get_path("{}/{}".format(analysis_path, io.get_filename_matching("evt2.fits")[0])) detname = rt.dmkeypar(infile=evt1_filename, keyword="DETNAM", echo=True) print("evt1 : {}\nevt2 : {}\ndetname : {}".format(evt1_filename, evt2_filename, detname)) assert not isinstance(detname, type(None)), "detname returned nothing!" detnums = [int(x) for x in detname.split('-')[-1]] for acis_id in detnums: print("{cluster}/{observation}: Making level 2 event file for ACIS Chip id: {acis_id}".format( cluster=cluster.name, observation=observation.id, acis_id=acis_id)) rt.dmcopy(infile="{evt2_file}[ccd_id={acis_id}]".format(evt2_file=evt2_filename, acis_id=acis_id), outfile="acis_ccd{acis_id}.fits".format(acis_id=acis_id), clobber=True) acisI_list = io.get_filename_matching("acis_ccd[0-3].fits") for i in range(len(acisI_list)): acisI_list[i] = io.get_path("{obs_analysis_dir}/{file}".format(obs_analysis_dir=observation.analysis_directory, file=acisI_list[i])) io.write_contents_to_file("\n".join(acisI_list), observation.ccd_merge_list, binary=False) merge_data_and_backgrounds(cluster, acisI_list) return
def chandra_repro(indir="./", outdir="./repro", set_ardlib=False, clobber=True): indir = io.get_path(indir) outdir = io.get_path(outdir) kwargs = {'indir': indir, 'outdir': outdir, 'set_ardlib': set_ardlib, 'clobber': clobber} rt.chandra_repro.punlearn() rt.chandra_repro(**kwargs) return
def eff_times_to_fits(clstr: cluster.ClusterObj): for observation in clstr.observations: effbt = observation.effective_background_time effdt = observation.effective_data_time temp = fits.open(clstr.scale_map_file) temp[0].data = effbt print(io.get_path("writing {}/{}_{}_eff_bkg_time.fits".format(clstr.directory, clstr.name, observation.id))) temp.writeto(io.get_path('{}/{}_{}_eff_bkg_time.fits'.format(clstr.directory, clstr.name, observation.id))) temp[0].data = effdt print(io.get_path('writing {}/{}_{}_eff_data_time.fits'.format(clstr.directory, clstr.name, observation.id))) temp.writeto(io.get_path('{}/{}_{}_eff_data_time.fits'.format(clstr.directory, clstr.name, observation.id)))
def copy_event_files(source_dir, destination_dir): os.chdir(source_dir) evt2_filename = io.get_filename_matching("{source_dir}/acis*repro_evt2.fits".format(source_dir=source_dir)) if isinstance(evt2_filename, list): evt2_filename = evt2_filename[-1] bpix1_filename = io.get_filename_matching("{source_dir}/*repro_bpix1.fits".format(source_dir=source_dir)) if isinstance(bpix1_filename, list): bpix1_filename = bpix1_filename[-1] io.copy(evt2_filename, io.get_path("{}/evt2.fits".format(destination_dir))) io.copy(bpix1_filename, io.get_path("{}/bpix1_new.fits".format(destination_dir))) print("Copied level 2 event files") return None
def actually_merge_observations_from(cluster): print("Merging observations from {}.".format(cluster.name)) merged_directory = io.get_path('{}/merged_obs_evt2/'.format( cluster.directory)) io.make_directory(merged_directory) os.chdir(merged_directory) merged_observations = [] for observation in cluster.observation_list: evt2_file = "{}/{}/analysis/evt2.fits".format(cluster.directory, observation) merged_observations.append(evt2_file) merged_lis = "{}/merged_obs.lis".format(merged_directory) io.write_contents_to_file("\n".join(merged_observations), merged_lis, binary=False) outroot = io.get_path("{}/{}".format(cluster.directory, cluster.name)) infile = "@{infile}[ccd_id=0:3]".format(infile=merged_lis) # for ACIS-I # infile = "@{infile}".format(infile=merged_lis) # for ACIS-I & ACIS-S xygrid = "1500:6500:4,1500:6500:4" if len(merged_observations) == 1: rt.fluximage.punlearn() rt.fluximage(infile=infile, outroot=outroot, xygrid=xygrid, clobber=True) print("Only single observation, flux image created.") elif len(merged_observations) > 1: rt.merge_obs.punlearn() rt.merge_obs(infiles=infile, outroot=outroot, xygrid=xygrid, clobber=True, parallel=True, nproc=12)
def make_response_files(cluster): for obsid in cluster.observation_ids: print("Making response files for observation {}".format(obsid)) obs_analysis_dir = cluster.obs_analysis_directory(obsid) region_file = io.get_path("{}/acisI_region_0.reg".format(obs_analysis_dir)) if (not io.file_exists(region_file)) or (io.file_size(region_file) == 0): print("Region file {} does not exist.".format(region_file)) print("When DS9 opens, draw a small circle that covers a piece of each ACIS-I chip (~20 pixels) and save it as:\n" \ "{}".format(region_file)) print("Opening SAO DS9") io.write_contents_to_file("", region_file, False) ds9_arguments = "ds9 -regions system physical -regions shape circle -regions format ciao -zoom 0.5 " \ "-bin factor 4 {}/acisI_clean.fits".format(obs_analysis_dir) subprocess.run([ds9_arguments], shell=True) print('Creating global response file.') create_global_response_file_for(cluster, obsid, region_file)
def sherpa_save_dir(self): return io.get_path("{super_comp_dir}/sherpa/".format( super_comp_dir=self.super_comp_dir ))
def sherpa_save_region(self, region_num): return io.get_path("{sherpa}/{region_num}.p".format( sherpa=self.sherpa_save_dir, region_num=region_num ))
def effdtime_file_obs(self, observation_id): return io.get_path("{super_comp_dir}/effdtime-{obsid}_circle.dat".format( super_comp_dir=self.super_comp_dir, obsid=observation_id ))
def bad_fits_file(self): return io.get_path("{super_comp_dir}/{cluster_name}_worst_spectral_fits.csv".format( super_comp_dir=self.super_comp_dir, cluster_name=self.name ))
def super_comp_cluster_config(self): return io.get_path("{super_comp_dir}/{name}_pypeline_config.ini".format( super_comp_dir=self.super_comp_dir, name=self.name ))
def spec_log_directory(self): return io.get_path("{acb_dir}/log_files/".format( acb_dir=self.acb_dir ))
def scale_map_file(self): return io.get_path("{acb_dir}/{cluster_name}_pype_scale_map.fits".format( acb_dir=self.acb_dir, cluster_name=self.name ))
def acb_dir(self): return io.get_path("{dir}/acb/".format(dir=self.directory))
def voronoi_region_file(self): return io.get_path("{wvt_dir}/voronoi.reg".format(wvt_dir=self.wvt_directory))
def spec_lis(self, region_number): return io.get_path("{super_comp_dir}/spec_{region_number}.lis".format( super_comp_dir=self.super_comp_dir, region_number=region_number ))
def pi_directory(self): return io.get_path("{acb_dir}/pi_files/".format( acb_dir=self.acb_dir ))
def scale_map_region_file(self): return io.get_path("{acb_dir}/{cluster_name}_scale_map_region_index.fits".format( acb_dir=self.acb_dir, cluster_name=self.name ))
def super_comp_dir(self): return io.get_path("{acb_dir}".format( #/super_computer/".format( acb_dir=self.acb_dir ))
def sn_map(self): return io.get_path("{acb_dir}/{cluster_name}_pype_SN_map.fits".format( acb_dir=self.acb_dir, cluster_name=self.name ))
def command_lis(self): return io.get_path("{super_comp_dir}/commands_{name}.lis".format( super_comp_dir=self.super_comp_dir, name=self.name ))
def region_list(self): return io.get_path("{acb_dir}/{cluster_name}_bin3_regionlist.list".format( acb_dir=self.acb_dir, cluster_name=self.name ))
def backI_clean_obs(self, observation_id): return io.get_path("{super_comp_dir}/backI_clean_{obsid}.fits".format( super_comp_dir=self.super_comp_dir, obsid=observation_id ))
def filtered_region_list(self): return io.get_path("{acb_dir}/{cluster_name}_filtered_regions.list".format( acb_dir=self.acb_dir, cluster_name=self.name ))
def scalemap_regionlist_file_obs(self, observation_id): return io.get_path("{super_comp_dir}/{name}_scalemap_regionlist_phys_{obsid}.reg".format( super_comp_dir=self.super_comp_dir, name=self.name, obsid=observation_id ))
def region_to_index(self): return io.get_path("{acb_dir}/{cluster_name}_region_to_index.fits".format( acb_dir=self.acb_dir, cluster_name=self.name ))
def directory(self): return io.get_path("{cluster_dir}/{obsid}/".format(cluster_dir=self.cluster.directory, obsid=self.id))
def sources_file(self): return io.get_path("{cluster_dir}/sources.reg".format(cluster_dir=self.directory))
def pressure_map_filename(self): return io.get_path('{output_dir}/{name}_pressure.fits'.format( output_dir=self.output_dir, name=self.name ))
def exclude_file(self): return io.get_path("{cluster_dir}/exclude.reg".format(cluster_dir=self.directory))