def _filter_simulated_ft1(original_ft1, simulated_ft1, ra, dec, radius, tmin, tmax, emin, emax, outfile): """ This accomplish what gtselect and gtmktime would do, but in one single command (much faster). This is possible again because we are applying GTIs that are already known. :param original_ft1: :param simulated_ft1: :param ra: :param dec: :param radius: :param tmin: :param tmax: :param emin: :param emax: :param outfile: :return: """ # Now filter my_filter = 'gtifilter("%s") && ANGSEP(RA, DEC, %s, %s) <= %s ' \ '&& TIME>=%s && TIME<=%s && ENERGY >=%s && ENERGY <=%s' % (original_ft1, ra, dec, radius, tmin, tmax, emin, emax) cmd_line = "ftcopy '%s[EVENTS][%s]' %s copyall=yes clobber=yes history=yes" % ( simulated_ft1, my_filter, outfile) subprocess.check_call(cmd_line, shell=True) # Now update the DS keywords from the orginal file, so that downstream software will understand # the file even though we didn't use gtselect nor gtmktime with pyfits.open(sanitize_filename(original_ft1)) as orig: with pyfits.open(outfile, mode='update') as new: # Copy keywords from original file orig_header = orig['EVENTS'].header relevant_keywords = filter( lambda x: x.find("DS") == 0 or x == "NDSKEYS", orig['EVENTS'].header.keys()) for keyword in relevant_keywords: new['EVENTS'].header[keyword] = orig_header[keyword]
def gtselect(ra_center, dec_center, radius, tmin, tmax, emin, emax, simulated_ft1, output_ft1): # NOTE: we assume there is no need for a Zenith cut, because the Zenith cut has been made with # gtmktime gtselect = GtApp('gtselect') gtselect.run(print_command=False, infile=sanitize_filename(simulated_ft1), outfile=output_ft1, ra=ra_center, dec=dec_center, rad=radius, tmin=tmin, tmax=tmax, emin=emin, emax=emax, zmin=0, zmax=180, # Zenith cut must be applied with gtmktime evclass="INDEF", # Assume simulation has been made with the same evclass of the input file evtype='INDEF')
def gtmktime_from_file(original_ft1, original_ft2, simulated_ft1, output_ft1): """ :param original_ft1: :param original_ft2: :param simulated_ft1: :param output_ft1: :return: """ # Add the GTI extension to the data file with pyfits.open(sanitize_filename(original_ft1)) as orig: with pyfits.open(simulated_ft1, mode='update') as new: # Copy the GTIs from the original file new['GTI'] = orig['GTI'] # Re-write header info (inaccuracy due to pil conversion float to str) for ehdu, ghdu in zip(new, orig): ehdu.header['TSTART'] = ghdu.header['TSTART'] ehdu.header['TSTOP'] = ghdu.header['TSTOP'] # Now run gtmktime which will update the headers and select the events based on the GTIs gtmktime = GtApp('gtmktime') gtmktime.run( print_command=False, evfile=simulated_ft1, outfile=output_ft1, scfile=original_ft2, filter= 'T', # This filter is always true, which means we are not adding any new filter roicut='no', apply_filter='yes' # This will make gtmktime cut on the GTIs )
description='Find Good Time Intervals with gtmktime') parser.add_argument("--config", help="Configuration file", type=str, required=True) parser.add_argument("--gtifile", help="Name of output GTI file (text format)", type=str, required=True) args = parser.parse_args() # Read configuration file config = ConfigParser.SafeConfigParser() config.read([sanitize_filename(args.config)]) ft1 = sanitize_filename(config.get("data", "ft1")) ft2 = sanitize_filename(config.get("data", "ft2")) # Read from the original FT1 the ROI definition ra_center, dec_center, radius = find_ROI_cut(ft1) log.info( "Found ROI in file. Center: (R.A., Dec) = (%s, %s), radius = %s deg" % (ra_center, dec_center, radius)) assert ra_center == float(config.get("cuts", "ra")) assert dec_center == float(config.get("cuts", "dec")) assert radius == float(config.get("cuts", "radius"))
required=True) parser.add_argument("--met_stop", help='MET of the stop of the new FT2 file', type=float, required=True) parser.add_argument("--outfile", help='Output FT2 file', type=str, required=True) args = parser.parse_args() # Read the FT2 file and find the time interval where the pointing was as close as possible to the # desired one all_mission_ft2 = sanitize_filename(args.all_mission_ft2) with pyfits.open(all_mission_ft2) as f: # Is this a 30s FT2 file or a 1s FT2 file? dt_ = f['SC_DATA'].data.field("STOP") - f['SC_DATA'].data.field( "START") if np.average(dt_) >= 10.0: # 30 s FT2 file dt = 30.0 else: dt = 1.0
if __name__ == "__main__": parser = argparse.ArgumentParser(description='one_likelihood') parser.add_argument("--config", help="Configuration file", type=str, required=True) parser.add_argument("--outputdir", help="Directory where to store the output results (must exist already)", required=True, type=str) parser.add_argument("--tstarts", help="Start time of intervals to analyze", type=str, required=True) parser.add_argument("--tstops", help="Stop time of intervals to analyze", type=str, required=True) args = parser.parse_args() config = ConfigParser.SafeConfigParser() config.read([sanitize_filename(args.config)]) ra = float(config.get("cuts", "ra")) dec = float(config.get("cuts", "dec")) roi = float(config.get("cuts", "radius")) zmax = float(config.get("cuts", "zmax")) thetamax = float(config.get("cuts", "thetamax")) emin = float(config.get("cuts", "emin")) emax = float(config.get("cuts", "emax")) irf = config.get("cuts", "irf") galactic_model = config.get("likelihood", "galactic_model") particle_model = config.get("likelihood", "particle_model") tsmin = float(config.get("likelihood", "tsmin")) strategy = config.get("likelihood", "strategy")
def __init__(self, original_ft1, original_ft2, path_of_tar_file_with_simulated_ft1_files, workdir="simulated_ft1s"): # Make absolute path and resolve env. variables (if any) original_ft1 = sanitize_filename(original_ft1) original_ft2 = sanitize_filename( original_ft2 ) # This is needed only if we want to switch back to gtmktime path_of_tar_file_with_simulated_ft1_files = sanitize_filename( path_of_tar_file_with_simulated_ft1_files) # Read from the original FT1 the cuts roi_cuts = pyLike.RoiCuts() roi_cuts.readCuts(original_ft1) # ROI definition ra_center, dec_center, radius = roi_cuts.roiCone() # Store them as well self._ra_center = ra_center self._dec_center = dec_center self._radius = radius # Energy minimum and maximum emin, emax = roi_cuts.getEnergyCuts() with pyfits.open(original_ft1) as f: tstart = f['EVENTS'].header['TSTART'] tstop = f['EVENTS'].header['TSTOP'] # Unpack tar file here with within_directory(workdir, create=True): # Copy tar here, unpack, then remove copy log.info("Copying %s to %s..." % (path_of_tar_file_with_simulated_ft1_files, workdir)) shutil.copy2(path_of_tar_file_with_simulated_ft1_files, ".") execute_command( log, "tar xvf %s" % path_of_tar_file_with_simulated_ft1_files) os.remove( os.path.basename(path_of_tar_file_with_simulated_ft1_files)) # Now get the names of all ft1s all_ft1s_raw = glob.glob("gll_ft1_tr_bn*_v00.fit") log.info( "Found %s simulated FT1 files in archive %s" % (len(all_ft1s_raw), path_of_tar_file_with_simulated_ft1_files)) log.info("Filtering them with the same cuts as in %s" % (original_ft1)) self._all_ft1s = [] # Apply the cuts to them for i, this_simulated_ft1 in enumerate(all_ft1s_raw): if (i + 1) % 100 == 0: log.info("Processed %i of %i" % (i + 1, len(all_ft1s_raw))) # temp_file1 = "__temp_ft1.fit" # # self.gtmktime_from_file(original_ft1, original_ft2, this_simulated_ft1, temp_file1) # # temp_file2 = "__temp_ft1_2.fit" # # self.gtselect(ra_center, dec_center, radius, tstart, tstop, emin, emax, temp_file1, temp_file2) # # os.remove(temp_file1) # basename = os.path.splitext( os.path.basename(this_simulated_ft1))[0] new_name = "%s_filt.fit" % basename self._filter_simulated_ft1(original_ft1, this_simulated_ft1, ra_center, dec_center, radius, tstart, tstop, emin, emax, new_name) # os.rename(temp_file2, new_name) self._all_ft1s.append(sanitize_filename(new_name)) # Remove the simulated FT1 to save space os.remove(this_simulated_ft1)
if __name__ == "__main__": parser = argparse.ArgumentParser(description='Create scripts to be sumbitted to the farm') parser.add_argument("--config", help="Configuration file", type=str, required=True) parser.add_argument("--gtifile", help="Name of output GTI file (text format)", type=str, required=True) parser.add_argument("--scriptfile", help="Output script file (a bash script)", type=str, required=True) # parser.add_argument("--package", help="Path to the data package", type=str, required=True) parser.add_argument("--outputdir", help="Directory where to store the output results", required=True, type=str) parser.add_argument("--n_per_job", help="How many intervals per job", type=int, required=True) args = parser.parse_args() config_path = sanitize_filename(args.config) assert os.path.exists(config_path) # package_path = sanitize_filename(args.package) # # assert os.path.exists(package_path) outputdir = sanitize_filename(args.outputdir) if not os.path.exists(outputdir): os.makedirs(outputdir) config = ConfigParser.SafeConfigParser() config.read([config_path])
required=False, default='GRB', type=str, help="Name of target source") parser.add_argument( "--outfile", required=True, type=str, help= "Name for the output file which will contain the numpy array of the TS values " "measured on the simulations") args = parser.parse_args() ft1 = sanitize_filename(args.filtered_ft1) ft2 = sanitize_filename(args.ft2) expmap = sanitize_filename(args.expmap) ltcube = sanitize_filename(args.ltcube) xml_file = sanitize_filename(args.xmlfile) path_of_tar_file_with_simulated_ft1_files = sanitize_filename(args.tar) # This will process the simulations and compute the TSs sf = SimulationFeeder(ft1, ft2, expmap, ltcube, xml_file, path_of_tar_file_with_simulated_ft1_files, args.tsmap_spec,