def find_frame_files(cache_dir): """ Find frame cache files for current time and all reference times. """ cache_files = os.listdir(cache_dir) frame_cache_refs = [] for fname in cache_files: fullname = os.path.join(cache_dir, fname) current_match = re.findall('current', fname) ref_match = re.findall('reference-(\d+).txt', fname) if len(current_match) > 0: with open(fullname, 'r') as cache: cache_entries = [ lal.CacheEntry(x.replace('\n', '')) \ for x in cache.readlines() ] frame_cache_current = frutils.FrameCache(cache_entries, scratchdir=None, verbose=False) elif len(ref_match) > 0: with open(fullname, 'r') as cache: cache_entries = [ lal.CacheEntry(x.replace('\n', '')) \ for x in cache.readlines() ] frame_cache_ref = frutils.FrameCache(cache_entries, scratchdir=None, verbose=False) ref_time = int(ref_match[0]) frame_cache_refs.append((ref_time, frame_cache_ref)) return frame_cache_current, frame_cache_refs
def exportGPSEventToDisk(tevent, dir, cnt, dag, filename=None): """ """ #If directy coincHeadings not there make the directory if #filename is None headingDir = dir + "/coinc_info" ifos = tevent.instruments instruments = tevent.instruments time = tevent.time idKeyStr = "%s_%s" % (str(time), instruments) if filename == None: filename = "coincEvent.info" stfu_pipe.mkdir(headingDir) filename = os.path.normpath(headingDir + '/' + idKeyStr + '_' + filename) fp = open(filename, 'w') fp.write("#DIR\t\t\tRANK\tFAR\t\tSNR\tIFOS\tINSTRUMENTS\tTIME\t\tMASS\n") fp.write("%-16s\t%d\t%0.2e\t%.2f\t%s\t%s\t\t%.3f\t%.2f\n" % (dir, cnt, 0, 0, ifos, instruments, float(time), 0)) fp.write("#DIR\t\t\tIFO\tTIME\t\tSNR\tCHISQ\tMASS1\tMASS2\n") rowString = "%-16s\t%s\t%.3f\t%.2f\t%.2f\t%.2f\t%.2f\n" content = list() for ifo in tevent.ifos_list: content.append( rowString % (dir, ifo, float(time), float(0), float(0), float(0), float(0))) cache = lal.CacheEntry(instruments, "COINC_INFO_" + dir.upper(), segments.segment(float(time), float(time)), "file://localhost/" + os.path.abspath(filename)) dag.output_cache.append(cache) fp.writelines(content) fp.close() return os.path.split(filename)[1]
def cache_parser(cachefile): coinc = {} f = open(cachefile) out_cache = [] for l in f.readlines(): if "COINC_INFO" in l: c = l.split() coinc.setdefault(c[1].replace('COINC_INFO_',''),[]).append(c[4].replace('file://localhost','')) else: out_cache.append(lal.CacheEntry(l)) return coinc, out_cache
def AddFileToCache(fname, cache): """ Add the given file to the lal.Cache @param fname: @param cache: """ file_name = fname.split('.')[0].split('-') cache.append(lal.CacheEntry( file_name[0], file_name[1], segments.segment(int(file_name[2]), int(file_name[2]) + int(file_name[3])), 'file://' + socket.gethostbyaddr(socket.gethostname())[0] + \ os.getcwd() + '/' + fname))
def fromlalcache(cachefile, coltype=int): """ Construct a segmentlist representing the times spanned by the files identified in the LAL cache contained in the file object file. The segmentlist will be created with segments whose boundaries are of type coltype, which should raise ValueError if it cannot convert its string argument. Example: >>> from glue.lal import LIGOTimeGPS >>> cache_seglists = fromlalcache(open(filename), coltype = LIGOTimeGPS).coalesce() See also: glue.lal.CacheEntry """ return segments.segmentlist( lal.CacheEntry(l, coltype=coltype).segment for l in cachefile)
path_count = 0 seglists = segments.segmentlistdict() # # Filter input one line at a time # for line in src: path, filename = os.path.split(line.strip()) url = "file://localhost%s" % os.path.abspath(os.path.join(path, filename)) try: cache_entry = lal.CacheEntry.from_T050017(url) except ValueError as e: if options.include_all: cache_entry = lal.CacheEntry(None, None, None, url) elif options.force: continue else: raise e print(str(cache_entry), file=dst) path_count += 1 if cache_entry.segment is not None: seglists |= cache_entry.segmentlistdict.coalesce() # # Summary # if options.verbose: print("Size of cache: %d URLs" % path_count, file=sys.stderr)
############################################################################## # create the DAG writing the log to the specified directory dag = pipeline.CondorDAG(logfile) dag.set_dag_file(basename) ############################################################################## # Open the ihope cache and create THINCA cache print "Parsing the ihope cache..." coinc_tag = cp.get('pipeline', 'coinc-file-tag') ihope_cache = [line for line in file(options.ihope_cache) \ if coinc_tag in line or " INJECTIONS" in line or " PREGEN_INJFILE" in line] thinca_cache = lal.Cache([lal.CacheEntry(entry) for entry in ihope_cache \ if coinc_tag in entry]) inj_cache = lal.Cache([lal.CacheEntry(entry) for entry in ihope_cache if \ " INJECTIONS" in entry or " PREGEN_INJFILE" in entry]) del ihope_cache # get the USERTAGS from the thinca_cache # for single stage runs with ssipe, the thinca's output is of the form # IFOs_THINCA_UserTag_StartTime_Duration.xml.gz # where UserTag = TiSiNum_RunName_CAT_X_VETO skip_tags = 2 user_tags = set([ '_'.join([ entry.description.split('_')[ii] for ii in range( skip_tags, len(entry.description.split('_')) ) ]) for entry in thinca_cache ])