def find_cache_segments(*caches): """Return the segments covered by one or more data caches Parameters ---------- *cache : `~glue.lal.Cache` one or more file caches Returns ------- segments : `~gwpy.segments.SegmentList` list of segments containing in cache """ out = SegmentList() nframes = sum(len(c) for c in caches) if nframes == 0: return out for cache in caches: # build segment for this cache if not len(cache): continue seg = cache[0].segment for e in cache: # if new segment doesn't overlap, append and start again if e.segment.disjoint(seg): out.append(seg) seg = e.segment # otherwise, append to current segment else: seg |= e.segment # append final segment and return out.append(seg) return out
def find_cache_segments(*caches): """Construct a :class:`~gwpy.segments.segments.SegmentList` describing the validity of a given :class:`~glue.lal.Cache`, or list of them. Parameters ---------- cache : :class:`~glue.lal.Cache` Cache of frame files to check Returns ------- segments : :class:`~gwpy.segments.segments.SegmentList` list of segments containing in cache """ out = SegmentList() nframes = sum(len(c) for c in caches) if nframes == 0: return out for cache in caches: # build segment for this cache if not len(cache): continue seg = cache[0].segment for e in cache: # if new segment doesn't overlap, append and start again if e.segment.disjoint(seg): out.append(seg) seg = e.segment # otherwise, append to current segment else: seg |= e.segment # append final segment and return out.append(seg) return out
def fetch(self, config=GWSummConfigParser(), segdb_error='raise', datafind_error='raise', **kwargs): """Finalise this state by fetching its defining segments, either from global memory, or from the segment database """ # check we haven't done this before if self.ready: return self # fetch data if self.definition: match = re.search('(%s)' % '|'.join(MATHOPS.keys()), self.definition) else: match = None if self.filename: self._read_segments(self.filename) elif match: channel, thresh = self.definition.split(match.groups()[0]) channel = channel.rstrip() thresh = float(thresh.strip()) self._fetch_data(channel, thresh, match.groups()[0], config=config, datafind_error=datafind_error, **kwargs) # fetch segments elif self.definition: self._fetch_segments(config=config, segdb_error=segdb_error, **kwargs) # fetch null else: start = config.getfloat(DEFAULTSECT, 'gps-start-time') end = config.getfloat(DEFAULTSECT, 'gps-end-time') self.known = [(start, end)] self.active = self.known # restrict to given hours if self.hours: segs_ = SegmentList() # get start day d = Time(float(self.start), format='gps', scale='utc').datetime d.replace(hour=0, minute=0, second=0, microsecond=0) end_ = Time(float(self.end), format='gps', scale='utc').datetime while d < end_: # get GPS of day t = to_gps(d) # for each [start, end) hour pair, build a segment for h0, h1 in self.hours: segs_.append(Segment(t + h0 * 3600, t + h1 * 3600)) # increment and return d += datetime.timedelta(1) self.known &= segs_ self.active &= segs_ # FIXME self.ready = True return self
def segments_from_array(array): """Convert a 2-dimensional `numpy.ndarray` to a `SegmentList` """ out = SegmentList() for row in array: out.append(Segment(*row)) return out
def segmentlist_from_tree(tree, coalesce=False): """Read a `~ligo.segments.segmentlist` from a 'segments' `ROOT.Tree` """ segs = SegmentList() for i in range(tree.GetEntries()): tree.GetEntry(i) segs.append(Segment(tree.start, tree.end)) return segs
def make_cache(): segs = SegmentList() cache = Cache() for seg in [(0, 1), (1, 2), (4, 5)]: d = seg[1] - seg[0] f = 'A-B-%d-%d.tmp' % (seg[0], d) cache.append(CacheEntry.from_T050017(f)) segs.append(Segment(*seg)) return cache, segs
def make_cache(): segs = SegmentList() cache = Cache() for seg in [(0, 1), (1, 2), (4, 5)]: d = seg[1] - seg[0] _, f = tempfile.mkstemp(prefix='A-', suffix='-%d-%d.tmp' % (seg[0], d)) cache.append(CacheEntry.from_T050017(f)) segs.append(Segment(*seg)) return cache, segs
def diff(seglist,nodata): new = SegmentList() for segment in seglist: flag = 0 for _nodata in nodata: if segment != _nodata: flag += 1 else: break if flag==len(nodata): new.append(segment) return new
def fetch(self, config=GWSummConfigParser(), segdb_error='raise', datafind_error='raise', **kwargs): """Finalise this state by fetching its defining segments, either from global memory, or from the segment database """ # check we haven't done this before if self.ready: return self # fetch data if self.definition: match = re.search('(%s)' % '|'.join(MATHOPS.keys()), self.definition) else: match = None if self.filename: self._read_segments(self.filename) elif match: channel, thresh = self.definition.split(match.groups()[0]) channel = channel.rstrip() thresh = float(thresh.strip()) self._fetch_data(channel, thresh, match.groups()[0], config=config, datafind_error=datafind_error, **kwargs) # fetch segments elif self.definition: self._fetch_segments(config=config, segdb_error=segdb_error, **kwargs) # fetch null else: start = config.getfloat(DEFAULTSECT, 'gps-start-time') end = config.getfloat(DEFAULTSECT, 'gps-end-time') self.known = [(start, end)] self.active = self.known # restrict to given hours if self.hours: segs_ = SegmentList() # get start day d = Time(float(self.start), format='gps', scale='utc').datetime d.replace(hour=0, minute=0, second=0, microsecond=0) end_ = Time(float(self.end), format='gps', scale='utc').datetime while d < end_: # get GPS of day t = to_gps(d) # for each [start, end) hour pair, build a segment for h0, h1 in self.hours: segs_.append(Segment(t + h0 * 3600, t + h1*3600)) # increment and return d += datetime.timedelta(1) self.known &= segs_ self.active &= segs_ # FIXME self.ready = True return self
def make_cache(): try: from lal.utils import CacheEntry except ImportError as e: pytest.skip(str(e)) segs = SegmentList() cache = Cache() for seg in [(0, 1), (1, 2), (4, 5)]: d = seg[1] - seg[0] f = 'A-B-%d-%d.tmp' % (seg[0], d) cache.append(CacheEntry.from_T050017(f, coltype=int)) segs.append(Segment(*seg)) return cache, segs
def cache_overlaps(*caches): """Find segments of overlap in the given cache sets """ cache = [e for c in caches for e in c] cache.sort(key=lambda e: file_segment(e)[0]) overlap = SegmentList() segments = SegmentList() for e in cache: seg = file_segment(e) ol = SegmentList([seg]) & segments if abs(ol): overlap.extend(ol) segments.append(seg) return overlap
def make_cache(): try: from lal.utils import CacheEntry except ImportError as e: pytest.skip(str(e)) segs = SegmentList() cache = [] for seg in [(0, 1), (1, 2), (4, 5)]: d = seg[1] - seg[0] f = 'A-B-%d-%d.tmp' % (seg[0], d) cache.append(CacheEntry.from_T050017(f, coltype=int)) segs.append(Segment(*seg)) return cache, segs
def diff(segmentlist, nodata): ''' ''' from gwpy.segments import SegmentList new = SegmentList() for segment in segmentlist: flag = 0 for _nodata in nodata: if segment != _nodata: flag += 1 else: break if flag == len(nodata): new.append(segment) return new
def combine_coherence(darm_channel, subsystem, st, et, directory, jobdur, verbose=True): """ combine coherence between two times Parameters ---------- darm_channel : `str` differential arm channel subsystem : `str` subsystem to combine data for st : `int` start time for combining data et : `int` end time for combining data jobdur : `int` duration of analyzed data saved in files Returns ------- subsys : `stamp_pem.coherence_segment.PEMCoherenceSubsystem` combined data for subsystem """ segs = SegmentList() info_str = "" ii = 0 while st + (ii + 1) * jobdur <= et: segs.append( Segment( [st + (ii * params.jobdur), st + ((ii + 1) * params.jobdur)])) ii + 1 First = 1 subsys = None for seg in segs: seg_st = seg[0] seg_et = seg[1] cohdir = coh_io.get_directory_structure(subsystem, seg_st, directory=directory) cohfile = coh_io.create_coherence_data_filename(darm_channel, subsystem, seg_st, seg_et, directory=cohdir) if First: try: subsys = PEMSubsystem.read(subsystem, cohfile) First = 0 except IOError: info_str += ("Couldn't load %s\n" % cohfile) continue else: try: temp = PEMSubsystem.read(subsystem, cohfile) except IOError: info_str += ("Couldn't load %s\n" % cohfile) continue subsys.update(temp) if verbose: print info_str return subsys
def check_baddata(segmentlist, prefix='./data', write=True, plot=True, **kwargs): ''' ''' log.debug('Checking bad segments') exists = iofunc.existance(segmentlist, ftype='png_ts') checked = [segmentlist[i] for i, exist in enumerate(exists) if exist] not_checked = [ segmentlist[i] for i, exist in enumerate(exists) if not exist ] from gwpy.segments import SegmentList bad = SegmentList() eq = SegmentList() for i, segment in enumerate(segmentlist): data, bad_status = _check_baddata(segment, **kwargs) if bad_status - 16 >= 0: eq.append(segment) elif bad_status and not (bad_status - 16 >= 0): bad.append(segment) elif not bad_status: pass else: log.debug(bad_status) log.debug('!') raise ValueError('!') start, end = segment fname_img = iofunc.fname_png_ts(start, end, prefix) log.debug('{0:03d}/{1:03d} {2} {3}'.format(i, len(segmentlist), fname_img, bad_status)) chname = get_seis_chname(start, end) #if plot and not os.path.exists(fname_img): # plot_timeseries(data,start,end,bad_status,fname_img) # new = SegmentList() for segment in segmentlist: flag = 0 for _bad in bad: if segment != _bad: flag += 1 else: break if flag == len(bad): new.append(segment) segmentlist = new new = SegmentList() for segment in segmentlist: flag = 0 for _eq in eq: if segment != _eq: flag += 1 else: break if flag == len(eq): new.append(segment) if write: new.write('./segmentlist/available.txt') bad.write('./segmentlist/lackofdata.txt') eq.write('./segmentlist/glitch.txt') return new, bad, eq
if len(day) < 2: day = "0" + day #=============Get locked segments============= locked = DataQualityFlag.read("/home/detchar/Segments/K1-DET_FOR_GRB200415A/" + year + "/K1-DET_FOR_GRB200415A_UTC_" + year + "-" + month + "-" + day + ".xml") # Remove segments shorter than 94 sec act = SegmentList() for seg in locked.active: duration = seg[1] - seg[0] if duration >= 94: act.append(seg) # Remove last 30 sec and margin 2 sec act = act.contract(17) act = act.shift(-15) locked.active = act #=============Get omicron succeeded segments============= omicron = DataQualityFlag(known=locked.known) gpsstart = locked.known[0][0] gpsend = locked.known[0][1] #gpsstart = 1270944018 #gpsend = 1271030418 #omicron = DataQualityFlag(name="Omicron",known = [(gpsstart,gpsend)])