def update_cache(self): """Reset the lal cache. This can be used to update the cache if the result may change due to more files being added to the filesystem, for example. """ cache = locations_to_cache(self.frame_src) stream = lalframe.FrStreamCacheOpen(cache) self.stream = stream
def open_data_source(source): """Open a GWF file source into a `lalframe.XLALFrStream` object Parameters ---------- source : `str`, `file`, `lal.Cache`, :class:`glue.lal.Cache` data source to read Returns ------- stream : `lalframe.FrStream` an open `FrStream` Raises ------ ValueError if the input format cannot be identified """ if isinstance(source, FILE_LIKE): source = source.name if isinstance(source, GlueCacheEntry): source = source.path # read cache file if (isinstance(source, string_types) and source.endswith( ('.lcf', '.cache'))): return lalframe.FrStreamCacheOpen(lal.CacheImport(source)) # read glue cache object if isinstance(source, GlueCache): cache = lal.Cache() for entry in source: cache = lal.CacheMerge(cache, lal.CacheGlob(*os.path.split(entry.path))) return lalframe.FrStreamCacheOpen(cache) # read lal cache object if isinstance(source, lal.Cache): return lalframe.FrStreamCacheOpen(source) # read single file if isinstance(source, string_types): return lalframe.FrStreamOpen(*os.path.split(source)) raise ValueError("Don't know how to open data source of type %r" % type(source))
def open_data_source(source): """Open a GWF file source into a `lalframe.XLALFrStream` object Parameters ---------- source : `str`, `file`, `list` Data source to read. Returns ------- stream : `lalframe.FrStream` An open `FrStream`. Raises ------ ValueError If the input format cannot be identified. """ if isinstance(source, io_cache.FILE_LIKE): source = source.name if isinstance(source, CacheEntry): source = source.path # read cache file if (isinstance(source, string_types) and source.endswith(('.lcf', '.cache'))): return lalframe.FrStreamCacheOpen(lal.CacheImport(source)) # read glue cache object if isinstance(source, list) and io_cache.is_cache(source): cache = lal.Cache() for entry in io_cache.file_list(source): cache = lal.CacheMerge(cache, lal.CacheGlob(*os.path.split(entry))) return lalframe.FrStreamCacheOpen(cache) # read lal cache object if isinstance(source, lal.Cache): return lalframe.FrStreamCacheOpen(source) # read single file if isinstance(source, string_types): return lalframe.FrStreamOpen(*map(str, os.path.split(source))) raise ValueError("Don't know how to open data source of type %r" % type(source))
def open_data_source(source): """Open a GWF file source into a `lalframe.XLALFrStream` object Parameters ---------- source : `str`, `file`, `list` Data source to read. Returns ------- stream : `lalframe.FrStream` An open `FrStream`. Raises ------ ValueError If the input format cannot be identified. """ # -- preformatting try: source = file_path(source) except ValueError: # not parsable as a single file pass # import cache from file if ( isinstance(source, str) and source.endswith(('.lcf', '.cache')) ): source = lal.CacheImport(source) # reformat cache (or any list of files) as a lal cache object if isinstance(source, list) and is_cache(source): cache = lal.Cache() for entry in file_list(source): cache = lal.CacheMerge(cache, lal.CacheGlob(*os.path.split(entry))) source = cache # -- now we have a lal.Cache or a filename # read lal cache object if isinstance(source, lal.Cache): return lalframe.FrStreamCacheOpen(source) # read single file if isinstance(source, str): return lalframe.FrStreamOpen(*map(str, os.path.split(source))) raise ValueError("Don't know how to open data source of type %r" % type(source))
def update_cache_by_increment(self, blocksize): """Update the internal cache by starting from the first frame and incrementing. Guess the next frame file name by incrementing from the first found one. This allows a pattern to be used for the GPS folder of the file, which is indicated by `GPSX` where x is the number of digits to use. Parameters ---------- blocksize: int Number of seconds to increment the next frame file. """ start = float(self.raw_buffer.end_time) end = float(start + blocksize) if not hasattr(self, 'dur'): fname = glob.glob(self.frame_src[0])[0] fname = os.path.splitext(os.path.basename(fname))[0].split('-') self.beg = '-'.join([fname[0], fname[1]]) self.ref = int(fname[2]) self.dur = int(fname[3]) fstart = int(self.ref + numpy.floor((start - self.ref) / float(self.dur)) * self.dur) starts = numpy.arange(fstart, end, self.dur).astype(numpy.int) keys = [] for s in starts: pattern = self.increment_update_cache if 'GPS' in pattern: n = int(pattern[int(pattern.index('GPS') + 3)]) pattern = pattern.replace('GPS%s' % n, str(s)[0:n]) name = '%s/%s-%s-%s.gwf' % (pattern, self.beg, s, self.dur) # check that file actually exists, else abort now if not os.path.exists(name): logging.info("%s does not seem to exist yet" % name) raise RuntimeError keys.append(name) cache = locations_to_cache(keys) stream = lalframe.FrStreamCacheOpen(cache) self.stream = stream self.channel_type, self.raw_sample_rate = \ self._retrieve_metadata(self.stream, self.channel_name)
def read_frame(location, channels, start_time=None, end_time=None, duration=None, check_integrity=True): """Read time series from frame data. Using the `location`, which can either be a frame file ".gwf" or a frame cache ".gwf", read in the data for the given channel(s) and output as a TimeSeries or list of TimeSeries. Parameters ---------- location : string A source of gravitational wave frames. Either a frame filename (can include pattern), a list of frame files, or frame cache file. channels : string or list of strings Either a string that contains the channel name or a list of channel name strings. start_time : {None, LIGOTimeGPS}, optional The gps start time of the time series. Defaults to reading from the beginning of the available frame(s). end_time : {None, LIGOTimeGPS}, optional The gps end time of the time series. Defaults to the end of the frame. Note, this argument is incompatible with `duration`. duration : {None, float}, optional The amount of data to read in seconds. Note, this argument is incompatible with `end`. check_integrity : {True, bool}, optional Test the frame files for internal integrity. Returns ------- Frame Data: TimeSeries or list of TimeSeries A TimeSeries or a list of TimeSeries, corresponding to the data from the frame file/cache for a given channel or channels. """ if end_time and duration: raise ValueError("end time and duration are mutually exclusive") if type(location) is list: locations = location else: locations = [location] cum_cache = locations_to_cache(locations) stream = lalframe.FrStreamCacheOpen(cum_cache) stream.mode = lalframe.FR_STREAM_VERBOSE_MODE if check_integrity: stream.mode = (stream.mode | lalframe.FR_STREAM_CHECKSUM_MODE) lalframe.FrSetMode(stream.mode, stream) # determine duration of data if type(channels) is list: first_channel = channels[0] else: first_channel = channels data_length = lalframe.FrStreamGetVectorLength(first_channel, stream) channel_type = lalframe.FrStreamGetTimeSeriesType(first_channel, stream) create_series_func = _fr_type_map[channel_type][2] get_series_metadata_func = _fr_type_map[channel_type][3] series = create_series_func(first_channel, stream.epoch, 0, 0, lal.ADCCountUnit, 0) get_series_metadata_func(series, stream) data_duration = data_length * series.deltaT if start_time is None: start_time = stream.epoch*1 if end_time is None: end_time = start_time + data_duration if type(start_time) is not lal.LIGOTimeGPS: start_time = lal.LIGOTimeGPS(start_time) if type(end_time) is not lal.LIGOTimeGPS: end_time = lal.LIGOTimeGPS(end_time) if duration is None: duration = float(end_time - start_time) else: duration = float(duration) # lalframe behaves dangerously with invalid duration so catch it here if duration <= 0: raise ValueError("Negative or null duration") #if duration > data_duration: # raise ValueError("Requested duration longer than available data") if type(channels) is list: all_data = [] for channel in channels: channel_data = _read_channel(channel, stream, start_time, duration) lalframe.FrStreamSeek(stream, start_time) all_data.append(channel_data) return all_data else: return _read_channel(channels, stream, start_time, duration)