示例#1
0
def shift_waveform_phase_time(hp,
                              hc,
                              t_shift,
                              ph_shift,
                              trim_leading=False,
                              trim_trailing=True,
                              verbose=False):
    """
    Input:  hp, hc, where h = hp(t) + i hx(t) = Amp(t) * exp(-i * Phi(t))
    Output: hp, hc, where h = Amp(t - t_c) * exp( -i * [Phi(t - t_c) + phi_c] )
    """
    hpnew = TimeSeries(hp,
                       epoch=hp._epoch,
                       delta_t=hp.delta_t,
                       dtype=hp.dtype,
                       copy=True)
    hcnew = TimeSeries(hc,
                       epoch=hc._epoch,
                       delta_t=hc.delta_t,
                       dtype=hc.dtype,
                       copy=True)
    # First apply phase shift
    if ph_shift != 0.:
        amplitude = amplitude_from_polarizations(hpnew, hcnew)
        phase = phase_from_polarizations(hpnew, hcnew)
        if verbose:
            print(("shifting by %f radians" % ph_shift))
        phase = phase + ph_shift
        hpnew = TimeSeries(amplitude * np.cos(phase + np.pi),
                           epoch=hpnew._epoch,
                           delta_t=hpnew.delta_t,
                           dtype=hpnew.dtype)
        hcnew = TimeSeries(amplitude * np.sin(phase + np.pi),
                           epoch=hcnew._epoch,
                           delta_t=hcnew.delta_t,
                           dtype=hcnew.dtype)
    # Now apply time shift
    if t_shift != 0:
        id_shift = int(np.round(np.abs(t_shift) / hpnew.delta_t))
        if verbose:
            print(("shifting by %d (%f)" % (id_shift, t_shift)))
        if t_shift > 0:
            hpnew.append_zeros(id_shift)
            hcnew.append_zeros(id_shift)
        else:
            hpnew.prepend_zeros(id_shift)
            hcnew.prepend_zeros(id_shift)
        hpnew.roll(id_shift * np.sign(t_shift))
        hcnew.roll(id_shift * np.sign(t_shift))
    if trim_trailing:
        hpnew = trim_trailing_zeros(hpnew)
        hcnew = trim_trailing_zeros(hcnew)
    if trim_leading:
        hpnew = trim_leading_zeros(hpnew)
        hcnew = trim_leading_zeros(hcnew)
    # RETURN
    return hpnew, hcnew
示例#2
0
def interpolate_complex_frequency(series,
                                  delta_f,
                                  zeros_offset=0,
                                  side='right'):
    """Interpolate complex frequency series to desired delta_f.

    Return a new complex frequency series that has been interpolated to the
    desired delta_f.

    Parameters
    ----------
    series : FrequencySeries
        Frequency series to be interpolated.
    delta_f : float
        The desired delta_f of the output
    zeros_offset : optional, {0, int}
        Number of sample to delay the start of the zero padding
    side : optional, {'right', str}
        The side of the vector to zero pad
        
    Returns
    -------
    interpolated series : FrequencySeries
        A new FrequencySeries that has been interpolated.
    """
    new_n = int((len(series) - 1) * series.delta_f / delta_f + 1)
    old_N = int((len(series) - 1) * 2)
    new_N = int((new_n - 1) * 2)
    time_series = TimeSeries(zeros(old_N),
                             delta_t=1.0 / (series.delta_f * old_N),
                             dtype=real_same_precision_as(series))

    ifft(series, time_series)

    time_series.roll(-zeros_offset)
    time_series.resize(new_N)

    if side == 'left':
        time_series.roll(zeros_offset + new_N - old_N)
    elif side == 'right':
        time_series.roll(zeros_offset)

    out_series = FrequencySeries(zeros(new_n),
                                 epoch=series.epoch,
                                 delta_f=delta_f,
                                 dtype=series.dtype)
    fft(time_series, out_series)

    return out_series
示例#3
0
def interpolate_complex_frequency(series, delta_f, zeros_offset=0, side='right'):
    """Interpolate complex frequency series to desired delta_f.

    Return a new complex frequency series that has been interpolated to the
    desired delta_f.

    Parameters
    ----------
    series : FrequencySeries
        Frequency series to be interpolated.
    delta_f : float
        The desired delta_f of the output
    zeros_offset : optional, {0, int}
        Number of sample to delay the start of the zero padding
    side : optional, {'right', str}
        The side of the vector to zero pad
        
    Returns
    -------
    interpolated series : FrequencySeries
        A new FrequencySeries that has been interpolated.
    """
    new_n = int( (len(series)-1) * series.delta_f / delta_f + 1)
    samples = numpy.arange(0, new_n) * delta_f
    old_N = int( (len(series)-1) * 2 )
    new_N = int( (new_n - 1) * 2 )
    time_series = TimeSeries(zeros(old_N), delta_t =1.0/(series.delta_f*old_N),
                             dtype=real_same_precision_as(series))
                             
    ifft(series, time_series)

    time_series.roll(-zeros_offset)
    time_series.resize(new_N)
    
    if side == 'left':
        time_series.roll(zeros_offset + new_N - old_N)
    elif side == 'right':
        time_series.roll(zeros_offset)

    out_series = FrequencySeries(zeros(new_n), epoch=series.epoch,
                           delta_f=delta_f, dtype=series.dtype)
    fft(time_series, out_series)

    return out_series
示例#4
0
文件: frame.py 项目: RorySmith/pycbc
class DataBuffer(object):

    """A linear buffer that acts as a FILO for reading in frame data
    """

    def __init__(self, frame_src, 
                       channel_name,
                       start_time,
                       max_buffer=2048, 
                       force_update_cache=True,
                       increment_update_cache=None):
        """ Create a rolling buffer of frame data

        Parameters
        ---------
        frame_src: str of list of strings
            Strings that indicate where to read from files from. This can be a
        list of frame files, a glob, etc.
        channel_name: str
            Name of the channel to read from the frame files
        start_time: 
            Time to start reading from.
        max_buffer: {int, 2048}, Optional
            Length of the buffer in seconds
        """
        self.frame_src = frame_src
        self.channel_name = channel_name
        self.read_pos = start_time
        self.force_update_cache = force_update_cache
        self.increment_update_cache = increment_update_cache

        self.update_cache()
        self.channel_type, self.raw_sample_rate = self._retrieve_metadata(self.stream, self.channel_name)

        raw_size = self.raw_sample_rate * max_buffer
        self.raw_buffer = TimeSeries(zeros(raw_size, dtype=numpy.float64),
                                     copy=False,
                                     epoch=start_time - max_buffer,
                                     delta_t=1.0/self.raw_sample_rate)

    def update_cache(self):
        """Reset the lal cache. This can be used to update the cache if the 
        result may change due to more files being added to the filesystem, 
        for example.
        """
        cache = locations_to_cache(self.frame_src)
        stream = lalframe.FrStreamCacheOpen(cache)
        self.stream = stream

    def _retrieve_metadata(self, stream, channel_name):
        """Retrieve basic metadata by reading the first file in the cache
    
        Parameters
        ----------
        stream: lal stream object
            Stream containing a channel we want to learn about
        channel_name: str
            The name of the channel we want to know the dtype and sample rate of

        Returns
        -------
        channel_type: lal type enum
            Enum value which indicates the dtype of the channel
        sample_rate: int
            The sample rate of the data within this channel
        """
        data_length = lalframe.FrStreamGetVectorLength(channel_name, stream)
        channel_type = lalframe.FrStreamGetTimeSeriesType(channel_name, stream)
        create_series_func = _fr_type_map[channel_type][2]
        get_series_metadata_func = _fr_type_map[channel_type][3]
        series = create_series_func(channel_name, stream.epoch, 0, 0,
                            lal.ADCCountUnit, 0)
        get_series_metadata_func(series, stream)
        return channel_type, int(1.0/series.deltaT)

    def _read_frame(self, blocksize):
        """Try to read the block of data blocksize seconds long

        Parameters
        ----------
        blocksize: int
            The number of seconds to attempt to read from the channel

        Returns
        -------
        data: TimeSeries
            TimeSeries containg 'blocksize' seconds of frame data

        Raises
        ------
        RuntimeError:
            If data cannot be read for any reason
        """
        try:
            read_func = _fr_type_map[self.channel_type][0]
            dtype = _fr_type_map[self.channel_type][1]
            data = read_func(self.stream, self.channel_name,
                             self.read_pos, int(blocksize), 0)
            return TimeSeries(data.data.data, delta_t=data.deltaT,
                              epoch=self.read_pos, 
                              dtype=dtype)     
        except Exception as e:
            raise RuntimeError('Cannot read requested frame data') 

    def null_advance(self, blocksize):
        """Advance and insert zeros

        Parameters
        ----------
        blocksize: int
            The number of seconds to attempt to read from the channel
        """
        self.raw_buffer.roll(-int(blocksize * self.raw_sample_rate))
        self.read_pos += blocksize       
        self.raw_buffer.start_time += blocksize

    def advance(self, blocksize):
        """Add blocksize seconds more to the buffer, push blocksize seconds
        from the beginning.

        Parameters
        ----------
        blocksize: int
            The number of seconds to attempt to read from the channel
        """
        ts = self._read_frame(blocksize)

        self.raw_buffer.roll(-len(ts))
        self.raw_buffer[-len(ts):] = ts[:] 
        self.read_pos += blocksize
        self.raw_buffer.start_time += blocksize
        return ts
        
    def update_cache_by_increment(self, blocksize):
        """Update the internal cache by starting from the first frame
        and incrementing.

        Guess the next frame file name by incrementing from the first found
        one. This allows a pattern to be used for the GPS folder of the file,
        which is indicated by `GPSX` where x is the number of digits to use.

        Parameters
        ----------
        blocksize: int
            Number of seconds to increment the next frame file.
        """
        start = float(self.raw_buffer.end_time)
        end = float(start + blocksize)
        
        if not hasattr(self, 'dur'):       
            fname = glob.glob(self.frame_src[0])[0]
            fname = os.path.splitext(os.path.basename(fname))[0].split('-')
            
            self.beg = '-'.join([fname[0], fname[1]])
            self.ref = int(fname[2])
            self.dur = int(fname[3])
        
        fstart = int(self.ref + numpy.floor((start - self.ref) / float(self.dur)) * self.dur)
        starts = numpy.arange(fstart, end, self.dur).astype(numpy.int)
        
        keys = []
        for s in starts:
            pattern = self.increment_update_cache
            if 'GPS' in pattern:
                n = int(pattern[int(pattern.index('GPS') + 3)])
                pattern = pattern.replace('GPS%s' % n, str(s)[0:n])
                
            name = '%s/%s-%s-%s.gwf' % (pattern, self.beg, s, self.dur)
            # check that file actually exists, else abort now
            if not os.path.exists(name):
                logging.info("%s does not seem to exist yet" % name)
                raise RuntimeError

            keys.append(name)
        cache = locations_to_cache(keys)
        stream = lalframe.FrStreamCacheOpen(cache)
        self.stream = stream
        self.channel_type, self.raw_sample_rate = self._retrieve_metadata(self.stream, self.channel_name)

    def attempt_advance(self, blocksize, timeout=10):
        """ Attempt to advance the frame buffer. Retry upon failure, except
        if the frame file is beyond the timeout limit.

        Parameters
        ----------
        blocksize: int
            The number of seconds to attempt to read from the channel
        timeout: {int, 10}, Optional
            Number of seconds before giving up on reading a frame

        Returns
        -------
        data: TimeSeries
            TimeSeries containg 'blocksize' seconds of frame data
        """
        if self.force_update_cache:
            self.update_cache()
        
        try:
            if self.increment_update_cache:
                self.update_cache_by_increment(blocksize)

            return DataBuffer.advance(self, blocksize)

        except RuntimeError:
            if lal.GPSTimeNow() > timeout + self.raw_buffer.end_time:
                # The frame is not there and it should be by now, so we give up
                # and treat it as zeros
                logging.info('Frame missing, giving up...')
                DataBuffer.null_advance(self, blocksize)
                return None
            else:
                # I am too early to give up on this frame, so we should try again
                logging.info('Frame missing, waiting a bit more...')
                time.sleep(1)
                return self.attempt_advance(blocksize, timeout=timeout)
示例#5
0
class DataBuffer(object):

    """A linear buffer that acts as a FILO for reading in frame data
    """

    def __init__(self, frame_src, 
                 channel_name,
                 start_time,
                 max_buffer=2048,
                 force_update_cache=True,
                 increment_update_cache=None,
                 dtype=numpy.float64):
        """ Create a rolling buffer of frame data

        Parameters
        ---------
        frame_src: str of list of strings
            Strings that indicate where to read from files from. This can be a
        list of frame files, a glob, etc.
        channel_name: str
            Name of the channel to read from the frame files
        start_time: 
            Time to start reading from.
        max_buffer: {int, 2048}, Optional
            Length of the buffer in seconds
        dtype: {dtype, numpy.float32}, Optional
            Data type to use for the interal buffer
        """
        self.frame_src = frame_src
        self.channel_name = channel_name
        self.read_pos = start_time
        self.force_update_cache = force_update_cache
        self.increment_update_cache = increment_update_cache
        self.detector = channel_name.split(':')[0]

        self.update_cache()
        self.channel_type, self.raw_sample_rate = self._retrieve_metadata(self.stream, self.channel_name)

        raw_size = self.raw_sample_rate * max_buffer
        self.raw_buffer = TimeSeries(zeros(raw_size, dtype=dtype),
                                     copy=False,
                                     epoch=start_time - max_buffer,
                                     delta_t=1.0/self.raw_sample_rate)

    def update_cache(self):
        """Reset the lal cache. This can be used to update the cache if the 
        result may change due to more files being added to the filesystem, 
        for example.
        """
        cache = locations_to_cache(self.frame_src)
        stream = lalframe.FrStreamCacheOpen(cache)
        self.stream = stream

    @staticmethod
    def _retrieve_metadata(stream, channel_name):
        """Retrieve basic metadata by reading the first file in the cache
    
        Parameters
        ----------
        stream: lal stream object
            Stream containing a channel we want to learn about
        channel_name: str
            The name of the channel we want to know the dtype and sample rate of

        Returns
        -------
        channel_type: lal type enum
            Enum value which indicates the dtype of the channel
        sample_rate: int
            The sample rate of the data within this channel
        """
        lalframe.FrStreamGetVectorLength(channel_name, stream)
        channel_type = lalframe.FrStreamGetTimeSeriesType(channel_name, stream)
        create_series_func = _fr_type_map[channel_type][2]
        get_series_metadata_func = _fr_type_map[channel_type][3]
        series = create_series_func(channel_name, stream.epoch, 0, 0,
                            lal.ADCCountUnit, 0)
        get_series_metadata_func(series, stream)
        return channel_type, int(1.0/series.deltaT)

    def _read_frame(self, blocksize):
        """Try to read the block of data blocksize seconds long

        Parameters
        ----------
        blocksize: int
            The number of seconds to attempt to read from the channel

        Returns
        -------
        data: TimeSeries
            TimeSeries containg 'blocksize' seconds of frame data

        Raises
        ------
        RuntimeError:
            If data cannot be read for any reason
        """
        try:
            read_func = _fr_type_map[self.channel_type][0]
            dtype = _fr_type_map[self.channel_type][1]
            data = read_func(self.stream, self.channel_name,
                             self.read_pos, int(blocksize), 0)
            return TimeSeries(data.data.data, delta_t=data.deltaT,
                              epoch=self.read_pos, 
                              dtype=dtype)     
        except Exception:
            raise RuntimeError('Cannot read {0} frame data'.format(self.channel_name))

    def null_advance(self, blocksize):
        """Advance and insert zeros

        Parameters
        ----------
        blocksize: int
            The number of seconds to attempt to read from the channel
        """
        self.raw_buffer.roll(-int(blocksize * self.raw_sample_rate))
        self.read_pos += blocksize       
        self.raw_buffer.start_time += blocksize

    def advance(self, blocksize):
        """Add blocksize seconds more to the buffer, push blocksize seconds
        from the beginning.

        Parameters
        ----------
        blocksize: int
            The number of seconds to attempt to read from the channel
        """
        ts = self._read_frame(blocksize)

        self.raw_buffer.roll(-len(ts))
        self.raw_buffer[-len(ts):] = ts[:] 
        self.read_pos += blocksize
        self.raw_buffer.start_time += blocksize
        return ts
        
    def update_cache_by_increment(self, blocksize):
        """Update the internal cache by starting from the first frame
        and incrementing.

        Guess the next frame file name by incrementing from the first found
        one. This allows a pattern to be used for the GPS folder of the file,
        which is indicated by `GPSX` where x is the number of digits to use.

        Parameters
        ----------
        blocksize: int
            Number of seconds to increment the next frame file.
        """
        start = float(self.raw_buffer.end_time)
        end = float(start + blocksize)
        
        if not hasattr(self, 'dur'):       
            fname = glob.glob(self.frame_src[0])[0]
            fname = os.path.splitext(os.path.basename(fname))[0].split('-')
            
            self.beg = '-'.join([fname[0], fname[1]])
            self.ref = int(fname[2])
            self.dur = int(fname[3])
        
        fstart = int(self.ref + numpy.floor((start - self.ref) / float(self.dur)) * self.dur)
        starts = numpy.arange(fstart, end, self.dur).astype(numpy.int)
        
        keys = []
        for s in starts:
            pattern = self.increment_update_cache
            if 'GPS' in pattern:
                n = int(pattern[int(pattern.index('GPS') + 3)])
                pattern = pattern.replace('GPS%s' % n, str(s)[0:n])
                
            name = '%s/%s-%s-%s.gwf' % (pattern, self.beg, s, self.dur)
            # check that file actually exists, else abort now
            if not os.path.exists(name):
                logging.info("%s does not seem to exist yet" % name)
                raise RuntimeError

            keys.append(name)
        cache = locations_to_cache(keys)
        stream = lalframe.FrStreamCacheOpen(cache)
        self.stream = stream
        self.channel_type, self.raw_sample_rate = \
            self._retrieve_metadata(self.stream, self.channel_name)

    def attempt_advance(self, blocksize, timeout=10):
        """ Attempt to advance the frame buffer. Retry upon failure, except
        if the frame file is beyond the timeout limit.

        Parameters
        ----------
        blocksize: int
            The number of seconds to attempt to read from the channel
        timeout: {int, 10}, Optional
            Number of seconds before giving up on reading a frame

        Returns
        -------
        data: TimeSeries
            TimeSeries containg 'blocksize' seconds of frame data
        """
        if self.force_update_cache:
            self.update_cache()
        
        try:
            if self.increment_update_cache:
                self.update_cache_by_increment(blocksize)

            return DataBuffer.advance(self, blocksize)

        except RuntimeError:
            if lal.GPSTimeNow() > timeout + self.raw_buffer.end_time:
                # The frame is not there and it should be by now, so we give up
                # and treat it as zeros
                DataBuffer.null_advance(self, blocksize)
                return None
            else:
                # I am too early to give up on this frame, so we should try again
                time.sleep(1)
                return self.attempt_advance(blocksize, timeout=timeout)
示例#6
0
class DataBuffer(object):
    """ A linear buffer that acts as a FILO for reading in frame data
    """
    def __init__(self, frame_src, channel_name, start_time, max_buffer=2048):
        """ Create a rolling buffer of frame data

        Parameters
        ---------
        frame_src: str of list of strings
            Strings that indicate where to read from files from. This can be a
        list of frame files, a glob, etc.
        channel_name: str
            Name of the channel to read from the frame files
        start_time: 
            Time to start reading from.
        max_buffer: {int, 2048}, Optional
            Length of the buffer in seconds
        """
        self.frame_src = frame_src
        self.channel_name = channel_name
        self.read_pos = start_time

        self.update_cache()
        self.channel_type, self.sample_rate = self._retrieve_metadata(
            self.stream, self.channel_name)

        raw_size = self.sample_rate * max_buffer
        self.raw_buffer = TimeSeries(zeros(raw_size, dtype=numpy.float64),
                                     copy=False,
                                     epoch=start_time - max_buffer,
                                     delta_t=1.0 / self.sample_rate)

    def update_cache(self):
        """ Reset the lal cache. This can be used to update the cache if the 
        result may change due to more files being added to the filesystem, 
        for example.
        """
        cache = locations_to_cache(self.frame_src)
        stream = lalframe.FrStreamCacheOpen(cache)
        self.stream = stream

    def _retrieve_metadata(self, stream, channel_name):
        """ Retrieve basic metadata by reading the first file in the cache
    
        Parameters
        ----------
        stream: lal stream object
            Stream containing a channel we want to learn about
        channel_name: str
            The name of the channel we want to know the dtype and sample rate of

        Returns
        -------
        channel_type: lal type enum
            Enum value which indicates the dtype of the channel
        sample_rate: int
            The sample rate of the data within this channel
        """
        data_length = lalframe.FrStreamGetVectorLength(channel_name, stream)
        channel_type = lalframe.FrStreamGetTimeSeriesType(channel_name, stream)
        create_series_func = _fr_type_map[channel_type][2]
        get_series_metadata_func = _fr_type_map[channel_type][3]
        series = create_series_func(channel_name, stream.epoch, 0, 0,
                                    lal.ADCCountUnit, 0)
        get_series_metadata_func(series, stream)
        return channel_type, int(1.0 / series.deltaT)

    def _read_frame(self, blocksize):
        """ Try to read the block of data blocksize seconds long

        Parameters
        ----------
        blocksize: int
            The number of seconds to attempt to read from the channel

        Returns
        -------
        data: TimeSeries
            TimeSeries containg 'blocksize' seconds of frame data

        Raises
        ------
        RuntimeError:
            If data cannot be read for any reason
        """
        try:
            read_func = _fr_type_map[self.channel_type][0]
            dtype = _fr_type_map[self.channel_type][1]
            data = read_func(self.stream, self.channel_name, self.read_pos,
                             blocksize, 0)
            return TimeSeries(data.data.data,
                              delta_t=data.deltaT,
                              epoch=self.read_pos,
                              dtype=dtype)
        except:
            raise RuntimeError('Cannot read requested frame data')

    def null_advance(self, blocksize):
        """ Advance and insert zeros

        Parameters
        ----------
        blocksize: int
            The number of seconds to attempt to read from the channel
        """
        self.raw_buffer.roll(-int(blocksize * self.sample_rate))
        self.raw_buffer.start_time += blocksize

    def advance(self, blocksize):
        """ Add blocksize seconds more to the buffer, push blocksize seconds
        from the beginning.

        Parameters
        ----------
        blocksize: int
            The number of seconds to attempt to read from the channel
        """
        ts = self._read_frame(blocksize)

        self.raw_buffer.roll(-len(ts))
        self.raw_buffer[-len(ts):] = ts[:]
        self.read_pos += blocksize
        self.raw_buffer.start_time += blocksize
        return ts

    def attempt_advance(self, blocksize, timeout=10):
        """ Attempt to advance the frame buffer. Retry upon failure, except
        if the frame file is beyond the timeout limit.

        Parameters
        ----------
        blocksize: int
            The number of seconds to attempt to read from the channel
        timeout: {int, 10}, Optional
            Number of seconds before giving up on reading a frame

        Returns
        -------
        data: TimeSeries
            TimeSeries containg 'blocksize' seconds of frame data
        """
        self.update_cache()

        try:
            return DataBuffer.advance(self, blocksize)
        except ValueError:
            if lal.GPSTimeNow() > timeout + self.raw_buffer.end_time:
                # The frame is not there and it should be by now, so we give up
                # and treat it as zeros
                self.null_advance(blocksize)
                return None
            else:
                # I am too early to give up on this frame, so we should try again
                return self.attempt_advance(self, blocksize, timeout=timeout)
示例#7
0
class DataBuffer(object):
    """ A linear buffer that acts as a FILO for reading in frame data
    """
    def __init__(self, frame_src, 
                       channel_name,
                       start_time,
                       max_buffer=2048):
        """ Create a rolling buffer of frame data

        Parameters
        ---------
        frame_src: str of list of strings
            Strings that indicate where to read from files from. This can be a
        list of frame files, a glob, etc.
        channel_name: str
            Name of the channel to read from the frame files
        start_time: 
            Time to start reading from.
        max_buffer: {int, 2048}, Optional
            Length of the buffer in seconds
        """
        self.frame_src = frame_src
        self.channel_name = channel_name
        self.read_pos = start_time

        self.update_cache()
        self.channel_type, self.sample_rate = self._retrieve_metadata(self.stream, self.channel_name)

        raw_size = self.sample_rate * max_buffer
        self.raw_buffer = TimeSeries(zeros(raw_size, dtype=numpy.float64),
                                     copy=False,
                                     epoch=start_time - max_buffer,
                                     delta_t=1.0/self.sample_rate)

    def update_cache(self):
        """ Reset the lal cache. This can be used to update the cache if the 
        result may change due to more files being added to the filesystem, 
        for example.
        """
        cache = locations_to_cache(self.frame_src)
        stream = lalframe.FrStreamCacheOpen(cache)
        self.stream = stream

    def _retrieve_metadata(self, stream, channel_name):
        """ Retrieve basic metadata by reading the first file in the cache
    
        Parameters
        ----------
        stream: lal stream object
            Stream containing a channel we want to learn about
        channel_name: str
            The name of the channel we want to know the dtype and sample rate of

        Returns
        -------
        channel_type: lal type enum
            Enum value which indicates the dtype of the channel
        sample_rate: int
            The sample rate of the data within this channel
        """
        data_length = lalframe.FrStreamGetVectorLength(channel_name, stream)
        channel_type = lalframe.FrStreamGetTimeSeriesType(channel_name, stream)
        create_series_func = _fr_type_map[channel_type][2]
        get_series_metadata_func = _fr_type_map[channel_type][3]
        series = create_series_func(channel_name, stream.epoch, 0, 0,
                            lal.ADCCountUnit, 0)
        get_series_metadata_func(series, stream)
        return channel_type, int(1.0/series.deltaT)        

    def _read_frame(self, blocksize):
        """ Try to read the block of data blocksize seconds long

        Parameters
        ----------
        blocksize: int
            The number of seconds to attempt to read from the channel

        Returns
        -------
        data: TimeSeries
            TimeSeries containg 'blocksize' seconds of frame data

        Raises
        ------
        RuntimeError:
            If data cannot be read for any reason
        """
        try:
            read_func = _fr_type_map[self.channel_type][0]
            dtype = _fr_type_map[self.channel_type][1]
            data = read_func(self.stream, self.channel_name, self.read_pos, blocksize, 0)
            return TimeSeries(data.data.data, delta_t=data.deltaT,
                              epoch=self.read_pos, 
                              dtype=dtype)     
        except:
            raise RuntimeError('Cannot read requested frame data') 

    def null_advance(self, blocksize):
        """ Advance and insert zeros

        Parameters
        ----------
        blocksize: int
            The number of seconds to attempt to read from the channel
        """
        self.raw_buffer.roll(-int(blocksize * self.sample_rate))       
        self.raw_buffer.start_time += blocksize

    def advance(self, blocksize):
        """ Add blocksize seconds more to the buffer, push blocksize seconds
        from the beginning.

        Parameters
        ----------
        blocksize: int
            The number of seconds to attempt to read from the channel
        """
        ts = self._read_frame(blocksize)

        self.raw_buffer.roll(-len(ts))
        self.raw_buffer[-len(ts):] = ts[:] 
        self.read_pos += blocksize
        self.raw_buffer.start_time += blocksize
        return ts

    def attempt_advance(self, blocksize, timeout=10):
        """ Attempt to advance the frame buffer. Retry upon failure, except
        if the frame file is beyond the timeout limit.

        Parameters
        ----------
        blocksize: int
            The number of seconds to attempt to read from the channel
        timeout: {int, 10}, Optional
            Number of seconds before giving up on reading a frame

        Returns
        -------
        data: TimeSeries
            TimeSeries containg 'blocksize' seconds of frame data
        """
        self.update_cache()
        
        try:
            return DataBuffer.advance(self, blocksize)
        except ValueError:
            if lal.GPSTimeNow() > timeout + self.raw_buffer.end_time:
                # The frame is not there and it should be by now, so we give up
                # and treat it as zeros
                self.null_advance(blocksize)
                return None
            else:
                # I am too early to give up on this frame, so we should try again
                return self.attempt_advance(self, blocksize, timeout=timeout)