Exemple #1
0
def from_h5(src, debug=False):
    """
    Read the HDF5 file in the given path, build the metAry accordingly.

    dest can be string or h5py.Group object.

    If src is a string, it is interpret as the destination file path.

    If src is a h5py.Group object, the content will be read from the given group.

    """

    # Reading from existing group
    if isinstance(src, h5py.Group):
        ary = src['ndarray'][()]          # Read the array
        info = __read_info(src['info'])   # Read the meta info

    # Reading from file path
    path = filePath(src)

    if not path.exist:
        raise IOError('File ' + str(path.full) + ' does not exist')
    
    if not path.read:
        raise IOError("Unable to read from: " + str(path.full))

    with h5py.File(path.full, 'r') as f:
        ary = f['ndarray'][()]          # Read the array
        info = __read_info(f['info'])   # Read the meta info

    return metaArray(ary, info=info)
Exemple #2
0
    def __call__(self):
        """
        Return a metaArray when called
        """
        metainfo = self.metainfo
        index = array(self.getcolumn(3), dtype=float)
        data = array(self.getcolumn(4), dtype=float)

        if linearChk(index, debug=self.debug) is not True:
            raise ValueError("The index array is not linear")

        # Write the data array as metaArray
        ary = metaArray(data)

        # Update the basic metaArray info
        ary['name'] = self.name
        ary['unit'] = metainfo['Vertical Units']

        # Update the scaling info
        ary['range']['begin'][0] = index[0]
        ary['range']['end'][0] = index[-1]
        ary['range']['unit'][0] = metainfo['Horizontal Units']
        ary['range']['label'][0] = metainfo['Source']

        # Include the rest of the metainfo into metaArray
        for field, value in metainfo.items():
            ary["TDS2.csv."+field] = value

        ary.update_range()
        return ary
Exemple #3
0
    def __call__(self):
        """
        Return a metaArray when called
        """
        metainfo = self.metainfo
        index = array(self.getcolumn(3), dtype=float)
        data = array(self.getcolumn(4), dtype=float)

        if linearChk(index, debug=self.debug) is not True:
            raise ValueError("The index array is not linear")

        # Write the data array as metaArray
        ary = metaArray(data)

        # Update the basic metaArray info
        ary['name'] = self.name
        ary['unit'] = metainfo['Vertical Units']

        # Update the scaling info
        ary['range']['begin'][0] = index[0]
        ary['range']['end'][0] = index[-1]
        ary['range']['unit'][0] = metainfo['Horizontal Units']
        ary['range']['label'][0] = metainfo['Source']

        # Include the rest of the metainfo into metaArray
        for field, value in metainfo.items():
            ary["TDS2.csv." + field] = value

        ary.update_range()
        return ary
Exemple #4
0
    def __call__(self):
        """
        Return a metaArray when called
        """
        metainfo = self.metainfo
        rcd_len = metainfo['Record Length']

        index = csv_file.getcolumn(self, 0)[self.label_row:]
        index_name = index[0]
        index = array(index[1:rcd_len+1], dtype=float)

        try:
            data = csv_file.getcolumn(self, self.data_col)[self.label_row:]
            metainfo['Source'] = data[0]
            data = array(data[1:rcd_len+1], dtype=float)
        except IndexError as err:
            print("Data column doesn't exist:", err)
            print("Defaulting to first data column")
            data = csv_file.getcolumn(self, 1)[self.label_row:]
            metainfo['Source'] = data[0]
            data = array(data[1:rcd_len+1], dtype=float)

        if linearChk(index, debug=self.debug) is not True:
            raise ValueError("The index array is not linear")

        # Write the data array as metaArray
        ary = metaArray(data)

        # Update the basic metaArray info
        ary['unit'] = metainfo['Vertical Units']
        if metainfo['Label'] is '':
            ary['name'] = self.name
        else:
            ary['name'] = metainfo['Label']

        # Update the scaling info
        ary['range']['begin'][0] = index[0]
        ary['range']['end'][0] = index[-1]
        ary['range']['unit'][0] = metainfo['Horizontal Units']
        ary['range']['label'][0] = index_name

        # Include the rest of the metainfo into metaArray
        for field, value in metainfo.items():
            ary["DPO2.csv."+field] = value

        ary.update_range()
        return ary
Exemple #5
0
    def __call__(self):
        """
        Return a metaArray when called
        """
        metainfo = self.metainfo
        rcd_len = metainfo['Record Length']

        index = csv_file.getcolumn(self, 0)[self.label_row:]
        index_name = index[0]
        index = array(index[1:rcd_len + 1], dtype=float)

        try:
            data = csv_file.getcolumn(self, self.data_col)[self.label_row:]
            metainfo['Source'] = data[0]
            data = array(data[1:rcd_len + 1], dtype=float)
        except IndexError as err:
            print("Data column doesn't exist:", err)
            print("Defaulting to first data column")
            data = csv_file.getcolumn(self, 1)[self.label_row:]
            metainfo['Source'] = data[0]
            data = array(data[1:rcd_len + 1], dtype=float)

        if linearChk(index, debug=self.debug) is not True:
            raise ValueError("The index array is not linear")

        # Write the data array as metaArray
        ary = metaArray(data)

        # Update the basic metaArray info
        ary['unit'] = metainfo['Vertical Units']
        if metainfo['Label'] is '':
            ary['name'] = self.name
        else:
            ary['name'] = metainfo['Label']

        # Update the scaling info
        ary['range']['begin'][0] = index[0]
        ary['range']['end'][0] = index[-1]
        ary['range']['unit'][0] = metainfo['Horizontal Units']
        ary['range']['label'][0] = index_name

        # Include the rest of the metainfo into metaArray
        for field, value in metainfo.items():
            ary["DPO2.csv." + field] = value

        ary.update_range()
        return ary
Exemple #6
0
def meta_histogram(metAry, bins=False):
    """
    Compute a histogram of the given 1D metaArray.

    It will try to work out the maximum number of bins (i.e. minimum
    quantisation from the data) by default.

    Will raise QuantsationError if unable to determin number of bins.
    """

    assert metAry.ndim is 1, "Only 1D metaArray accepted, there are {0:d} dimemsions in the given data.".format(
        metAry.ndim)

    # Flatten the data to 1D array
    data = metAry.data.ravel()

    if bins is not False:
        quanter = data.ptp() / bins
    else:
        # Try to quantise the array data
        quanter = quantise(data)

    # Quantise the data, and offset to the +ve side of value, bincount requires +ve
    # int arrays
    quantum = np.round(data / quanter).astype(int)

    quantum -= quantum.min()

    # Do the bincount for histogram
    hist = np.bincount(quantum)

    # Update the metaInfo
    hist = metaArray(hist)
    hist.set_range(0, 'begin', metAry.min())
    hist.set_range(0, 'end', metAry.max())
    hist.set_range(0, 'unit', metAry['unit'])
    hist.set_range(0, 'label', metAry['label'])

    hist['name'] = 'Histogram of ' + metAry['name']
    hist['unit'] = ''
    hist['label'] = 'Counts'

    return hist
Exemple #7
0
def meta_histogram(metAry, bins=False):
    """
    Compute a histogram of the given 1D metaArray.

    It will try to work out the maximum number of bins (i.e. minimum
    quantisation from the data) by default.

    Will raise QuantsationError if unable to determin number of bins.
    """

    assert metAry.ndim is 1, "Only 1D metaArray accepted, there are {0:d} dimemsions in the given data.".format(metAry.ndim)

    # Flatten the data to 1D array
    data = metAry.data.ravel()

    if bins is not False:
        quanter = data.ptp() / bins
    else:
        # Try to quantise the array data
        quanter = quantise(data)

    # Quantise the data, and offset to the +ve side of value, bincount requires +ve
    # int arrays
    quantum = np.round(data / quanter).astype(int)

    quantum -= quantum.min()

    # Do the bincount for histogram
    hist = np.bincount(quantum)

    # Update the metaInfo
    hist = metaArray(hist)
    hist.set_range(0, 'begin', metAry.min())
    hist.set_range(0, 'end', metAry.max())
    hist.set_range(0, 'unit', metAry['unit'])
    hist.set_range(0, 'label', metAry['label'])

    hist['name'] = 'Histogram of ' + metAry['name']
    hist['unit'] = ''
    hist['label'] = 'Counts'

    return hist
Exemple #8
0
def rfft(metAry, n=None, axes=-1):
    """
    RFFT function wraper for numpy.fft to be used on metaArray objects, returns
    scaled metaArray object.
    """

    nyquist = len(metAry) * 0.5 / (metAry.get_range(axes, 'end') - metAry.get_range(axes, 'begin'))

    fary = metaArray(np.fft.rfft(metAry.data, n, axes))

    fary.set_range(0, 'begin', 0)
    fary.set_range(0, 'end', nyquist)
    fary.set_range(0, 'unit', 'Hz')
    fary.set_range(0, 'label', 'Frequency')

    fary['unit'] = ''
    fary['label'] = 'Amplitude'
    try:
        fary['name'] = 'FFT{ ' + metAry['name'] + ' }'
    except TypeError:
        fary['name'] = 'FFT{ }'

    return fary
Exemple #9
0
def rfft(metAry, n=None, axes=-1):
    """
    RFFT function wraper for numpy.fft to be used on metaArray objects, returns
    scaled metaArray object.
    """

    nyquist = len(metAry) * 0.5 / (metAry.get_range(axes, 'end') -
                                   metAry.get_range(axes, 'begin'))

    fary = metaArray(np.fft.rfft(metAry.data, n, axes))

    fary.set_range(0, 'begin', 0)
    fary.set_range(0, 'end', nyquist)
    fary.set_range(0, 'unit', 'Hz')
    fary.set_range(0, 'label', 'Frequency')

    fary['unit'] = ''
    fary['label'] = 'Amplitude'
    try:
        fary['name'] = 'FFT{ ' + metAry['name'] + ' }'
    except TypeError:
        fary['name'] = 'FFT{ }'

    return fary
Exemple #10
0
def stfft(metAry, tres=100, fres=None, window='blackmanharris', \
            fmin=None, fmax=None, mag=True, debug=False):
    """
    Simple implementation of short time fast Fourier transform on metaArray
    object.

    metAry      Input metaArray
    tres        Temporal resolution
    fres        Frequency resolution
    window      Window function or None
    fmin        Cut-off frequency for the return data, default to the 0
    fmax        Cut-off frequency for the return data, default to the Nyquist
    mag         The default is to return the abs() value, return complex array if false

    Each window will overlap 50% with its immediate neighbours. e.g.:

    |_____________________________________________|
    | | 1 | 3 | 5 | 7 | 9 | 11| 13| 15| 17| 19| 21|
    | 0 | 2 | 4 | 6 | 8 | 10| 12| 14| 16| 18| 20| |

    """
    f1 = fmax

    # Length of the (short) time window
    l = int(round(2 * len(metAry) / float(tres)))

    # List of (short) time window starting points
    winlst = sp.linspace(0, len(metAry) - l, tres).round().astype(int)

    # Native RFFT frequency resolution to Nyquist
    lfres = int(np.floor(l/2.0)+1)
    Nyquist = 0.5 * len(metAry) / (metAry.get_range(0, 'end') - metAry.get_range(0, 'begin'))

    # RFFT len, use native resolution by default
    n = None

    # Generate the (short) time window function
    if window is None:
        win = 1
    else:
        win = sp.signal.get_window(window, l)

    # Decide where to slice the rfft output as a ratio to Nyquist
    # Make fmax < 1 or None
    if fmax is not None:
        if fmax < Nyquist:
            fmax = fmax / Nyquist
        elif fmax >= Nyquist:
            fmax = None
            if debug:
                print("*** Warning, spec frequency range beyond Nyquist limit")

    # Check whether padding is needed
    # If fres is not specified, use the native resolution
    if fres is None:
        if fmax is None:
            # No freq limit, use native resolution
            fres = lfres
        else:
            # Still on native resolution, but truncated to fmax
            fres = int(round(fmax * lfres))
    else:
        # fres is specified
        if fmax is not None:
            # freq limit specified, work out global freq resolution
            gfres = int(round(fres / fmax))
        else:
            # No freq limit, global freq resolution is same as fres
            gfres = fres

        # Global freq resolution is greater than native freq resolution
        # Need padding for rfft
        if gfres > lfres:
            n = (gfres - 1) * 2
        elif gfres < lfres:
            # No need for padding, but throwing away freq resolution for nothing
            if debug:
                print("*** Warning, frequency resolution is artificially limited")
        # else gfres = lfres, no need for padding, native fres is just right


    # Convert fmax to array length if specified
    if fmax is not None:
        # If rfft is padded
        if n is not None:
            fmax = int(round(int(np.floor(n/2.0)+1) * fmax))
        else:
            # Otherwise just truncate from native output
            fmax = int(round(lfres * fmax))

    if debug:
        src_len = len(metAry.data[:l]*win)
        rfft_len = len(np.fft.rfft(metAry.data[:l]*win, n=n))
        print("*** l: " + str(l))
        print("*** lfres: " + str(lfres))
        print("*** Nyquist: " + str(Nyquist))
        print("*** n: " + str(n))
        print("*** fmax: " + str(fmax))
        print("*** fres: " + str(fres))
        print("*** src_len: " + str(src_len))
        print("*** rfft_len: " + str(rfft_len))


    if mag:
        # Construct a place holder of the 2D time-freq output
        tfary = np.zeros((tres, fres)).astype(float)
        for i in range(len(winlst)):
            t = winlst[i]                # Where the (short) time window starts
            # Do the rfft to length n, and slice to fmax, then take abs()
            tfary[i] = spline_resize(abs(np.fft.rfft(metAry.data[t:t+l]*win, n=n)[:fmax]), fres)
    else:
        # Construct a place holder of the 2D time-freq output
        tfary = np.zeros((tres, fres)).astype(complex)
        for i in range(len(winlst)):
            t = winlst[i]
            # Do the rfft to length n, and slice to fmax
            tfary[i] = spline_resize(np.fft.rfft(metAry.data[t:t+l]*win, n=n)[:fmax], fres)

    tfary = metaArray(tfary)

    try:
        tfary['name'] = 'STFFT{ ' + metAry['name'] + ' }'
    except:
        tfary['name'] = 'STFFT{ }'

    tfary['unit'] = metAry['unit']
    tfary['label'] = metAry['label']

    # Per axis definitions
    tfary.set_range(0, 'begin', metAry.get_range(0, 'begin'))
    tfary.set_range(0, 'end', metAry.get_range(0, 'end'))
    tfary.set_range(0, 'unit', metAry.get_range(0, 'unit'))
    tfary.set_range(0, 'label', metAry.get_range(0, 'label'))
    tfary.set_range(1, 'begin', 0)
    if f1 is None:
        tfary.set_range(1, 'end', Nyquist)
    else:
        tfary.set_range(1, 'end', f1)
    tfary.set_range(1, 'unit', 'Hz')
    tfary.set_range(1, 'label', 'Frequency')

    return tfary
Exemple #11
0
def cwt(x, wavelet, scale0, scale1, res, scale=10, tres=None, debug=False):
    """
    This function will return continous wavelet transform of x,
    calculated by the convolution method.

    x is a 1-D metaArray

    Inputs:
        x           The data as an metaArray object.
        wavelet     Mother wavelet function (e.g. wavelet(scale0) should
                    provide the largest scale daughter wavelet)
        scale0      Starting scale length
        scale1      Stoping scale length
        res         Resolution in the scale space (i.e. number of daughter wavelets)
        tres        Resolution in the time space (Default to be same as x)

    Options:
        scale       [int|float|True|'linscal'|'linfreq']

                    int float True
                    Logarithmic scale (based 10) is used by default for the
                    production of daughter wavelets. If a number is given
                    the log space will be generated with that base.

                    'linscal'
                    Scale length is step linearly (i.e. freq in 1/x space)

                    'linfreq'
                    Frequency is step linearly (i.e. scale length in 1/x space)


    Output:
        A 2D metaArray in the time-sacle space.
    """

    data = x.data
    d_len = len(data)

    if tres is None:
        tres = len(x)
        flag_resample = False
    else:
        # Sanity check
        flag_resample = True
        resmp_time = np.arange(len(x))       # i.e. pretend 1Hz sample
        resmp_rate = float(tres) / len(x)

    # Generate a blank time-sacle space array
    page = np.zeros((tres, res)).astype('complex128')
    page = metaArray(page)

    prange = page['range']
    x_range = x['range']
    prange['label'][0] = x_range['label'][0]
    prange['begin'][0] = x_range['begin'][0]
    prange['end'][0] = x_range['end'][0]
    prange['unit'][0] = x_range['unit'][0]

    prange['label'][1] = "Scale"
    prange['begin'][1] = scale0
    prange['end'][1] = scale1

    try:
        prange['unit'][1] = wavelet.unit
    except:
        pass

    if x['name'] is not None:
        page['name'] = 'cwt{' + x['name'] + '}'
    else:
        page['name'] = 'cwt'


    # Generate a list of scales
    if isinstance(scale, int) or isinstance(scale, float):
        # Log scale applied, given log base.
        scale0 = np.log(scale0)/np.log(scale)
        scale1 = np.log(scale1)/np.log(scale)
        # print "*** num", scale0, scale1, res, scale
        scl_lst = sp.logspace(scale0, scale1, res, base=scale)
        prange['log'][1] = scale
    elif scale == True:
        # Log scale applied, use default base
        scale0 = np.log(scale0)/np.log(10)
        scale1 = np.log(scale1)/np.log(10)
        # print "*** log", scale0, scale1, res, scale
        scl_lst = sp.logspace(scale0, scale1, res, base=10)
        prange['log'][1] = 10
    elif scale == 'linscal':
        # print "*** lin", scale0, scale1, res, scale
        # Log scale is not applied, everything is linear
        scl_lst = sp.linspace(scale0, scale1, res)
    elif scale == 'linfreq':
        scale0 = 1 / scale0
        scale1 = 1 / scale1
        scl_lst = sp.linspace(scale0, scale1, res)
        scl_lst = 1 / scl_lst
    else:
        raise ValueError("Log scale descriptor can only be int,\
            float, True, False or None, given: " + str(scale))

    # return scl_lst, page

    if debug:
        print("There are a total number of " + str(len(scl_lst)) + " scales to be processed:")

    for i in range(len(scl_lst)):

        d_wavelet = wavelet(scl_lst[i])

        ###if debug:
        ###   print "\t line number: " + str(i) + "\t scale: " + str(scl_lst[i]) + "\t x.data: " + str(len(x.data)) + "\t wavelet: " + str(len(d_wavelet)

        if len(d_wavelet) > d_len:
            # It probably shouldnt happen, because the daughter wavelet is longer than
            # the signal itself now
            print("\t line number: " + str(i) + "\t scale: " + str(scl_lst[i]) + "\t data length: " + str(len(x.data)) + "\t wavelet length: " + str(len(d_wavelet)))
            raise ValueError("Warning - Daughter wavelet is longer than itself!!")
        else:
            line = sp.signal.convolve(data, d_wavelet, mode='same')

        if flag_resample:
            ###if debug:
            ###   print "\t resmp_time: " + str(len(resmp_time)) + "\t line: " + str(len(line)) + "\t resmp_rate: " + str(resmp_rate)

            line = resample(resmp_time, line, resmp_rate)[1]

            if debug:
                print("\t line number: " + str(i) + "\t scale: " + str(scl_lst[i]))

        page.data[:len(line), i] = line

    return page
Exemple #12
0
def stfft(metAry, tres=100, fres=None, window='blackmanharris', \
            fmin=None, fmax=None, mag=True, debug=False):
    """
    Simple implementation of short time fast Fourier transform on metaArray
    object.

    metAry      Input metaArray
    tres        Temporal resolution
    fres        Frequency resolution
    window      Window function or None
    fmin        Cut-off frequency for the return data, default to the 0
    fmax        Cut-off frequency for the return data, default to the Nyquist
    mag         The default is to return the abs() value, return complex array if false

    Each window will overlap 50% with its immediate neighbours. e.g.:

    |_____________________________________________|
    | | 1 | 3 | 5 | 7 | 9 | 11| 13| 15| 17| 19| 21|
    | 0 | 2 | 4 | 6 | 8 | 10| 12| 14| 16| 18| 20| |

    """
    f1 = fmax

    # Length of the (short) time window
    l = int(round(2 * len(metAry) / float(tres)))

    # List of (short) time window starting points
    winlst = sp.linspace(0, len(metAry) - l, tres).round().astype(int)

    # Native RFFT frequency resolution to Nyquist
    lfres = int(np.floor(l / 2.0) + 1)
    Nyquist = 0.5 * len(metAry) / (metAry.get_range(0, 'end') -
                                   metAry.get_range(0, 'begin'))

    # RFFT len, use native resolution by default
    n = None

    # Generate the (short) time window function
    if window is None:
        win = 1
    else:
        win = sp.signal.get_window(window, l)

    # Decide where to slice the rfft output as a ratio to Nyquist
    # Make fmax < 1 or None
    if fmax is not None:
        if fmax < Nyquist:
            fmax = fmax / Nyquist
        elif fmax >= Nyquist:
            fmax = None
            if debug:
                print("*** Warning, spec frequency range beyond Nyquist limit")

    # Check whether padding is needed
    # If fres is not specified, use the native resolution
    if fres is None:
        if fmax is None:
            # No freq limit, use native resolution
            fres = lfres
        else:
            # Still on native resolution, but truncated to fmax
            fres = int(round(fmax * lfres))
    else:
        # fres is specified
        if fmax is not None:
            # freq limit specified, work out global freq resolution
            gfres = int(round(fres / fmax))
        else:
            # No freq limit, global freq resolution is same as fres
            gfres = fres

        # Global freq resolution is greater than native freq resolution
        # Need padding for rfft
        if gfres > lfres:
            n = (gfres - 1) * 2
        elif gfres < lfres:
            # No need for padding, but throwing away freq resolution for nothing
            if debug:
                print(
                    "*** Warning, frequency resolution is artificially limited"
                )
        # else gfres = lfres, no need for padding, native fres is just right

    # Convert fmax to array length if specified
    if fmax is not None:
        # If rfft is padded
        if n is not None:
            fmax = int(round(int(np.floor(n / 2.0) + 1) * fmax))
        else:
            # Otherwise just truncate from native output
            fmax = int(round(lfres * fmax))

    if debug:
        src_len = len(metAry.data[:l] * win)
        rfft_len = len(np.fft.rfft(metAry.data[:l] * win, n=n))
        print("*** l: " + str(l))
        print("*** lfres: " + str(lfres))
        print("*** Nyquist: " + str(Nyquist))
        print("*** n: " + str(n))
        print("*** fmax: " + str(fmax))
        print("*** fres: " + str(fres))
        print("*** src_len: " + str(src_len))
        print("*** rfft_len: " + str(rfft_len))

    if mag:
        # Construct a place holder of the 2D time-freq output
        tfary = np.zeros((tres, fres)).astype(float)
        for i in range(len(winlst)):
            t = winlst[i]  # Where the (short) time window starts
            # Do the rfft to length n, and slice to fmax, then take abs()
            tfary[i] = spline_resize(
                abs(np.fft.rfft(metAry.data[t:t + l] * win, n=n)[:fmax]), fres)
    else:
        # Construct a place holder of the 2D time-freq output
        tfary = np.zeros((tres, fres)).astype(complex)
        for i in range(len(winlst)):
            t = winlst[i]
            # Do the rfft to length n, and slice to fmax
            tfary[i] = spline_resize(
                np.fft.rfft(metAry.data[t:t + l] * win, n=n)[:fmax], fres)

    tfary = metaArray(tfary)

    try:
        tfary['name'] = 'STFFT{ ' + metAry['name'] + ' }'
    except:
        tfary['name'] = 'STFFT{ }'

    tfary['unit'] = metAry['unit']
    tfary['label'] = metAry['label']

    # Per axis definitions
    tfary.set_range(0, 'begin', metAry.get_range(0, 'begin'))
    tfary.set_range(0, 'end', metAry.get_range(0, 'end'))
    tfary.set_range(0, 'unit', metAry.get_range(0, 'unit'))
    tfary.set_range(0, 'label', metAry.get_range(0, 'label'))
    tfary.set_range(1, 'begin', 0)
    if f1 is None:
        tfary.set_range(1, 'end', Nyquist)
    else:
        tfary.set_range(1, 'end', f1)
    tfary.set_range(1, 'unit', 'Hz')
    tfary.set_range(1, 'label', 'Frequency')

    return tfary
Exemple #13
0
def cwt(x, wavelet, scale0, scale1, res, scale=10, tres=None, debug=False):
    """
    This function will return continous wavelet transform of x,
    calculated by the convolution method.

    x is a 1-D metaArray

    Inputs:
        x           The data as an metaArray object.
        wavelet     Mother wavelet function (e.g. wavelet(scale0) should
                    provide the largest scale daughter wavelet)
        scale0      Starting scale length
        scale1      Stoping scale length
        res         Resolution in the scale space (i.e. number of daughter wavelets)
        tres        Resolution in the time space (Default to be same as x)

    Options:
        scale       [int|float|True|'linscal'|'linfreq']

                    int float True
                    Logarithmic scale (based 10) is used by default for the
                    production of daughter wavelets. If a number is given
                    the log space will be generated with that base.

                    'linscal'
                    Scale length is step linearly (i.e. freq in 1/x space)

                    'linfreq'
                    Frequency is step linearly (i.e. scale length in 1/x space)


    Output:
        A 2D metaArray in the time-sacle space.
    """

    data = x.data
    d_len = len(data)

    if tres is None:
        tres = len(x)
        flag_resample = False
    else:
        # Sanity check
        flag_resample = True
        resmp_time = np.arange(len(x))  # i.e. pretend 1Hz sample
        resmp_rate = float(tres) / len(x)

    # Generate a blank time-sacle space array
    page = np.zeros((tres, res)).astype('complex128')
    page = metaArray(page)

    prange = page['range']
    x_range = x['range']
    prange['label'][0] = x_range['label'][0]
    prange['begin'][0] = x_range['begin'][0]
    prange['end'][0] = x_range['end'][0]
    prange['unit'][0] = x_range['unit'][0]

    prange['label'][1] = "Scale"
    prange['begin'][1] = scale0
    prange['end'][1] = scale1

    try:
        prange['unit'][1] = wavelet.unit
    except:
        pass

    if x['name'] is not None:
        page['name'] = 'cwt{' + x['name'] + '}'
    else:
        page['name'] = 'cwt'

    # Generate a list of scales
    if isinstance(scale, int) or isinstance(scale, float):
        # Log scale applied, given log base.
        scale0 = np.log(scale0) / np.log(scale)
        scale1 = np.log(scale1) / np.log(scale)
        # print "*** num", scale0, scale1, res, scale
        scl_lst = sp.logspace(scale0, scale1, res, base=scale)
        prange['log'][1] = scale
    elif scale == True:
        # Log scale applied, use default base
        scale0 = np.log(scale0) / np.log(10)
        scale1 = np.log(scale1) / np.log(10)
        # print "*** log", scale0, scale1, res, scale
        scl_lst = sp.logspace(scale0, scale1, res, base=10)
        prange['log'][1] = 10
    elif scale == 'linscal':
        # print "*** lin", scale0, scale1, res, scale
        # Log scale is not applied, everything is linear
        scl_lst = sp.linspace(scale0, scale1, res)
    elif scale == 'linfreq':
        scale0 = 1 / scale0
        scale1 = 1 / scale1
        scl_lst = sp.linspace(scale0, scale1, res)
        scl_lst = 1 / scl_lst
    else:
        raise ValueError("Log scale descriptor can only be int,\
            float, True, False or None, given: " + str(scale))

    # return scl_lst, page

    if debug:
        print("There are a total number of " + str(len(scl_lst)) +
              " scales to be processed:")

    for i in range(len(scl_lst)):

        d_wavelet = wavelet(scl_lst[i])

        ###if debug:
        ###   print "\t line number: " + str(i) + "\t scale: " + str(scl_lst[i]) + "\t x.data: " + str(len(x.data)) + "\t wavelet: " + str(len(d_wavelet)

        if len(d_wavelet) > d_len:
            # It probably shouldnt happen, because the daughter wavelet is longer than
            # the signal itself now
            print("\t line number: " + str(i) + "\t scale: " +
                  str(scl_lst[i]) + "\t data length: " + str(len(x.data)) +
                  "\t wavelet length: " + str(len(d_wavelet)))
            raise ValueError(
                "Warning - Daughter wavelet is longer than itself!!")
        else:
            line = sp.signal.convolve(data, d_wavelet, mode='same')

        if flag_resample:
            ###if debug:
            ###   print "\t resmp_time: " + str(len(resmp_time)) + "\t line: " + str(len(line)) + "\t resmp_rate: " + str(resmp_rate)

            line = resample(resmp_time, line, resmp_rate)[1]

            if debug:
                print("\t line number: " + str(i) + "\t scale: " +
                      str(scl_lst[i]))

        page.data[:len(line), i] = line

    return page
Exemple #14
0
    def __getitem__(self, index):
        """
        Return metaArray object of the given index item in the file
        """

        # sanity check
        assert type(index) is int, "Given index is not int type: %r" % index

        idx = self.idx
        #if index < 0 or index >= len(idx):
        #    raise IndexError, "Requested index outside range (" + str(len(idx)) + ")."

        # [hdr_pos, hdr_len, data_pos, data_len, hdr_dict, unpack_str]
        hdr_pos, hdr_len, data_pos, data_len, hdr_dict, unpack_str = idx[index]

        if unpack_str is None:
            raise ValueError("Do not know how to decode the data byte stream.")

        # Read in the binary
        f = self.open()
        f.seek(data_pos)
        data = f.read(data_len)
        f.close()

        data = array(unpack(unpack_str, data))

        # Attempt to scale the data
        # YMU
        if hdr_dict.has_key('YMU'):
            YMU = hdr_dict['YMU']
        elif hdr_dict.has_key('YMULT'):
            YMU = hdr_dict['YMULT']
        else:
            YMU = 1

        # YOF
        if hdr_dict.has_key('YOF'):
            YOF = hdr_dict['YOF']
        elif hdr_dict.has_key('YOFF'):
            YOF = hdr_dict['YOFF']
        else:
            YOF = 0

        # YZE
        if hdr_dict.has_key('YZE'):
            YZE = hdr_dict['YZE']
        elif hdr_dict.has_key('YZERO'):
            YZE = hdr_dict['YZERO']
        else:
            YZE = 0

        data = YZE + YMU * (data - YOF)

        # Attempt to label the data
        data = metaArray(data)

        # data['unit']
        if hdr_dict.has_key('YUN'):
            data['unit'] = hdr_dict['YUN']
        elif hdr_dict.has_key('YUNIT'):
            data['unit'] = hdr_dict['YUNIT']

        # data['label']
        if hdr_dict.has_key('COMP'):
            data['label'] = hdr_dict['COMP']
        elif hdr_dict.has_key('PT_F'):
            data['label'] = hdr_dict['PT_F']
        elif hdr_dict.has_key('PT_FMT'):
            data['label'] = hdr_dict['PT_FMT']

        # XUN
        if hdr_dict.has_key('XUN'):
            data.set_range(0, 'unit', hdr_dict['XUN'])
        elif hdr_dict.has_key('XUNIT'):
            data.set_range(0, 'unit', hdr_dict['XUNIT'])


        # WFI
        WFI = None
        if hdr_dict.has_key('WFI'):
            WFI = hdr_dict['WFI']
        elif hdr_dict.has_key('WFID'):
            WFI = hdr_dict['WFID']
        else:
            for key in hdr_dict.keys():
                if key.find(':WFI') != -1:
                    WFI = hdr_dict[key]
                    break

        # data['name']
        data['name'] = self.file_path.name + '[' + str(index) + ']'

        if WFI is not None:
            chansep = WFI.find(',')
            data.set_range(0, 'label', WFI[:chansep])
            # data['name'] += WFI[chansep:]

        # scale the x-axis

        # XIN
        if hdr_dict.has_key('XIN'):
            XIN = hdr_dict['XIN']
        elif hdr_dict.has_key('XINCR'):
            XIN = hdr_dict['XINCR']
        else:
            XIN = 1

        # PT_O
        if hdr_dict.has_key('PT_O'):
            PT_O = hdr_dict['PT_O']
        elif hdr_dict.has_key('PT_OFF'):
            PT_O = hdr_dict['PT_OFF']
        else:
            PT_O = 0

        # XZE
        if hdr_dict.has_key('XZE'):
            XZE = hdr_dict['XZE']
        else:
            XZE = PT_O * -XIN
        #elif hdr_dict.has_key('PT_OFF'):
        #    XZE = PT_O * -XIN
        #else:
        #    XZE = 0

        data.set_range(0, 'begin', XZE)
        data.set_range(0, 'end', XZE + XIN * len(data))

        # Include the rest of the metainfo into metaArray
        for field, value in hdr_dict.items():
            data["isf."+field] = value

        data.update_range()

        return data
Exemple #15
0
    def __getitem__(self, index):
        """
        Return metaArray object of the given index item in the file
        """

        # sanity check
        assert type(index) is int, "Given index is not int type: %r" % index

        idx = self.idx
        #if index < 0 or index >= len(idx):
        #    raise IndexError, "Requested index outside range (" + str(len(idx)) + ")."

        # [hdr_pos, hdr_len, data_pos, data_len, hdr_dict, unpack_str]
        hdr_pos, hdr_len, data_pos, data_len, hdr_dict, unpack_str = idx[index]

        if unpack_str is None:
            raise ValueError("Do not know how to decode the data byte stream.")

        # Read in the binary
        f = self.open()
        f.seek(data_pos)
        data = f.read(data_len)
        f.close()

        data = array(unpack(unpack_str, data))

        # Attempt to scale the data
        # YMU
        if hdr_dict.has_key('YMU'):
            YMU = hdr_dict['YMU']
        elif hdr_dict.has_key('YMULT'):
            YMU = hdr_dict['YMULT']
        else:
            YMU = 1

        # YOF
        if hdr_dict.has_key('YOF'):
            YOF = hdr_dict['YOF']
        elif hdr_dict.has_key('YOFF'):
            YOF = hdr_dict['YOFF']
        else:
            YOF = 0

        # YZE
        if hdr_dict.has_key('YZE'):
            YZE = hdr_dict['YZE']
        elif hdr_dict.has_key('YZERO'):
            YZE = hdr_dict['YZERO']
        else:
            YZE = 0

        data = YZE + YMU * (data - YOF)

        # Attempt to label the data
        data = metaArray(data)

        # data['unit']
        if hdr_dict.has_key('YUN'):
            data['unit'] = hdr_dict['YUN']
        elif hdr_dict.has_key('YUNIT'):
            data['unit'] = hdr_dict['YUNIT']

        # data['label']
        if hdr_dict.has_key('COMP'):
            data['label'] = hdr_dict['COMP']
        elif hdr_dict.has_key('PT_F'):
            data['label'] = hdr_dict['PT_F']
        elif hdr_dict.has_key('PT_FMT'):
            data['label'] = hdr_dict['PT_FMT']

        # XUN
        if hdr_dict.has_key('XUN'):
            data.set_range(0, 'unit', hdr_dict['XUN'])
        elif hdr_dict.has_key('XUNIT'):
            data.set_range(0, 'unit', hdr_dict['XUNIT'])

        # WFI
        WFI = None
        if hdr_dict.has_key('WFI'):
            WFI = hdr_dict['WFI']
        elif hdr_dict.has_key('WFID'):
            WFI = hdr_dict['WFID']
        else:
            for key in hdr_dict.keys():
                if key.find(':WFI') != -1:
                    WFI = hdr_dict[key]
                    break

        # data['name']
        data['name'] = self.file_path.name + '[' + str(index) + ']'

        if WFI is not None:
            chansep = WFI.find(',')
            data.set_range(0, 'label', WFI[:chansep])
            # data['name'] += WFI[chansep:]

        # scale the x-axis

        # XIN
        if hdr_dict.has_key('XIN'):
            XIN = hdr_dict['XIN']
        elif hdr_dict.has_key('XINCR'):
            XIN = hdr_dict['XINCR']
        else:
            XIN = 1

        # PT_O
        if hdr_dict.has_key('PT_O'):
            PT_O = hdr_dict['PT_O']
        elif hdr_dict.has_key('PT_OFF'):
            PT_O = hdr_dict['PT_OFF']
        else:
            PT_O = 0

        # XZE
        if hdr_dict.has_key('XZE'):
            XZE = hdr_dict['XZE']
        else:
            XZE = PT_O * -XIN
        #elif hdr_dict.has_key('PT_OFF'):
        #    XZE = PT_O * -XIN
        #else:
        #    XZE = 0

        data.set_range(0, 'begin', XZE)
        data.set_range(0, 'end', XZE + XIN * len(data))

        # Include the rest of the metainfo into metaArray
        for field, value in hdr_dict.items():
            data["isf." + field] = value

        data.update_range()

        return data