コード例 #1
0
def resample(Data, q):
    '''
    Slice data with quocient q. 
    
    Parameters
    ----------
    Data: DataObj
        Data object to resample
    q: int
        Quocient of resample. Sample Rate  should be divisible by q (sample_rat % q = 0)

    '''
    if Data.sample_rate % q != 0:
        print 'sample_rate % q should be int'
        return
    else:
        #reading data
        data = Data.data
        new_data = data[0:-1:q, :]
        # calculate new sample rate
        new_sample_rate = Data.sample_rate / q
        # creating new time_vec
        new_time_vec = Data.time_vec[0:-1:q]
        # creating new DataObj
        newData = DataObj(new_data, new_sample_rate, Data.amp_unit,
                          Data.ch_labels, new_time_vec, Data.bad_channels)
    return newData
コード例 #2
0
def create_avg(Data):
    ''' 
    Create averege montagem excluding bad_channels
    
    Parameters
    ----------
    Data: DataObj
        Data object
    '''
    # get non-bad channels index
    index = [
        ch for ch in range(Data.n_channels) if ch not in Data.bad_channels
    ]
    # empty variable
    avg = np.empty(Data.data.shape)
    avg[:] = np.NAN
    avg_label = []
    for ch in index:
        avg[:, ch] = Data.data[:, ch] - np.mean(Data.data[:, index], 1)
    for ch in range(Data.n_channels):
        avg_label.append(Data.ch_labels[ch] + '-avg')

    newData = DataObj(avg, Data.sample_rate, Data.amp_unit, avg_label,
                      Data.time_vec, Data.bad_channels)
    return newData
コード例 #3
0
def loadMAT(slice_filename,parameters_filename):
    '''
    Created to convert .mat files with specific configuration for ECoG data of Newcastle Hospitals and create a dict.
    If you want to load other .mat file, use scipy.io. loadmat and create_DataObj
    
    Parameters
    ----------
    slice_filename: str 
        Name of the slice (.mat) file 
    parameters_filename: str 
        Name of the parameters (.mat) file
    
    '''
    mat = sio.loadmat(parameters_filename, struct_as_record=False, squeeze_me=True)
    parameters = mat['parameters']
    ch_l = parameters.channels
    ch_labels = [str(x) for x in ch_l]
    sample_rate = parameters.sr
    f = sio.loadmat(slice_filename, struct_as_record=False, squeeze_me=True)
    Data = f['Data']
    time_vec = Data.time_vec
    signal = Data.raw.T
    amp_unit = '$\mu V$'
    Data = DataObj(signal,sample_rate,amp_unit,ch_labels,time_vec,[])
    return Data
コード例 #4
0
def merge(Data1, Data2, new_time=False):
    '''    
    Merging two DataObj
    
    Parameters
    ----------
    Data1: DataObj
        Data object to merge
    Data2: DataObj
        Data object to merge
    new_time: boolean, optional
        False (Defautl) - merge the time_vec from the 2 DataObj
        True - Create a new time_vec starting with 0        
    '''

    # check if is the same sample rate, then get it
    if Data1.sample_rate != Data2.sample_rate:
        raise Exception('Data object should have same sample_rate')
    sample_rate = Data1.sample_rate

    # check if is the same number of channels, then get it
    if Data1.n_channels != Data2.n_channels:
        raise Exception('Data object should have same n_channels')

    # check if is the same amplitude
    if Data1.amp_unit != Data2.amp_unit:
        raise Exception('Data object should have same amplitude unit')
    amp_unit = Data1.amp_unit

    # get the label from dict 1
    ch_labels = Data1.ch_labels
    # Append bad channels from both object
    bad_channels = []
    bad_channels = sorted(set(np.append(bad_channels, Data1.bad_channels)))
    bad_channels = sorted(set(np.append(bad_channels, Data2.bad_channels)))

    # get data and time_vec from dict1
    data1 = Data1.data
    time_vec1 = Data1.time_vec
    # get data and time_vec from dict2
    data2 = Data2.data
    time_vec2 = Data2.time_vec

    # broadcast new_data
    new_data = np.concatenate((data1, data2), axis=0)

    if new_time:
        n_points = new_data.shape[0]
        end_time = n_points / sample_rate
        new_time_vec = np.linspace(0, end_time, n_points, endpoint=False)
    else:
        # broadcast new_time_vec
        new_time_vec = np.concatenate((time_vec1, time_vec2), axis=0)

    # creating new DataObj
    newData = DataObj(new_data, sample_rate, amp_unit, ch_labels, new_time_vec,
                      bad_channels)
    return newData
コード例 #5
0
def pop_channel(Data, ch):
    '''
    Create a new DataObj just with the the choose channel
    
    Parameters
    ----------
    Data: DataObj
        Data object to filt
    ch: int
        Channel number
    '''
    if len(Data.data.shape) == 1:
        raise Exception('DataObj is single channel')
    signal = Data.data[:, ch]
    label = Data.ch_labels[ch]
    newData = DataObj(signal, Data.sample_rate, Data.amp_unit, label,
                      Data.time_vec)
    return newData
コード例 #6
0
def decimate(Data, q):
    '''
    Use scipy decimate to create a new DataObj with low sample rate data

    Parameters
    ----------
    Data: DataObj
        Data object to resample
    q: int
        Quocient of resample.
    '''
    #reading data
    data = Data.data
    # get shape
    if len(data.shape) == 1:
        nch = 1
        npoints = data.shape[0]
    else:
        npoints, nch = data.shape
    # creating a empty array
    new_data = np.empty((npoints / q, nch))
    new_data[:] = np.NAN
    # decimate each channel
    if nch == 1:
        new_data = sig.decimate(data, q)
    else:
        for ch in range(nch):
            new_data[:, ch] = sig.decimate(data[:, ch], q)
    # calculate new sample rate
    new_sample_rate = Data.sample_rate / q
    # creating new time_vec
    new_time_vec = Data.time_vec[0:-1:q]
    # creating new Data
    newData = DataObj(new_data, new_sample_rate, Data.amp_unit, Data.ch_labels,
                      new_time_vec, Data.bad_channels)
    return newData
コード例 #7
0
def loadRDH(filename):
    ''' 
    Created to load 64 channels at port A -  code need change if use diferent configuration. 
    It use RHD.py file to read and return DataObj
    
    Parameters
    ----------
    file_name: str 
        Name of the intran (.rhd) file 
    
    '''
    # load file
    myData = RHD.openRhd(filename)
    # get sample rate
    sample_rate = myData.sample_rate
    # get channels 
    myChannels = myData.channels
    # create a empty signal
    signal = np.zeros((myChannels['A-000'].getTrace().size,64))
    signal[:] = np.NAN
    labels = [] 
    for ch in range(64):
        if ch < 10:
            label = "A-00" + str(ch)
        else:
            label = "A-0" + str(ch)
        signal[:,ch] = myChannels[label].getTrace()
        labels.append(label)
    signal *= 0.195 # according the Intan, the output should be multiplied by 0.195 to be converted to micro-volts
    amp_unit = '$\mu V$'
    # Time vector   
    n_points  = signal.shape[0]
    end_time  = n_points/sample_rate
    time_vec  = np.linspace(0,end_time,n_points,endpoint=False)
    Data = DataObj(signal,sample_rate,amp_unit,labels,time_vec,[])
    return Data
コード例 #8
0
ファイル: i_functions.py プロジェクト: britodasilva/pyhfo
def open_dataset(file_name,dataset_name,htype = 'auto'):
    '''
    open a dataset in a specific file_name
    
    Parameters
    ----------
    file_name: str 
        Name of the HDF5 (.h5) file 
    dataset_name: str
        Name of dataset to open
    htype: str, optional
        auto (the default) - read htype from HDF file 
        Data - DataObj type
        Spike - SpikeObj type
        hfo - hfoObj type
    '''
    # reading h5 file
    h5 = h5py.File(file_name,'r+')
    # loading dataset
    dataset = h5[dataset_name]
    # getting htype
    if htype == 'auto':
        htype = dataset.attrs['htype']      
    
    if htype == 'Data':
        # Sample Rate attribute
        sample_rate = dataset.attrs['SampleRate[Hz]']
        n_points         = dataset.shape[0]
        end_time         = n_points/sample_rate
        # Amplitude Unit
        if 'amp_unit' in dataset.attrs:
            amp_unit = dataset.attrs['amp_unit']
        else:
            amp_unit = 'AU'
        # Time vector
        if 'Time_vec_edge' in dataset.attrs:
            edge = dataset.attrs['Time_vec_edge']
            if edge[0] == edge[1]:
                time_vec = np.linspace(0,end_time,n_points,endpoint=False)
            else:
                time_vec = np.linspace(edge[0],edge[1],n_points,endpoint=False)
        else:
            time_vec = np.linspace(0,end_time,n_points,endpoint=False)
        # Check if has 'Bad_channels' attribute, if not, create one empty
        if len([x for x in dataset.attrs.keys() if x == 'Bad_channels']) == 0:
            dataset.attrs.create("Bad_channels",[],dtype=int)
        # Load bad channels
        bad_channels = dataset.attrs["Bad_channels"]    
        # Creating dictionary
        Data = DataObj(dataset[:],sample_rate,amp_unit,dataset.attrs['Channel_Labels'],time_vec,bad_channels,file_name,dataset_name)
    
    elif htype == 'list':
        # Time vector
        keys  = dataset.keys()
        ch_labels = dataset.attrs['ch_labels']
        time_edge = dataset.attrs['time_edge']
        Data = EventList(ch_labels,time_edge,file_name,dataset_name)
        for k in keys:
            waveform =  dataset[k][:]
            tstamp = dataset[k].attrs['tstamp']
            evhtype = dataset[k].attrs['htype']
            channel = dataset[k].attrs['channel']
            if evhtype == 'Spike':
                clus = dataset[k].attrs['cluster']
                feat = dataset[k].attrs['features']
                time_edge = dataset[k].attrs['time_edge'] 
                spk = SpikeObj(channel,waveform,tstamp,clus,feat,time_edge)
                Data.__addEvent__(spk)
            elif evhtype == 'HFO':
                
                tstamp_idx = dataset[k].attrs['tstamp_idx'] 
                start_idx  = dataset[k].attrs['start_idx']
                end_idx  = dataset[k].attrs['end_idx']
                ths_value  = dataset[k].attrs['ths_value']
                sample_rate = dataset[k].attrs['sample_rate']
                cutoff = dataset[k].attrs['cutoff']
                info = dataset[k].attrs['info']
                hfo = hfoObj(channel,tstamp,tstamp_idx, waveform,start_idx,end_idx,ths_value,sample_rate,cutoff,info)
                Data.__addEvent__(hfo)
    
    h5.close()
    return Data
コード例 #9
0
def eegfilt(Data,
            low_cut=None,
            high_cut=None,
            order=None,
            window=('kaiser', 0.5)):
    '''
    Filt EEG Data object with FIR filter.
    
    Parameters
    ----------
    Data: DataObj
        Data object to filt
    low_cut: int
        Low cut frequency. If None, generate a low pass filter with cut 
        frequency in high_cut.
    high_cut: int
        High cut frequency. If None, generate a high pass filter with cut 
        frequency in low_cut.
    order: int, optional
        None (default) - Order of the filter calculated as 1/10 of sample rate
    window : string or tuple of string and parameter values
        Desired window to use. See `scipy.signal.get_window` for a list
        of windows and required parameters.
    '''
    if low_cut == None and high_cut == None:
        raise Exception('You should determine the cutting frequencies')

    signal = Data.data
    sample_rate = Data.sample_rate
    time_vec = Data.time_vec
    labels = Data.ch_labels
    if len(signal.shape) == 1:
        nch = 1
        npoints = signal.shape[0]
    else:
        npoints, nch = signal.shape
    # order
    if order == None:
        numtaps = int(sample_rate / 10 + 1)
    else:
        numtaps = order
    # Nyquist rate
    nyq = sample_rate / 2

    # cutoff frequencies
    if high_cut == None:  # high pass
        f = [low_cut]
        pass_zero = False
    elif low_cut == None:  # low pass
        f = [high_cut]
        pass_zero = True
    else:  # band pass
        f = [low_cut, high_cut]
        pass_zero = False

    # Creating filter
    b = sig.firwin(numtaps, f, pass_zero=pass_zero, window=window, nyq=nyq)
    # Creating filtered, numpy array with the filtered signal of raw data
    filtered = np.empty((npoints, nch))
    filtered[:] = np.NAN
    if nch == 1:
        print 'Filtering channel'
        filtered = sig.filtfilt(b, np.array([1]), signal)
    else:
        for ch in range(nch):
            if ch not in Data.bad_channels:
                print 'Filtering channel ' + labels[ch]
                filtered[:, ch] = sig.filtfilt(b, np.array([1]), signal[:, ch])

    newData = DataObj(filtered, sample_rate, Data.amp_unit, labels, time_vec,
                      Data.bad_channels)
    return newData
コード例 #10
0
def open_dataset(file_name,dataset_name,htype = 'auto'):
    '''
    open a dataset in a specific file_name
    
    Parameters
    ----------
    file_name: str 
        Name of the HDF5 (.h5) file 
    dataset_name: str
        Name of dataset to open
    htype: str, optional
        auto (the default) - read htype from HDF file 
        Data - DataObj type
        Spike - SpikeObj type
        hfo - hfoObj type
    '''
    # reading h5 file
    h5 = h5py.File(file_name,'r+')
    # loading dataset
    dataset = h5[dataset_name]
    # getting htype
    if htype == 'auto':
        htype = dataset.attrs['htype']      
    
    if htype == 'Data':
        # Sample Rate attribute
        sample_rate = dataset.attrs['SampleRate[Hz]']
        n_points         = dataset.shape[0]
        end_time         = n_points/sample_rate
        # Amplitude Unit
        if 'amp_unit' in dataset.attrs:
            amp_unit = dataset.attrs['amp_unit']
        else:
            amp_unit = 'AU'
        # Time vector
        if 'Time_vec_edge' in dataset.attrs:
            edge = dataset.attrs['Time_vec_edge']
            time_vec = np.linspace(edge[0],edge[1],n_points,endpoint=False)
        else:
            time_vec = np.linspace(0,end_time,n_points,endpoint=False)
        # Check if has 'Bad_channels' attribute, if not, create one empty
        if len([x for x in dataset.attrs.keys() if x == 'Bad_channels']) == 0:
            dataset.attrs.create("Bad_channels",[],dtype=int)
        # Load bad channels
        bad_channels = dataset.attrs["Bad_channels"]    
        # Creating dictionary
        Data = DataObj(dataset[:],sample_rate,amp_unit,dataset.attrs['Channel_Labels'],time_vec,bad_channels,file_name,dataset_name)
        
    elif htype == 'list':
        # Time vector
        keys  = dataset.keys()
        ch_labels = dataset.attrs['ch_labels']
        time_edge = dataset.attrs['time_edge']
        Data = EventList(ch_labels,time_edge,file_name,dataset_name)
        for k in keys:
            waveform =  dataset[k][:]
            tstamp = dataset[k].attrs['tstamp']
            evhtype = dataset[k].attrs['htype']
            if evhtype == 'Spike':
                clus = dataset[k].attrs['cluster']
                feat = dataset[k].attrs['features'] 
                spk = SpikeObj(waveform,tstamp,clus,feat)
                Data.__addEvent__(spk)
            elif evhtype == 'HFO':
                channel = dataset[k].attrs['channel']
                tstamp_idx = dataset[k].attrs['tstamp_idx'] 
                start_idx  = dataset[k].attrs['start_idx']
                end_idx  = dataset[k].attrs['end_idx']
                ths_value  = dataset[k].attrs['ths_value']
                sample_rate = dataset[k].attrs['sample_rate']
                cutoff = dataset[k].attrs['cutoff']
                info = dataset[k].attrs['info']
                hfo = hfoObj(channel,tstamp,tstamp_idx, waveform,start_idx,end_idx,ths_value,sample_rate,cutoff,info)
                Data.__addEvent__(hfo)
    
    h5.close()
    return Data