Beispiel #1
0
def process_physio(files, resample=False, load_time=True):
    """
    A function to load the contents of all physio monitor
    files from one experiment folder into memory
    Args:
        -files: list of files to load/concatinate
        -resample: if a number, will resample at "resample" hz
        -load_time: if True, loads the duration of the recording in seconds
    Returns:
        -dsets: full concatinated data sets
    """
    global serial_chans
    ##order the files
    files = order_files(files)
    tdms = []
    dsets = {}
    for f in files:
        print("loading " + f)
        data = load_physio(f, resample, load_time)
        tdms.append(data)
    for chan in serial_chans.values():
        dsets[chan] = np.hstack([x[chan] for x in tdms])
    if load_time:
        times = [x['time'] for x in tdms]
        dsets['time'] = np.sum(times)
    return dsets
Beispiel #2
0
def save_bp(files, path_out=None, resample=False, load_time=True):
    """
    Function to create hdf5 file from bp monitor data.
    Args:
        -files: iterable of physio monitor file paths from one experiment (TDMS files)
        -path_out: optional alternative path to save the data file. If
            not specified, file is saved in same location as input files.
        -resample: if a number, resamples to 'resample' Hz
        -load_time: if True, loads the duration of the recording in seconds
    Returns:
        None; data saved in specified location
    """
    global bp_chans
    ##order the files
    files = order_files(files)
    ##create the output data file
    if path_out == None:
        path_out = os.path.dirname(files[0])
    path_out = os.path.join(path_out, 'bp_data.hdf5')
    f_out = h5py.File(path_out, 'w')
    dsets = load_bp_mp(files, resample, load_time)
    for chan in bp_chans:
        f_out.create_dataset(chan, data=np.hstack([x[chan] for x in dsets]))
    if load_time:
        times = [x['time'] for x in dsets]
        f_out.create_dataset("time", data=np.asarray(np.sum(times)))
    f_out.close()
Beispiel #3
0
def save_physio(files, path_out=None, resample=False, load_time=True):
    """
    Function to create hdf5 file from physiological monitor data.
    Args:
        -files: iterable of physio monitor file paths from one experiment (TDMS files)
        -path_out: optional alternative path to save the data file. If
            not specified, file is saved in same location as input files.
        -resample: if a number, will resample at "resample" hz
        -load_time: if True, loads duration of the recording in seconds
    Returns:
        None; data saved in specified location
    """
    global serial_chans
    ##order the files
    files = order_files(files)
    ##create the output data file
    if path_out == None:
        path_out = os.path.dirname(files[0])
    path_out = os.path.join(path_out, 'physio_data.hdf5')
    f_out = h5py.File(path_out, 'w')
    dsets = []
    for f in files:
        print("loading " + f)
        data = load_physio(f, resample, load_time)
        dsets.append(data)
    for chan in serial_chans.values():
        f_out.create_dataset(chan, data=np.hstack([x[chan] for x in dsets]))
    if load_time:
        times = [x['time'] for x in dsets]
        f_out.create_dataset("time", data=np.asarray(np.sum(times)))
    f_out.close()
Beispiel #4
0
def save_bp2(files, path_out=None, resample=False, load_time=True):
    """
    Function to create hdf5 file from bp monitor data.
    Args:
        -files: iterable of physio monitor file paths from one experiment (TDMS files)
        -path_out: optional alternative path to save the data file. If
            not specified, file is saved in same location as input files.
        -resample: if a number, resamples to 'resample' Hz
        -load_time: if True, loads the duration of the recording in seconds
    Returns:
        None; data saved in specified location

    Note: On this version, change of the save path, and add a
    frequency sampling in the dataset.
    Saving in hd5 should be added in process_bp in the next version.
    """
    global bp_chans
    ##order the files
    files = order_files(files)
    ##create the output data file
    if path_out == None:
        path_out = os.path.join(os.path.dirname(files[0]), 'bp.hdf5')
    f_out = h5py.File(path_out, 'a')
    dsets = load_bp_mp(files, resample, load_time)
    for chan in bp_chans:
        f_out.create_dataset(chan, data=np.hstack([x[chan] for x in dsets]))
    if load_time:
        times = [x['time'] for x in dsets]
        time_bp = np.asarray(np.sum(times))
        f_out.create_dataset("time_bp", data=time_bp)
        f_out.create_dataset("fs_bp", data=len(f_out[chan]) / time_bp)
    f_out.close()
Beispiel #5
0
def save_stim(files, path_out=None):
    """
    Function to create hdf5 file from ephys data.
    Args:
        -files: iterable of ephys/stim file paths from one experiment (TDMS files)
        -path_out: optional alternative path to save the data file. If
            not specified, file is saved in same location as input files.
    Returns:
        None; data saved in specified location
    """
    ##order the files
    files = order_files(files)
    ##create path
    if path_out == None:
        path_out = os.path.dirname(files[0])
    path_out = os.path.join(path_out, 'stim_data.hdf5')
    f_out = h5py.File(path_out, 'w')
    starts = []
    stops = []
    zs = []
    offset = 0
    for f in files:
        print("loading " + f)
        start, stop, z, fs = load_stim(f, offset)
        starts.append(start)
        stops.append(stop)
        zs.append(z)
        offset += z.size
    ##now add to the data file
    f_out.create_dataset("start", data=np.hstack(starts) / fs)
    f_out.create_dataset("stop", data=np.hstack(stops) / fs)
    f_out.create_dataset("z", data=np.hstack(zs))
    f_out.close()
Beispiel #6
0
def process_stim(files):
    """
    Function to create a data dictionary from stim data.
    Args:
        -files: iterable of ephys/stim file paths from one experiment (TDMS files)
        -path_out: optional alternative path to save the data file. If
            not specified, file is saved in same location as input files.
    Returns:
        data: dictionary with data from stim file, processed accordingly
    """
    ##order the files
    files = order_files(files)
    data = {}
    starts = []
    stops = []
    zs = []
    offset = 0
    for f in files:
        print("loading " + f)
        start, stop, z, fs = load_stim(f, offset)
        starts.append(start)
        stops.append(stop)
        zs.append(z)
        offset += z.size
    ##now add to the data dict
    data['start'] = np.hstack(starts) / fs
    data['stop'] = np.hstack(stops) / fs
    data['z'] = np.hstack(zs)
    return data
Beispiel #7
0
def save_ephys(files, path_out=None, resample=False, load_time=True):
    """
    Function to create hdf5 file from ephys data.
    Args:
        -files: iterable of ephys file paths from one experiment (TDMS files)
        -path_out: optional alternative path to save the data file. If
            not specified, file is saved in same location as input files.
        -resample: if a number, resamples to 'resample' Hz
        -load_time: if True, loads the duration of the recording in seconds
    Returns:
        None; data saved in specified location
    """
    ##order the files
    files = order_files(files)
    ##create path
    if path_out == None:
        path_out = os.path.dirname(files[0])
    path_out = os.path.join(path_out, 'ephys_data.hdf5')
    f_out = h5py.File(path_out, 'w')
    dsets = load_ephys_mp(files, resample, load_time)
    ##standardize the channel names
    ephys_chans = [x for x in list(dsets[0]) if x != 'time']
    for i, chan in enumerate(ephys_chans):
        f_out.create_dataset("amp_" + str(i),
                             data=np.hstack([x[chan] for x in dsets]))
    if load_time:
        times = [x['time'] for x in dsets]
        f_out.create_dataset("time", data=np.asarray(np.sum(times)))
    f_out.close()
Beispiel #8
0
def get_period(files):
    """
    A function to get the stimulation onset and offset times
    (for the whole stimulation block), in ms, relative to the start
    of the data file.
    Args:
        -files: list of tdms files that contain the stim waveform data
    Returns:
        -start, stop: times, in ms
    """
    global stim_chan
    ##make sure that the files are in the correct order
    files = order_files(files)
    start = None
    stop = None
    files = iter(files)
    ##this will churn through the files until a start value is found
    while start == None:
        path = next(files)
        print("Loading {}".format(path))
        tdms_file = nptdms.TdmsFile(path)
        ##here are a couple diffent possibilities for how things could be named
        try:
            channel_object = tdms_file.object('Group Name', stim_chan)
        except KeyError:
            try:
                channel_object = tdms_file.object('ephys', stim_chan)
            except KeyError:
                channel_object = tdms_file.object('Untitled', stim_chan)
        start, end = find_stim(channel_object)
        if start != None:
            print("Found the start")
            stop = end
    print("Moving on to the end detection")
    ##now we'll keep going until there aren't any stim pulses detected
    ##that all of the stim data is in the first file
    while end != None:
        ##if end is not None, we must have found a continuation of the stim block in the previous file,
        ##so update 'stop' to reflect this
        stop = end
        path = next(files)
        print("Loading {}".format(path))
        tdms_file = nptdms.TdmsFile(path)
        ##here are a couple diffent possibilities for how things could be named
        try:
            channel_object = tdms_file.object('Group Name', stim_chan)
        except KeyError:
            try:
                channel_object = tdms_file.object('ephys', stim_chan)
            except KeyError:
                channel_object = tdms_file.object('Untitled', stim_chan)
        ##here we want to ignore any new values of start, because the start value we found in
        ##an earlier file should be the true start
        ignore, end = find_stim(channel_object)
    return start, stop
Beispiel #9
0
def save_raw_stim(files, group_name='Group Name', path_out=None, record_raw='screenshot'):
    """
    Function to create hdf5 file from ephys data.
    Args:
        -files: iterable of ephys/stim file paths from one experiment (TDMS files)
        - group_name: Name of the group storing the stim data in the tdms file
        -path_out: optional alternative path to save the data file. If
            not specified, file is saved in same location as input files.
        - record_raw: 'full' will record the full stim recording (~4GB), 'screenshot'
        will just save a plot of it with the start and stop of the stim (but takes longer).
    Returns:
        - the stim raw vector (memory intensive), the start and the stop of the stim
    """
    ## Order the files.
    files = order_files(files)
    offset = 0
    raw_list = []
    fs_list = []
    for f in files:
        print("loading " + f)
        raw, fs = _load_raw_stim(f, group_name=group_name)
        raw_list.append(raw)
        fs_list.append(fs)
        offset += raw.size
    raw_stim = np.hstack(raw_list)
    ### Check that the sampling rate does not change during the recording.
    assert len(set(fs_list)) == 1, "Error: Sampling rate changes"
    ### Find start and stop time of stim.
    stim_start, stim_stop = _get_stim_times(raw_stim)
    ##now add to the data file
    if path_out == None:
        path_out = os.path.join(os.path.dirname(files[0]), 'stim_data.hdf5')
    f_out = h5py.File(path_out, 'a')
    if record_raw == 'full':
        f_out.create_dataset("raw", data=raw_stim)
    elif record_raw == 'screenshot':
        fig = plt.figure()
        plt.vlines([stim_start, stim_stop], 0, max(raw_stim), color='r')
        plt.plot(raw_stim, zorder=1)
        plt.savefig(path_out + '.png')
        plt.close(fig)
    f_out.create_dataset("fs_stim", data=fs)
    f_out.create_dataset("time_stim", data=len(raw_stim)/fs)
    f_out.create_dataset("stim_start", data=stim_start)
    f_out.create_dataset("stim_stop", data=stim_stop)
    f_out.close()
    return raw_stim, stim_start, stim_stop
Beispiel #10
0
def process_physio2(files,
                    resample=False,
                    load_time=True,
                    hd5_output_name=None):
    """
    A function to load the contents of all physio monitor
    files from one experiment folder into memory
    Args:
        -files: list of files to load/concatinate
        -resample: if a number, will resample at "resample" hz
        -load_time: if True, loads the duration of the recording in seconds
    Returns:
        -dsets: full concatinated data sets

    Note: In this version, we added the possibilities to save the physio files
    after processing it, if the 'hd5_output_name' is specified.
    """
    global serial_chans
    ##order the files
    files = order_files(files)
    tdms = []
    dsets = {}
    for f in files:
        print("loading " + f)
        data = load_physio(f, resample, load_time)
        tdms.append(data)
    for chan in serial_chans.values():
        dsets[chan] = np.hstack([x[chan] for x in tdms])
    if load_time:
        times = [x['time'] for x in tdms]
        dsets['time'] = np.sum(times)

    if hd5_output_name is not None:
        f_out = h5py.File(hd5_output_name, 'w')
        for chan in serial_chans.values():
            f_out.create_dataset(chan, data=np.hstack([x[chan] for x in tdms]))
        if load_time:
            f_out.create_dataset("time", data=np.asarray(np.sum(times)))
        f_out.close()
    return dsets
Beispiel #11
0
def process_ephys(files, resample=False, load_time=True):
    """
    A function to load the contents of all bp monitor
    files from one experiment folder into memory
    Args:
        -files: list of files to load/concatinate
        -resample: if a number, resamples to 'resample' Hz
        -load_time: if True, loads the duration of the recording in seconds
    Returns:
        -dsets: full concatinated data sets
    """
    files = order_files(files)
    dsets = {}
    tdms = load_ephys_mp(files, resample, load_time)
    ##extract the channel names from the last dataset processed
    ephys_chans = [x for x in list(tdms[0]) if x != 'time']
    for i, chan in enumerate(ephys_chans):
        dsets["amp_" + str(i)] = np.hstack([x[chan] for x in tdms])
    if load_time:
        times = [x['time'] for x in tdms]
        dsets['time'] = np.sum(times)
    return dsets
Beispiel #12
0
def process_ephys2(files,
                   resample=False,
                   load_time=True,
                   hd5_output_name=None):
    """
    A function to load the contents of all bp monitor
    files from one experiment folder into memory
    Args:
        -files: list of files to load/concatinate
        -resample: if a number, resamples to 'resample' Hz
        -load_time: if True, loads the duration of the recording in seconds
    Returns:
        -dsets: full concatinated data sets

    Note: In this version, we added the possibilities to save the ephys files
    after processing it, if the 'hd5_output_name' is specified.
    """
    files = order_files(files)
    dsets = {}
    tdms = load_ephys_mp(files, resample, load_time)
    ##extract the channel names from the last dataset processed
    ephys_chans = [x for x in list(tdms[0]) if x != 'time']
    for i, chan in enumerate(ephys_chans):
        dsets["amp_" + str(i)] = np.hstack([x[chan] for x in tdms])
    if load_time:
        times = [x['time'] for x in tdms]
        dsets['time'] = np.sum(times)

    if hd5_output_name is not None:
        f_out = h5py.File(hd5_output_name, 'w')
        for i, chan in enumerate(ephys_chans):
            f_out.create_dataset("amp_" + str(i),
                                 data=np.hstack([x[chan] for x in tdms]))
        if load_time:
            f_out.create_dataset("time", data=np.asarray(np.sum(times)))
        f_out.close()

    return dsets
Beispiel #13
0
def process_bp(files, resample=False, load_time=True):
    """
    A function to load the contents of all bp monitor
    files from one experiment folder into memory
    Args:
        -files: list of files to load/concatinate
        -resample: if a number, resamples to 'resample' Hz
        -load_time: if True, loads the duration of the recording in seconds
    Returns:
        -dsets: full concatinated data sets
    """
    global bp_chans
    files = order_files(files)
    tdms = []
    dsets = {}
    tdms = load_bp_mp(files, resample, load_time)
    for chan in bp_chans:
        dsets[chan] = np.hstack([x[chan]
                                 for x in tdms]) * 100.0  ##convert to mmHg
    if load_time:
        times = [x['time'] for x in tdms]
        dsets['time'] = np.sum(times)
    return dsets