Пример #1
0
    def create(self, *args, **kwargs):
        self.markers = []
        self.timeStamps = []

        if 'data' in kwargs.keys():
            if kwargs['data']:
                self.markers = kwargs['markers']
                self.timeStamps = kwargs['timeStamps']
                self.trialIndices = kwargs['trialIndices']
                self.rawMarkers = kwargs['rawMarkers']
                self.sessionStartTime = kwargs['sessionStartTime']
                self.samplingRate = kwargs['sampleRate']
                self.numSets = 1
                return self

        nevFile = glob.glob("*.nev")
        if len(nevFile) == 0:
            print("No .nev files in directory. Returning empty object...")
            # create empty object
            DPT.DPObject.create(self, dirs=[], *args, **kwargs)
        else:
            # create object
            DPT.DPObject.create(self, *args, **kwargs)
            reader = BlackrockIO(nevFile[0])
            print('Opening .nev file, creating new RPLParallel object...')
            ev_rawtimes, _, ev_markers = reader.get_event_timestamps()
            ev_times = reader.rescale_event_timestamp(ev_rawtimes,
                                                      dtype="float64")
            if ev_markers[0] == 128:
                self.rawMarkers = ev_markers
                self.markers = ev_markers[::2]
                self.timeStamps = ev_times[::2]
                return self
            self.samplingRate = 30000
            self.rawMarkers = ev_markers
            self.sessionStartTime = ev_times[0]
            self.numSets = 1
            self.markers, self.timeStamps, self.trialIndices = arrangeMarkers(
                ev_markers, ev_times)
            return self
Пример #2
0
    def create(self, *args, **kwargs):

        if not self.args['SkipParallel']:
            print('Calling RPLParallel...')
            rp = RPLParallel(saveLevel=1)

        ns5File = glob.glob('*.ns5')
        if len(ns5File) > 1:
            print('Too many .ns5 files, do not know which one to use.')
            # create empty object
            DPT.DPObject.create(self, dirs=[], *args, **kwargs)
            return
        if len(ns5File) == 0:
            print('.ns5 file missing')
            # create empty object
            DPT.DPObject.create(self, dirs=[], *args, **kwargs)
            return
        # create object
        DPT.DPObject.create(self, *args, **kwargs)
        reader = BlackrockIO(ns5File[0])
        bl = reader.read_block(lazy=True)
        print('.ns5 file loaded.')
        segment = bl.segments[0]
        if len(
                glob.glob('*.ns2')
        ) == 0:  # Check if .ns2 file is present, if its not present adjust the index for raw signals accordingly
            index = 1
        else:
            index = 2
        chx = bl.channel_indexes[index]  # For the raw data.
        analogInfo = {}
        analogInfo['SampleRate'] = float(
            segment.analogsignals[index].sampling_rate)
        annotations = chx.annotations
        names = list(map(lambda x: str(x), chx.channel_names))

        def process_channel(data,
                            annotations,
                            chxIndex,
                            analogInfo,
                            channelNumber,
                            returnData=False):
            analogInfo['Units'] = 'uV'
            analogInfo['HighFreqCorner'] = float(
                annotations['nev_hi_freq_corner'][chxIndex])
            analogInfo['HighFreqOrder'] = annotations['nev_hi_freq_order'][
                chxIndex]
            analogInfo['HighFilterType'] = annotations['nev_hi_freq_type'][
                chxIndex]
            analogInfo['LowFreqCorner'] = float(
                annotations['nev_lo_freq_corner'][chxIndex])
            analogInfo['LowFreqOrder'] = annotations['nev_lo_freq_order'][
                chxIndex]
            analogInfo['LowFilterType'] = annotations['nev_lo_freq_type'][
                chxIndex]
            analogInfo['MaxVal'] = np.amax(data)
            analogInfo['MinVal'] = np.amin(data)
            analogInfo['NumberSamples'] = len(data)
            analogInfo['ProbeInfo'] = names[chxIndex]
            if returnData:
                return analogInfo
            arrayNumber = annotations['connector_ID'][chxIndex] + 1
            arrayDir = "array{:02d}".format(int(arrayNumber))
            channelDir = "channel{:03d}".format(int(channelNumber))
            directory = os.getcwd(
            )  # Go to the channel directory and save file there.
            if arrayDir not in os.listdir('.'):
                os.mkdir(arrayDir)
            os.chdir(arrayDir)
            if channelDir not in os.listdir('.'):
                os.mkdir(channelDir)
            os.chdir(channelDir)
            print('Calling RPLRaw for channel {:03d}'.format(channelNumber))
            rplraw.RPLRaw(analogData=data, analogInfo=analogInfo, saveLevel=1)
            if self.args['SkipHPC']:
                if not self.args['SkipLFP']:
                    print('Calling RPLLFP for channel {:03d}'.format(
                        channelNumber))
                    rpllfp.RPLLFP(saveLevel=1)
                if not self.args['SkipHighPass']:
                    print('Calling RPLHighPass for channel {:03d}'.format(
                        channelNumber))
                    rplhighpass.RPLHighPass(saveLevel=1)
                if DPT.levels.get_level_name('session',
                                             os.getcwd()) != 'sessioneye':
                    if not self.args['SkipSort']:
                        print(
                            'Calling Mountain Sort for channel {:03d}'.format(
                                channelNumber))
                        # mountain_batch()
                        # export_mountain_cells()
            else:
                if 'HPCScriptsDir' not in kwargs.keys():
                    kwargs['HPCScriptsDir'] = ''
                print(
                    'Adding RPLLFP slurm script for channel {:03d} to job queue'
                    .format(channelNumber))
                os.system('sbatch ' + kwargs['HPCScriptsDir'] +
                          'rpllfp-slurm.sh')
                if not self.args['SkipSort']:
                    print(
                        'Adding RPLHighPass and Mountain Sort slurm script for channel {:03d} to job queue'
                        .format(channelNumber))
                    os.system('sbatch ' + kwargs['HPCScriptsDir'] +
                              'rplhighpass-sort-slurm.sh')
                else:
                    print(
                        'Adding RPLHighPass slurm script for channel {:03d} to job queue'
                        .format(channelNumber))
                    os.system('sbatch ' + kwargs['HPCScriptsDir'] +
                              'rplhighpass-slurm.sh')
            os.chdir(directory)
            print('Channel {:03d} processed'.format(channelNumber))
            return

        if 'returnData' in kwargs.keys():
            i = self.args['channel'][0]
            chxIndex = names.index(
                list(filter(lambda x: str(i) in x, names))[0])
            data = np.array(segment.analogsignals[index].load(
                time_slice=None, channel_indexes=[chx.index[chxIndex]]))
            analogInfo = process_channel(data,
                                         annotations,
                                         chxIndex,
                                         analogInfo,
                                         i,
                                         returnData=True)
            print('Returning data and analogInfo to RPLRaw')
            self.data = data
            self.analogInfo = analogInfo
            return

        channelNumbers = []
        channelIndexes = []
        for i in self.args['channel']:
            chxIndex = list(
                filter(lambda x: int(i) == int(x[6:len(x) - 1]), names))
            if len(chxIndex) > 0:
                chxIndex = names.index(chxIndex[0])
                channelNumbers.append(i)
                channelIndexes.append(chxIndex)

        if kwargs.get('byChannel', 0):
            for ind, idx in enumerate(channelIndexes):
                data = np.array(segment.analogsignals[index].load(
                    time_slice=None, channel_indexes=[idx]))
                print('Processing channel {:03d}'.format(channelNumbers[ind]))
                process_channel(data, annotations, idx, analogInfo,
                                channelNumbers[ind])
                del data  # to create RAM space to load in the next channel data.
            return

        else:
            if len(channelIndexes) > 0:
                numOfIterations = int(np.ceil(len(channelIndexes) / 32))
                for k in range(numOfIterations):
                    tempIndexes = channelIndexes[k * 32:(k + 1) * 32]
                    tempNumbers = channelNumbers[k * 32:(k + 1) * 32]
                    data = np.array(segment.analogsignals[index].load(
                        time_slice=None, channel_indexes=tempIndexes))
                    for ind, idx in enumerate(tempIndexes):
                        print('Processing channel {:03d}'.format(
                            tempNumbers[ind]))
                        process_channel(np.array(data[:, ind]), annotations,
                                        idx, analogInfo, tempNumbers[ind])
                    del data  # to create RAM space to load in the next set of channel data.
            return
Пример #3
0
def load_data(settings):
    reader = BlackrockIO(filename=os.path.join(settings.path2data,
                                               settings.file_stem),
                         nsx_to_load=3)
    return reader
Пример #4
0
import numpy as np
import matplotlib.pyplot as plt
from pyedfread import edf
from neo.io import BlackrockIO

# 2c
pos_data = np.loadtxt("session01/RawData_T1-400/session_1_5112018105323.txt",
                      skiprows=14)

# 3b
samples, events, messages = edf.pread('181105.edf',
                                      trial_marker=b'Start Trial')

# 4c
reader = BlackrockIO('session01/181105_Block1.ns5')
bl = reader.read_block(lazy=True)

# 4e
broadband_data = np.array(bl.segments[0].analogsignals[1].load(
    time_slice=None, channel_indexes=[2]))

# 4f
sr = float(bl.segments[0].analogsignals[1].sampling_rate)

# 4g
reader = BlackrockIO('session01/181105_Block1.nev')
ev_rawtimes, _, ev_markers = reader.get_event_timestamps()

# 2d
uind = np.arange(300)
Пример #5
0
def source2raw(subs=cfg.subs):

    from neo.io import BlackrockIO

    TimeStart = time.time()
    # ITERATE SUBJECT LIST =============================================================================
    for iSub, sub in enumerate(subs):

        print(
            f"\n--- {sub} ----------------------------------------------------\n"
        )
        # add paths to parameters
        SP = cf.sub_params(sub)
        cf.display_progress(f"Loading source data, {sub}", iSub, len(subs),
                            TimeStart)
        ieeg = []
        ttl = []
        # 2 files per session
        for i, source_file in enumerate(SP['source_files']):
            reader = BlackrockIO(filename=source_file)
            blks = reader.read(lazy=False)

            ttl_ = np.squeeze(1. * (np.diff(1. * (np.array(
                blks[0].segments[-1].analogsignals[0]).T[0, :] > 4000.)) >
                                    0)).astype(np.int8)
            ieeg_ = np.array(blks[0].segments[-1].analogsignals[1],
                             dtype=np.int16).T

            # start five seconds before first fixation
            t0 = np.argmax(ttl_ > 0.5) - int(5 * cfg.source_srate)
            ttl += [ttl_[t0:]]
            ieeg += [ieeg_[:, t0:]]

        # loop over sessions
        ieeg_full = []
        ttl_full = []
        for i in range(0, len(ttl), 2):
            m = min(len(ttl[i]), len(ttl[i + 1]))
            ieeg_session = np.vstack((ieeg[i][:, :m], ieeg[i + 1][:, :m]))

            ieeg_full += [ieeg_session]
            ttl_full += [ttl[i][:m]]

        # concatenate sessions
        ieeg = np.concatenate(ieeg_full, axis=-1)
        ttl = np.concatenate(ttl_full)

        print(
            f"\n{ieeg.shape[0]} channels, {int(len(ttl)/cfg.source_srate)} seconds, {np.sum(ttl)} events, {round(ieeg.nbytes/1e9,2)} GB of data loaded"
        )
        cf.print_d("saving...")

        # save one ieeg, one ttl file per subject
        fname = os.path.join(SP['RawPath'], f"{sub}_ttl.csv")
        ttl.tofile(fname, sep=',')

        if len(SP['ChNames']) != ieeg.shape[0]:
            dp = fix_channels(dp, len(ieeg))

        cf.display_progress(f"Making MNE Raw, {sub} ", iSub, len(subs),
                            TimeStart)

        # Create MNE info
        info = mne.create_info(SP['ChNames'],
                               sfreq=cfg.source_srate,
                               ch_types='eeg')
        # Finally, create the Raw object
        raw = mne.io.RawArray(ieeg, info)

        montage = mne.channels.make_dig_montage(
            dict(zip(SP['ChNames'], SP['coords'])))
        raw.set_montage(montage)

        #print(raw.get_montage().get_positions()['ch_pos']['001-AH1'])

        # downsample for memory
        if cfg.srate >= raw.info['sfreq']:
            print(
                f"Error: Original sampling freq smaller than or equal to target sampling freq ({raw.info['sfreq']} Hz <={cfg.srate} Hz)"
            )
        else:
            # resample raw
            cf.display_progress(
                f"Resampling from {cfg.source_srate} to {cfg.srate}, {sub}",
                iSub, len(subs), TimeStart)
            raw.resample(cfg.srate)

        cf.display_progress(
            f"Applying notch filtering at {cfg.landline_noise} Hz, and 4 harmonics, {sub}",
            iSub, len(subs), TimeStart)
        raw.notch_filter(cfg.landline_noise * np.arange(1, 5),
                         filter_length='auto',
                         phase='zero',
                         picks='all')

        cf.print_d(f"Saving mne raw files for {sub}")
        fname = os.path.join(SP['RawPath'], f"{sub}_raw.fif")
        raw.save(fname, picks='all', overwrite=True)

        print(' ')