Example #1
0
    def load_raw_data(self, subject, series):
        """Load data for a subject / series."""
        test = series == TEST_SERIES
        if not test:
            fnames = [
                glob('../data/train/subj%d_series%d_data.csv' % (subject, i))
                for i in series
            ]
        else:
            fnames = [
                glob('../data/test/subj%d_series%d_data.csv' % (subject, i))
                for i in series
            ]
        fnames = list(np.concatenate(fnames))
        fnames.sort()
        raw_train = [
            creat_mne_raw_object(fname, read_events=not test)
            for fname in fnames
        ]
        raw_train = concatenate_raws(raw_train)
        # pick eeg signal
        picks = pick_types(raw_train.info, eeg=True)

        self.data = raw_train._data[picks].transpose()

        self.data = preprocessData(self.data)

        if not test:

            self.events = raw_train._data[32:].transpose()
Example #2
0
    def load_raw_data(self, subject, series):
        """Load data for a subject / series."""
        # test = series == TEST_SERIES
        test = False
        if not test:
            fnames = [glob(get_horizo_path(subject, i)) for i in series]
        else:
            fnames = [
                glob('../data/test/subj%d_series%d_data.csv' % (subject, i))
                for i in series
            ]
        fnames = list(np.concatenate(fnames))
        fnames.sort()
        self.fnames = fnames
        action_1D_type = 'HO'
        raw_train = [
            creat_mne_raw_object(fnames[i], i, read_events=action_1D_type)
            for i in range(len(fnames))
        ]
        raw_train = concatenate_raws(raw_train)
        # pick eeg signal
        picks = pick_types(raw_train.info, eeg=True)

        self.data = raw_train._data[picks].transpose()

        self.data = preprocessData(self.data)

        if not test:
            self.events = raw_train._data[14:].transpose()
Example #3
0
    def load_raw_data(self, subject, series):
        """
        Load data for a subject / series.
        n_points: int. The number of timepoints that can be predict/train. 
        Because the timepoints in the start are not valid for windows or there are no velocity.
        """
        # test = series == TEST_SERIES
        test = False
        if not test:
            fnames = [glob(get_horizo_path(subject, i)) for i in series]
        else:
            fnames = [glob('../data/test/subj%d_series%d_data.csv' %
                      (subject, i)) for i in series]
        fnames = list(np.concatenate(fnames))
        fnames.sort()
        self.fnames = fnames
        action_1D_type = 'HO'
        raw_train = [creat_mne_raw_object(fnames[i], i, read_events=action_1D_type) 
                for i in range(len(fnames))]
        raw_train = concatenate_raws(raw_train)
        # pick eeg signal
        picks = pick_types(raw_train.info, eeg=True)

        self.data = raw_train._data[picks].transpose()

        self.data = preprocessData(self.data)
        self.n_points = self.data.shape[0] - START_TRAIN

        if not test:
            self.events = raw_train._data[14:].transpose()
    def load_raw_data(self, subject, series):
        """Load data for a subject / series."""
        test = series == TEST_SERIES
        if not test:
            fnames = [glob('../data/train/subj%d_series%d_data.csv' %
                      (subject, i)) for i in series]
        else:
            fnames = [glob('../data/test/subj%d_series%d_data.csv' %
                      (subject, i)) for i in series]
        fnames = list(np.concatenate(fnames))
        fnames.sort()
        raw_train = [creat_mne_raw_object(fname, read_events=not test)
                     for fname in fnames]
        raw_train = concatenate_raws(raw_train)
        # pick eeg signal
        picks = pick_types(raw_train.info, eeg=True)

        self.data = raw_train._data[picks].transpose()

        self.data = preprocessData(self.data)

        if not test:

            self.events = raw_train._data[32:].transpose()
Example #5
0
series_test_tot = []

# #### generate predictions #####
for subject in subjects:
    print 'Loading data for subject %d...' % subject
    # ############### READ DATA ###############################################
    # fnames = glob('data/train/subj%d_series*_data.csv' % (subject))
    fnames = glob(get_all_horizon_path_from_the_subject(subject))
    fnames.sort()
    fnames_val = fnames[-1:]

    # fnames_test = glob('data/test/subj%d_series*_data.csv' % (subject))
    # fnames_test.sort()

    action_1D_type = 'HO'
    raw_val = concatenate_raws([creat_mne_raw_object(fnames[i], i, read_events=action_1D_type) for i in range(len(fnames_val))])
    # raw_test = concatenate_raws([creat_mne_raw_object(fname, read_events=False) for fname in fnames_test])

    # extract labels for series 7&8
    labels = raw_val._data[len(CH_NAMES):]
    lbls_tot.append(labels.transpose())

    # aggregate infos for validation (series 7&8)
    raw_series5 = creat_mne_raw_object(fnames_val[0], 4, action_1D_type)
    series = np.array([5] * raw_series5.n_times)
    series_val_tot.append(series)

    subjs = np.array([subject]*labels.shape[1])
    subjects_val_tot.append(subjs)

    # aggregate infos for test (series 9&10)
Example #6
0
ids_tot = []
subjects_test_tot = []
series_test_tot = []

# #### generate predictions #####
for subject in subjects:
    print 'Loading data for subject %d...' % subject
    # ############### READ DATA ###############################################
    fnames = glob('data/train/subj%d_series*_data.csv' % (subject))
    fnames.sort()
    fnames_val = fnames[-2:]

    fnames_test = glob('data/test/subj%d_series*_data.csv' % (subject))
    fnames_test.sort()

    raw_val = concatenate_raws([creat_mne_raw_object(fname, read_events=True)
                                for fname in fnames_val])
    raw_test = concatenate_raws([creat_mne_raw_object(fname, read_events=False)
                                for fname in fnames_test])

    # extract labels for series 7&8
    labels = raw_val._data[32:]
    lbls_tot.append(labels.transpose())

    # aggregate infos for validation (series 7&8)
    raw_series7 = creat_mne_raw_object(fnames_val[0])
    raw_series8 = creat_mne_raw_object(fnames_val[1])
    series = np.array([7] * raw_series7.n_times +
                      [8] * raw_series8.n_times)
    series_val_tot.append(series)
Example #7
0
subjects_test_tot = []
series_test_tot = []

# #### generate predictions #####
for subject in subjects:
    print('Loading data for subject %d...' % subject)
    # ############### READ DATA ###############################################
    fnames = glob('data/train/subj%d_series*_data.csv' % (subject))
    fnames.sort()
    fnames_val = fnames[-2:]

    fnames_test = glob('data/test/subj%d_series*_data.csv' % (subject))
    fnames_test.sort()

    raw_val = concatenate_raws([
        creat_mne_raw_object(fname, read_events=True) for fname in fnames_val
    ])
    raw_test = concatenate_raws([
        creat_mne_raw_object(fname, read_events=False) for fname in fnames_test
    ])

    # extract labels for series 7&8
    labels = raw_val._data[32:]
    lbls_tot.append(labels.transpose())

    # aggregate infos for validation (series 7&8)
    raw_series7 = creat_mne_raw_object(fnames_val[0])
    raw_series8 = creat_mne_raw_object(fnames_val[1])
    series = np.array([7] * raw_series7.n_times + [8] * raw_series8.n_times)
    series_val_tot.append(series)
ids_tot = []
subjects_test_tot = []
series_test_tot = []

# #### generate predictions #####
for subject in subjects:
    print "Loading data for subject %d..." % subject
    # ############### READ DATA ###############################################
    fnames = glob("data/train/subj%d_series*_data.csv" % (subject))
    fnames.sort()
    fnames_val = fnames[-2:]

    fnames_test = glob("data/test/subj%d_series*_data.csv" % (subject))
    fnames_test.sort()

    raw_val = concatenate_raws([creat_mne_raw_object(fname, read_events=True) for fname in fnames_val])
    raw_test = concatenate_raws([creat_mne_raw_object(fname, read_events=False) for fname in fnames_test])

    # extract labels for series 7&8
    labels = raw_val._data[32:]
    lbls_tot.append(labels.transpose())

    # aggregate infos for validation (series 7&8)
    raw_series7 = creat_mne_raw_object(fnames_val[0])
    raw_series8 = creat_mne_raw_object(fnames_val[1])
    series = np.array([7] * raw_series7.n_times + [8] * raw_series8.n_times)
    series_val_tot.append(series)

    subjs = np.array([subject] * labels.shape[1])
    subjects_val_tot.append(subjs)