Ejemplo n.º 1
0
        filepaths = [
            Path(
                f"/home/paulo/Documents/datasets/BCI_Comp_IV_2a/gdf/{uid}E.gdf"
            ),
            # Path(f"/home/paulo/Documents/datasets/BCI_Comp_IV_2a/gdf/{uid}E.gdf"),
        ]
        # BCI_IV_Comp_Dataset.EVENT_MAP_DICT = {
        #     "276": 0,
        #     "277": 1,
        #     "1072": 2
        # }
        BCI_IV_Comp_Dataset.EVENT_MAP_DICT = {"277": 0, "276": 1, "1072": 2}
        epochs_list = [
            BCI_IV_Comp_Dataset.load_as_epochs(filepaths[0],
                                               load_eog=True,
                                               tmin=4.,
                                               tmax=60.,
                                               reject=False).load_data(),
            # BCI_IV_Comp_Dataset.load_as_epochs(filepaths[1], load_eog=True, tmin=4., tmax=60., reject=False, has_labels=False).load_data(),
        ]
        epochs = concatenate_epochs(epochs_list)

        # eeg_epochs = epochs.copy().pick("eeg").pick(['EEG-Fz', 'EEG-C3', 'EEG-Cz', 'EEG-C4', 'EEG-Pz'])
        # eog_epochs = epochs.copy().pick("eog").pick(['EOG-left', 'EOG-right'])
        eeg_epochs = epochs.copy().pick("eeg")
        eog_epochs = epochs.copy().pick("eog").pick(['EOG-right'])

        eog_data = eog_epochs.get_data()
        eeg_data = eeg_epochs.get_data()

        n_channels = eeg_data.shape[1]
Ejemplo n.º 2
0
def run_ica_experiment(_run, method_idx):

    train_epochs = BCI_IV_Comp_Dataset.load_dataset(train_filepaths,
                                                    as_epochs=True,
                                                    concatenate=True,
                                                    drop_bad=True,
                                                    return_metadata=False,
                                                    tmin=-1.,
                                                    tmax=3.)
    train_epochs.load_data().filter(l_freq=None, h_freq=40)
    test_epochs_list, test_metadata_list = BCI_IV_Comp_Dataset.load_dataset(
        test_filepaths,
        as_epochs=True,
        concatenate=False,
        drop_bad=True,
        return_metadata=True,
        tmin=-1.,
        tmax=3.)

    all_methods = get_all_methods()
    methods = all_methods if method_idx is None else [all_methods[method_idx]]
    name = "" if method_idx is None else "_{}".format(all_methods[method_idx])
    print("Using methods", methods)

    print("Loaded test files:", len(test_epochs_list))
    results = dict()
    for method in methods:
        print("Running for method", method)
        results[method] = list()
        clf = make_pipeline(PSD(fmin=0.1, fmax=40, picks=PICKS), Averager(),
                            MinMaxScaler(), SVC(C=4))

        start = time.time()
        ICA = get_ica_instance(method, n_components=ICA_N_COMPONENTS)
        ICA.fit(train_epochs)
        print("\tICA fitted!")
        duration = time.time() - start
        transformed_train_epochs = ICA.get_sources(train_epochs)

        for i, (epochs,
                mdata) in enumerate(zip(test_epochs_list, test_metadata_list)):
            print("\t", i, mdata["id"])

            epochs = epochs.copy().load_data().filter(l_freq=None,
                                                      h_freq=40).resample(90.)
            transformed_test_epochs = ICA.get_sources(epochs)

            scores = dict()
            signal = np.hstack(transformed_test_epochs.get_data())
            for fn_name in SCORING_FN_DICT:
                score = apply_pairwise_parallel(signal,
                                                SCORING_FN_DICT[fn_name])
                scores[fn_name] = score

            X_train, Y_train = transformed_train_epochs, transformed_train_epochs.events[:,
                                                                                         2]
            X_test, Y_test = transformed_test_epochs, transformed_test_epochs.events[:,
                                                                                     2]

            try:
                clf.fit(X_train, Y_train)
            except Exception as e:
                print("\t\tFailed during fit:", str(e))
                results[method].append({
                    "id": mdata["id"],
                    "score": None,
                    "bas": None,
                    "duration": duration
                })
                continue

            pred = clf.predict(X_test)
            bas = balanced_accuracy_score(Y_test, pred)
            results[method].append({
                "id": mdata["id"],
                "score": scores,
                "bas": bas,
                "duration": duration
            })

    results_filepath = f"./results{name}.json"
    with open(results_filepath, "w") as json_file:
        json.dump(results, json_file, indent=4)

    _run.add_artifact(results_filepath, content_type="json")
Ejemplo n.º 3
0
def run_ica_experiment(_run, method_idx):

    # filepaths = Path(r"C:\Users\paull\Documents\GIT\BCI_MsC\notebooks\BCI_Comp_IV_2a\BCICIV_2a_gdf/").glob("*T.gdf")

    dataset, metadata = BCI_IV_Comp_Dataset.load_dataset(filepaths,
                                                         as_epochs=True,
                                                         concatenate=False,
                                                         drop_bad=True,
                                                         return_metadata=True,
                                                         tmin=-1.,
                                                         tmax=3.)

    all_methods = get_all_methods()
    methods = all_methods if method_idx is None else [all_methods[method_idx]]
    name = "" if method_idx is None else "_{}".format(all_methods[method_idx])
    print("Using methods", methods)

    results = dict()
    for method in methods:
        print("Running for method", method)
        clf = make_pipeline(CSP(n_components=CSP_N_COMPONENTS), Vectorizer(),
                            MinMaxScaler(), SVC(C=4))
        results[method] = list()
        for i, (epochs, mdata) in enumerate(zip(dataset, metadata)):
            print("\t", i, mdata["id"])
            ICA = get_ica_instance(method, n_components=ICA_N_COMPONENTS)
            start = time.time()

            epochs = epochs.copy().load_data().filter(l_freq=None,
                                                      h_freq=40).resample(90.)

            transformed_epochs = ICA.fit(epochs).get_sources(epochs)
            duration = time.time() - start

            scores = dict()
            signal = np.hstack(transformed_epochs.get_data())
            for fn_name in SCORING_FN_DICT:
                score = apply_pairwise_parallel(signal,
                                                SCORING_FN_DICT[fn_name])
                scores[fn_name] = score

            X, Y = transformed_epochs.get_data(), transformed_epochs.events[:,
                                                                            2]

            del epochs, transformed_epochs

            try:
                clf.fit(X, Y)
            except Exception:
                print("\t\tFailed during fit")
                results[method].append({
                    "id": mdata["id"],
                    "score": None,
                    "bas": None,
                    "duration": duration
                })
                continue

            pred = clf.predict(X)
            bas = balanced_accuracy_score(Y, pred)
            results[method].append({
                "id": mdata["id"],
                "score": scores,
                "bas": bas,
                "duration": duration
            })

    results_filepath = f"./results{name}.json"
    with open(results_filepath, "w") as json_file:
        json.dump(results, json_file, indent=4)

    _run.add_artifact(results_filepath, content_type="json")