Beispiel #1
0
def predict_features():
    model = load_model('./encoder2019_08_18_04_59_46.h5')
    input = model.input
    encoded1 = model.layers[1]
    encoded2 = model.layers[2]
    encoder = Model(inputs=input, outputs=encoded2(encoded1(input)))

    decoded1 = model.layers[3]
    decoded2 = model.layers[4]
    decoder_input = Input(shape=(1, 3))
    decoder = Model(inputs=decoder_input,
                    outputs=decoded2(decoded1(decoder_input)))

    df_list = rd.read_pkl_list(dwt_data_no_amr_normalized)
    rows_num = 1280 * 29
    data = list()

    i = 0
    for df in df_list:
        print(i)
        for index, row in df.iterrows():
            value = list(
                zip(row['AF3'], row['F7'], row['F3'], row['FC5'], row['T7'],
                    row['P7'], row['O1'], row['O2'], row['P8'], row['T8'],
                    row['FC6'], row['F4'], row['F8'], row['AF4']))
            data.append(value)
        i += 1

    data = np.array(data)
    data = np.reshape(data, (rows_num * 130, 1, 14))

    predicted = encoder.predict(data)
    predicted = predicted.reshape(1280 * 29, 130, 3)

    pkl.dump(predicted, open(encoded_features, 'wb'))
Beispiel #2
0
def calculate_windowed_for_raw(filename, window_size, step, result):
    df_list = rd.read_pkl_list(filename)
    print('file read')
    windowed = list(map(lambda x: rd.generate_windowed_data(x, window_size, step), df_list))
    print('windowed')
    pkl.dump(windowed, open(result, 'wb'))
    print('Windowed data saved to pickle')
Beispiel #3
0
def calculate_alpha_for_all(filename, result):
    df_list = rd.read_pkl_list(filename)
    print('file read')
    windowed = map(lambda x: rd.generate_windowed_data(x, 8, 2), df_list)
    print('windowed')
    dwt = list(map(rd.calculate_alpha, windowed))
    print('alpha waves')
    pkl.dump(dwt, open(result, 'wb'))
    print('DWT (Alpha) data saved to pickle')
Beispiel #4
0
def calculate_beta_for_all_normalized(filename, result):
    df_list = rd.read_pkl_list(filename)
    print('file read')
    windowed = map(lambda x: rd.generate_windowed_data(x, 4, 2), df_list)
    print('windowed')
    dwt = list(map(rd.calculate_beta, windowed))
    print('beta waves')
    dwt_normalized = normalize_by_channel(dwt)
    print("normilized")
    pkl.dump(dwt_normalized, open(result, 'wb'))
    print('DWT (Beta normalized) data saved to pickle')
def cluster_data(filename):
    data = rd.read_pkl_list(filename)
    valence = list()
    arousal = list()

    arousal_column = 'arousal'
    valence_column = 'valence'
    for df in data:
        a = df[arousal_column][0]
        v = df[valence_column][0]

        for index, row in df.iterrows():
            arousal.append(a)
            valence.append(v)

    valence = np.array(valence)
    valence = (valence - 1) / 8

    arousal = np.array(arousal)
    arousal = (arousal - 1) / 8

    valence_arousal_data = {
        valence_column: valence,
        arousal_column: arousal
    }

    df = DataFrame(valence_arousal_data, columns=[valence_column, arousal_column])

    plt.figure(figsize=(12, 12))

    # Kmeans
    clusters = 8
    kmeans = KMeans(n_clusters=clusters).fit_predict(df)
    plt.title("Kmeans")
    plt.scatter(df[valence_column], df[arousal_column], c=kmeans)
    plt.show()
Beispiel #6
0
def show_data():
    data = rd.read_pkl_list(dwt_data)

    x = np.arange(130)
    plt.figure(figsize=(80, 20))

    for exp in range(1280):
        plt.figure(figsize=(80, 20))
        d0p0 = data[exp]

        for i in range(20):
            plt.subplot(2, 10, i + 1)
            for channel in channels:
                y = d0p0[channel][i]
                valence = d0p0['valence'][0]
                arousal = d0p0['arousal'][0]

                plt.plot(x, y, label=channel)
            plt.gca().set_title("Window no. " + str(i))

        plt.legend()
        plt.title("Valence: " + str(valence) + "Arousal: " + str(arousal))
        plt.savefig(plots_dir + str(exp) + "_valence=" + str(valence) + "_arousal=" + str(arousal) + ".png",
                    bbox_inches='tight')
Beispiel #7
0
def autoencode():
    tsb_log = TensorBoard(log_dir=encoder_log_dir,
                          histogram_freq=100,
                          write_graph=True,
                          write_images=True)
    encoder_filepath = path.join(
        encoder_dir,
        "encoder" + dt.datetime.now().strftime("%Y_%m_%d_%H_%M_%S"))
    checkpointer = ModelCheckpoint(filepath=encoder_filepath,
                                   verbose=1,
                                   save_best_only=True)

    df_list = rd.read_pkl_list(dwt_data_no_amr_normalized)
    rows_num = 1280 * 29
    data = list()

    i = 0
    for df in df_list:
        print(i)
        for index, row in df.iterrows():
            value = list(
                zip(row['AF3'], row['F7'], row['F3'], row['FC5'], row['T7'],
                    row['P7'], row['O1'], row['O2'], row['P8'], row['T8'],
                    row['FC6'], row['F4'], row['F8'], row['AF4']))
            data.append(value)
        i += 1

    data = np.array(data)
    data = np.reshape(data, (rows_num * 130, 1, 14))
    input_shape = 14
    encoded1_shape = 8
    encoding_dim = 3
    decoded2_shape = 8

    x_train, x_valid, y_train, y_valid = train_test_split(data,
                                                          data,
                                                          test_size=0.2)

    input_data = Input(shape=(1, input_shape))
    encoded = Dense(encoded1_shape,
                    activation="relu",
                    activity_regularizer=regularizers.l2(0))(input_data)
    encoded_middle = Dense(encoding_dim,
                           activation="relu",
                           activity_regularizer=regularizers.l2(0))(encoded)
    decoded = Dense(decoded2_shape,
                    activation="relu",
                    activity_regularizer=regularizers.l2(0))(encoded_middle)
    output = Dense(input_shape,
                   activation="sigmoid",
                   activity_regularizer=regularizers.l2(0))(decoded)

    autoencoder = Model(inputs=input_data, outputs=output)
    encoder = Model(input_data, encoded_middle)

    autoencoder.compile(loss="mean_squared_error", optimizer="adam")
    autoencoder.summary()
    autoencoder.fit(x_train,
                    x_train,
                    epochs=1000,
                    callbacks=[checkpointer, tsb_log],
                    validation_data=(x_valid, x_valid))
Beispiel #8
0
def calculate_energy_power_entropy_mean_st_dev(filename, output):
    data = rd.read_pkl_list(filename)
    features = list(map(lambda x: calculate_energy_power_entropy_mean_st_dev_for_experiment(x), data))
    pkl.dump(features, open(output, 'wb'))
Beispiel #9
0
def calculate_power_energy_minmax_diff(filename, output):
    data = rd.read_pkl_list(filename)
    features = list(map(lambda x: calculate_power_energy_minmax_for_experiment(x), data))
    pkl.dump(features, open(output, 'wb'))