def initData():
    initialization = InitDataSet()
    doas = initialization.get_dataset_as_doas()
    encoding = Encoding('./../../data_to_be_saved/alphabet_3.txt')
    # SAU 1 SAU 2
    #  1 th
    mark_bursts_regions_one_threshold(doas)

    # diff th
    # mark_bursts_regions(doas)

    # remove_bursted_trials_when_segment(doas)


    # doas_train, doas_test, ind_test = train_test_doa(doas, 0.2)
    # sa imi fac propriul test and train
    doas_train, doas_test, ind_test = train_test_doa_check_trials(doas, 0.2)

    train_data = ExtractData(doas_train, [channel], ['light', 'medium', 'deep'], [segment], ['all'])
    test_data = ExtractData(doas_test, [channel], ['light', 'medium', 'deep'], [segment], ['all'])

    # doar fft features
    X_train, y_train = obtain_TESPAR_A_FFT_features(train_data)
    x_test, y_test = obtain_TESPAR_A_FFT_features(test_data)

    return X_train, y_train, x_test, y_test
Ejemplo n.º 2
0
# data frame that keeps avr and std of the runs
columns = ['ch train', 'ch test', 'segment', 'acc avr', 'acc std_dev', 'f1-sc avr', 'f1-sc std_dev']
df_results = DataFrame(columns=columns)
# df_results.to_csv(csv_results, mode='a', header=True)

train_channels = [4, 6, 12, 2, 14]  # good over all
test_channels = [21, 20, 29, 16, 19]  # bad over all

segment = 'spontaneous'

# how many models to train a for a channel-segment pair
run_nr = 10

initialization = InitDataSet()
doas = initialization.get_dataset_as_doas()
encoding = Encoding('./../../data_to_be_saved/alphabet_1_150hz.txt')

# ############################## train on good channel, test on good chnannel ###############################3
# accuracies = [[[] for i in range(len(train_channels))] for j in range(len(train_channels))]
# f1scores = [[[] for i in range(len(train_channels))] for j in range(len(train_channels))]
#
# for run in range(run_nr):
#     # firstly split the input into train test
#     doas_train, doas_test, ind_test = train_test_doa(doas, 0.2)
#
#     for ind_train, ch_train in enumerate(train_channels):
#         for ind_test, ch_test in enumerate(train_channels):
#             print("start running for channel " + str(ch_train) + ' and ' + str(ch_test) + ' ' + segment + '\n')
#
#             # SplitData(self, doas, channels, levels, segment, orientation):
#             train_data = SplitData(doas_train, [ch_train], ['light', 'deep'], [segment], ['all'])
# # once per filter hereee
channels_range = 6
all_channels = [4, 6, 7, 13, 14]

segments = ['spontaneous', 'stimulus', 'poststimulus']

# data frame that keeps all runs for all channels, that will be added to .csv file
column_names = ['channel', 'segment', 'accuracy', 'f1-score']
df_all = DataFrame(columns=column_names)
df_all.to_csv(csv_file, mode='a', header=True)

initialization = InitDataSet()
doas = initialization.get_dataset_as_doas()
# mark_outsiders(doas)
encoding = Encoding('./../../data_to_be_saved/alphabet_3.txt')
'''
for calculating the average acc or af1-score
we need
dictionary to keep array of 30 values for 3 segments for 30 channels
'''
accuracies = [[[] for i in range(channels_range - 1)]
              for j in range(len(segments))]
f1scores = [[[] for i in range(channels_range - 1)]
            for j in range(len(segments))]

for run in range(run_nr):
    print('************************RUN ' + str(run) +
          '************************')
    # firstly split the input into train test
    doas_train, doas_test, ind_test = train_test_doa(doas, 0.2)
Ejemplo n.º 4
0
#
# # TEST with and without burst
# a = encoding.get_a(X[13], X_validate[13])
# # def plotMatrixA_Single(DOA, segment, channel_number, values):
# plot_matrix_A('DEEP_t14_no_bursts', 'spontaneous', 2, a)
#
# a = encoding.get_a(X[36], X_validate[36])
# # def plotMatrixA_Single(DOA, segment, channel_number, values):
# plot_matrix_A('DEEP_t37_no_bursts', 'spontaneous', 2, a)
#
# a_no_bursts = encoding.get_a(X[0], X_validate[0])
# plot_matrix_A('DEEP_no_bursts', 'spontaneous', 2, a_no_bursts)

############### test getting all values in a channel and plotting tespar A of it ##################

encoding = Encoding('./../../data_to_be_saved/alphabet_3.txt')

initialization = InitDataSet()
doas = initialization.get_dataset_as_doas()

# all_trials_values, all_trials_outsiders = get_channel_trials_values_and_outsiders(doas, 'light', 'spontaneous', 2)
# a_matrix_all = np.zeros((encoding.no_symbols, encoding.no_symbols), dtype=int)
# for i in range(len(all_trials_values)):
#     a_matrix = encoding.get_a(all_trials_values[i], all_trials_outsiders[i])
#     a_matrix_all = np.add(a_matrix_all, a_matrix)
# # print('debug')
# a_matrix_all = np.log10(a_matrix_all + 1)
# plot_matrix_A(DOA='LIGHT_all', segment='spontaneous', channel_number=2, values=a_matrix_all)
#
# all_trials_values, all_trials_outsiders = get_channel_trials_values_and_outsiders(doas, 'light', 'spontaneous', 7)
# a_matrix_all = np.zeros((encoding.no_symbols, encoding.no_symbols), dtype=int)
Ejemplo n.º 5
0
import np as np
import numpy as np

from feature_extraction.TESPAR.Encoding import Encoding
from input_reader.InitDataSet import InitDataSet
from input_reader.Models import DOA
from matplotlib import pyplot as plt
import matplotlib.pyplot as plt
import seaborn as sns

encoder = Encoding('./../data_to_be_saved/alphabet_3hz.txt')


def plot_matrix_A(a_matrix, DOA, trial, segment, channel_nr, lag):
    # ax = sns.heatmap(np.log10([[v + 1 for v in r] for r in a_matrix]), cmap="YlGnBu", vmin=0, vmax=5)
    ax = sns.heatmap(a_matrix, cmap="YlGnBu", vmin=0, vmax=10)
    ax.invert_yaxis()
    plt.xlabel("Symbols lag " + str(lag))
    plt.ylabel("Symbols lag " + str(lag))
    plt.title("A Matrix " + DOA + " " + str(trial) + " " + str(segment) +
              " ch: " + str(channel_nr))
    plot_name = 'log/channel' + str(
        channel_nr) + "_" + DOA + "_" + segment + "_lag" + str(lag) + "_A.png"
    # plt.savefig(plot_name)
    plt.show()


def differences_A(doas, ch_nr, trial_nr, segment):

    # doas_array = np.array(doas, dtype=DOA)
    # # open the wanted DOAs - DEEP and LIGHT
import numpy as np

from feature_extraction.TESPAR.Encoding import Encoding
import os
import sys

en = Encoding('./symbols_3_s10.txt')
array = []

file_deep_stimulus = 'input_reader/cutoff1hz/deep/stimulus'
project_path = os.path.join('', '..')
data_dir = os.path.join(project_path, file_deep_stimulus, '')
sys.path.append(project_path)

file_name = "channel0.txt"
with open(os.path.join(data_dir, file_name), 'r') as f:
    line = f.readline()
    line = line.replace("[", "")
    line = line.replace("]", "")
    array = np.fromstring(line, dtype=np.float, sep=', ')

en.get_symbols(array)
en.get_s()
Ejemplo n.º 7
0
import numpy as np

from input_reader.CreateDOA import CreateDOA
# write the results here
from feature_extraction.TESPAR.Encoding import Encoding

path = os.getcwd()
fileName = path + "/results_3hz.txt"
results_file = open(fileName, "w")

# lags value to be tested
lags_values = [0, 1, 3, 5]

# encoder used
encoder = Encoding('../data_to_be_saved/alphabet_3hz.txt')
a_size = (len(encoder.a_matrix), len(encoder.a_matrix[0]))

#  read input doas
data_dir = os.path.join('', '..')
data_dir = os.path.join(data_dir, 'Data/cutoff3hz', '')
sys.path.append(data_dir)

doa_info = {
    'deep': {
        'epd':
        'M014_S001_SRCS3L_25,50,100_0002.epd',
        'eti':
        'Results M014_S001_SRCS3L_25,50,100_0002 Variable contrast, all orientations.eti'
    },
    'light': {