X = np.ones((X_test.shape[0], 32768, 4), dtype=np.float16)
Y = np.ones((X_test.shape[0], 32768), dtype=np.float16)

X[:, 0:X_test.shape[1]] = X_test
X = np.reshape(X, (X.shape[0], X.shape[1], 4))

#load model

model = SVM((X.shape[1], 4))
#model = model_from_json(open('model.json').read())
model.load_weights(path_best_weights)

#predict

Y_pred = model.predict(X)
Y_pred = Y_pred[:, 0:25350]

Y_pred_hard = 2 * np.argmax(Y_pred, axis=-1) + 1
Y_pred_soft = 2 * Y_pred[:, :, 1] + 1

#place for predictions

predicted_phantoms_hard = sio.loadmat(path_predicted_phantoms_hard)
phantoms_data = predicted_phantoms_hard['PhantomDataBase']
dataa = np.zeros(phantoms_data.shape)

for i in range(Y_pred.shape[0]):

    indx = test_fantom_labels[i]
    dataa[:, indx - 1] = Y_pred_hard[i]
Exemple #2
0
print("Samples distribution:", preprocessing.samples_statistics(samples, _classes, get_question))
print("Train set distribution:", preprocessing.samples_statistics(train_samples, _classes, get_question))
print("Test set distribution:", preprocessing.samples_statistics(test_samples, _classes, get_question))

train_texts = [sample.text for sample in train_samples]
test_texts = [sample.text for sample in test_samples]
train_matrix, test_matrix, words = preprocessing.preprocess(train_texts, test_texts, words_src = "samples", normalize_flag = False)

if _model == "SVM":
	train_labels = preprocessing.samples_to_label(train_samples, _classes, get_question)
	test_labels = preprocessing.samples_to_label(test_samples, _classes, get_question)

	model = SVM()
	model.train(train_matrix, train_labels)
	predict = model.predict(test_matrix)

elif _model == "NN":
	train_dists = preprocessing.samples_to_dists(train_samples, _classes, get_question)
	test_dists = preprocessing.samples_to_dists(test_samples, _classes, get_question)
	model = Neural_Network(_n_factors = train_matrix.shape[1], _learning_rate = _learning_rate, _hidden_nodes = _hidden_nodes, _last_layer = len(_classes))
	model.train(train_matrix, train_dists, test_matrix, test_dists)
	predict = model.predict(test_matrix)
	predict = preprocessing.dists_to_labels(predict, _classes)
	test_labels = preprocessing.samples_to_label(test_samples, _classes)

else:
	raise Exception("Unknown model flag '%s'"%str(_model))

accuracy = np.mean(predict == test_labels)
Exemple #3
0
    normalize_flag=False,
    reduction=_reduction,
    reduce_n_attr=_reduce_n_attr,
    stem_words=_stem_words)
model = None
print("Generating labels..")
if _model == "SVM":
    train_labels = preprocessing.samples_to_label(train_samples, _sections,
                                                  get_section)
    test_labels = preprocessing.samples_to_label(test_samples, _sections,
                                                 get_section)

    model = SVM()
    print("Training.. ")
    model.train(train_matrix, train_labels)
    predict = model.predict(test_matrix)

elif _model == "NN":
    train_dists = preprocessing.samples_to_dists(train_samples, _sections,
                                                 get_section)
    test_dists = preprocessing.samples_to_dists(test_samples, _sections,
                                                get_section)
    model = Neural_Network(_n_factors=train_matrix.shape[1],
                           _learning_rate=_learning_rate,
                           _hidden_nodes=_hidden_nodes,
                           _last_layer=len(_sections))
    print("Training.. ")
    model.train(train_matrix,
                train_dists,
                test_matrix,
                test_dists,
        train_x, train_y = load_data[cpt].getTrainData()
        train_data += train_x
        train_label += train_y

    nb_model_nb = NaiveBayes(train_data, train_label)
    nb_model_svm = SVM(train_data, train_label)

    # Save Naive Bayes Model
    nb_pickle = open(config.naive_bayes_path, 'wb')
    pickle.dump(nb_model_nb, nb_pickle)
    nb_pickle.close()

    # Save SVM Model
    svm_pickle = open(config.SVM_path, 'wb')
    pickle.dump(nb_model_nb, svm_pickle)
    svm_pickle.close()

    valid_data = []
    valid_label = []
    for cpt in range(len(load_data)):
        valid_x, valid_y = load_data[cpt].getTestData()
        valid_data += valid_x
        valid_label += valid_y
    predicted = nb_model_nb.predict(valid_data)
    print('results nb', nb_model_nb.loss_fct(predicted, valid_label))
    predicted = nb_model_svm.predict(valid_data)
    print('results svm', nb_model_svm.loss_fct(predicted, valid_label))
    


Exemple #5
0
results0 = np.zeros(3000)
len_files = len(FILES)

for i in range(len_files):

    γ = gamma_list[i]
    λ = lambda_list[i]

    X_train, Y_train, X_test = load_data(i,
                                         data_dir=DATA_DIR,
                                         files_dict=FILES)

    kernel = GaussianKernel(γ)
    clf = SVM(_lambda=λ, kernel=kernel)
    clf.fit(X_train, Y_train)
    y_pred = clf.predict(X_test)
    results0[i * 1000:i * 1000 + 1000] = y_pred

# SAVE Results
save_results("results_SVM_gaussian.csv", results0, RESULT_DIR)
print("1/3 Ending SVM with Gaussian kernel...")

#####################################
# 2) SVM with Convolutional kernel  #
#####################################
print("2/3 Starting SVM with Convolutional kernel...")
# Define parameters lists
sigma_list = [0.31, 0.31, 0.3]
k_list = [9, 10, 11]
lambda_list = [1e-5, 1e-9, 1e-9]