示例#1
0
pp.create_labels(audio_path=audio_path, output_classes=cf, output_labels=lf)
labels = np.load(resource_path + "\\labels\\" + lf)
labelencoder = LabelEncoder() 
labelencoder.classes_ = np.load(resource_path + "\\labels\\" + cf)
classes = labelencoder.transform(labels)
pp.one_hot(classes, "onehotlabels.npy")
onehotlabels = np.load(resource_path+"\\labels\\onehotlabels.npy")
print(labels.shape)
print(classes.shape)

scaled_feature_vectors = pickle.load(open(resource_path + "\\feature_vectors\\CQT_SK\\" + "CQT_SK_VECTORS_44100_28_37_512.pl", "rb"))
#for cqt omit otherwise 
scaled_feature_vectors = scaled_feature_vectors.reshape(len(scaled_feature_vectors), 37)
print(scaled_feature_vectors.shape)
train_set, test_set, train_classes, test_classes, test_classes = pp.split_training_set(onehotlabels, scaled_feature_vectors)

test_predictions = model.predict(test_set)

predictions_round=np.around(test_predictions).astype('int')
predictions_int = np.argmax(predictions_round, axis=1)
predicted_labels= labelencoder.inverse_transform(np.ravel(predictions_int))

test_round=np.around(test_classes).astype('int')
test_int = np.argmax(predictions_round, axis=1)
test_labels= labelencoder.inverse_transform(np.ravel(predictions_int))

plt.figure(figsize=(18, 13))
evaluation.plot_confusion_matrix(predicted_labels, labelencoder.classes_, test_labels)
plt.savefig(plot_path + "\\DNN\\" + log_name_cqt[:-3] +".png")
wp = evaluation.wrong_predictions(predicted_labels, test_classes)
示例#2
0
print(labels.shape)
print(classes.shape)


'''
#CREATE VECTORS
feature_vectors, files = pp.get_cqt_folder(path=audio_path)
pp.save_cqt_sk(feature_vectors, log_name_cqt + ".pl")
np.save(resource_path + "\\files\\" + files_path + ".npy", files)
'''

scaled_feature_vectors = pickle.load(open(resource_path + "\\feature_vectors\\" + log_name_cqt +".pl", "rb"))
#for cqt omit otherwise 
scaled_feature_vectors = scaled_feature_vectors.reshape(len(scaled_feature_vectors), n_bins)
print(scaled_feature_vectors.shape)
train_set, test_set, train_classes, test_classes, test_index = pp.split_training_set(classes, scaled_feature_vectors)

#GRIDSEARCH 
C_range = np.logspace(-2, 10, 2)
gamma_range = np.logspace(-9, 3, 2)
kernel = np.array(['rbf'])
param_grid = [{'C': C_range, 'gamma': gamma_range, 'kernel':kernel }]
model = GridSearchCV(SVC(), param_grid)
model.fit(train_set, train_classes)
print("best params are %s with score %0.2f" % (model.best_params_, model.best_score_))

learning.save_model(model, model_path + "\\SVM\\SVM_grid_windowed_rbf")

'''
model = learning.kNN(train_set, train_classes)
learning.save_model(model, model_path+"\\SVM\\SVM_grid_windowed")