Exemple #1
0
from sklearn.preprocessing import MinMaxScaler
from sklearn.decomposition import PCA
from sklearn.feature_selection import SelectKBest
from sklearn.utils import shuffle
from sklearn.naive_bayes import GaussianNB
from sklearn.manifold import LocallyLinearEmbedding

from data.preprocess import features_preprocess, features_test_preprocess, labels_preprocess, labels_preprocess_num
from data.preprocess_2nd import preprocess_ft_lbls_num
from data.preprocess import ft_lbls_num

scores = []

embedding = LocallyLinearEmbedding(n_components=10)

(features1, labels1) = ft_lbls_num()
(features2, labels2) = preprocess_ft_lbls_num()

features1 = embedding.fit_transform(features1, labels1)
features2 = embedding.fit_transform(features2, labels2)

K = 5
cv = KFold(n_splits=K, shuffle=True)

features = numpy.concatenate((features1, features2))
labels = numpy.concatenate((labels1, labels2))

clf = svm.SVC(kernel='rbf')

for i in range(100):
Exemple #2
0
import keras
import sklearn
from sklearn import svm
from keras.layers import Dense, Input, LeakyReLU
from keras.models import Model
from sklearn.feature_selection import SelectKBest
from sklearn.metrics import accuracy_score
from sklearn.model_selection import KFold
import numpy

from data.preprocess import ft_lbls_num
from data.preprocess_2nd import preprocess_ft_lbls_num

(features, labels) = ft_lbls_num()
labels = numpy.asarray(labels, dtype=numpy.float32)

input = Input(shape=(50, ))
hd1 = Dense(20)(input)
leaky = LeakyReLU()(hd1)
output = Dense(50, activation='softmax')(hd1)

model = Model(input, output)
encoder = Model(input, hd1)

model.compile(optimizer='adam', loss='binary_crossentropy')

K = 5
cv = KFold(n_splits=K, shuffle=True)
scores = []

for train, test in cv.split(features):