Ejemplo n.º 1
0
    def cross_validation(self, content_file):
        """
        进行交叉验证
        :param content_file:
        :return:
        """
        dataset = self.load_data(content_file)
        row, col = dataset.shape
        X = dataset[:, :col - 1]
        Y = dataset[:, -1]
        clf = SVC(kernel='rbf', C=1000)
        clf.fit(X, Y)
        scores = cs(clf, X, Y, cv=5)
        print("Accuracy: %0.2f (+- %0.2f)" % (scores.mean(), scores.std()))

        return clf
Ejemplo n.º 2
0
    def train_creale_plk(self, content_file, plk_file):
        """
        训练数据并且生成训练结果文件
        :param content_file:
        :param plk_file: 训练结果文件
        :return:
        """

        dataset = self.load_data(content_file)
        if not dataset.any():
            raise Exception('特征值文件为空')
        row, col = dataset.shape
        X = dataset[:, :col - 1]
        Y = dataset[:, -1]
        clf = SVC(kernel='rbf', C=1000)
        clf.fit(X, Y)

        scores = cs(clf, X, Y, cv=5)
        print("Accuracy: %0.2f (+- %0.2f)" % (scores.mean(), scores.std()))
        joblib.dump(clf, plk_file)
Ejemplo n.º 3
0
print(len(train_X))
print(len(train_y))
print(len(test_X))
print(len(test_y))


# In[ ]:


#Classifying the splited data and check accuracy
model = dtr()
model.fit(train_X, train_y)

a = model.score(test_X, test_y)
print('Score with model', a)
z = cs(model, test_X, test_y)

print('This is error in list', z)


# In[ ]:


#Predict your data
prediction = model.predict(test_X)

ans = aus(test_y, prediction)

Final_score1 = round(model.score(train_X, train_y) * 100, 6)

print('Error', ans)
Ejemplo n.º 4
0
    classifier.add(
        Dense(input_dim=nh, output_dim=50, init='uniform', activation='relu'))

    #internal layers
    classifier.add(Dense(output_dim=50, init='uniform', activation='relu'))
    classifier.add(Dense(output_dim=50, init='uniform', activation='relu'))

    #output layer
    classifier.add(Dense(output_dim=1, init='uniform', activation='sigmoid'))

    #compiling ANN
    classifier.compile(optimizer='rmsprop',
                       metrics=['accuracy'],
                       loss='binary_crossentropy')
    return classifier


#kfoldcrossvalidaton
from sklearn.model_selection import cross_val_score as cs
classifier = KerasClassifier(build_fn=build_classifier,
                             batch_size=5,
                             nb_epoch=100)
accuracies = cs(classifier, X=X, y=y, cv=10, n_jobs=-1)
accuracy = accuracies.mean()

#fitting the model
classifier.fit(X, y)

#predicting results
y_pred = classifier.predict(X_test1)
    classifier.add(Dense(output_dim=50, init='uniform', activation='relu'))
    classifier.add(Dense(output_dim=50, init='uniform', activation='relu'))

    #output layer
    classifier.add(Dense(output_dim=1, init='uniform', activation='sigmoid'))

    #compiling ANN
    classifier.compile(optimizer='rmsprop',
                       metrics=['accuracy'],
                       loss='binary_crossentropy')
    return classifier


#kfoldcrossvalidaton
from sklearn.model_selection import cross_val_score as cs
classifier = KerasClassifier(build_fn=build_classifier,
                             batch_size=5,
                             nb_epoch=100)
accuracies = cs(classifier, X=X_train, y=y_train, cv=10, n_jobs=-1)
accuracy = accuracies.mean()

#fitting the model
classifier.fit(X_train, y_train)

#predicting
y_pred = classifier.predict(X_test)

#confusionmatrix
from sklearn.metrics import confusion_matrix
cm = confusion_matrix(y_pred, y_test)