示例#1
0
from sklearn.cross_validation import StratifiedKFold
from sklearn.cross_validation import cross_val_score
from sklearn.cross_validation import LeaveOneLabelOut
from sklearn.cross_validation import permutation_test_score
from sklearn.pipeline import Pipeline
from sklearn.dummy import DummyClassifier
from nilearn.input_data import NiftiMasker
import numpy as np


import fmriUtils as fm

n_folds = 10
f=fm.outTo()
X,y = fm.loadData()
y = fm.defineClass(y)

XX = X
yy = y
#使用SVM分类和预测
svc = SVC(kernel='linear')
#
feature_selection = SelectKBest(f_classif, k=1000)
anova_svc = Pipeline([('anova', feature_selection), ('svc', svc)])

cv = StratifiedKFold(yy, n_folds=n_folds)

cv_scores = []

for train, test in cv:
    anova_svc.fit(XX[train], yy[train])
示例#2
0
from sklearn.svm import LinearSVC
from sklearn.naive_bayes import BernoulliNB, MultinomialNB
from sklearn.neighbors import KNeighborsClassifier
from sklearn.neighbors import NearestCentroid
from sklearn.ensemble import RandomForestClassifier
from sklearn.cross_validation import StratifiedKFold
from sklearn.utils.extmath import density
from sklearn import metrics

root = r"D:\data_processing\Python"
n_folds = 5

os.chdir(root)
X = np.load('X.npy')
y = np.load('y.npy')
y = fm.defineClass(y,according='noise')
f = fm.outTo()

#===========特征选择========================
clf_l1_LR = LogisticRegression(C=0.1, penalty='l1', tol=0.001)
clf_l1_LR.fit(X, y)
print clf_l1_LR
model = SelectFromModel(clf_l1_LR, prefit=True)
X = model.transform(X)
     

results = []
for clf, name in (
        (RidgeClassifier(tol=1e-2, solver="sag"), "Ridge Classifier"),
        (Perceptron(n_iter=50), "Perceptron"),
        (PassiveAggressiveClassifier(n_iter=50), "Passive-Aggressive"),
示例#3
0
"""
Created on Tue Jul 26 15:05:41 2016

@author: FF120
"""
import os
import numpy as np
import fmriUtils as fm
from matplotlib import pyplot as plt
from matplotlib import font_manager

root = r"D:\data_processing\Python"
os.chdir(root)
X = np.load('X.npy')
y = np.load('y.npy')
y = fm.defineClass(y)

zhfont = matplotlib.font_manager.FontProperties(fname='/usr/share/fonts/truetype/arphic/ukai.ttc')

""" 比较好看的绘制方法 """ 
plt.figure(figsize=(8, 5), dpi=80)
axes = plt.subplot(111)

type1_x = []
type1_y = []
type2_x = []
type2_y = []
type3_x = []
type3_y = []
type4_x = []
type4_y = []
示例#4
0
from sklearn.svm import LinearSVC
from sklearn.naive_bayes import BernoulliNB, MultinomialNB
from sklearn.neighbors import KNeighborsClassifier
from sklearn.neighbors import NearestCentroid
from sklearn.ensemble import RandomForestClassifier
from sklearn.cross_validation import StratifiedKFold
from sklearn.utils.extmath import density
from sklearn import metrics

root = r"D:\data_processing\Python"
n_folds = 5

os.chdir(root)
X = np.load('X.npy')
y = np.load('y.npy')
y = fm.defineClass(y, according='noise')
f = fm.outTo()

#===========特征选择========================
clf_l1_LR = LogisticRegression(C=0.1, penalty='l1', tol=0.001)
clf_l1_LR.fit(X, y)
print clf_l1_LR
model = SelectFromModel(clf_l1_LR, prefit=True)
X = model.transform(X)

results = []
for clf, name in (
    (RidgeClassifier(tol=1e-2, solver="sag"), "Ridge Classifier"),
    (Perceptron(n_iter=50), "Perceptron"),
    (PassiveAggressiveClassifier(n_iter=50), "Passive-Aggressive"),
    (KNeighborsClassifier(n_neighbors=10), "kNN"),