Beispiel #1
0
import pickle as cPickle
import gzip
import numpy as np
import copula_ordinal_regression as cor

X, y, _ = cor.load_disfa()


class testcase:
    def test_mlr(self):
        clf = cor.COR(max_iter=50)
        clf.fit(X, y, debug=True)


if __name__ == "__main__":
    import nose
    nose.run(defaultTest=__file__, env={'NOSE_NOCAPTURE': 1})
from sklearn.model_selection import LabelKFold
from sklearn.model_selection import GridSearchCV
from sklearn.model_selection import cross_val_predict 
import copula_ordinal_regression as cor
import numpy as np

# load the processed disfa database
X, y, S  = cor.load_disfa()

# select the first 3 action units (AU1,AU2,AU4)
y = y[:,[0,1,2]]


# select estimator and number of folds for cross validation
clf = cor.COR(max_iter=5000, verbose=0)
cv = LabelKFold(9)

# define parameter grid
parameter = {
        'margins':['normcdf','sigmoid'],
        'C':[0]+10.**np.arange(0,8),
        'w_nodes':np.linspace(0,1,5),
        }

# apply grid search to find optimal hyper parameters
clf = GridSearchCV(
        clf,
        parameter,
        cv = cv,
        n_jobs= -1,
        verbose = 10,
import copula_ordinal_regression as cor

# load the processed disfa database
X, y, _  = cor.load_disfa()

# select the first 3 action units (AU1,AU2,AU4)
y = y[:,[0,1,2]]

# use 3000 samples for training and the rest for testing
X_tr, X_te = X[:3000],X[3000:]
y_tr, y_te = y[:3000],y[3000:]


clf = cor.COR(
        max_iter=5000,          # maximum number of iteration
        margins = 'sigmoid',    # marginal function. [ sigmoid, normcdf ]
        copula = 'frank',       # copula function.   [ frank, gumbel, indep ]
        optimizer = 'CG',       # scipy optimizer    [ CG, BFGS, TNC ... ]
        sparsity = 2,           # level of sparsity: fully connected crf if this parameter is set to 0
        w_nodes = 0.1,          # balance potentials. Set this between 0 (only unary)  and 1 (only binary)
        shared_copula = True,   # share the same copula parameter for full range of intensities
        verbose=1,              # verbose level
        )

# fit the model and apply predction 
clf.fit(X_tr,y_tr,debug=True)
y_hat = clf.predict(X_te)

# print resutls on test set
print cor.metrics.ICC(y_te,y_hat)
print 'avr. CORR:',cor.metrics.ICC(y_te,y_hat).mean()
from sklearn.model_selection import LabelKFold
from sklearn.model_selection import GridSearchCV
from sklearn.model_selection import cross_val_predict
import copula_ordinal_regression as cor
import numpy as np

# load the processed disfa database
X, y, S = cor.load_disfa()

# select the first 3 action units (AU1,AU2,AU4)
y = y[:, [0, 1, 2]]

# select estimator and number of folds for cross validation
clf = cor.COR(max_iter=5000, verbose=0)
cv = LabelKFold(9)

# define parameter grid
parameter = {
    'margins': ['normcdf', 'sigmoid'],
    'C': [0] + 10.**np.arange(0, 8),
    'w_nodes': np.linspace(0, 1, 5),
}

# apply grid search to find optimal hyper parameters
clf = GridSearchCV(clf, parameter, cv=cv, n_jobs=-1, verbose=10, refit=False)
clf.fit(X, y, S)
print clf.best_params_

# apply cross validation using best hyper parameters
y_hat = cross_val_predict(clf.best_estimator_,
                          X,