Exemple #1
0
def m_svm():
  if ARGV.one_vs:
    svm_model = svm.svm_to_binary(svm.svm_raw())
  else:
    svm_model = multi.one_against_one(svm.svm_to_binary(svm.svm_raw()))
  # return milk.defaultclassifier(mode='slow', multi_strategy='1-vs-1')
  learner = milk.supervised.classifier.ctransforms(
    # remove nans
    supervised.normalise.chkfinite(),
    # normalize to [-1,1]
    supervised.normalise.interval_normalise(),
    # feature selection
    featureselection.featureselector(
      featureselection.linear_independent_features),
    # sda filter
    featureselection.sda_filter(),
    # same parameter range as 'medium'
    supervised.gridsearch(
      svm_model,
      params = {
        'C': 2.0 ** np.arange(-2, 4),
        'kernel': [ svm.rbf_kernel(2.0 ** i) for i in xrange(-4, 4) ]
      }
    )
  )
  return learner
Exemple #2
0
def test_ecoc_probability():
    features,labels = load()
    features = features[labels < 5]
    labels = labels[labels < 5]
    raw = svm.svm_raw(kernel=svm.dot_kernel(), C=1.)
    base = ctransforms(raw, svm.svm_sigmoidal_correction())
    learner = ecoc_learner(base, probability=True)
    model = learner.train(features[::2], labels[::2])
    results = list(map(model.apply, features[1::2]))
    results = np.array(results)
    assert results.shape[1] == len(set(labels))
    assert np.mean(results.argmax(1) == labels[1::2]) > .5
Exemple #3
0
def bench_milk(X, y, T, valid):
#
#       .. milk ..
#
    from milk.supervised import svm
    start = datetime.now()
    learner = svm.svm_raw(
        kernel=svm.rbf_kernel(sigma=1./X.shape[1]), C=1.)
    model = learner.train(X,y)
    pred = np.sign(map(model.apply, T))
    score = np.mean(pred == valid)
    return score, datetime.now() - start
Exemple #4
0
def m_svm():
    if ARGV.one_vs:
        svm_model = svm.svm_to_binary(svm.svm_raw())
    else:
        svm_model = multi.one_against_one(svm.svm_to_binary(svm.svm_raw()))
    # return milk.defaultclassifier(mode='slow', multi_strategy='1-vs-1')
    learner = milk.supervised.classifier.ctransforms(
        # remove nans
        supervised.normalise.chkfinite(),
        # normalize to [-1,1]
        supervised.normalise.interval_normalise(),
        # feature selection
        featureselection.featureselector(
            featureselection.linear_independent_features),
        # sda filter
        featureselection.sda_filter(),
        # same parameter range as 'medium'
        supervised.gridsearch(
            svm_model,
            params={
                'C': 2.0**np.arange(-2, 4),
                'kernel': [svm.rbf_kernel(2.0**i) for i in xrange(-4, 4)]
            }))
    return learner
Exemple #5
0
def test_ecoc_learner():
    base = milk.tests.fast_classifier.fast_classifier()
    learner = milk.supervised.multi.ecoc_learner(base)
    features, labels = load()
    nlabels = len(set(labels))
    model = learner.train(features[::2],labels[::2])

    testl = np.array(model.apply_many(features[1::2]))
    assert np.mean(testl == labels[1::2]) > 1./nlabels
    assert testl.min() >= 0
    assert testl.max() < nlabels

# This failed at one point:
    learner = ecoc_learner(svm.svm_to_binary(svm.svm_raw(kernel=svm.dot_kernel(), C=1.)))
    model = learner.train(features[:200], labels[:200])
    assert (model is not None)