コード例 #1
0
ファイル: test_gridsearch.py プロジェクト: zoe7791/milk
def test_gridminimise():
    features = np.arange(100)
    labels = np.tile((0, 1), 50)
    paramspace = {
        'a': np.arange(4),
        'b': np.arange(-3, 3),
        'c': np.linspace(2., 10)
    }
    best, value = gridminimise(simple_learner(),
                               features,
                               labels,
                               paramspace,
                               measure=(lambda _, p: p[0]),
                               return_value=True)
    best = dict(best)
    val = f(best['a'], best['b'], best['c'])
    assert value == val * 100
    for a in np.arange(4):
        for b in np.arange(-3, 3):
            for c in np.linspace(2., 10):
                assert val <= f(a, b, c)
    gs = gridsearch(simple_learner(),
                    paramspace,
                    measure=(lambda _, p: p[0]),
                    annotate=True)
    model = gs.train(features, labels)
    assert model.value == value
    assert model.arguments == val
コード例 #2
0
ファイル: emote.py プロジェクト: AvenTu/emote-cat
def m_svm():
  if ARGV.one_vs:
    svm_model = svm.svm_to_binary(svm.svm_raw())
  else:
    svm_model = multi.one_against_one(svm.svm_to_binary(svm.svm_raw()))
  # return milk.defaultclassifier(mode='slow', multi_strategy='1-vs-1')
  learner = milk.supervised.classifier.ctransforms(
    # remove nans
    supervised.normalise.chkfinite(),
    # normalize to [-1,1]
    supervised.normalise.interval_normalise(),
    # feature selection
    featureselection.featureselector(
      featureselection.linear_independent_features),
    # sda filter
    featureselection.sda_filter(),
    # same parameter range as 'medium'
    supervised.gridsearch(
      svm_model,
      params = {
        'C': 2.0 ** np.arange(-2, 4),
        'kernel': [ svm.rbf_kernel(2.0 ** i) for i in xrange(-4, 4) ]
      }
    )
  )
  return learner
コード例 #3
0
ファイル: test_gridsearch.py プロジェクト: NobodyWHU/milk
def test_gridminimise():
    features = np.arange(100)
    labels = np.tile((0,1), 50)
    paramspace = { 'a': np.arange(4), 'b' : np.arange(-3,3), 'c' : np.linspace(2., 10) }
    best,value = gridminimise(simple_learner(), features, labels, paramspace, measure=(lambda _, p: p[0]), return_value=True)
    best = dict(best)
    val = f(best['a'], best['b'], best['c'])
    assert value == val*100
    for a in np.arange(4):
        for b in np.arange(-3,3):
            for c in np.linspace(2., 10):
                assert val <= f(a,b,c)
    gs = gridsearch(simple_learner(), paramspace, measure=(lambda _, p: p[0]), annotate=True)
    model = gs.train(features, labels)
    assert model.value == value
    assert model.arguments == val
コード例 #4
0
def m_svm():
    if ARGV.one_vs:
        svm_model = svm.svm_to_binary(svm.svm_raw())
    else:
        svm_model = multi.one_against_one(svm.svm_to_binary(svm.svm_raw()))
    # return milk.defaultclassifier(mode='slow', multi_strategy='1-vs-1')
    learner = milk.supervised.classifier.ctransforms(
        # remove nans
        supervised.normalise.chkfinite(),
        # normalize to [-1,1]
        supervised.normalise.interval_normalise(),
        # feature selection
        featureselection.featureselector(
            featureselection.linear_independent_features),
        # sda filter
        featureselection.sda_filter(),
        # same parameter range as 'medium'
        supervised.gridsearch(
            svm_model,
            params={
                'C': 2.0**np.arange(-2, 4),
                'kernel': [svm.rbf_kernel(2.0**i) for i in xrange(-4, 4)]
            }))
    return learner