Example #1
0
 def __init__(self, ks, base=None, R=None):
     if base is None:
         base = defaultlearner()
     self.ks = ks
     self.R = R
     self.base = base
     self.normalise = True
Example #2
0
 def __init__(self, ks, base=None, R=None):
     if base is None:
         base = defaultlearner()
     self.ks = ks
     self.R = R
     self.base = base
     self.normalise = True
Example #3
0
def test_empty_input():
    learn = milk.defaultlearner()
    X = np.random.rand(60, 3)
    X[:32] += .52
    y = np.arange(60) > 35
    model = learn.train(X, y)
    preds = model.apply_many([])
    assert len(preds) == 0
Example #4
0
def test_empty_input():
    learn = milk.defaultlearner()
    X = np.random.rand(60, 3)
    X[:32] += .52
    y = np.arange(60) > 35
    model = learn.train(X, y)
    preds = model.apply_many([])
    assert len(preds) == 0
Example #5
0
def test_expandend():
    np.random.seed(23232432)
    X = np.random.rand(100,10)
    labels = np.zeros(100)
    X[50:] += .5
    labels[50:] = 1
    learners = milk.defaultlearner(expanded=True)
    for learner in learners:
        model = learner.train(X, labels)
        test = [model.apply(x) for x in X]
        test = np.array(test)
        assert set(test) == set(labels)
    def train(self, features, labels, **kwargs):
        from milk.supervised.gridsearch import gridminimise
        from milk.supervised import svm
        c_features = np.concatenate([f for f,_ in features if f.size])
        c_features = c_features[::self.sample]

        learner = milk.defaultlearner()
        k = (self.k if self.k is not None else len(features)//self.kfrac)
        _,codebook = milk.kmeans(c_features, k=k, R=123)
        features = project.f(features, codebook)
        model = learner.train(features, labels)
        return codebook_model(codebook, model)
Example #7
0
def test_expandend():
    np.random.seed(23232432)
    X = np.random.rand(100, 10)
    labels = np.zeros(100)
    X[50:] += .5
    labels[50:] = 1
    learners = milk.defaultlearner(expanded=True)
    for learner in learners:
        model = learner.train(X, labels)
        test = [model.apply(x) for x in X]
        test = np.array(test)
        assert set(test) == set(labels)
Example #8
0
    def train(self, features, labels, R=134, **kwargs):
        from milk.supervised.gridsearch import gridminimise
        from milk.supervised import svm
        c_features = np.concatenate([f for f, _ in features if f.size])
        c_features = c_features[::self.sample]

        learner = milk.defaultlearner()
        k = (self.k if self.k is not None else len(features) // self.kfrac)
        _, codebook = milk.kmeans(c_features, k=k, R=R)
        features = project.f(features, codebook)
        model = learner.train(features, labels)
        return codebook_model(codebook, model)
Example #9
0
def test_pickle_learner():
    learner = milk.defaultlearner()
    assert len(pickle.dumps(learner))
import pickle
import sys

import milk
import milk.supervised
import milk.supervised.adaboost
import milk.supervised.multi

assert(sys.argv[1])
features = pickle.load(open(sys.argv[1], 'r'))

#learner = milk.supervised.tree_learner()
#learner = milk.supervised.adaboost.boost_learner(weak)
#learner = milk.supervised.multi.one_against_one(learner)
learner = milk.defaultlearner(mode='really-slow')
model = learner.train(*features)


pickle.dump(model, open('trainer.pik', 'w'))
cmat,names, preds = milk.nfoldcrossvalidation(*features,
                                              classifier=learner,
                                              return_predictions=1)

print cmat
print names
print preds
Example #11
0
def test_extra_arg():
    from milksets.wine import load
    features, labels = load()
    learner = milk.defaultlearner()
    model = learner.train(features[::2], labels[::2], extra_arg=5)
    assert model.apply(features[1]) < 12.
Example #12
0
 def __init__(self, ks, base=None):
     if base is None:
         base = defaultlearner()
     self.ks = ks
     self.base = base
Example #13
0
 def __init__(self, ks, base=None):
     if base is None:
         base = defaultlearner()
     self.ks = ks
     self.base = base
Example #14
0
def test_extra_arg():
    from milksets.wine import load
    features,labels = load()
    learner = milk.defaultlearner()
    model = learner.train(features[::2],labels[::2], extra_arg=5)
    assert model.apply(features[1]) < 12.
Example #15
0
def test_pickle_learner():
    learner = milk.defaultlearner()
    assert len(pickle.dumps(learner))