def test_precluster():
    learner = precluster_learner([2], base=fast_classifier(), R=12)
    features, labels = gen_data(22)
    model = learner.train(features, labels)

    assert model.apply([c0() for i in xrange(35)])
    assert not model.apply([c1() for i in xrange(35)])
def test_precluster():
    learner = precluster_learner([2], base=fast_classifier(), R=12)
    features, labels = gen_data(22)
    model = learner.train(features,labels)

    assert model.apply([c0() for i in xrange(35)])
    assert not model.apply([c1() for i in xrange(35)])
Example #3
0
def test_gridminimise_return():
    from milksets.wine import load
    features,labels = load()
    learner = fast_classifier()
    gridminimise(learner, features, labels, { 'ignore' : [0] })
    _,error = gridminimise(learner, features, labels, { 'ignore' : [0] }, return_value=True, nfolds=5)
    cmat,_ = milk.nfoldcrossvalidation(features, labels, learner=learner, nfolds=5)
    assert error == cmat.sum()-cmat.trace()
def test_codebook_learner_case1():
    learner = select_precluster([2], base=fast_classifier())
    learner.rmax = 1
    features, labels = gen_data(23, 1)
    model = learner.train(features,labels)

    assert model.apply(([c0() for i in xrange(35)],[]))
    assert not model.apply(([c1() for i in xrange(35)],[]))
def test_codebook_learner_case1():
    learner = select_precluster([2], base=fast_classifier())
    learner.rmax = 1
    features, labels = gen_data(23, 1)
    model = learner.train(features, labels)

    assert model.apply(([c0() for i in xrange(35)], []))
    assert not model.apply(([c1() for i in xrange(35)], []))
Example #6
0
def test_gridminimise_return():
    from milksets.wine import load
    features, labels = load()
    learner = fast_classifier()
    gridminimise(learner, features, labels, {'ignore': [0]})
    _, error = gridminimise(learner,
                            features,
                            labels, {'ignore': [0]},
                            return_value=True,
                            nfolds=5)
    cmat, _ = milk.nfoldcrossvalidation(features,
                                        labels,
                                        learner=learner,
                                        nfolds=5)
    assert error == cmat.sum() - cmat.trace()
Example #7
0
def test_one_by_one():
    np.random.seed(23)
    r = np.random.random
    ps = np.array([.7,.5,.8,.3,.8])
    learner = milk.supervised.multi_label.one_by_one(fast_classifier())
    universe = list(range(len(ps)))

    for _ in range(10):
        labels = []
        features = []
        bases = [np.random.rand(20) for pj in ps]
        for i in range(256):
            cur = []
            curf = np.zeros(20,float)
            for j,pj in enumerate(ps):
                if r() < pj:
                    cur.append(j)
                    curf += r()*bases[j]
            if not cur: continue
            labels.append(cur)
            features.append(curf)

        model = learner.train(features, labels)
        predicted = model.apply_many(features)
        matrix = np.zeros((2,2), int)
        for t,p in zip(labels, predicted):
            for ell in universe:
                row = (ell in t)
                col = (ell in p)
                matrix[row,col] += 1
        Tn,Fp = matrix[0]
        Fn,Tp = matrix[1]
        prec = Tp/float(Tp+Fp)
        recall = Tp/float(Tp+Fn)
        F1 = 2*prec*recall/(prec + recall)
        assert F1 > .3
def test_one_by_one():
    np.random.seed(23)
    r = np.random.random
    ps = np.array([.7, .5, .8, .3, .8])
    learner = milk.supervised.multi_label.one_by_one(fast_classifier())
    universe = range(len(ps))

    for _ in xrange(10):
        labels = []
        features = []
        bases = [np.random.rand(20) for pj in ps]
        for i in xrange(256):
            cur = []
            curf = np.zeros(20, float)
            for j, pj in enumerate(ps):
                if r() < pj:
                    cur.append(j)
                    curf += r() * bases[j]
            if not cur: continue
            labels.append(cur)
            features.append(curf)

        model = learner.train(features, labels)
        predicted = model.apply_many(features)
        matrix = np.zeros((2, 2), int)
        for t, p in zip(labels, predicted):
            for ell in universe:
                row = (ell in t)
                col = (ell in p)
                matrix[row, col] += 1
        Tn, Fp = matrix[0]
        Fn, Tp = matrix[1]
        prec = Tp / float(Tp + Fp)
        recall = Tp / float(Tp + Fn)
        F1 = 2 * prec * recall / (prec + recall)
        assert F1 > .3
Example #9
0
def test_precluster():
    np.random.seed(22)
    learner = precluster_learner([2], base=fast_classifier(), R=12)

    def c0():
        return np.random.rand(8)
    def c1():
        return c0()+2.*np.ones(8)

    features = []
    labels =[]
    for i in xrange(200):
        f = []
        for j in xrange(40):
            use_0 = (i < 100 and j < 30) or (i >= 100 and j >= 30)
            if use_0: f.append(c0())
            else: f.append(c1())
        labels.append((i < 100))
        features.append(f)
    model = learner.train(features,labels)


    assert model.apply([c0() for i in xrange(35)])
    assert not model.apply([c1() for i in xrange(35)])
Example #10
0
def test_precluster():
    np.random.seed(22)
    learner = precluster_learner([2], base=fast_classifier(), R=12)

    def c0():
        return np.random.rand(8)

    def c1():
        return c0() + 2. * np.ones(8)

    features = []
    labels = []
    for i in xrange(200):
        f = []
        for j in xrange(40):
            use_0 = (i < 100 and j < 30) or (i >= 100 and j >= 30)
            if use_0: f.append(c0())
            else: f.append(c1())
        labels.append((i < 100))
        features.append(f)
    model = learner.train(features, labels)

    assert model.apply([c0() for i in xrange(35)])
    assert not model.apply([c1() for i in xrange(35)])
Example #11
0
import milk.ext.jugparallel
from milksets.wine import load
from milk.tests.fast_classifier import fast_classifier
features, labels = load()
classified = milk.ext.jugparallel.nfoldcrossvalidation(
    features, labels, learner=fast_classifier())
classified_wpred = milk.ext.jugparallel.nfoldcrossvalidation(
    features, labels, learner=fast_classifier(), return_predictions=True)
Example #12
0
import milk.ext.jugparallel
from milksets.wine import load
from milk.tests.fast_classifier import fast_classifier
features,labels = load()
classified = milk.ext.jugparallel.nfoldcrossvalidation(features, labels, learner=fast_classifier())
classified_wpred = milk.ext.jugparallel.nfoldcrossvalidation(features, labels, learner=fast_classifier(), return_predictions=True)