Esempio n. 1
0
def classifier_featureblock_logistic_regression(fm_train=traindat,
                                                fm_test=testdat,
                                                label_train=label_traindat):

    from modshogun import BinaryLabels, RealFeatures, IndexBlock, IndexBlockGroup
    try:
        from modshogun import FeatureBlockLogisticRegression
    except ImportError:
        print("FeatureBlockLogisticRegression not available")
        exit(0)

    features = RealFeatures(hstack((traindat, traindat)))
    labels = BinaryLabels(hstack((label_train, label_train)))

    n_features = features.get_num_features()
    block_one = IndexBlock(0, n_features // 2)
    block_two = IndexBlock(n_features // 2, n_features)
    block_group = IndexBlockGroup()
    block_group.add_block(block_one)
    block_group.add_block(block_two)

    mtlr = FeatureBlockLogisticRegression(0.1, features, labels, block_group)
    mtlr.set_regularization(1)  # use regularization ratio
    mtlr.set_tolerance(1e-2)  # use 1e-2 tolerance
    mtlr.train()
    out = mtlr.apply().get_labels()

    return out
def classifier_featureblock_logistic_regression (fm_train=traindat,fm_test=testdat,label_train=label_traindat):

	from modshogun import BinaryLabels, RealFeatures, IndexBlock, IndexBlockGroup
	try:
		from modshogun import FeatureBlockLogisticRegression
	except ImportError:
		print("FeatureBlockLogisticRegression not available")
		exit(0)

	features = RealFeatures(hstack((traindat,traindat)))
	labels = BinaryLabels(hstack((label_train,label_train)))

	n_features = features.get_num_features()
	block_one = IndexBlock(0,n_features//2)
	block_two = IndexBlock(n_features//2,n_features)
	block_group = IndexBlockGroup()
	block_group.add_block(block_one)
	block_group.add_block(block_two)

	mtlr = FeatureBlockLogisticRegression(0.1,features,labels,block_group)
	mtlr.set_regularization(1) # use regularization ratio
	mtlr.set_tolerance(1e-2) # use 1e-2 tolerance
	mtlr.train()
	out = mtlr.apply().get_labels()

	return out
Esempio n. 3
0
        n, p, L, blk_nnz, gcov, nstd)

    # here each column represents a feature vector
    features = RealFeatures(X)
    # we have to convert the labels to +1/-1
    labels = BinaryLabels(np.sign(y.astype(int) - 0.5))

    # SETTING UP THE CLASSIFIERS
    # CLASSIFIER 1: group LASSO
    # build the feature blocks and add them to the block group
    pl = p / L
    block_group = IndexBlockGroup()
    for i in xrange(L):
        block_group.add_block(IndexBlock(pl * i, pl * (i + 1)))

    cls_gl = FeatureBlockLogisticRegression(0.0, features, labels, block_group)
    # with set_regularization(1), the parameter z will indicate the fraction of
    # the maximum regularization to use, and so z is in [0,1]
    # (reference: SLEP manual)
    cls_gl.set_regularization(1)
    cls_gl.set_q(2.0)  # it is the default anyway...

    # CLASSIFIER 2: LASSO (illustrating group lasso with all group sizes = 1)
    block_group_ones = IndexBlockGroup()
    for i in xrange(p):
        block_group_ones.add_block(IndexBlock(i, i + 1))

    cls_l = FeatureBlockLogisticRegression(0.0, features, labels,
                                           block_group_ones)
    cls_l.set_regularization(1)
    cls_l.set_q(2.0)
Esempio n. 4
0
    X, y, true_coefs = generate_synthetic_logistic_data(n, p, L, blk_nnz, gcov, nstd)

    # here each column represents a feature vector
    features = RealFeatures(X)
    # we have to convert the labels to +1/-1
    labels = BinaryLabels(np.sign(y.astype(int) - 0.5))

    # SETTING UP THE CLASSIFIERS
    # CLASSIFIER 1: group LASSO
    # build the feature blocks and add them to the block group
    pl = p / L
    block_group = IndexBlockGroup()
    for i in xrange(L):
        block_group.add_block(IndexBlock(pl * i, pl * (i + 1)))

    cls_gl = FeatureBlockLogisticRegression(0.0, features, labels, block_group)
    # with set_regularization(1), the parameter z will indicate the fraction of
    # the maximum regularization to use, and so z is in [0,1]
    # (reference: SLEP manual)
    cls_gl.set_regularization(1)
    cls_gl.set_q(2.0)   # it is the default anyway...

    # CLASSIFIER 2: LASSO (illustrating group lasso with all group sizes = 1)
    block_group_ones = IndexBlockGroup()
    for i in xrange(p):
        block_group_ones.add_block(IndexBlock(i, i + 1))

    cls_l = FeatureBlockLogisticRegression(0.0, features, labels, block_group_ones)
    cls_l.set_regularization(1)
    cls_l.set_q(2.0)