Example #1
0
def classifier_featureblock_logistic_regression(fm_train=traindat,
                                                fm_test=testdat,
                                                label_train=label_traindat):

    from modshogun import BinaryLabels, RealFeatures, IndexBlock, IndexBlockGroup
    try:
        from modshogun import FeatureBlockLogisticRegression
    except ImportError:
        print("FeatureBlockLogisticRegression not available")
        exit(0)

    features = RealFeatures(hstack((traindat, traindat)))
    labels = BinaryLabels(hstack((label_train, label_train)))

    n_features = features.get_num_features()
    block_one = IndexBlock(0, n_features // 2)
    block_two = IndexBlock(n_features // 2, n_features)
    block_group = IndexBlockGroup()
    block_group.add_block(block_one)
    block_group.add_block(block_two)

    mtlr = FeatureBlockLogisticRegression(0.1, features, labels, block_group)
    mtlr.set_regularization(1)  # use regularization ratio
    mtlr.set_tolerance(1e-2)  # use 1e-2 tolerance
    mtlr.train()
    out = mtlr.apply().get_labels()

    return out
def classifier_featureblock_logistic_regression (fm_train=traindat,fm_test=testdat,label_train=label_traindat):

	from modshogun import BinaryLabels, RealFeatures, IndexBlock, IndexBlockGroup
	try:
		from modshogun import FeatureBlockLogisticRegression
	except ImportError:
		print("FeatureBlockLogisticRegression not available")
		exit(0)

	features = RealFeatures(hstack((traindat,traindat)))
	labels = BinaryLabels(hstack((label_train,label_train)))

	n_features = features.get_num_features()
	block_one = IndexBlock(0,n_features//2)
	block_two = IndexBlock(n_features//2,n_features)
	block_group = IndexBlockGroup()
	block_group.add_block(block_one)
	block_group.add_block(block_two)

	mtlr = FeatureBlockLogisticRegression(0.1,features,labels,block_group)
	mtlr.set_regularization(1) # use regularization ratio
	mtlr.set_tolerance(1e-2) # use 1e-2 tolerance
	mtlr.train()
	out = mtlr.apply().get_labels()

	return out
def transfer_multitask_group_regression(fm_train=traindat,fm_test=testdat,label_train=label_traindat):

	from modshogun import RegressionLabels, RealFeatures, IndexBlock, IndexBlockGroup, MultitaskLSRegression

	features = RealFeatures(traindat)
	labels = RegressionLabels(label_train)

	n_vectors = features.get_num_vectors()
	task_one = IndexBlock(0,n_vectors/2)
	task_two = IndexBlock(n_vectors/2,n_vectors)
	task_group = IndexBlockGroup()
	task_group.add_block(task_one)
	task_group.add_block(task_two)

	mtlsr = MultitaskLSRegression(0.1,features,labels,task_group)
	mtlsr.train()
	mtlsr.set_current_task(0)
	out = mtlsr.apply_regression().get_labels()
	return out
Example #4
0
    num_z = 21

    # get the data
    X, y, true_coefs = generate_synthetic_logistic_data(
        n, p, L, blk_nnz, gcov, nstd)

    # here each column represents a feature vector
    features = RealFeatures(X)
    # we have to convert the labels to +1/-1
    labels = BinaryLabels(np.sign(y.astype(int) - 0.5))

    # SETTING UP THE CLASSIFIERS
    # CLASSIFIER 1: group LASSO
    # build the feature blocks and add them to the block group
    pl = p / L
    block_group = IndexBlockGroup()
    for i in xrange(L):
        block_group.add_block(IndexBlock(pl * i, pl * (i + 1)))

    cls_gl = FeatureBlockLogisticRegression(0.0, features, labels, block_group)
    # with set_regularization(1), the parameter z will indicate the fraction of
    # the maximum regularization to use, and so z is in [0,1]
    # (reference: SLEP manual)
    cls_gl.set_regularization(1)
    cls_gl.set_q(2.0)  # it is the default anyway...

    # CLASSIFIER 2: LASSO (illustrating group lasso with all group sizes = 1)
    block_group_ones = IndexBlockGroup()
    for i in xrange(p):
        block_group_ones.add_block(IndexBlock(i, i + 1))
Example #5
0
    max_z = 1
    num_z = 21

    # get the data
    X, y, true_coefs = generate_synthetic_logistic_data(n, p, L, blk_nnz, gcov, nstd)

    # here each column represents a feature vector
    features = RealFeatures(X)
    # we have to convert the labels to +1/-1
    labels = BinaryLabels(np.sign(y.astype(int) - 0.5))

    # SETTING UP THE CLASSIFIERS
    # CLASSIFIER 1: group LASSO
    # build the feature blocks and add them to the block group
    pl = p / L
    block_group = IndexBlockGroup()
    for i in xrange(L):
        block_group.add_block(IndexBlock(pl * i, pl * (i + 1)))

    cls_gl = FeatureBlockLogisticRegression(0.0, features, labels, block_group)
    # with set_regularization(1), the parameter z will indicate the fraction of
    # the maximum regularization to use, and so z is in [0,1]
    # (reference: SLEP manual)
    cls_gl.set_regularization(1)
    cls_gl.set_q(2.0)   # it is the default anyway...

    # CLASSIFIER 2: LASSO (illustrating group lasso with all group sizes = 1)
    block_group_ones = IndexBlockGroup()
    for i in xrange(p):
        block_group_ones.add_block(IndexBlock(i, i + 1))