def modelselection_grid_search_liblinear_modular (traindat=traindat, label_traindat=label_traindat):
    from modshogun import CrossValidation, CrossValidationResult
    from modshogun import ContingencyTableEvaluation, ACCURACY
    from modshogun import StratifiedCrossValidationSplitting
    from modshogun import GridSearchModelSelection
    from modshogun import ModelSelectionParameters, R_EXP
    from modshogun import ParameterCombination
    from modshogun import BinaryLabels
    from modshogun import RealFeatures
    from modshogun import LibLinear, L2R_L2LOSS_SVC

    # build parameter tree to select C1 and C2
    param_tree_root=ModelSelectionParameters()
    c1=ModelSelectionParameters("C1");
    param_tree_root.append_child(c1)
    c1.build_values(-1.0, 0.0, R_EXP);

    c2=ModelSelectionParameters("C2");
    param_tree_root.append_child(c2);
    c2.build_values(-1.0, 0.0, R_EXP);

    # training data
    features=RealFeatures(traindat)
    labels=BinaryLabels(label_traindat)

    # classifier
    classifier=LibLinear(L2R_L2LOSS_SVC)

    # print all parameter available for modelselection
    # Dont worry if yours is not included but, write to the mailing list
    #classifier.print_modsel_params()

    # splitting strategy for cross-validation
    splitting_strategy=StratifiedCrossValidationSplitting(labels, 10)

    # evaluation method
    evaluation_criterium=ContingencyTableEvaluation(ACCURACY)

    # cross-validation instance
    cross_validation=CrossValidation(classifier, features, labels,
                                     splitting_strategy, evaluation_criterium)
    cross_validation.set_autolock(False)

    # model selection instance
    model_selection=GridSearchModelSelection(cross_validation, param_tree_root)

    # perform model selection with selected methods
    #print "performing model selection of"
    #param_tree_root.print_tree()
    best_parameters=model_selection.select_model()

    # print best parameters
    #print "best parameters:"
    #best_parameters.print_tree()

    # apply them and print result
    best_parameters.apply_to_machine(classifier)
    result=cross_validation.evaluate()
def modelselection_random_search_liblinear_modular (traindat=traindat, label_traindat=label_traindat):
    from modshogun import CrossValidation, CrossValidationResult
    from modshogun import ContingencyTableEvaluation, ACCURACY
    from modshogun import StratifiedCrossValidationSplitting
    from modshogun import RandomSearchModelSelection
    from modshogun import ModelSelectionParameters, R_EXP
    from modshogun import ParameterCombination
    from modshogun import BinaryLabels
    from modshogun import RealFeatures
    from modshogun import LibLinear, L2R_L2LOSS_SVC

    # build parameter tree to select C1 and C2
    param_tree_root=ModelSelectionParameters()
    c1=ModelSelectionParameters("C1");
    param_tree_root.append_child(c1)
    c1.build_values(-2.0, 2.0, R_EXP);

    c2=ModelSelectionParameters("C2");
    param_tree_root.append_child(c2);
    c2.build_values(-2.0, 2.0, R_EXP);

    # training data
    features=RealFeatures(traindat)
    labels=BinaryLabels(label_traindat)

    # classifier
    classifier=LibLinear(L2R_L2LOSS_SVC)

    # print all parameter available for modelselection
    # Dont worry if yours is not included but, write to the mailing list
    #classifier.print_modsel_params()

    # splitting strategy for cross-validation
    splitting_strategy=StratifiedCrossValidationSplitting(labels, 10)

    # evaluation method
    evaluation_criterium=ContingencyTableEvaluation(ACCURACY)

    # cross-validation instance
    cross_validation=CrossValidation(classifier, features, labels,
                                     splitting_strategy, evaluation_criterium)
    cross_validation.set_autolock(False)

    # model selection instance
    model_selection=RandomSearchModelSelection(cross_validation, param_tree_root, 0.5)

    # perform model selection with selected methods
    #print "performing model selection of"
    #param_tree_root.print_tree()
    best_parameters=model_selection.select_model()

    # print best parameters
    #print "best parameters:"
    #best_parameters.print_tree()

    # apply them and print result
    best_parameters.apply_to_machine(classifier)
    result=cross_validation.evaluate()
Example #3
0
def create_param_tree():
	root=ModelSelectionParameters()

	c1=ModelSelectionParameters("C1")
	root.append_child(c1)
	c1.build_values(-1.0, 1.0, R_EXP)

	c2=ModelSelectionParameters("C2")
	root.append_child(c2)
	c2.build_values(-1.0, 1.0, R_EXP)

	gaussian_kernel=GaussianKernel()

	# print all parameter available for modelselection
	# Dont worry if yours is not included, simply write to the mailing list
	#gaussian_kernel.print_modsel_params()

	param_gaussian_kernel=ModelSelectionParameters("kernel", gaussian_kernel)
	gaussian_kernel_width=ModelSelectionParameters("log_width")
	gaussian_kernel_width.build_values(-math.log(2.0), 0.0, R_EXP, 1.0, 2.0)
	param_gaussian_kernel.append_child(gaussian_kernel_width)
	root.append_child(param_gaussian_kernel)

	power_kernel=PowerKernel()

	# print all parameter available for modelselection
	# Dont worry if yours is not included, simply write to the mailing list
	#power_kernel.print_modsel_params()

	param_power_kernel=ModelSelectionParameters("kernel", power_kernel)
	root.append_child(param_power_kernel)

	param_power_kernel_degree=ModelSelectionParameters("degree")
	param_power_kernel_degree.build_values(1.0, 2.0, R_LINEAR)
	param_power_kernel.append_child(param_power_kernel_degree)

	metric=MinkowskiMetric(10)

	# print all parameter available for modelselection
	# Dont worry if yours is not included, simply write to the mailing list
	#metric.print_modsel_params()

	param_power_kernel_metric1=ModelSelectionParameters("distance", metric)

	param_power_kernel.append_child(param_power_kernel_metric1)

	param_power_kernel_metric1_k=ModelSelectionParameters("k")
	param_power_kernel_metric1_k.build_values(1.0, 2.0, R_LINEAR)
	param_power_kernel_metric1.append_child(param_power_kernel_metric1_k)

	return root
def create_param_tree():
    from modshogun import ModelSelectionParameters, R_EXP, R_LINEAR
    from modshogun import ParameterCombination
    from modshogun import GaussianKernel, PolyKernel
    import math
    root = ModelSelectionParameters()

    tau = ModelSelectionParameters("tau")
    root.append_child(tau)

    # also R_LINEAR/R_LOG is available as type
    min = -1
    max = 1
    type = R_EXP
    step = 1.5
    base = 2
    tau.build_values(min, max, type, step, base)

    # gaussian kernel with width
    gaussian_kernel = GaussianKernel()

    # print all parameter available for modelselection
    # Dont worry if yours is not included but, write to the mailing list
    #gaussian_kernel.print_modsel_params()

    param_gaussian_kernel = ModelSelectionParameters("kernel", gaussian_kernel)
    gaussian_kernel_width = ModelSelectionParameters("log_width")
    gaussian_kernel_width.build_values(2.0 * math.log(2.0),
                                       2.5 * math.log(2.0), R_LINEAR, 1.0)
    param_gaussian_kernel.append_child(gaussian_kernel_width)
    root.append_child(param_gaussian_kernel)

    # polynomial kernel with degree
    poly_kernel = PolyKernel()

    # print all parameter available for modelselection
    # Dont worry if yours is not included but, write to the mailing list
    #poly_kernel.print_modsel_params()

    param_poly_kernel = ModelSelectionParameters("kernel", poly_kernel)

    root.append_child(param_poly_kernel)

    # note that integers are used here
    param_poly_kernel_degree = ModelSelectionParameters("degree")
    param_poly_kernel_degree.build_values(1, 2, R_LINEAR)
    param_poly_kernel.append_child(param_poly_kernel_degree)

    return root
def create_param_tree():
    from modshogun import ModelSelectionParameters, R_EXP, R_LINEAR
    from modshogun import ParameterCombination
    from modshogun import GaussianKernel, PolyKernel
    import math
    root=ModelSelectionParameters()

    tau=ModelSelectionParameters("tau")
    root.append_child(tau)

    # also R_LINEAR/R_LOG is available as type
    min=-1
    max=1
    type=R_EXP
    step=1.5
    base=2
    tau.build_values(min, max, type, step, base)

    # gaussian kernel with width
    gaussian_kernel=GaussianKernel()

    # print all parameter available for modelselection
    # Dont worry if yours is not included but, write to the mailing list
    #gaussian_kernel.print_modsel_params()

    param_gaussian_kernel=ModelSelectionParameters("kernel", gaussian_kernel)
    gaussian_kernel_width=ModelSelectionParameters("log_width");
    gaussian_kernel_width.build_values(2.0*math.log(2.0), 2.5*math.log(2.0), R_LINEAR, 1.0)
    param_gaussian_kernel.append_child(gaussian_kernel_width)
    root.append_child(param_gaussian_kernel)

    # polynomial kernel with degree
    poly_kernel=PolyKernel()

    # print all parameter available for modelselection
    # Dont worry if yours is not included but, write to the mailing list
    #poly_kernel.print_modsel_params()

    param_poly_kernel=ModelSelectionParameters("kernel", poly_kernel)

    root.append_child(param_poly_kernel)

    # note that integers are used here
    param_poly_kernel_degree=ModelSelectionParameters("degree")
    param_poly_kernel_degree.build_values(1, 2, R_LINEAR)
    param_poly_kernel.append_child(param_poly_kernel_degree)

    return root
def modelselection_parameter_tree_modular (dummy):
    from modshogun import ParameterCombination
    from modshogun import ModelSelectionParameters, R_EXP, R_LINEAR
    from modshogun import PowerKernel
    from modshogun import GaussianKernel
    from modshogun import DistantSegmentsKernel
    from modshogun import MinkowskiMetric
    import math

    root=ModelSelectionParameters()

    combinations=root.get_combinations()
    combinations.get_num_elements()

    c=ModelSelectionParameters('C');
    root.append_child(c)
    c.build_values(1, 11, R_EXP)

    power_kernel=PowerKernel()

    # print all parameter available for modelselection
    # Dont worry if yours is not included but, write to the mailing list
    #power_kernel.print_modsel_params()

    param_power_kernel=ModelSelectionParameters('kernel', power_kernel)
    root.append_child(param_power_kernel)

    param_power_kernel_degree=ModelSelectionParameters('degree')
    param_power_kernel_degree.build_values(1, 1, R_EXP)
    param_power_kernel.append_child(param_power_kernel_degree)

    metric1=MinkowskiMetric(10)

    # print all parameter available for modelselection
    # Dont worry if yours is not included but, write to the mailing list
    #metric1.print_modsel_params()

    param_power_kernel_metric1=ModelSelectionParameters('distance', metric1)

    param_power_kernel.append_child(param_power_kernel_metric1)

    param_power_kernel_metric1_k=ModelSelectionParameters('k')
    param_power_kernel_metric1_k.build_values(1, 12, R_LINEAR)
    param_power_kernel_metric1.append_child(param_power_kernel_metric1_k)

    gaussian_kernel=GaussianKernel()

    # print all parameter available for modelselection
    # Dont worry if yours is not included but, write to the mailing list
    #gaussian_kernel.print_modsel_params()

    param_gaussian_kernel=ModelSelectionParameters('kernel', gaussian_kernel)

    root.append_child(param_gaussian_kernel)

    param_gaussian_kernel_width=ModelSelectionParameters('log_width')
    param_gaussian_kernel_width.build_values(0.0, 0.5*math.log(2.0), R_LINEAR)
    param_gaussian_kernel.append_child(param_gaussian_kernel_width)

    ds_kernel=DistantSegmentsKernel()

    # print all parameter available for modelselection
    # Dont worry if yours is not included but, write to the mailing list
    #ds_kernel.print_modsel_params()

    param_ds_kernel=ModelSelectionParameters('kernel', ds_kernel)

    root.append_child(param_ds_kernel)

    param_ds_kernel_delta=ModelSelectionParameters('delta')
    param_ds_kernel_delta.build_values(1, 2, R_EXP)
    param_ds_kernel.append_child(param_ds_kernel_delta)

    param_ds_kernel_theta=ModelSelectionParameters('theta')
    param_ds_kernel_theta.build_values(1, 2, R_EXP)
    param_ds_kernel.append_child(param_ds_kernel_theta)

    #	root.print_tree()
    combinations=root.get_combinations()
    #	for i in range(combinations.get_num_elements()):
    #		params = ParameterCombination.obtain_from_generic(combinations.get_element(i))
    #		params.print_tree()

    return
def run_demo():
    LG.basicConfig(level=LG.INFO)
    random.seed(572)

    #1. create toy data
    [x,y] = create_toy_data()

    feat_train = RealFeatures(transpose(x));
    labels = RegressionLabels(y);

    n_dimensions = 1

    #2. location of unispaced predictions
    X = SP.linspace(0,10,10)[:,SP.newaxis]

    #new interface with likelihood parametres being decoupled from the covaraince function
    likelihood = GaussianLikelihood()
    covar_parms = SP.log([2])
    hyperparams = {'covar':covar_parms,'lik':SP.log([1])}

    #construct covariance function
    SECF = GaussianKernel(feat_train, feat_train,2)
    covar = SECF
    zmean = ZeroMean();
    inf = ExactInferenceMethod(SECF, feat_train, zmean, labels, likelihood);

    gp = GaussianProcessRegression(inf, feat_train, labels);

    root=ModelSelectionParameters();
    c1=ModelSelectionParameters("inference_method", inf);
    root.append_child(c1);

    c2 = ModelSelectionParameters("scale");
    c1.append_child(c2);
    c2.build_values(0.01, 4.0, R_LINEAR);
    c3 = ModelSelectionParameters("likelihood_model", likelihood);
    c1.append_child(c3);

    c4=ModelSelectionParameters("sigma");
    c3.append_child(c4);
    c4.build_values(0.001, 4.0, R_LINEAR);
    c5 =ModelSelectionParameters("kernel", SECF);
    c1.append_child(c5);

    c6 =ModelSelectionParameters("width");
    c5.append_child(c6);
    c6.build_values(0.001, 4.0, R_LINEAR);

    crit = GradientCriterion();

    grad=GradientEvaluation(gp, feat_train, labels,
			crit);

    grad.set_function(inf);

    gp.print_modsel_params();

    root.print_tree();

    grad_search=GradientModelSelection(
			root, grad);

    grad.set_autolock(0);

    best_combination=grad_search.select_model(1);

    gp.set_return_type(GaussianProcessRegression.GP_RETURN_COV);

    St = gp.apply_regression(feat_train);

    St = St.get_labels();

    gp.set_return_type(GaussianProcessRegression.GP_RETURN_MEANS);

    M = gp.apply_regression();

    M = M.get_labels();

    #create plots
    plot_sausage(transpose(x),transpose(M),transpose(SP.sqrt(St)));
    plot_training_data(x,y);
    PL.show();
def modelselection_parameter_tree_modular(dummy):
    from modshogun import ParameterCombination
    from modshogun import ModelSelectionParameters, R_EXP, R_LINEAR
    from modshogun import PowerKernel
    from modshogun import GaussianKernel
    from modshogun import DistantSegmentsKernel
    from modshogun import MinkowskiMetric

    root = ModelSelectionParameters()

    combinations = root.get_combinations()
    combinations.get_num_elements()

    c = ModelSelectionParameters('C')
    root.append_child(c)
    c.build_values(1, 11, R_EXP)

    power_kernel = PowerKernel()

    # print all parameter available for modelselection
    # Dont worry if yours is not included but, write to the mailing list
    #power_kernel.print_modsel_params()

    param_power_kernel = ModelSelectionParameters('kernel', power_kernel)
    root.append_child(param_power_kernel)

    param_power_kernel_degree = ModelSelectionParameters('degree')
    param_power_kernel_degree.build_values(1, 1, R_EXP)
    param_power_kernel.append_child(param_power_kernel_degree)

    metric1 = MinkowskiMetric(10)

    # print all parameter available for modelselection
    # Dont worry if yours is not included but, write to the mailing list
    #metric1.print_modsel_params()

    param_power_kernel_metric1 = ModelSelectionParameters('distance', metric1)

    param_power_kernel.append_child(param_power_kernel_metric1)

    param_power_kernel_metric1_k = ModelSelectionParameters('k')
    param_power_kernel_metric1_k.build_values(1, 12, R_LINEAR)
    param_power_kernel_metric1.append_child(param_power_kernel_metric1_k)

    gaussian_kernel = GaussianKernel()

    # print all parameter available for modelselection
    # Dont worry if yours is not included but, write to the mailing list
    #gaussian_kernel.print_modsel_params()

    param_gaussian_kernel = ModelSelectionParameters('kernel', gaussian_kernel)

    root.append_child(param_gaussian_kernel)

    param_gaussian_kernel_width = ModelSelectionParameters('width')
    param_gaussian_kernel_width.build_values(1, 2, R_EXP)
    param_gaussian_kernel.append_child(param_gaussian_kernel_width)

    ds_kernel = DistantSegmentsKernel()

    # print all parameter available for modelselection
    # Dont worry if yours is not included but, write to the mailing list
    #ds_kernel.print_modsel_params()

    param_ds_kernel = ModelSelectionParameters('kernel', ds_kernel)

    root.append_child(param_ds_kernel)

    param_ds_kernel_delta = ModelSelectionParameters('delta')
    param_ds_kernel_delta.build_values(1, 2, R_EXP)
    param_ds_kernel.append_child(param_ds_kernel_delta)

    param_ds_kernel_theta = ModelSelectionParameters('theta')
    param_ds_kernel_theta.build_values(1, 2, R_EXP)
    param_ds_kernel.append_child(param_ds_kernel_theta)

    #	root.print_tree()
    combinations = root.get_combinations()
    #	for i in range(combinations.get_num_elements()):
    #		combinations.get_element(i).print_tree()

    return
def modelselection_grid_search_libsvr_modular (fm_train=traindat,fm_test=testdat,label_train=label_traindat,\
           width=2.1,C=1,epsilon=1e-5,tube_epsilon=1e-2):
    from modshogun import CrossValidation, CrossValidationResult
    from modshogun import MeanSquaredError
    from modshogun import CrossValidationSplitting
    from modshogun import RegressionLabels
    from modshogun import RealFeatures
    from modshogun import GaussianKernel
    from modshogun import LibSVR
    from modshogun import GridSearchModelSelection
    from modshogun import ModelSelectionParameters, R_EXP
    from modshogun import ParameterCombination

    # training data
    features_train = RealFeatures(traindat)
    labels = RegressionLabels(label_traindat)

    # kernel
    kernel = GaussianKernel(features_train, features_train, width)

    # print all parameter available for modelselection
    # Dont worry if yours is not included but, write to the mailing list
    #kernel.print_modsel_params()

    labels = RegressionLabels(label_train)

    # predictor
    predictor = LibSVR(C, tube_epsilon, kernel, labels)
    predictor.set_epsilon(epsilon)

    # splitting strategy for 5 fold cross-validation (for classification its better
    # to use "StratifiedCrossValidation", but the standard
    # "StratifiedCrossValidationSplitting" is also available
    splitting_strategy = CrossValidationSplitting(labels, 5)

    # evaluation method
    evaluation_criterium = MeanSquaredError()

    # cross-validation instance
    cross_validation = CrossValidation(predictor, features_train, labels,
                                       splitting_strategy,
                                       evaluation_criterium)

    #	 (optional) repeat x-val (set larger to get better estimates, at least two
    # for confidence intervals)
    cross_validation.set_num_runs(2)

    # (optional) request 95% confidence intervals for results (not actually
    # needed for this toy example)
    cross_validation.set_conf_int_alpha(0.05)

    # print all parameter available for modelselection
    # Dont worry if yours is not included but, write to the mailing list
    #predictor.print_modsel_params()

    # build parameter tree to select C1 and C2
    param_tree_root = ModelSelectionParameters()
    c1 = ModelSelectionParameters("C1")
    param_tree_root.append_child(c1)
    c1.build_values(-1.0, 0.0, R_EXP)

    c2 = ModelSelectionParameters("C2")
    param_tree_root.append_child(c2)
    c2.build_values(-1.0, 0.0, R_EXP)

    # model selection instance
    model_selection = GridSearchModelSelection(cross_validation,
                                               param_tree_root)

    # perform model selection with selected methods
    #print "performing model selection of"
    #print "parameter tree"
    #param_tree_root.print_tree()

    #print "starting model selection"
    # print the current parameter combination, if no parameter nothing is printed
    print_state = False
    # lock data before since model selection will not change the kernel matrix
    # (use with care) This avoids that the kernel matrix is recomputed in every
    # iteration of the model search
    predictor.data_lock(labels, features_train)
    best_parameters = model_selection.select_model(print_state)

    # print best parameters
    #print "best parameters:"
    #best_parameters.print_tree()

    # apply them and print result
    best_parameters.apply_to_machine(predictor)
    result = cross_validation.evaluate()
def run_demo():
    LG.basicConfig(level=LG.INFO)
    random.seed(572)

    #1. create toy data
    [x, y] = create_toy_data()

    feat_train = RealFeatures(transpose(x))
    labels = RegressionLabels(y)

    n_dimensions = 1

    #2. location of unispaced predictions
    X = SP.linspace(0, 10, 10)[:, SP.newaxis]

    #new interface with likelihood parametres being decoupled from the covaraince function
    likelihood = GaussianLikelihood()
    covar_parms = SP.log([2])
    hyperparams = {'covar': covar_parms, 'lik': SP.log([1])}

    #construct covariance function
    SECF = GaussianKernel(feat_train, feat_train, 2)
    covar = SECF
    zmean = ZeroMean()
    inf = ExactInferenceMethod(SECF, feat_train, zmean, labels, likelihood)

    gp = GaussianProcessRegression(inf, feat_train, labels)

    root = ModelSelectionParameters()
    c1 = ModelSelectionParameters("inference_method", inf)
    root.append_child(c1)

    c2 = ModelSelectionParameters("scale")
    c1.append_child(c2)
    c2.build_values(0.01, 4.0, R_LINEAR)
    c3 = ModelSelectionParameters("likelihood_model", likelihood)
    c1.append_child(c3)

    c4 = ModelSelectionParameters("sigma")
    c3.append_child(c4)
    c4.build_values(0.001, 4.0, R_LINEAR)
    c5 = ModelSelectionParameters("kernel", SECF)
    c1.append_child(c5)

    c6 = ModelSelectionParameters("width")
    c5.append_child(c6)
    c6.build_values(0.001, 4.0, R_LINEAR)

    crit = GradientCriterion()

    grad = GradientEvaluation(gp, feat_train, labels, crit)

    grad.set_function(inf)

    gp.print_modsel_params()

    root.print_tree()

    grad_search = GradientModelSelection(root, grad)

    grad.set_autolock(0)

    best_combination = grad_search.select_model(1)

    gp.set_return_type(GaussianProcessRegression.GP_RETURN_COV)

    St = gp.apply_regression(feat_train)

    St = St.get_labels()

    gp.set_return_type(GaussianProcessRegression.GP_RETURN_MEANS)

    M = gp.apply_regression()

    M = M.get_labels()

    #create plots
    plot_sausage(transpose(x), transpose(M), transpose(SP.sqrt(St)))
    plot_training_data(x, y)
    PL.show()