Ejemplo n.º 1
0
def viz(pc, fig=None, show_histogram=False, show=True):
    import create_data_set
    from methods import method
    source_learner = method.NadarayaWatsonMethod()
    target_learner = method.NadarayaWatsonMethod()
    #pc = configs_lib.ProjectConfigs()
    data = helper_functions.load_object('../' + pc.data_file).data
    data.set_train()
    source_data = data.get_transfer_subset(pc.source_labels)
    source_data.set_target()
    target_data= data.get_transfer_subset(pc.target_labels)
    target_data.set_target()
    source_learner.train_and_test(source_data)
    target_learner.train_and_test(target_data)
    source_learner.sigma = 10
    target_learner.sigma = 10
    x = array_functions.vec_to_2d(np.linspace(data.x.min(), data.x.max(), 100))
    test_data = data_lib.Data()
    test_data.x = x
    test_data.is_regression = True
    y_s = source_learner.predict(test_data).fu
    y_t = target_learner.predict(test_data).fu

    #array_functions.plot_line(x,y_t-y_s,pc.data_set,y_axes=np.asarray([-5,5]))
    y = y_t-y_s
    #y = y - y.mean()
    array_functions.plot_line(x,y, title=None ,fig=fig,show=show)
    if show_histogram:
        array_functions.plot_histogram(data.x,20)
    x=1
Ejemplo n.º 2
0
    def train_and_test(self, data):
        source_order = self.configs.source_domain_order
        target_order = self.configs.target_domain_order
        #results = super(DomainModelShiftMethod, self).train_and_test(data_copy)

        source_to_keep = array_functions.find_set(data.data_set_ids, source_order)
        source_data = data.get_subset(source_to_keep)
        source_data.y = source_data.true_y
        source_configs = deepcopy(self.configs)
        source_configs.labels_to_keep = source_order
        source_configs.labels_to_not_sample = np.asarray([source_order[0]])
        source_configs.source_labels = np.asarray([source_order[0]])
        source_configs.target_labels = np.asarray([source_order[1]])

        source_transformation = local_transfer_methods.OffsetTransfer(source_configs)
        source_transformation.use_validation = True
        source_transformation.train_and_test(source_data)

        target_to_keep = array_functions.find_set(data.data_set_ids, [target_order[0]])
        target_data = data.get_subset(target_to_keep)
        target_data.reveal_labels(target_data.data_set_ids == target_order[0])
        target_configs = deepcopy(self.configs)
        target_configs.labels_to_keep = np.asarray([target_order[0]])
        target_configs.source_labels = np.asarray([])
        target_configs.target_labels = np.asarray([target_order[0]])

        offset_labels = source_transformation.predict(target_data).y
        target_data.y = offset_labels
        target_data.true_y = offset_labels
        self.target_learner = method.NadarayaWatsonMethod(target_configs)
        self.target_learner.use_validation = True
        self.target_learner.train_and_test(target_data)

        t = data.get_subset(data.data_set_ids == target_order[1])
        return super(DomainModelShiftMethod, self).train_and_test(t)
Ejemplo n.º 3
0
def estimate_gradients(x, y, I):
    range = x.max(0) - x.min(0)
    #x = (x - x.min(0)) / range
    x = x[I, :]
    y = y[I]
    data = data_lib.Data(x, y)
    data.set_train()
    data.is_regression = True
    nw = method.NadarayaWatsonMethod()
    nw.cv_params = {}
    nw.sigma = 1000
    nw.train_and_test(data)

    num_x0 = 40
    num_x1 = int(num_x0 * range[1] / range[0])
    v = np.zeros((num_x0, num_x1))

    x0_vals = np.linspace(x[:, 0].min(), x[:, 0].max(), num_x0)
    x1_vals = np.linspace(x[:, 1].min(), x[:, 1].max(), num_x1)
    x1_vals = x1_vals[::-1]
    v = np.zeros((x1_vals.size, x0_vals.size))

    for idx0, x0 in enumerate(x0_vals):
        for idx1, x1 in enumerate(x1_vals):
            xi = np.asarray([x0, x1])
            d = data_lib.Data(xi[np.newaxis, :], np.asarray([np.nan]))
            v[idx1, idx0] = nw.predict(d).y
        print ''
    gradients = np.gradient(v)
    g = gradients[0]**2 + gradients[1]**2
    g = np.sqrt(g)
    return g, v
    def __init__(self, pc):
        super(MainConfigs, self).__init__()
        #pc = create_project_configs()
        self.copy_fields(pc, pc_fields_to_copy)
        from methods import transfer_methods
        from methods import method
        method_configs = MethodConfigs(pc)
        method_configs.metric = 'euclidean'
        method_configs.use_validation = use_validation

        target_nw = transfer_methods.TargetTranfer(method_configs)
        target_nw.base_learner = method.NadarayaWatsonMethod(method_configs)
        target_ridge = transfer_methods.TargetTranfer(method_configs)
        target_ridge.base_learner = method.SKLRidgeRegression(method_configs)

        stacked_nw1 = transfer_methods.StackingTransfer(method_configs)
        stacked_nw1.preprocessor = preprocessing.SelectSourcePreprocessor(
            sources_to_keep=[1])

        stacked_nw2 = transfer_methods.StackingTransfer(method_configs)
        stacked_nw2.preprocessor = preprocessing.SelectSourcePreprocessor(
            sources_to_keep=[2])

        stacked_nw3 = transfer_methods.StackingTransfer(method_configs)
        stacked_nw3.preprocessor = preprocessing.SelectSourcePreprocessor(
            sources_to_keep=[3])

        higher_order = higher_order_transfer_methods.DomainModelShiftMethod(
            method_configs)

        #self.learner = target_nw
        #self.learner = stacked_nw1
        #self.learner = stacked_nw2
        #self.learner = stacked_nw3
        self.learner = higher_order
Ejemplo n.º 5
0
 def __init__(self, configs=None):
     super(SemisupervisedRegressionMethod, self).__init__(configs)
     self.cv_params['C'] = self.create_cv_params(-7, 7)
     self.cv_params['sigma'] = self.create_cv_params(-7, 7)
     self.max_n_L = 200
     self.nw_method = method.NadarayaWatsonMethod(deepcopy(configs))
     self.nw_method.configs.target_labels = None
     self.nw_method.configs.source_labels = None
Ejemplo n.º 6
0
 def __init__(self, configs=base_configs.MethodConfigs()):
     super(ScipyOptNonparametricHypothesisTransfer, self).__init__(configs)
     self.cv_params['C'] = 10**np.asarray(range(-4, 4), dtype='float64')
     self.g_nw = method.NadarayaWatsonMethod(configs)
     self.g_nw.configs.target_labels = None
     self.g_nw.configs.source_labels = None
     self.g_nw.configs.cv_loss_function = loss_function.MeanSquaredError()
     self.g_nw.quiet = True
     self.k = 3
     self.metric = configs.metric
     self.bias = 0
     self.use_huber = use_huber
Ejemplo n.º 7
0
def create_forest_fires():
    months = {
        'jan': 1,
        'feb': 2,
        'mar': 3,
        'apr': 4,
        'may': 5,
        'jun': 6,
        'jul': 7,
        'aug': 8,
        'sep': 9,
        'oct': 10,
        'nov': 11,
        'dec': 12
    }
    days = {
        'sun': 1,
        'mon': 2,
        'tue': 3,
        'wed': 4,
        'thu': 5,
        'fri': 6,
        'sat': 7
    }
    #month_to_season = lambda x : (months[x]-1)/3
    month_to_season = lambda x: months[x]
    day_to_int = lambda x: days[x]
    file = 'forest_fires/forestfires.csv'
    converters = {2: month_to_season, 3: day_to_int}
    field_names, forest_data = load_csv(file,
                                        dtype='float',
                                        converters=converters)
    x = forest_data
    y = forest_data[:, -1]
    i = field_names == 'month'
    domain_ids = forest_data[:, i]
    months_to_use = np.asarray([6, 7, 8])
    #months_to_use = np.asarray([1,2,3,4,5,6,7,8,9,10,11,12])
    to_use = array_functions.find_set(domain_ids, months_to_use)
    x = x[to_use, :]
    y = y[to_use]
    domain_ids = domain_ids[to_use]
    x = x[:, 4:]
    field_names = field_names[4:]
    I = (y > 0) & (y < 700)
    x = x[I, :]
    y = y[I]
    domain_ids = domain_ids[I]

    from methods import method
    learner = method.NadarayaWatsonMethod()
    viz_features(x, y, domain_ids, field_names, learner=learner)
    pass
    def __init__(self, pc):
        super(MainConfigs, self).__init__()
        #pc = create_project_configs()
        self.copy_fields(pc, pc_fields_to_copy)
        from methods import method
        from methods import active_methods
        from methods import wrapper_methods
        from methods import semisupervised
        method_configs = MethodConfigs(pc)
        method_configs.cluster_select_singleton = cluster_select_singleton
        method_configs.active_iterations = active_iterations
        method_configs.active_items_per_iteration = pc.active_items_per_iteration
        method_configs.metric = 'euclidean'
        method_configs.num_starting_labels = num_starting_labels
        method_configs.fixed_sigma_x = pc.fixed_sigma_x
        method_configs.no_f_x = pc.no_f_x
        method_configs.no_spectral_kernel = pc.no_spectral_kernel
        method_configs.transfer_hyperparameters = pc.transfer_hyperparameters
        method_configs.use_greedy_instance_selection = pc.use_greedy_instance_selection
        method_configs.use_knn = pc.use_knn
        method_configs.use_l1_loss = pc.use_l1_loss

        for key in other_method_configs.keys():
            setattr(method_configs, key, getattr(pc, key))

        method_configs.use_test_error_for_model_selection = pc.use_test_error_for_model_selection

        if pc.active_method == ACTIVE_RANDOM:
            active = active_methods.ActiveMethod(method_configs)
        elif pc.active_method == ACTIVE_CLUSTER:
            active = active_methods.ClusterActiveMethod(method_configs)
            active.cluster_scale = cluster_scale
        elif pc.active_method == ACTIVE_CLUSTER_PURITY:
            active = active_methods.ClusterPurityActiveMethod(method_configs)
            active.cluster_scale = cluster_scale

        wrapper = wrapper_methods.TargetOnlyWrapper(method_configs)
        if method_configs.use_knn:
            knn = method.SKLKNNRegression(deepcopy(method_configs))
            knn.cv_params['n_neighbors'] = np.asarray([1])
            wrapper.base_learner = knn
        else:
            nw = method.NadarayaWatsonMethod(deepcopy(method_configs))
            wrapper.base_learner = nw

        active.base_learner = wrapper
        active.base_learner.quiet = False
        self.learner = active
Ejemplo n.º 9
0
def run_main():
    import create_data_set
    from methods import method
    learner = method.NadarayaWatsonMethod()
    #s = create_data_set.synthetic_step_transfer_file
    #s = create_data_set.synthetic_delta_linear_file
    #s = create_data_set.synthetic_step_linear_transfer_file
    #s = create_data_set.boston_housing_raw_data_file % '-13'
    #s = create_data_set.concrete_file % '-7'
    #s = create_data_set.concrete_file % '-feat=0'
    #s = create_data_set.bike_file % '-feat=1'
    #s = create_data_set.wine_file % '-small-11'
    #s = create_data_set.boston_housing_raw_data_file % ''
    #learner = None
    data = helper_functions.load_object(s)
    viz_features(data.x, data.y, data.data_set_ids, learner=learner)
Ejemplo n.º 10
0
    def __init__(self, pc):
        super(MainConfigs, self).__init__()
        #pc = create_project_configs()
        self.copy_fields(pc, pc_fields_to_copy)
        from methods import method
        from methods import active_methods
        from methods import semisupervised
        method_configs = MethodConfigs(pc)

        for key in other_method_configs.keys():
            setattr(method_configs, key, getattr(pc, key))
        #ridge_reg = method.SKLRidgeRegression(method_configs)

        from methods import preprocessing
        ssl_reg = semisupervised.SemisupervisedRegressionMethod(method_configs)
        nw_reg = method.NadarayaWatsonMethod(method_configs)
        nw_reg.preprocessor = preprocessing.TargetOnlyPreprocessor()

        #self.learner = ssl_reg
        self.learner = nw_reg
Ejemplo n.º 11
0
    def __init__(self, configs=MethodConfigs()):
        super(SupervisedInstanceSelection, self).__init__(configs)
        self.cv_params = dict()
        self.is_classifier = False
        self.p_s = None
        self.p_x = None
        self.f_s = None
        self.f_x = None
        self.f_x_estimate = None
        self.learned_distribution = None
        self.optimization_value = None
        self.use_linear = False
        self.quiet = False

        self.selected_data = None
        self.full_data = None

        configs = deepcopy(self.configs)
        self.target_learner = method.NadarayaWatsonMethod(deepcopy(configs))
        self.target_learner.configs.use_validation = True
        self.target_learner.configs.results_features = ['y', 'true_y']
        self.target_learner.quiet = True
        self.subset_learner = deepcopy(self.target_learner)
        self.mixture_reg = 1
        self.subset_size = 10
        self.density_reg = .1
        self.num_samples = 5
        self.subset_density_reg = .1
        self.learner_reg = 100
        self.pca = None
        self.output = None

        self.no_f_x = False

        self.is_noisy = None

        if self.use_linear:
            self.supervised_loss_func = compute_f_linear
        else:
            self.supervised_loss_func = compute_f_nw
            self.cv_params['subset_size'] = self.create_cv_params(-5, 5)
    def __init__(self, pc):
        super(MainConfigs, self).__init__()
        #pc = create_project_configs()
        self.copy_fields(pc, pc_fields_to_copy)
        from methods import transfer_methods
        from methods import method
        from methods import scipy_opt_methods
        method_configs = MethodConfigs(pc)
        method_configs.metric = 'euclidean'
        method_configs.use_validation = use_validation

        if self.data_set == bc.DATA_NG:
            method_configs.metric = 'cosine'
            method_configs.use_fused_lasso = False

        method_configs.constraints = []

        from methods import far_transfer_methods

        fuse_nw = transfer_methods.FuseTransfer(method_configs)
        fuse_nw.base_learner = method.NadarayaWatsonMethod(method_configs)
        target_nw = transfer_methods.TargetTranfer(method_configs)
        target_nw.base_learner = method.NadarayaWatsonMethod(method_configs)
        target_ridge = transfer_methods.TargetTranfer(method_configs)
        target_ridge.base_learner = method.SKLRidgeRegression(method_configs)
        nw = method.NadarayaWatsonMethod(method_configs)
        graph_transfer = far_transfer_methods.GraphTransfer(method_configs)

        from methods import semisupervised
        from methods import preprocessing
        ssl_regression = semisupervised.SemisupervisedRegressionMethod(
            method_configs)
        ssl_regression.preprocessor = preprocessing.TargetOnlyPreprocessor()

        dummy = method.SKLMeanRegressor(method_configs)
        dummy.preprocessor = preprocessing.TargetOnlyPreprocessor()

        graph_transfer_nw = far_transfer_methods.GraphTransferNW(
            method_configs)
        graph_transfer_nw.predict_sample = pc.predict_sample
        graph_transfer_nw.quiet = False
        #self.learner = target_nw
        offset_transfer = methods.local_transfer_methods.OffsetTransfer(
            method_configs)
        stacked_transfer = methods.transfer_methods.StackingTransfer(
            method_configs)
        sms_delta_transfer = methods.local_transfer_methods.LocalTransferDeltaSMS(
            method_configs)

        method_configs.metric = 'euclidean'
        method_configs.no_reg = False
        method_configs.use_g_learner = True
        method_configs.use_reg2 = True
        method_configs.use_fused_lasso = False
        method_configs.no_C3 = False
        method_configs.use_radius = False
        method_configs.include_scale = False
        method_configs.constant_b = False
        method_configs.linear_b = True
        method_configs.clip_b = True
        if pc.ft_method == FT_METHOD_LOCAL_NONPARAMETRIC:
            method_configs.linear_b = False
            method_configs.clip_b = False
            method_configs.use_radius = True

        dt_local_transfer = methods.local_transfer_methods.LocalTransferDelta(
            method_configs)
        if pc.ft_method == FT_METHOD_GRAPH:
            self.learner = graph_transfer
        elif pc.ft_method == FT_METHOD_GRAPH_NW:
            self.learner = graph_transfer_nw
        elif pc.ft_method == FT_METHOD_STACKING:
            self.learner = stacked_transfer
        elif pc.ft_method in {FT_METHOD_LOCAL, FT_METHOD_LOCAL_NONPARAMETRIC}:
            self.learner = dt_local_transfer
        elif pc.ft_method == FT_METHOD_SMS_DELTA:
            self.learner = sms_delta_transfer
        elif pc.ft_method == FT_METHOD_OFFSET:
            self.learner = offset_transfer
        elif pc.ft_method == FT_METHOD_DUMMY:
            self.learner = dummy
        elif pc.ft_method == FT_METHOD_SSL:
            self.learner = ssl_regression
        else:
            assert False, 'Unknown ft_method'
Ejemplo n.º 13
0
 def __init__(self, configs=base_configs.MethodConfigs()):
     super(IdentityWrapper, self).__init__(configs)
     self.base_learner = method.NadarayaWatsonMethod(deepcopy(configs))
Ejemplo n.º 14
0
 def __init__(self, use_variance=True):
     super(PiplineUseVariance, self).__init__()
     self.base_learner = method.NadarayaWatsonMethod()
     self.use_variance = use_variance
Ejemplo n.º 15
0
 def __init__(self, configs=None):
     super(DomainModelShiftMethod, self).__init__(configs)
     self.base_learner = transfer_methods.StackingTransfer(configs)
     self.target_learner = method.NadarayaWatsonMethod(configs)
Ejemplo n.º 16
0
    def __init__(self, pc):
        super(MainConfigs, self).__init__()
        #pc = create_project_configs()
        self.copy_fields(pc, pc_fields_to_copy)
        from methods import transfer_methods
        from methods import method
        from methods import scipy_opt_methods
        method_configs = MethodConfigs(pc)
        method_configs.metric = 'euclidean'
        method_configs.no_reg = False
        method_configs.use_g_learner = True
        method_configs.use_validation = use_validation
        method_configs.use_reg2 = True
        method_configs.joint_cv = True

        method_configs.use_fused_lasso = use_fused_lasso
        method_configs.no_C3 = no_C3
        method_configs.use_radius = use_radius
        method_configs.include_scale = include_scale
        method_configs.constant_b = constant_b
        method_configs.linear_b = linear_b
        method_configs.clip_b = clip_b
        method_configs.separate_target_domains = separate_target_domains
        method_configs.multitask = multitask
        if self.data_set != bc.DATA_SYNTHETIC_SLANT_MULTITASK:
            method_configs.separate_target_domains = False
            method_configs.multitask = False
        assert not (constant_b and linear_b)

        if self.data_set == bc.DATA_NG:
            method_configs.metric = 'cosine'
            method_configs.use_fused_lasso = False

        method_configs.constraints = []
        if use_constraints:
            if linear_b:
                if self.data_set == bc.DATA_CONCRETE:
                    method_configs.constraints.append(
                        nonpositive_constraint_linear)
                elif self.data_set == bc.DATA_BIKE_SHARING:
                    method_configs.constraints.append(
                        nonnegative_constraint_linear)
                elif self.data_set == bc.DATA_BOSTON_HOUSING:
                    method_configs.constraints.append(
                        nonnegative_constraint_linear)
                elif self.data_set == bc.DATA_WINE:
                    method_configs.constraints.append(
                        nonnegative_constraint_linear)
                elif self.data_set == bc.DATA_SYNTHETIC_CURVE:
                    method_configs.constraints.append(
                        nonpositive_constraint_linear)
                elif self.data_set == bc.DATA_SYNTHETIC_SLANT:
                    method_configs.constraints.append(
                        nonpositive_constraint_linear)
                elif self.data_set == bc.DATA_SYNTHETIC_STEP_LINEAR_TRANSFER:
                    method_configs.constraints.append(
                        nonpositive_constraint_linear)
                elif self.data_set == bc.DATA_SYNTHETIC_DELTA_LINEAR:
                    method_configs.constraints.append(
                        nonpositive_constraint_linear)
                elif self.data_set == bc.DATA_SYNTHETIC_CROSS:
                    method_configs.constraints.append(bound_4_linear)
                else:
                    assert False
            else:
                if self.data_set == bc.DATA_CONCRETE:
                    method_configs.constraints.append(nonpositive_constraint)
                elif self.data_set == bc.DATA_BIKE_SHARING:
                    method_configs.constraints.append(nonnegative_constraint)
                elif self.data_set == bc.DATA_BOSTON_HOUSING:
                    method_configs.constraints.append(nonnegative_constraint)
                elif self.data_set == bc.DATA_WINE:
                    method_configs.constraints.append(nonnegative_constraint)
                elif self.data_set == bc.DATA_SYNTHETIC_CURVE:
                    method_configs.constraints.append(nonpositive_constraint)
                elif self.data_set == bc.DATA_SYNTHETIC_SLANT:
                    method_configs.constraints.append(nonpositive_constraint)
                elif self.data_set == bc.DATA_SYNTHETIC_STEP_LINEAR_TRANSFER:
                    method_configs.constraints.append(nonpositive_constraint)
                elif self.data_set == bc.DATA_SYNTHETIC_DELTA_LINEAR:
                    method_configs.constraints.append(nonpositive_constraint)
                elif self.data_set == bc.DATA_SYNTHETIC_CROSS:
                    method_configs.constraints.append(bound_4)
                else:
                    assert False

        method_configs.use_validation = use_validation
        fuse_log_reg = transfer_methods.FuseTransfer(method_configs)
        fuse_nw = transfer_methods.FuseTransfer(method_configs)
        fuse_nw.base_learner = method.NadarayaWatsonMethod(method_configs)
        target_nw = transfer_methods.TargetTranfer(method_configs)
        target_nw.base_learner = method.NadarayaWatsonMethod(method_configs)
        target_ridge = transfer_methods.TargetTranfer(method_configs)
        target_ridge.base_learner = method.SKLRidgeRegression(method_configs)
        nw = method.NadarayaWatsonMethod(method_configs)
        log_reg = method.SKLLogisticRegression(method_configs)
        target_knn = transfer_methods.TargetTranfer(method_configs)
        target_knn.base_learner = method.SKLKNN(method_configs)
        scipy_ridge_reg = scipy_opt_methods.ScipyOptRidgeRegression(
            method_configs)
        model_transfer = methods.transfer_methods.ModelSelectionTransfer(
            method_configs)
        hyp_transfer = methods.local_transfer_methods.HypothesisTransfer(
            method_configs)
        iwl_transfer = methods.local_transfer_methods.IWTLTransfer(
            method_configs)
        sms_transfer = methods.local_transfer_methods.SMSTransfer(
            method_configs)
        local_transfer_delta = methods.local_transfer_methods.LocalTransferDelta(
            method_configs)
        dt_sms = methods.local_transfer_methods.LocalTransferDeltaSMS(
            method_configs)
        cov_shift = transfer_methods.ReweightedTransfer(method_configs)
        offset_transfer = methods.local_transfer_methods.OffsetTransfer(
            method_configs)
        stacked_transfer = methods.transfer_methods.StackingTransfer(
            method_configs)

        gaussian_process = methods.method.SKLGaussianProcess(method_configs)

        from methods import semisupervised
        from methods import preprocessing
        ssl_regression = semisupervised.SemisupervisedRegressionMethod(
            method_configs)
        ssl_regression.preprocessor = preprocessing.TargetOnlyPreprocessor()

        if use_delta_new:
            self.learner = methods.local_transfer_methods.LocalTransferDeltaNew(
                method_configs)
        else:
            #self.learner = target_nw
            #self.learner = offset_transfer
            self.learner = stacked_transfer
Ejemplo n.º 17
0
def vis_data():
    pc = configs_lib.ProjectConfigs(bc.DATA_KC_HOUSING)
    #pc = configs_lib.ProjectConfigs(bc.DATA_CLIMATE_MONTH)
    pc.active_method = configs_lib.ACTIVE_CLUSTER_PURITY
    #pc.active_method = configs_lib.ACTIVE_CLUSTER
    #pc.active_method = configs_lib.ACTIVE_RANDOM
    pc.fixed_sigma_x = False
    pc.no_spectral_kernel = False
    pc.no_f_x = False
    pc.active_items_per_iteration = 10
    use_oracle_target = False

    main_configs = configs_lib.MainConfigs(pc)
    data_file = '../' + main_configs.data_file
    data_and_splits = helper_functions.load_object(data_file)
    data = data_and_splits.get_split(0, 0)
    is_target = data.data_set_ids == main_configs.target_labels[0]
    is_source = data.data_set_ids == main_configs.source_labels[0]
    data.reveal_labels(is_source.nonzero()[0])
    data.type = data_lib.TYPE_TARGET*np.ones(data.n)
    data.type[is_source] = data_lib.TYPE_SOURCE
    x = data.x
    y = data.y


    learner = main_configs.learner
    learner.use_oracle_target = use_oracle_target
    if pc.active_method == configs_lib.ACTIVE_CLUSTER_PURITY and False:
        learner.instance_selector.cv_params['sigma_y'] = [1]
    print 'Experiment: ' + learner.prefix
    results = learner.train_and_test(data)
    queried_data = results.results_list[0].queried_idx
    selected_data = data.get_subset(queried_data)

    fig = plt.figure(0, figsize=(12, 5))
    plt.title('TODO')
    plt.axis('off')

    x1 = data.x[:, 0]
    x1_sel = selected_data.x[:, 0]
    if data.p == 1:
        x2 = data.true_y
        x2_sel = selected_data.true_y
    else:
        assert data.p == 2
        x2 = data.x[:, 1]
        x2_sel = selected_data.x[:, 1]

    plt.subplot(1, 3, 1)
    plt.scatter(x1[is_target], x2[is_target], c='b', s=10)
    plt.scatter(x1_sel, x2_sel, c='r', s=20)


    if data.p == 2:
        plt.subplot(1, 3, 2)

        target_data = data.get_subset(is_target)
        target_data.y = target_data.true_y.copy()

        nw_method = method.NadarayaWatsonMethod()
        y_pred = nw_method.train_and_test(target_data).prediction.y
        means, _, _, _ = binned_statistic_2d(target_data.x[:, 0], target_data.x[:, 1], y_pred, bins=30)
        #means = means[:, ::-1]
        #means = means[::-1, :]
        means[~np.isfinite(means)] = -1
        plt.pcolormesh(means, cmap='RdBu')
        plt.colorbar()

        plt.subplot(1, 3, 3)

        source_data = data.get_subset(is_source)
        source_data.y = source_data.true_y.copy()

        nw_method = method.NadarayaWatsonMethod()
        y_pred = nw_method.train_and_test(source_data).prediction.y
        means, _, _, _ = binned_statistic_2d(source_data.x[:, 0], source_data.x[:, 1], y_pred, bins=30)
        # means = means[:, ::-1]
        # means = means[::-1, :]
        means[~np.isfinite(means)] = -1
        plt.pcolormesh(means, cmap='RdBu')
        plt.colorbar()

    plt.show()
Ejemplo n.º 18
0
 def __init__(self, configs=base_configs.MethodConfigs()):
     super(PipelineMethod, self).__init__(configs)
     self.preprocessing_pipeline = []
     self.base_learner = method.NadarayaWatsonMethod(deepcopy(configs))
     self.in_train_and_test = False