示例#1
0
Y_train = image_data_sets.train.labels * 2 - 1
train = DataSet(L_train, Y_train)
test = DataSet(L_test, Y_test)

data_sets = base.Datasets(train=train, validation=None, test=test)
input_dim = data_sets.train.x.shape[1]

# Train with hinge
rbf_model = SmoothHinge(
    temp=0,
    use_bias=use_bias,
    input_dim=input_dim,
    weight_decay=weight_decay,
    num_classes=num_classes,
    batch_size=batch_size,
    data_sets=data_sets,
    initial_learning_rate=initial_learning_rate,
    keep_probs=keep_probs,
    decay_epochs=decay_epochs,
    mini_batch=False,
    train_dir='output',
    log_dir='log',
    # model_name='dogfish_rbf_hinge_t-0')
    model_name='catpanda_rbf_hinge_t-0')

rbf_model.train()
hinge_W = rbf_model.sess.run(rbf_model.params)[0]

# Then load weights into smoothed version
tf.reset_default_graph()
rbf_model = SmoothHinge(
    temp=0.001,
示例#2
0
        num_pos_copies = int(total_copies / 2)
        num_neg_copies = total_copies - num_pos_copies

        weight_decay = 0.09

        tf.reset_default_graph()

        model = SmoothHinge(
            input_dim=input_dim,
            temp=temp,
            weight_decay=weight_decay,
            use_bias=True,
            num_classes=num_classes,
            batch_size=batch_size,
            data_sets=data_sets,
            initial_learning_rate=initial_learning_rate,
            decay_epochs=None,
            mini_batch=False,
            train_dir=output_root,
            log_dir='log',
            model_name=
            'smooth_hinge_%s_sphere-%s_slab-%s_start-copy_lflip-%s_step-%s_t-%s_eps-%s_wd-%s_rs-%s'
            % (dataset_name, project_sphere, project_slab, label_flip,
               step_size, temp, epsilon, weight_decay, random_seed))

        X_modified, Y_modified = data.copy_random_points(
            X_train,
            Y_train,
            target_class=1,
            num_copies=num_pos_copies,
            random_seed=random_seed,
示例#3
0
num_temps = len(temps)

num_params = 784

# Get weights from hinge

tf.reset_default_graph()

temp = 0
model = SmoothHinge(use_bias=use_bias,
                    temp=temp,
                    input_dim=input_dim,
                    weight_decay=weight_decay,
                    num_classes=num_classes,
                    batch_size=batch_size,
                    data_sets=data_sets,
                    initial_learning_rate=initial_learning_rate,
                    keep_probs=keep_probs,
                    decay_epochs=decay_epochs,
                    mini_batch=False,
                    train_dir='output',
                    log_dir='log',
                    model_name='smooth_hinge_17_t-%s' % temp)

model.train()
model.load_checkpoint(iter_to_load=0)
hinge_W = model.sess.run(model.params)[0]

model_margins = model.sess.run(model.margin,
                               feed_dict=model.all_test_feed_dict)
# Look at np.argsort(model_margins)[:10] to pick a test example
示例#4
0
    def init_model(self):
        """
        Initialize a tf model based on model_name and datasets
        """

        # TODO: make it easier to use non-default hyperparams?

        # we can always infer # classes of from the training data
        num_classes = len(set(self.data_sets.train.labels))
        model_name = self.task + '_' + self.model_name
        print('Num classes', num_classes)
        if self.model_name == 'binary_logistic':
            #num_classes = 2
            assert num_classes == 2
            weight_decay = 0.0001
            batch_size = 100
            initial_learning_rate = 0.001
            keep_probs = None
            decay_epochs = [1000, 10000]
            max_lbfgs_iter = 1000

            self.model = BinaryLogisticRegressionWithLBFGS(
                input_dim=self.input_dim,
                weight_decay=weight_decay,
                max_lbfgs_iter=max_lbfgs_iter,
                num_classes=num_classes,
                batch_size=batch_size,
                data_sets=self.data_sets,
                initial_learning_rate=initial_learning_rate,
                keep_probs=keep_probs,
                decay_epochs=decay_epochs,
                mini_batch=False,
                train_dir='output',
                log_dir='log',
                model_name=model_name
            )
        elif self.model_name == 'multi_logistic':
            #num_classes = 10
            weight_decay = 0.01
            batch_size = 1400
            initial_learning_rate = 0.001 
            keep_probs = None
            max_lbfgs_iter = 1000
            decay_epochs = [1000, 10000]

            self.model = LogisticRegressionWithLBFGS(
                input_dim=self.input_dim,
                weight_decay=weight_decay,
                max_lbfgs_iter=max_lbfgs_iter,
                num_classes=num_classes, 
                batch_size=batch_size,
                data_sets=self.data_sets,
                initial_learning_rate=initial_learning_rate,
                keep_probs=keep_probs,
                decay_epochs=decay_epochs,
                mini_batch=False,
                train_dir='output',
                log_dir='log',
                model_name=model_name)

        elif self.model_name == 'cnn':
            assert num_classes == 10
            weight_decay = 0.001
            batch_size = 500

            initial_learning_rate = 0.0001
            decay_epochs = [10000, 20000]
            hidden1_units = 8
            hidden2_units = 8
            hidden3_units = 8
            conv_patch_size = 3
            keep_probs = [1.0, 1.0]

            self.model = All_CNN_C(
                input_side=self.input_side,
                input_channels=self.input_channels,
                conv_patch_size=conv_patch_size,
                hidden1_units=hidden1_units,
                hidden2_units=hidden2_units,
                hidden3_units=hidden3_units,
                weight_decay=weight_decay,
                num_classes=num_classes,
                batch_size=batch_size,
                data_sets=self.data_sets,
                initial_learning_rate=initial_learning_rate,
                damping=1e-2,
                decay_epochs=decay_epochs,
                mini_batch=True,
                train_dir='output',
                log_dir='log',
                model_name=model_name
            )
        elif self.task == 'income':
            num_classes = 2
            input_dim = self.data_sets.train.x.shape[1]
            weight_decay = 0.0001
            # weight_decay = 1000 / len(lr_data_sets.train.labels)
            batch_size = 10
            initial_learning_rate = 0.001
            keep_probs = None
            decay_epochs = [1000, 10000]
            max_lbfgs_iter = 1000

            self.model = BinaryLogisticRegressionWithLBFGS(
                input_dim=input_dim,
                weight_decay=weight_decay,
                max_lbfgs_iter=max_lbfgs_iter,
                num_classes=num_classes,
                batch_size=batch_size,
                data_sets=self.data_sets,
                initial_learning_rate=initial_learning_rate,
                keep_probs=keep_probs,
                decay_epochs=decay_epochs,
                mini_batch=False,
                train_dir='output',
                log_dir='log',
                model_name='income_logreg'
            )
        elif self.model_name == 'hinge_svm':
            #num_classes = 2
            weight_decay = 0.01
            use_bias = False
            batch_size = 100
            initial_learning_rate = 0.001 
            keep_probs = None
            decay_epochs = [1000, 10000]

            temps = [0, 0.001, 0.1]
            num_temps = len(temps)

            num_params = 784

            temp = 0
            self.model = SmoothHinge(
                use_bias=use_bias,
                temp=temp,
                input_dim=self.input_dim,
                weight_decay=weight_decay,
                num_classes=num_classes,
                batch_size=batch_size,
                data_sets=self.data_sets,
                initial_learning_rate=initial_learning_rate,
                keep_probs=keep_probs,
                decay_epochs=decay_epochs,
                mini_batch=False,
                train_dir='output',
                log_dir='log',
                model_name='smooth_hinge_17_t-%s' % temp)