Ejemplo n.º 1
0
 def W_optimizer(self):
     """
     Get the optimizer that has the method 'minimize' to do the optimization.
     :return:
         An ExternalOptimizerInterface
     """
     return ScipyOptimizerInterface(self.Wb_loss, var_list=[self.W, self.b])
Ejemplo n.º 2
0
 def X_optimizer(self):
     """
     Get the optimizer that has the method 'minimize' to do the optimization.
     :return:
         An ExternalOptimizerInterface
     """
     if self.prev_layer.activation == 'relu':
         return ScipyOptimizerInterface(
             self.X_loss,
             var_list=[self.X],
             var_to_bounds={
                 self.X: (0, np.infty)
             })  #, options={'ftol': 2e-15, 'gtol': 1e-11, 'maxls': 100})
     else:
         return ScipyOptimizerInterface(self.X_loss, var_list=[self.X])
     raise NotImplementedError
Ejemplo n.º 3
0
 def W_optimizer(self):
     """
     Get the optimizer that has the method 'minimize' to do the optimization.
     :return:
         An ExternalOptimizerInterface
     """
     alpha = self.rhod if self.rhod > 0.0 else self.rho
     if self.is_last_layer and self.loss == 'cross_entropy':
         return ScipyOptimizerInterface(self.Wb_loss, var_list=[self.W])
         #return RidgeInterface(self.W, self.b, self.X, self.X_next, W_offset=self.W_0, alpha=alpha, normalize=False)
     elif self.is_last_layer and self.loss == 'none':
         return RidgeInterface(self.W,
                               self.b,
                               self.X,
                               self.X_next,
                               W_offset=self.W_0,
                               alpha=alpha,
                               normalize=False)
     else:
         # optimize both W and b
         #return ScipyOptimizerInterface(self.Wb_loss, var_list=[self.W, self.b])
         return RidgeInterface(self.W,
                               self.b,
                               self.X,
                               self.X_next,
                               W_offset=self.W_0,
                               alpha=alpha / self.lmbda,
                               normalize=False)
Ejemplo n.º 4
0
 def W_optimizer(self):
     """
     Get the optimizer that has the method 'minimize' to do the optimization.
     :return:
         An ExternalOptimizerInterface
     """
     return ScipyOptimizerInterface(
         self.Wb_loss, var_list=[self.W, self.b]
     )  #, options={'ftol': 2e-15, 'gtol': 1e-15, 'maxls': 100, 'eps': 1e-12})
     raise NotImplementedError
Ejemplo n.º 5
0
def _optimize_zinb(mu, dropout, theta=None):
    pred, a, b, t = _tf_zinb_zero(mu, theta)
    #loss = tf.reduce_mean(tf.abs(tf_logit(pred) - tf_logit(dropout)))
    loss = tf.losses.log_loss(labels=dropout.astype('float32'),
                              predictions=pred)

    optimizer = ScipyOptimizerInterface(loss, options={'maxiter': 100})

    with tf.Session() as sess:
        sess.run(tf.global_variables_initializer())
        optimizer.minimize(sess)
        ret_a = sess.run(a)
        ret_b = sess.run(b)
        if theta is None:
            ret_t = sess.run(t)
        else:
            ret_t = t

    return ret_a, ret_b, ret_t
Ejemplo n.º 6
0
    def fit(self, sess, data, feed_dict, maxiter):
        pred = self.get_pred(data)
        loss, pred_normed, labels_normed = self.get_loss(pred, data['labels'])
        optimizer = ScipyOptimizerInterface(loss, options={'maxiter': maxiter})
        self.losses = []

        def append_loss(loss):
            self.losses.append(loss)

        optimizer.minimize(sess,
                           feed_dict=feed_dict,
                           loss_callback=append_loss,
                           fetches=[loss])
        for name, var in self.vars.items():
            self.vars_evals[name] = sess.run(var)

        self.eval_pred, self.eval_pred_normed, self.eval_label, self.eval_label_normed = sess.run(
            [pred, pred_normed, data['labels'], labels_normed],
            feed_dict=feed_dict)
        self.r2 = stats.linregress(self.eval_pred_normed.flatten(),
                                   self.eval_label_normed.flatten())[2]**2
        self.final_loss = sess.run(loss, feed_dict=feed_dict)
Ejemplo n.º 7
0
    def _init_optimizers(self):
        '''
        Initialize optimizers
        By default LBFGS-B and Adam are initialized.
        '''

        self.optimizer_BFGS = ScipyOptimizerInterface(
            self.loss,
            method='L-BFGS-B',
            options={
                'maxiter': 50000,
                'maxfun': 50000,
                'maxcor': 50,
                'maxls': 50,
                'gtol': 1.0 * np.finfo(float).eps,
                'ftol': 1.0 * np.finfo(float).eps
            })

        if self.learning_rate is not None:
            self.optimizer_Adam = tf.train.AdamOptimizer(
                self.learning_rate).minimize(self.loss)
        else:
            self.optimizer_Adam = tf.train.AdamOptimizer(
                **self.optimizer_kwargs).minimize(self.loss)
Ejemplo n.º 8
0
                                      dtype=tf.float32, shape=IMAGE_DIM + (3,),
                                      initializer=tf.initializers.random_uniform(0, 255))
    input_tensor = construct_input("./data/Tuebingen_Neckarfront.jpg", 
                                   "./data/stary_night.jpg",
                                   pastiche_tensor)

    # Load VGG16 model for feature extraction
    vgg_model = VGG16(input_tensor=input_tensor, weights='imagenet',
                  include_top=False)
    layers = get_vgg_layers(vgg_model)

    # Compute loss computational graphs
    loss_op = build_loss(input_tensor, layers)

    # Minmise using LBFS optimiser
    optimizer = ScipyOptimizerInterface(loss_op, options={'maxfun': 20},
                                        var_list=[pastiche_tensor])
    
    # Perform style transfer by optmising loss
    with tf.Session() as sess:
        # Init variables
        sess.run(tf.global_variables_initializer())
        
        n_iterations = 10
        for i in range(n_iterations):
            print('Iteration:', i)
            start_time = time.time()

            # Optimise loss using optimizer
            optimizer.minimize(sess)

            # Display progress