Example #1
0
 def add_test_updates(self, updates, tf_n_mc=None, test_optimizer=adagrad_window,
                      more_tf_params=None, more_replacements=None,
                      total_grad_norm_constraint=None):
     if more_tf_params is None:
         more_tf_params = []
     if more_replacements is None:
         more_replacements = dict()
     tf_target = self(tf_n_mc, more_tf_params=more_tf_params)
     tf_target = theano.clone(tf_target, more_replacements, strict=False)
     grads = pm.updates.get_or_compute_grads(tf_target, self.obj_params + more_tf_params)
     if total_grad_norm_constraint is not None:
         grads = pm.total_norm_constraint(grads, total_grad_norm_constraint)
     updates.update(
         test_optimizer(
             grads,
             self.test_params +
             more_tf_params))
Example #2
0
 def add_obj_updates(self, updates, obj_n_mc=None, obj_optimizer=adagrad_window,
                     more_obj_params=None, more_replacements=None,
                     total_grad_norm_constraint=None):
     if more_obj_params is None:
         more_obj_params = []
     if more_replacements is None:
         more_replacements = dict()
     obj_target = self(obj_n_mc, more_obj_params=more_obj_params)
     obj_target = theano.clone(obj_target, more_replacements, strict=False)
     grads = pm.updates.get_or_compute_grads(obj_target, self.obj_params + more_obj_params)
     if total_grad_norm_constraint is not None:
         grads = pm.total_norm_constraint(grads, total_grad_norm_constraint)
     updates.update(
         obj_optimizer(
             grads,
             self.obj_params +
             more_obj_params))
     if self.op.RETURNS_LOSS:
         updates.loss = obj_target