Example #1
0
 def set_param_values(self, flattened_params, **tags):
     #import ipdb
     #ipdb.set_trace()
     debug = tags.pop("debug", False)
     #        import ipdb
     #        ipdb.set_trace()
     param_values = unflatten_tensors(flattened_params,
                                      self.get_param_shapes(**tags))
     ops = []
     feed_dict = dict()
     for param, dtype, value in zip(self.get_params(**tags),
                                    self.get_param_dtypes(**tags),
                                    param_values):
         if param not in self._cached_assign_ops:
             assign_placeholder = tf.placeholder(
                 dtype=param.dtype.base_dtype)
             assign_op = tf.assign(param, assign_placeholder)
             self._cached_assign_ops[param] = assign_op
             self._cached_assign_placeholders[param] = assign_placeholder
         ops.append(self._cached_assign_ops[param])
         feed_dict[self._cached_assign_placeholders[param]] = value.astype(
             dtype)
         if debug:
             print("setting value of %s" % param.name)
     tf.get_default_session().run(ops, feed_dict=feed_dict)
Example #2
0
    def set_param_values(self, flattened_params, all_params=False, **tags):
        debug = tags.pop("debug", False)
        # print("debug, all params", all_params) True
        # print("debug, param shapes", self.get_param_shapes(all_params, **tags))
        #TODO: remove this hacky code
        param_values = unflatten_tensors(
            flattened_params, self.get_param_shapes(all_params, **tags))

        ops = []
        feed_dict = dict()
        for param, dtype, value in zip(
                self.get_params(all_params, **tags),
                self.get_param_dtypes(all_params, **tags), param_values):
            if param not in self._cached_assign_ops:
                assign_placeholder = tf.placeholder(
                    dtype=param.dtype.base_dtype)
                assign_op = tf.assign(param, assign_placeholder)
                self._cached_assign_ops[param] = assign_op
                self._cached_assign_placeholders[param] = assign_placeholder
            ops.append(self._cached_assign_ops[param])
            feed_dict[self._cached_assign_placeholders[param]] = value.astype(
                dtype)
            if debug:
                print("setting value of %s" % param.name)
        tf.get_default_session().run(ops, feed_dict=feed_dict)
 def set_param_values_transfer(self, flattened_params, **tags):
     debug = tags.pop("debug", False)
     param_values = unflatten_tensors(flattened_params,
                                      self.get_param_shapes(**tags))
     for param, dtype, value in zip(self.get_params(**tags),
                                    self.get_param_dtypes(**tags),
                                    param_values):
         if param.name != "output_log_std.param":
             param.set_value(value.astype(dtype))
 def set_param_values(self, flattened_params, **tags):
     debug = tags.pop("debug", False)
     param_values = unflatten_tensors(flattened_params,
                                      self.get_param_shapes(**tags))
     for param, dtype, value in zip(self.get_params(**tags),
                                    self.get_param_dtypes(**tags),
                                    param_values):
         param.set_value(value.astype(dtype))
         if debug:
             print("setting value of %s" % param.name)
Example #5
0
 def set_param_values(self, flattened_params, **tags):
     debug = tags.pop("debug", False)
     param_values = unflatten_tensors(
         flattened_params, self.get_param_shapes(**tags))
     for param, dtype, value in zip(
             self.get_params(**tags),
             self.get_param_dtypes(**tags),
             param_values):
         param.set_value(value.astype(dtype))
         if debug:
             print("setting value of %s" % param.name)
Example #6
0
 def set_param_values(self, flattened_params, **tags):
     debug = tags.pop("debug", False)
     param_values = unflatten_tensors(flattened_params,
                                      self.get_param_shapes(**tags))
     for param, dtype, value in zip(self.get_params(**tags),
                                    self.get_param_dtypes(**tags),
                                    param_values):
         if param.name == "leak_rate":
             value = np.minimum(1.0, np.maximum(0.0, value))
         param.set_value(value.astype(dtype))
         if debug:
             print("setting value of %s" % param.name)
 def set_param_values(self, flattened_params, **tags):
     debug = tags.pop("debug", False)
     param_values = unflatten_tensors(flattened_params,
                                      self.get_param_shapes(**tags))
     for param, dtype, value in zip(self.get_params(**tags),
                                    self.get_param_dtypes(**tags),
                                    param_values):
         if not self.reparam and param.name == "tc":
             if np.any(value < self.dt):
                 print("Tc constraint violated:", self.dt, value)
             value = np.maximum(self.dt, value)
         param.set_value(value.astype(dtype))
         if debug:
             print("setting value of %s" % param.name)
Example #8
0
 def set_param_values(self, flattened_params, **tags):
     debug = tags.pop("debug", False)
     param_values = unflatten_tensors(
         flattened_params, self.get_param_shapes(**tags))
     ops = []
     feed_dict = dict()
     for param, dtype, value in zip(
             self.get_params(**tags),
             self.get_param_dtypes(**tags),
             param_values):
         if param not in self._cached_assign_ops:
             assign_placeholder = tf.placeholder(dtype=param.dtype.base_dtype)
             assign_op = tf.assign(param, assign_placeholder)
             self._cached_assign_ops[param] = assign_op
             self._cached_assign_placeholders[param] = assign_placeholder
         ops.append(self._cached_assign_ops[param])
         feed_dict[self._cached_assign_placeholders[param]] = value.astype(dtype)
         if debug:
             print("setting value of %s" % param.name)
     tf.get_default_session().run(ops, feed_dict=feed_dict)
 def flat_to_params(self, flattened_params, all_params=False, **tags):
     return unflatten_tensors(flattened_params,
                              self.get_param_shapes(all_params, **tags))
Example #10
0
 def flat_to_params(self, flattened_params, **tags):
     # Not used.
     import pdb
     pdb.set_trace()
     return unflatten_tensors(flattened_params,
                              self.get_param_shapes(**tags))
Example #11
0
 def flat_to_params(self, flattened_params, **tags):
     return unflatten_tensors(flattened_params, self.get_param_shapes(**tags))
 def flat_to_params(self, flattened_params, **tags):
     import numpy as np
     print(np.shape(flattened_params))
     return unflatten_tensors(flattened_params,
                              self.get_param_shapes(**tags))
 def flat_to_params(self, flattened_params, **tags):
     if config.TF_NN_SETTRACE:
         ipdb.set_trace()
     return unflatten_tensors(flattened_params,
                              self.get_param_shapes(**tags))