Example #1
0
    def _decay_weights_op(
        self,
        var: tf.Variable,
        apply_state: Optional[dict[tuple[str, tf.DType], tf.Tensor]] = None,
    ):
        if self._use_weight_decay(var.name):
            var_device, var_dtype = var.device, var.dtype.base_dtype
            coefficients = (apply_state or {}).get(
                (var_device, var_dtype)) or self._fallback_apply_state(
                    var_device, var_dtype)

            m = self.learning_rate_multiplier.get(var.name, 1)
            return var.assign_sub(
                coefficients["lr_t"] / m * coefficients["wd_t"] * var,
                self._use_locking)
        return tf.no_op()
Example #2
0
# initialize var with shape
print(tf.Variable(tf.constant(4.2, shape=(3, 3))))

# Change vars
a = Variable(0.)
b = a + 1
print(a)
print(b)

print(type(b))

a.assign_add(1)
print(a)

a.assign_sub(4)
print(a)

# Tensors

# Constant tensor

c = tf.constant([[1, 1, 1], [2, 3, 3], [4, 4, 4]])
print(c.dtype, c.shape)

print(c.numpy())

d = tf.constant([[1, 1, 1], [2, 3, 3], [4, 4, 4]], dtype=tf.float32)
print(d)

# Coefficients - split list into different shaes