Пример #1
0
def test_read_nested_scopes(distribute_scope, eager_and_graph_mode):
    x = create_quantized_variable(get_var(3.5), quantizer=lambda x: 2 * x)
    evaluate(x.initializer)
    with quantized_scope.scope(True):
        assert evaluate(x.read_value()) == 7
        with quantized_scope.scope(False):
            assert evaluate(x.read_value()) == 3.5
        assert evaluate(x.read_value()) == 7
Пример #2
0
 def call(self, inputs):
     if self.input_quantizer:
         inputs = self.input_quantizer(inputs)
     with quantized_scope.scope(True):
         if hasattr(self, "flip_ratio"):
             self.add_metric(self.flip_ratio(self.kernel))
         return super().call(inputs)
Пример #3
0
def test_scatter_method_delegations(eager_and_graph_mode):
    x = create_quantized_variable(get_var([3.5, 4]), quantizer=lambda x: 2 * x)
    evaluate(x.initializer)
    with quantized_scope.scope(True):
        assert_array_equal(evaluate(x.value()), [7, 8])

        def slices(val, index):
            return tf.IndexedSlices(
                values=tf.constant(val, dtype=tf.float32),
                indices=tf.constant(index, dtype=tf.int32),
                dense_shape=tf.constant([2], dtype=tf.int32),
            )

        assert_array_equal(evaluate(x.scatter_sub(slices(0.5, 0))), [6, 8])
        assert_array_equal(evaluate(x.scatter_add(slices(0.5, 0))), [7, 8])
        if version.parse(tf.__version__) > version.parse("1.14"):
            assert_array_equal(evaluate(x.scatter_max(slices(4.5, 1))), [7, 9])
            assert_array_equal(evaluate(x.scatter_min(slices(4.0, 1))), [7, 8])
            assert_array_equal(evaluate(x.scatter_mul(slices(2.0, 1))),
                               [7, 16])
            assert_array_equal(evaluate(x.scatter_div(slices(2.0, 1))), [7, 8])
        assert_array_equal(evaluate(x.scatter_update(slices(2, 1))), [7, 4])
        assert_array_equal(evaluate(x.scatter_nd_sub([[0], [1]], [0.5, 1.0])),
                           [6, 2])
        assert_array_equal(evaluate(x.scatter_nd_add([[0], [1]], [0.5, 1.0])),
                           [7, 4])
        assert_array_equal(
            evaluate(x.scatter_nd_update([[0], [1]], [0.5, 1.0])), [1, 2])
Пример #4
0
def test_method_delegations(distribute_scope, eager_and_graph_mode):
    x = create_quantized_variable(get_var(3.5), quantizer=lambda x: 2 * x)
    with quantized_scope.scope(True):
        evaluate(x.initializer)
        assert evaluate(x.value()) == 7
        assert evaluate(x.read_value()) == 7
        assert x.trainable
        if version.parse(tf.__version__) > version.parse("1.14"):
            assert x.synchronization == x.latent_variable.synchronization
        assert x.aggregation == x.latent_variable.aggregation
        assert evaluate(x.initialized_value()) == 7
        if not tf.executing_eagerly():
            if not distribute_scope:
                # These functions are not supported for DistributedVariables
                x.load(4.5)
                assert x.eval() == 9
            assert evaluate(x.initial_value) == 7
            assert x.op == x.latent_variable.op
            assert x.graph == x.latent_variable.graph
        if not distribute_scope:
            # These attributes are not supported for DistributedVariables
            assert x.constraint is None
            assert x.initializer == x.latent_variable.initializer
        assert evaluate(x.assign(4)) == 8
        assert evaluate(x.assign_add(1)) == 10
        assert evaluate(x.assign_sub(1.5)) == 7
        assert x.name == x.latent_variable.name
        assert x.device == x.latent_variable.device
        assert x.shape == ()
        assert x.get_shape() == ()
Пример #5
0
 def call(self, inputs):
     if self.input_quantizer:
         inputs = self.input_quantizer(inputs)
     with quantized_scope.scope(True):
         if hasattr(self, "depthwise_flip_ratio"):
             self.add_metric(self.depthwise_flip_ratio(self.depthwise_kernel))
         if hasattr(self, "pointwise_flip_ratio"):
             self.add_metric(self.pointwise_flip_ratio(self.pointwise_kernel))
         return super().call(inputs)
Пример #6
0
def test_sparse_reads(eager_and_graph_mode):
    x = QuantizedVariable.from_variable(get_var([1.0, 2.0]), quantizer=lambda x: 2 * x)
    evaluate(x.initializer)

    assert evaluate(x.sparse_read([0])) == 1
    assert evaluate(x.gather_nd([0])) == 1
    with quantized_scope.scope(True):
        assert evaluate(x.sparse_read([0])) == 2
        assert evaluate(x.gather_nd([0])) == 2
Пример #7
0
def test_checkpoint(tmp_path, eager_and_graph_mode):
    x = create_quantized_variable(get_var(0.0), quantizer=lambda x: 2 * x)
    evaluate(x.initializer)
    evaluate(x.assign(123.0))

    checkpoint = tf.train.Checkpoint(x=x)
    save_path = checkpoint.save(tmp_path)
    evaluate(x.assign(234.0))
    checkpoint.restore(save_path).assert_consumed().run_restore_ops()
    assert isinstance(x, QuantizedVariable)
    assert evaluate(x) == 123.0
    with quantized_scope.scope(True):
        assert evaluate(x) == 123.0 * 2
Пример #8
0
def test_read(distribute_scope, eager_and_graph_mode):
    x = create_quantized_variable(get_var(3.5), quantizer=lambda x: 2 * x)
    evaluate(x.initializer)

    assert evaluate(x) == 3.5
    assert evaluate(x.value()) == 3.5
    assert evaluate(x.read_value()) == 3.5
    assert evaluate(tf.identity(x)) == 3.5

    with quantized_scope.scope(True):
        assert evaluate(x) == 7
        assert evaluate(x.value()) == 7
        assert evaluate(x.read_value()) == 7
        assert evaluate(tf.identity(x)) == 7
Пример #9
0
def test_optimizer(eager_mode, should_quantize):
    x = create_quantized_variable(get_var(1.0), quantizer=lambda x: -x)
    opt = tf.keras.optimizers.SGD(1.0)

    def loss():
        with quantized_scope.scope(should_quantize):
            return x + 1.0

    @tf.function
    def f():
        opt.minimize(loss, var_list=[x])

    f()
    if should_quantize:
        assert evaluate(x) == 2.0
        with quantized_scope.scope(should_quantize):
            assert evaluate(x) == -2.0
    else:
        assert evaluate(x) == 0.0
Пример #10
0
def quantized(request):
    """pytest fixture for running test quantized and non-quantized"""
    with quantized_scope.scope(request.param):
        yield request.param
Пример #11
0
 def loss():
     with quantized_scope.scope(should_quantize):
         return x + 1.0