Exemple #1
0
    def __call__(self, in_obj, keep=None, **kwargs):

        if self.mask is None:
            in_axes = in_obj.axes.sample_axes()
            self.mask = ng.persistent_tensor(axes=in_axes).named('mask')
        self.mask = ng.less_equal(ng.uniform(self.mask, low=0.0, high=1.0),
                                  keep)
        return ng.multiply(self.mask, in_obj) * (1. / keep)
Exemple #2
0
 def __call__(self, in_obj):
     if Layer.inference_mode:
         return self.keep * in_obj
     else:
         if self.mask is None:
             in_axes = in_obj.axes.sample_axes()
             self.mask = ng.persistent_tensor(axes=in_axes).named('mask')
         self.mask = ng.uniform(self.mask, low=0.0, high=1.0) <= self.keep
         return self.mask * in_obj
Exemple #3
0
def test_uniform_range_posneg(input_tensor):
    """TODO."""
    ng_a = ng.uniform(input_tensor, low=-0.5, high=0.5)

    with executor(ng_a) as ex:
        result = ex()
    print(result)

    assert np.all(result < 0.5)
    assert np.all(result >= -0.5)
    assert not np.all(result >= 0.0)
 def __call__(self, in_obj, **kwargs):
     if Layer.inference_mode:
         return self.keep * in_obj
     else:
         if self.mask is None:
             in_axes = in_obj.axes.sample_axes()
             channel_axes = ng.make_axes([in_axes.channel_axis()])
             self.mask = ng.persistent_tensor(
                 axes=channel_axes).named('channel_mask')
         self.mask = ng.uniform(self.mask, low=0.0, high=1.0) <= self.keep
         return self.mask * in_obj
Exemple #5
0
def test_uniform_range_pos(transformer_factory, input_tensor):
    """TODO."""
    ng_a = ng.uniform(input_tensor, low=0.0, high=0.5)

    with executor(ng_a) as ex:
        result = ex()
    print(result)

    assert np.all(result < 0.5)
    assert np.all(result >= 0.0)
    assert not np.all(result == 0.0)
Exemple #6
0
def test_rng_repetition():
    """
    Tests rng ops, to make sure they run every execution and not just initialization
    """
    axes = ng.make_axes([ng.make_axis(2), ng.make_axis(2)])
    x = ng.variable(initial_value=np.array([[1, 2], [3, 4]]), axes=axes)
    y = ng.uniform(x)
    mysum = ng.sum(y)
    trans = ng.transformers.make_transformer()
    rand_comp = trans.computation(mysum)
    val1 = rand_comp().copy()
    val2 = rand_comp().copy()
    assert val1 != val2
    trans.close()
Exemple #7
0
def test_uniform_range_posneg(transformer_factory):
    """TODO."""
    M = ng.make_axis(5, name='M')
    N = ng.make_axis(8, name='N')

    ng_a = ng.persistent_tensor([M, N], initial_value=10.0)
    ng_a = ng.uniform(ng_a, low=-0.5, high=0.5)

    result = executor(ng_a)()
    print(result)

    assert np.all(result < 0.5)
    assert np.all(result >= -0.5)
    assert not np.all(result >= 0.0)
Exemple #8
0
    # build network graph
    generated = generator(z)
    D1 = discriminator(image)
    D2 = discriminator(generated)

    weight_clip_value = None  # no weight clipping
    gp_scale = args.gp_scale  # gradient penalty coefficient

    loss_d = D1 - D2
    loss_g = D2

# calculate gradient for all losses

x = ng.variable(initial_value=0.5, axes=[])
epsilon = ng.uniform(x)
interpolated = epsilon * image + (1 - epsilon) * generated
D3 = discriminator(interpolated)

with name_scope(name="GradientPenalty"):
    gradient = ng.deriv(ng.sum(D3, out_axes=[]), interpolated)
    grad_norm = ng.L2_norm(gradient)
    gradient_penalty = ng.square(grad_norm - 1)

# add gradient penalty
# TODO
# when gp_scale is set to 0 the behavior is not as expected
# loss_d = loss_d + 0 * gp is not loss_d = loss_d + 0
# we can get rid of if statement once this is fixed
# https://github.com/NervanaSystems/private-ngraph/issues/2145
if gp_scale:
Exemple #9
0
 def train_outputs(self, in_obj):
     in_axes = in_obj.axes.sample_axes()
     self.mask = self.mask or ng.persistent_tensor(
         axes=in_axes).named('mask')
     self.mask = ng.uniform(self.mask, low=0.0, high=1.0) <= self.keep
     return self.mask * in_obj