Example #1
0
    def sample_h_given_x(self, x):
        h_pre = K.dot(x, self.Wrbm) + self.bh
        h_sigm = self.activation(self.scaling_h_given_x * h_pre)

        # drop out noise
        #if(0.0 < self.p < 1.0):
        #	noise_shape = self._get_noise_shape(h_sigm)
        #	h_sigm = K.in_train_phase(K.dropout(h_sigm, self.p, noise_shape), h_sigm)

        if (self.hidden_unit_type == 'binary'):
            h_samp = K.random_binomial(shape=h_sigm.shape, p=h_sigm)

    # random sample
    #   \hat{h} = 1,      if p(h=1|x) > uniform(0, 1)
    #             0,      otherwise
        elif (self.hidden_unit_type == 'nrlu'):
            h_samp = nrlu(h_pre)
        else:
            h_samp = h_sigm

        if (0.0 < self.p < 1.0):
            noise_shape = self._get_noise_shape(h_samp)
            h_samp = K.in_train_phase(K.dropout(h_samp, self.p, noise_shape),
                                      h_samp)

        return h_samp, h_pre, h_sigm
Example #2
0
    def call(self, segment_seqs, mask=None):
        """

        :param segments: input segment sequences with same length
        :param mask:
        :return:
        """
        x = []
        for segments in segment_seqs:
            ids = []
            for seg in segments:
                if seg.beg == -1 and seg.end == -1:
                    ids.append([0])
                else:
                    ids.append(seg.words)
            x.append(ids)
        if 0. < self.dropout < 1.:
            retain_p = 1. - self.dropout
            B = K.random_binomial(
                (self.input_dim, ), p=retain_p) * (1. / retain_p)
            B = K.expand_dims(B)
            W = K.in_train_phase(self.W * B, self.W)
        else:
            W = self.W
        out = K.gather(W, segments)
        return out
Example #3
0
def samplefocus(x):
    y = K.random_binomial(shape=x.shape, p=x, dtype=x.dtype)

    def grad(dy):
        return dy

    return y, grad
Example #4
0
    def gan_loss(self, d_logit_real, d_logit_fake):
        """
        define loss function
        """

        d_target_real = K.ones_like(d_logit_real)
        d_target_fake = K.zeros_like(d_logit_fake)

        if self.label_flipping > 0:
            # some idx replace to zeros
            flip_val = K.random_binomial(K.get_variable_shape(d_logit_real),
                                         p=self.label_flipping)
            d_target_real -= flip_val
            # some idx replace t0 ones
            flip_val = K.random_binomial(K.get_variable_shape(d_logit_fake),
                                         p=self.label_flipping)
            d_target_fake += flip_val

        #if self.oneside_smooth is True:
        if self.oneside_smooth:
            # some idx replace 0.9-1.0
            smooth_val = K.random_uniform_variable(
                K.get_variable_shape(d_logit_real), low=0.9, high=0.99)
            d_target_real *= smooth_val
            # some idx replace 0.9-1.0 (When label_flipping = 0, no processing )
            smooth_val = K.random_uniform_variable(
                K.get_variable_shape(d_logit_fake), low=0.9, high=0.99)
            d_target_fake *= smooth_val

        d_loss_real = K.mean(K.binary_crossentropy(output=d_logit_real,
                                                   target=d_target_real,
                                                   from_logits=True),
                             axis=1)
        d_loss_fake = K.mean(K.binary_crossentropy(output=d_logit_fake,
                                                   target=d_target_fake,
                                                   from_logits=True),
                             axis=1)

        d_loss = K.mean(d_loss_real + d_loss_fake)

        g_loss = K.mean(
            K.binary_crossentropy(output=d_logit_fake,
                                  target=K.ones_like(d_logit_fake),
                                  from_logits=True))

        return d_loss, g_loss
Example #5
0
def _stochastic_survival(y, p_survival=1.0):
    # binomial random variable
    survival = K.random_binomial((1, ), p=p_survival)
    # during testing phase:
    # - scale y (see eq. (6))
    # - p_survival effectively becomes 1 for all layers (no layer dropout)
    return K.in_test_phase(
        tf.constant(p_survival, dtype='float32') * y, survival * y)
 def _compute_drop_mask(self, shape):
     mask = K.random_binomial(shape, p=self._get_gamma(shape[1]))
     mask = keras.layers.MaxPool1D(
         pool_size=self.block_size,
         padding='same',
         strides=1,
         data_format='channels_last',
     )(mask)
     return 1.0 - mask
 def call(self, x, mask=None):
     if 0. < self.dropout < 1.:
         retain_p = 1. - self.dropout
         B = K.random_binomial((self.input_dim,), p=retain_p) * (1. / retain_p)
         B = K.expand_dims(B)
         W = K.in_train_phase(self.W * B, self.W)
     else:
         W = self.W
     out = K.gather(W, x)
     return out
Example #8
0
    def sample_x_given_h(self, h):
        x_pre = K.dot(h, self.Wrbm.T) + self.bx

        if (self.visible_unit_type == 'gaussian'):
            x_samp = self.scaling_x_given_h * x_pre
            return x_samp, x_samp, x_samp
        else:
            x_sigm = K.sigmoid(self.scaling_x_given_h * x_pre)
            x_samp = K.random_binomial(shape=x_sigm.shape, p=x_sigm)
            return x_samp, x_pre, x_sigm
Example #9
0
 def call(self, x, mask=None):
     if 0. < self.dropout < 1.:
         retain_p = 1. - self.dropout
         B = K.random_binomial((self.input_dim,), p=retain_p) * (1. / retain_p)
         B = K.expand_dims(B)
         W = K.in_train_phase(self.W * B, self.W)
     else:
         W = self.W
     out = K.gather(W, x)
     return out
Example #10
0
 def resmerge(self, inputs, **kwargs):
     x, y = inputs[0], inputs[1]
     ptrue = K.sqrt(K.sigmoid(self.p))
     split = K.tile(ptrue, self.dims)
     resout = split * x + (1 - split) * y
     sample = K.tile(
         K.in_train_phase(K.random_binomial((1, ), p=ptrue), K.zeros(
             (1, ))), self.dims)
     output = K.switch(sample, x, resout)
     return output
Example #11
0
 def call(self, x, mask=None):
     if 0. < self.dropout < 1.:
         retain_p = 1. - self.dropout
         B = K.random_binomial((self.input_dim,), p=retain_p) * (1. / retain_p)
         B = K.expand_dims(B)
         W = K.in_train_phase(self.W * B, self.W)
     else:
         W = self.W
     W_ = T.concatenate([self.zeros_vector, W], axis=0)
     out = K.gather(W_, x)
     return out
Example #12
0
 def _compute_drop_mask(self, shape):
     height, width = shape[1], shape[2]
     mask = K.random_binomial(shape, p=self._get_gamma(height, width))
     mask *= self._compute_valid_seed_region(height, width)
     mask = keras.layers.MaxPool2D(
         pool_size=(self.block_size, self.block_size),
         padding='same',
         strides=1,
         data_format='channels_last',
     )(mask)
     return 1.0 - mask
 def call(self, x, mask=None):
     if 0. < self.dropout < 1.:
         retain_p = 1. - self.dropout
         B = K.random_binomial((self.input_dim,), p=retain_p) * (1. / retain_p)
         B = K.expand_dims(B)
         W = K.in_train_phase(self.W * B, self.W)
     else:
         W = self.W
     W_ = T.concatenate([self.zeros_vector, W], axis=0)
     out = K.gather(W_, x)
     return out
Example #14
0
    def call(self, x, mask=None):
        if isinstance(x, list): 
            x,_ = x
        if mask is not None and isinstance(mask, list):
            mask,_ = mask
        if 0. < self.dropout < 1.:
            retain_p = 1. - self.dropout
            dims = self.W._keras_shape[:-1]
            B = K.random_binomial(dims, p=retain_p) * (1. / retain_p)
            B = K.expand_dims(B)
            W = K.in_train_phase(self.W * B, self.W)
        else:
            W = self.W
        
        if self.mode == 'matrix':
            return K.gather(W,x)
        elif self.mode == 'tensor':
            # quick and dirty: only allowing for 3dim inputs when it's tensor mode
            assert K.ndim(x) == 3
            # put sequence on first; gather; take diagonal across shared batch dimension
            # in other words, W is (B, S, F)
            # incoming x is (B, S, A)
            inds = K.arange(self.W._keras_shape[0])
            #out = K.gather(K.permute_dimensions(W, (1,0,2)), x).diagonal(axis1=0, axis2=3)
            #return K.permute_dimensions(out, (3,0,1,2))
            ### method above doesn't do grads =.=
            # tensor abc goes to bac, indexed onto with xyz, goes to xyzac, 
            # x == a, so shape to xayzc == xxyzc
            # take diagonal on first two: xyzc 
            #out = K.colgather()
            out = K.gather(K.permute_dimensions(W, (1,0,2)), x) 
            out = K.permute_dimensions(out, (0,3,1,2,4))
            out = K.gather(out, (inds, inds))
            return out
        else:
            raise Exception('sanity check. should not be here.')

        #all_dims = T.arange(len(self.W._keras_shape))
        #first_shuffle = [all_dims[self.embed_dim]] + all_dims[:self.embed_dim] + all_dims[self.embed_dim+1:]
        ## 1. take diagonal from 0th to
        ## chang eof tactics
        ## embed on time or embed on batch. that's all I'm supporting.  
        ## if it's embed on time, then, x.ndim+1 is where batch will be, and is what
        ## i need to take the diagonal over. 
        ## now dim shuffle the xdims + 1 to the front.
        #todo: get second shuffle or maybe find diagonal calculations
        #out = K.gather(W, x)
        #return out

        ### reference
        #A = S(np.arange(60).reshape(3,4,5))
        #x = S(np.random.randint(0, 4, (3,4,10)))
        #x_emb = A.dimshuffle(1,0,2)[x].dimshuffle(0,3,1,2,4)[T.arange(A.shape[0]), T.arange(A.shape[0])]
Example #15
0
    def _get_sampler_by_string(self, loss):
        output = self.outputs[0]
        inputs = self.inputs

        if loss in ["MSE", "mse", "mean_squared_error"]:
            output += samplers.random_normal(K.shape(output), mean=0.0, std=1.0)
            draw_sample = K.function(inputs + [K.learning_phase()], [output])

            def sample_gaussian(inputs, use_dropout=False):
                '''
                Helper to draw samples from a gaussian distribution
                '''
                return draw_sample(inputs + [int(use_dropout)])[0]

            return sample_gaussian

        elif loss == "binary_crossentropy":
            output = K.random_binomial(K.shape(output), p=output)
            draw_sample = K.function(inputs + [K.learning_phase()], [output])

            def sample_binomial(inputs, use_dropout=False):
                '''
                Helper to draw samples from a binomial distribution
                '''
                return draw_sample(inputs + [int(use_dropout)])[0]

            return sample_binomial

        elif loss in ["mean_absolute_error", "mae", "MAE"]:
            output += samplers.random_laplace(K.shape(output), mu=0.0, b=1.0)
            draw_sample = K.function(inputs + [K.learning_phase()], [output])
            def sample_laplace(inputs, use_dropout=False):
                '''
                Helper to draw samples from a Laplacian distribution
                '''
                return draw_sample(inputs + [int(use_dropout)])[0]

            return sample_laplace

        elif loss == "mixture_of_gaussians":
            pi, mu, log_sig = densities.split_mixture_of_gaussians(output, self.n_components)
            samples = samplers.random_gmm(pi, mu, K.exp(log_sig))
            draw_sample = K.function(inputs + [K.learning_phase()], [samples])
            def sample_Mix_gaussians(inputs, use_dropout = False):
                '''
                Helper to draw samples from a Mixtured Gaussian Distribution
                '''
                return draw_sample(inputs + [int(use_dropout)])[0]
            return sample_Mix_gaussians 

        else:
            raise NotImplementedError("Unrecognised loss: %s.\
                                       Cannot build a generic sampler" % loss)
Example #16
0
 def call(self, x, mask=None):
     if K.dtype(x) != 'int32':
         x = K.cast(x, 'int32')
     if 0. < self.dropout < 1.:
         retain_p = 1. - self.dropout
         B = K.random_binomial((self.input_dim,), p=retain_p) * (1. / retain_p)
         B = K.expand_dims(B)
         W = K.in_train_phase(self.W * B, self.W)
     else:
         W = self.W
     denorm = K.sum(W, axis=0)
     W = W / denorm
     out = K.gather(W, x)
     return out
Example #17
0
 def get_constants(self, X, train=False):
     retain_p_W = 1. - self.dropout_W
     retain_p_U = 1. - self.dropout_U
     if train and (self.dropout_W > 0 or self.dropout_U > 0):
         nb_samples = K.shape(X)[0]
         if K._BACKEND == 'tensorflow':
             if not self.input_shape[0]:
                 raise Exception('For RNN dropout in tensorflow, ' +
                                 'a complete input_shape must be ' +
                                 'provided (including batch size).')
             nb_samples = self.input_shape[0]
         B_W = [
             K.random_binomial((nb_samples, self.input_dim), p=retain_p_W)
             for _ in range(4)
         ]
         B_U = [
             K.random_binomial((nb_samples, self.output_dim), p=retain_p_U)
             for _ in range(4)
         ]
     else:
         B_W = np.ones(4, dtype=K.floatx()) * retain_p_W
         B_U = np.ones(4, dtype=K.floatx()) * retain_p_U
     return [B_W, B_U]
Example #18
0
 def call(self, x, mask=None):
     if 0. < self.dropout < 1.:
         retain_p = 1. - self.dropout
         B = K.random_binomial(
             (self.input_dim, ), p=retain_p) * (1. / retain_p)
         B = K.expand_dims(B)
         W = K.in_train_phase(self.W * B, self.W)
     else:
         W = self.W
     M = K.concatenate([K.zeros(
         (1, )), K.ones((self.input_dim - 1, ))],
                       axis=0)
     M = K.expand_dims(M)
     out = K.gather(W * M, x)
     return out
Example #19
0
    def get_output(self, train=False):
        X = self.get_input(train)
        retain_p = 1. - self.dropout
        if train and self.dropout > 0:
            B = K.random_binomial((self.input_dim,), p=retain_p)
        else:
            B = K.ones((self.input_dim)) * retain_p
        # we zero-out rows of W at random
        Xs = K.cast(K.reshape(X, (-1, self.nb_words)), 'int32')

        # (samples*input_length, nb_words, dim)
        out = K.gather(self.W * K.expand_dims(B), Xs)
        out = K.reshape(out, (-1, self.input_length, self.nb_words,
                              self.output_dim))
        # (samples, input_length, nb_words, dim)
        out = out * K.expand_dims(K.not_equal(X, 0), dim=-1)
        if self.bow_mode == "bow":
            out = K.sum(out, axis=2)
        return out
Example #20
0
    def get_output(self, train=False):
        X = self.get_input(train)
        retain_p = 1. - self.dropout
        if train and self.dropout > 0:
            B = K.random_binomial((self.input_dim, ), p=retain_p)
        else:
            B = K.ones((self.input_dim)) * retain_p
        # we zero-out rows of W at random
        Xs = K.cast(K.reshape(X, (-1, self.nb_words)), 'int32')

        # (samples*input_length, nb_words, dim)
        out = K.gather(self.W * K.expand_dims(B), Xs)
        out = K.reshape(
            out, (-1, self.input_length, self.nb_words, self.output_dim))
        # (samples, input_length, nb_words, dim)
        out = out * K.expand_dims(K.not_equal(X, 0), dim=-1)
        if self.bow_mode == "bow":
            out = K.sum(out, axis=2)
        return out
 def call(self, x, mask=None):
     if self.mode == 'maximum_likelihood':
         # draw maximum likelihood sample from Bernoulli distribution
         #    x* = argmax_x p(x) = 1         if p(x=1) >= 0.5
         #                         0         otherwise
         return K.round(x)
     elif self.mode == 'random':
         # draw random sample from Bernoulli distribution
         #    x* = x ~ p(x) = 1              if p(x=1) > uniform(0, 1)
         #                    0              otherwise
         #return self.srng.binomial(size=x.shape, n=1, p=x, dtype=K.floatx())
         return K.random_binomial(x.shape, p=x, dtype=K.floatx())
     elif self.mode == 'mean_field':
         # draw mean-field approximation sample from Bernoulli distribution
         #    x* = E[p(x)] = E[Bern(x; p)] = p
         return x
     elif self.mode == 'nrlu':
         return nrlu(x)
     else:
         raise NotImplementedError('Unknown sample mode!')
Example #22
0
    def call(self, input, deterministic=False, **kwargs):
        if self.gain is not None:
            input = input * self.gain
        if deterministic or not self.strength:
            return input

        in_shape = self.input_shape
        in_axes = range(len(in_shape))
        in_shape = [
            in_shape[axis] if in_shape[axis] is not None else input.shape[axis]
            for axis in in_axes
        ]  # None => Theano expr
        rnd_shape = [in_shape[axis] for axis in self.axes]
        broadcast = [
            self.axes.index(axis) if axis in self.axes else 'x'
            for axis in in_axes
        ]
        one = K.constant(1)

        if self.mode == 'drop':
            p = one - self.strength
            rnd = K.random_binomial(tuple(rnd_shape), p=p,
                                    dtype=input.dtype) / p

        elif self.mode == 'mul':
            rnd = (one + self.strength)**K.random_normal(tuple(rnd_shape),
                                                         dtype=input.dtype)

        elif self.mode == 'prop':
            coef = self.strength * K.constant(
                np.sqrt(np.float32(self.input_shape[1])))
            rnd = K.random_normal(tuple(rnd_shape),
                                  dtype=input.dtype) * coef + one

        else:
            raise ValueError('Invalid GDropLayer mode', self.mode)

        if self.normalize:
            rnd = rnd / K.sqrt(K.mean(rnd**2, axis=3, keepdims=True))
        return input * K.permute_dimensions(rnd, broadcast)
    def call(self, inputs):

        VI0 = inputs[0]  # Vector indexes representing each user's properties.
        VI1 = inputs[1]  # Vector indexes representing each item's properties.
        V0 = inputs[2]   # Matrix of row vectors for each user's properties.
        V1 = inputs[3]   # Matrix of row vectors for each item's properties.

        # Multiply the feature matrices to get *b* matrices of pair-wise feature products.
        P = K.batch_dot(V0, K.permute_dimensions(V1, (0, 2, 1)), (2, 1))

        # TODO: Normalize the products such that each is between 0 and 1.

        # Each of the VI0 and VI1 has a number of zero entries which should be
        # masked out. Compute row and column masks identifying non-zero entries.
        # Row mask must be permuted to represent rows instead of cols.
        row_masks = K.repeat(K.clip(VI0, 0, 1), P.shape[1])
        row_masks = K.permute_dimensions(row_masks, (0, 2, 1))
        col_masks = K.repeat(K.clip(VI1, 0, 1), P.shape[2])

        # Combine the row and col masks into masks where active (non-padded)
        # elements have value 1 and padding elements have value 0. This is
        # a unique mask for each product matrix.
        active_masks = row_masks * col_masks

        # Apply the active mask to the product matrices via elem-wise multiply.
        P = P * active_masks

        # For dropout, compute a binary binomial mask to 0 out elements.
        if 0. < self.dropout_prop < 1.:
            P = K.switch(K.learning_phase(),
                         P * K.random_binomial(K.shape(P), 1 - self.dropout_prop),
                         P)

        # Return the sum of each product matrix, a single scalar for each
        # pair of feature matrices, representing their sum of interactions.
        return K.expand_dims(K.sum(P, (1, 2)))
Example #24
0
    def call(self, x, mask=None):
        if isinstance(x, list):
            x, _ = x
        if mask is not None and isinstance(mask, list):
            mask, _ = mask
        if 0. < self.dropout < 1.:
            retain_p = 1. - self.dropout
            dims = self.W._keras_shape[:-1]
            B = K.random_binomial(dims, p=retain_p) * (1. / retain_p)
            B = K.expand_dims(B)
            W = K.in_train_phase(self.W * B, self.W)
        else:
            W = self.W

        if self.mode == 'matrix':
            return K.gather(W, x)
        elif self.mode == 'tensor':
            # quick and dirty: only allowing for 3dim inputs when it's tensor mode
            assert K.ndim(x) == 3
            # put sequence on first; gather; take diagonal across shared batch dimension
            # in other words, W is (B, S, F)
            # incoming x is (B, S, A)
            inds = K.arange(self.W._keras_shape[0])
            #out = K.gather(K.permute_dimensions(W, (1,0,2)), x).diagonal(axis1=0, axis2=3)
            #return K.permute_dimensions(out, (3,0,1,2))
            ### method above doesn't do grads =.=
            # tensor abc goes to bac, indexed onto with xyz, goes to xyzac,
            # x == a, so shape to xayzc == xxyzc
            # take diagonal on first two: xyzc
            #out = K.colgather()
            out = K.gather(K.permute_dimensions(W, (1, 0, 2)), x)
            out = K.permute_dimensions(out, (0, 3, 1, 2, 4))
            out = K.gather(out, (inds, inds))
            return out
        else:
            raise Exception('sanity check. should not be here.')
 def _random_arr(self, count, p):
     return K.random_binomial((count,), p = p)
 def _build_global_switch(self):
     return K.equal(K.random_binomial((), p = self.global_p, seed = self.switch_seed), 1.)
Example #27
0
 def noised():
     return inputs * K.random_binomial(shape=K.shape(inputs),
                                       p=self.ratio)
 def _random_arr(self, count, p):
     return K.random_binomial((count,), p=p)
 def _build_global_switch(self):
     # A randomly sampled tensor that will signal if the batch
     # should use global or local droppath
     return K.equal(K.random_binomial((), p=self.global_p,
                                      seed=self.switch_seed), 1.)
Example #30
0
 def _build_global_switch(self):
     # randomly sampled tensor
     # global or local droppath
     return K.equal(
         K.random_binomial((), p=self.global_p, seed=self.switch_seed), 1.)
Example #31
0
def stochastic_survival(y, p_survival=1.0):
    survival = K.random_binomial((1, ), p=p_survival)
    return K.in_test_phase(
        tf.constant(p_survival, dtype='float32') * y, survival * y)
Example #32
0
def mvn_kl(mean1, logvar1, mean2=None, logvar2=None):
    # computes kl between N(mean1, var1) and N(mean2, var2)
    # if mean2 is None, assumes mean2=logvar2=0
    if mean2 is None:
        mean2 = K.zeros_like(mean1)
        logvar2 = K.zeros_like(logvar1)

    kl = 0.5 * K.sum(logvar2 - logvar1 - 1 + \
        (K.exp(logvar1) + (mean1 - mean2) ** 2) / K.exp(logvar2), axis=-1)

    return kl

x = Input(batch_shape=(batch_size, img_dim))

x_s = Lambda(lambda arg : K.random_binomial(arg.shape, arg),
    output_shape=(img_dim,))(x)

# encode
h1 = BN(mode=bn_mode)(Dense(h_dim, activation='relu')(x_s))
h2 = BN(mode=bn_mode)(Dense(h_dim, activation='relu')(h1))
a_mean_en = Dense(a_dim)(h2)
a_logvar_en = Dense(a_dim)(h2)
def sampling_a(args):
    a_mean, a_log_var = args
    epsilon = K.random_normal(shape=(batch_size, a_dim))
    return a_mean + K.exp(a_log_var / 2) * epsilon

a = Lambda(sampling_a, output_shape=(a_dim,))([a_mean_en, a_logvar_en])

merged = merge([a, x_s], mode="concat", concat_axis=-1)
Example #33
0
    def build(self, input_shape):

        self.select_features = K.random_binomial(
            shape=input_shape[1:], p=self.prob,
            seed=None)  # binary mask of features to be selected
        super(Sparse, self).build(input_shape)
Example #34
0
def sample_bernoulli(p_h):
    # samples an uncorrelated bernoulli distribution with p(h_i = 1) = p_h_i
    return K.random_binomial(shape=p_h.shape, p=p_h)
Example #35
0
 def sample(args):
     pi = args
     return K.random_binomial(shape=K.shape(pi), p=pi)