def call(self, x, mask=None): x0 = x if mask is not None: mask = K.cast(mask, K.floatx()) mask = K.expand_dims(mask, 2) # x = x0 * mask if mask is not None else x0 x0 = Lambda(lambda x_: x_, output_shape=lambda s: s)(x0) # drop mask so do not put mask to conv1d x = self.conv1d(x0) x, g = x[:, :, :self.o_dim], x[:, :, self.o_dim:] if self.dropout_rate is not None: g = K.in_train_phase(K.dropout(g, self.dropout_rate), g) g = K.sigmoid(g) # mask is none mask = mask if mask is not None else K.ones_like(x) if self.skip_connection: if K.int_shape(x0)[-1] != self.o_dim: x0 = self.conv1d_1x1(x0) return (x0 * (1 - g) + x * g) * mask return x * g * mask
def call(self, inputs): clf, x_pre, x_next = inputs uncertain = normal_shannon_entropy(clf, num_classes) cond = K.greater(self.speed, uncertain) x = K.switch(cond, x_pre, x_next) return K.in_train_phase(x_next, x)
def call(self, inputs): source, target = inputs source = source * self.proportion target = target * (1 - self.proportion) output = (source + target) / 2 return K.in_train_phase(output, target)
def call(self, inputs): source, target = inputs mask = K.random_binomial(shape=[1], p=0.5) output = mask * source + (1 - mask) * target return K.in_train_phase(output, target)