def transform(data, network, time_factor=1): x = data.placeholder from returnn.tf.compat import v1 as tf # summary("features", x) step = network.global_train_step step1 = tf.where(tf.greater_equal(step, 1000), 1, 0) step2 = tf.where(tf.greater_equal(step, 2000), 1, 0) def get_masked(): x_masked = x x_masked = random_mask( x_masked, batch_axis=data.batch_dim_axis, axis=data.time_dim_axis, min_num=step1 + step2, max_num=tf.maximum(tf.shape(x)[data.time_dim_axis] // 100, 2) * (1 + step1 + step2 * 2), max_dims=20 // time_factor) x_masked = random_mask(x_masked, batch_axis=data.batch_dim_axis, axis=data.feature_dim_axis, min_num=step1 + step2, max_num=2 + step1 + step2 * 2, max_dims=data.dim // 5) #summary("features_mask", x_masked) return x_masked x = network.cond_on_train(get_masked, lambda: x) return x
def transform(data, network, time_factor=1): x = data.placeholder from returnn.tf.compat import v1 as tf # summary("features", x) step = network.global_train_step step1 = tf.where(tf.greater_equal(step, 1000), 1, 0) step2 = tf.where(tf.greater_equal(step, 2000), 1, 0) def get_masked(): x_masked = x x_masked = random_mask( x_masked, batch_axis=data.batch_dim_axis, axis=data.time_dim_axis, min_num=step1 + step2, max_num=tf.maximum(tf.shape(x)[data.time_dim_axis] // 100, 2) * (1 + step1 + step2 * 2), max_dims=20 // time_factor) x_masked = random_mask(x_masked, batch_axis=data.batch_dim_axis, axis=data.feature_dim_axis, min_num=step1 + step2, max_num=2 + step1 + step2 * 2, max_dims=data.dim // 5) #summary("features_mask", x_masked) return x_masked cond1 = network.train_flag cond2 = tf.greater_equal( tf.shape(x)[data.time_dim_axis], 20) # ignore specaug for utterances less than 20 frames x = tf.cond(tf.logical_and(cond1, cond2), get_masked, lambda: x) return x
def transform(source, **kwargs): from returnn.tf.compat import v1 as tf data = source(0, as_data=True) time_factor = 1 # for switchout == 6 x = data.placeholder network = kwargs["self"].network from returnn.tf.compat import v1 as tf step = network.global_train_step step1 = tf.where(tf.greater_equal(step, 1000), 1, 0) step2 = tf.where(tf.greater_equal(step, 2000), 1, 0) def get_masked(): x_masked = x x_masked = random_mask( x_masked, batch_axis=data.batch_dim_axis, axis=data.time_dim_axis, min_num=step1 + step2, max_num=tf.maximum(tf.shape(x)[data.time_dim_axis] // 100, 2) * (1 + step1 + step2 * 2), max_dims=20 // time_factor) x_masked = random_mask(x_masked, batch_axis=data.batch_dim_axis, axis=data.feature_dim_axis, min_num=step1 + step2, max_num=2 + step1 + step2 * 2, max_dims=data.dim // 5) return x_masked x = network.cond_on_train(get_masked, lambda: x) return x
def _mask(x, batch_axis, axis, pos, max_amount): """ :param tf.Tensor x: (batch,time,feature) :param int batch_axis: :param int axis: :param tf.Tensor pos: (batch,) :param int|tf.Tensor max_amount: inclusive """ from returnn.tf.compat import v1 as tf ndim = x.get_shape().ndims n_batch = tf.shape(x)[batch_axis] dim = tf.shape(x)[axis] amount = tf.random_uniform(shape=(n_batch, ), minval=1, maxval=max_amount + 1, dtype=tf.int32) pos2 = tf.minimum(pos + amount, dim) idxs = tf.expand_dims(tf.range(0, dim), 0) # (1,dim) pos_bc = tf.expand_dims(pos, 1) # (batch,1) pos2_bc = tf.expand_dims(pos2, 1) # (batch,1) cond = tf.logical_and(tf.greater_equal(idxs, pos_bc), tf.less(idxs, pos2_bc)) # (batch,dim) if batch_axis > axis: cond = tf.transpose(cond) # (dim,batch) cond = tf.reshape(cond, [ tf.shape(x)[i] if i in (batch_axis, axis) else 1 for i in range(ndim) ]) from TFUtil import where_bc x = where_bc(cond, 0.0, x) return x
def _get_mask(x, axis, pos, max_amount): """ :param tf.Tensor x: (batch,time,feature) :param int axis: :param tf.Tensor pos: (batch,) :param int max_amount: inclusive """ from returnn.tf.compat import v1 as tf n_batch = tf.shape(x)[0] dim = tf.shape(x)[axis] amount = tf.random_uniform(shape=(n_batch, ), minval=1, maxval=max_amount + 1, dtype=tf.int32) pos2 = tf.minimum(pos + amount, dim) idxs = tf.expand_dims(tf.range(0, dim), 0) # (1,dim) pos_bc = tf.expand_dims(pos, 1) # (batch,1) pos2_bc = tf.expand_dims(pos2, 1) # (batch,1) cond = tf.logical_and(tf.greater_equal(idxs, pos_bc), tf.less(idxs, pos2_bc)) # (batch,dim) return cond