Example #1
0
    def test_sequence_example_into_input_layer(self):
        examples = [_make_sequence_example().SerializeToString()] * 100
        ctx_cols, seq_cols = self._build_feature_columns()

        def _parse_example(example):
            ctx, seq = tf.io.parse_single_sequence_example(
                example,
                context_features=tf.feature_column.make_parse_example_spec(
                    ctx_cols),
                sequence_features=tf.feature_column.make_parse_example_spec(
                    seq_cols))
            ctx.update(seq)
            return ctx

        ds = tf.data.Dataset.from_tensor_slices(examples)
        ds = ds.map(_parse_example)
        ds = ds.batch(20)

        # Test on a single batch
        features = tf.compat.v1.data.make_one_shot_iterator(ds).get_next()

        # Tile the context features across the sequence features
        sequence_input_layer = ksfc.SequenceFeatures(seq_cols)
        seq_input, _ = sequence_input_layer(features)
        dense_input_layer = dense_features.DenseFeatures(ctx_cols)
        ctx_input = dense_input_layer(features)
        ctx_input = core.RepeatVector(
            tf.compat.v1.shape(seq_input)[1])(ctx_input)
        concatenated_input = merge.concatenate([seq_input, ctx_input])

        rnn_layer = recurrent.RNN(recurrent.SimpleRNNCell(10))
        output = rnn_layer(concatenated_input)

        with self.cached_session() as sess:
            sess.run(tf.compat.v1.global_variables_initializer())
            features_r = sess.run(features)
            self.assertAllEqual(features_r['int_list'].dense_shape, [20, 3, 6])

            output_r = sess.run(output)
            self.assertAllEqual(output_r.shape, [20, 10])
Example #2
0
def test_repeat_vector():
    layer = core.RepeatVector(10)
    _runner(layer)
 def test_repeat_vector(self):
     layer = core.RepeatVector(10)
     self._runner(layer)
Example #4
0
def repeat(seq, times):
    return core.RepeatVector(times)(seq)
adaptive_vec = core.Reshape((1, -1))(adaptive_conv)
soft_dense = Dense(2, activation='softmax')(adaptive_vec)

#------------------------------------------------------------------------------------------
inshape = deconv_nir_5._keras_shape
print inshape
before_merge_rgb = core.Flatten()(deconv_rgb_5)
print before_merge_rgb._keras_shape
before_merge_nir = core.Flatten()(deconv_nir_5)
print before_merge_nir._keras_shape
merge_flat = keras.layers.concatenate([before_merge_rgb, before_merge_nir])
print merge_flat._keras_shape

soft_flat = core.Flatten()(soft_dense)
print soft_flat._keras_shape
repeat = core.RepeatVector(before_merge_nir._keras_shape[1])(soft_flat)
print repeat._keras_shape
repeat_flat = core.Flatten()(repeat)
print repeat_flat._keras_shape

reshape_now = keras.layers.multiply([repeat_flat, merge_flat])
reshape_now = core.Reshape((2, -1))(reshape_now)
outshape = reshape_now._keras_shape

layer1 = core.Lambda(lambda x: x[:, 0:1, :],
                     output_shape=lambda x:
                     (outshape[0], 1, outshape[2]))(reshape_now)
layer2 = core.Lambda(lambda x: x[:, 1:2, :],
                     output_shape=lambda x:
                     (outshape[0], 1, outshape[2]))(reshape_now)