Example #1
0
    def build(self, input_shape):
        assert isinstance(input_shape, list) and 2 == len(input_shape)
        out_shape, residual_shape = input_shape
        self.input_spec = [
            keras.layers.InputSpec(shape=out_shape),
            keras.layers.InputSpec(shape=residual_shape)
        ]

        self.dense = keras.layers.Dense(
            units=self.params.hidden_size,
            kernel_initializer=self.create_initializer(),
            name="dense")
        self.dropout = keras.layers.Dropout(rate=self.params.hidden_dropout)
        self.layer_norm = LayerNormalization(name="LayerNorm")

        if self.params.adapter_size is not None:
            self.adapter_down = keras.layers.Dense(
                units=self.params.adapter_size,
                kernel_initializer=tf.keras.initializers.TruncatedNormal(
                    stddev=self.params.adapter_init_scale),
                activation=self.get_activation(self.params.adapter_activation),
                name="adapter-down")
            self.adapter_up = keras.layers.Dense(
                units=self.params.hidden_size,
                kernel_initializer=tf.keras.initializers.TruncatedNormal(
                    stddev=self.params.adapter_init_scale),
                name="adapter-up")

        super(ProjectionLayer, self).build(input_shape)
Example #2
0
 def test_seq_model(self):
     model = keras.Sequential([CustomLayer(num_units=17),
                               LayerNormalization()])
     model.compute_output_shape(input_shape=(16, 3, 4))
     # model.build(input_shape=(16, 3, 4))
     model.compile(optimizer='adam', loss='mse')
     model.fit(tf.ones((16, 3, 4), dtype=tf.float32),
               tf.ones((16, 3, 17), dtype=tf.float32), steps_per_epoch=2, epochs=10,
               callbacks=[pf.utils.create_one_cycle_lr_scheduler(
                       max_learn_rate=5e-2,
                       end_learn_rate=1e-7,
                       warmup_epoch_count=5,
                       total_epoch_count=10)
               ])
     model.summary()
Example #3
0
    def test_equal(self):
        norm_layer = LayerNormalization()
        model = keras.Sequential(
            [keras.layers.InputLayer(input_shape=(16, 256)), norm_layer])

        # model.build(input_shape=(3, 16, 256))
        model.compile(optimizer=keras.optimizers.Adam(), loss='mse')
        # model.summary()

        model.fit(np.zeros((3, 16, 256)), np.ones((3, 16, 256)))
        model.summary()

        inputs = np.zeros((3, 16, 256))
        predicted = model.predict(inputs)
        expected = np.ones_like(inputs)
        np.allclose(expected, predicted)
Example #4
0
    def build(self, input_shape):
        assert isinstance(input_shape, list) and 2 == len(input_shape)
        out_shape, residual_shape = input_shape
        self.input_spec = [
            keras.layers.InputSpec(shape=out_shape),
            keras.layers.InputSpec(shape=residual_shape)
        ]

        self.dense = keras.layers.Dense(
            units=self.params.hidden_size,
            kernel_initializer=self.create_initializer(),
            name="dense")
        self.dropout = keras.layers.Dropout(rate=self.params.hidden_dropout)
        self.layer_norm = LayerNormalization(name="LayerNorm")

        super(ProjectionLayer, self).build(input_shape)
Example #5
0
    def test_serialization(self):
        model = keras.Sequential([LayerNormalization(input_shape=(2, 3))])
        model.compile(optimizer='adam', loss='mse')
        model.summary()

        with tempfile.TemporaryDirectory() as temp_dir:
            temp_file = os.path.join(temp_dir, "model")
            model.save(temp_file)
            model = keras.models.load_model(
                temp_file,
                custom_objects={"LayerNormalization": LayerNormalization})
            model.summary()

        encoded = model.to_json()
        model = keras.models.model_from_json(
            encoded, custom_objects={"LayerNormalization": LayerNormalization})
        model.summary()
Example #6
0
    def test_serialization(self):
        model = keras.Sequential([LayerNormalization(input_shape=(2, 3))])
        model.compile(optimizer='adam', loss='mse')
        model.summary()

        with tempfile.NamedTemporaryFile() as temp_file:
            temp_file.file.close()
            model.save(temp_file.name)
            model = keras.models.load_model(
                temp_file.name,
                custom_objects={"LayerNormalization": LayerNormalization})
            model.summary()

        encoded = model.to_json()
        model = keras.models.model_from_json(
            encoded, custom_objects={"LayerNormalization": LayerNormalization})
        model.summary()
Example #7
0
    def test_simple(self):

        norm_layer = LayerNormalization()
        model = keras.Sequential(
            [keras.layers.InputLayer(input_shape=(2, 3)), norm_layer])

        model.build(input_shape=(None, 2, 3))
        model.compile(optimizer=keras.optimizers.Adam(), loss='mse')
        model.summary()

        inputs = np.array([[[.2, .1, .3], [.5, .1, .1]]])

        predict = model.predict(inputs)

        expected = np.asarray([[[0, -1.2247, 1.2247],
                                [1.4142, -0.7071, -0.7071]]])

        self.assertTrue(np.allclose(predict, expected, atol=1e-4))
Example #8
0
    def __init__(self,
                 config,
                 num_heads,
                 x_dim,
                 trainable=True,
                 name=None,
                 dtype=None,
                 dynamic=False,
                 **kwargs):
        super().__init__(trainable, name, dtype, dynamic, **kwargs)

        sa_dim = config.get('sa_dim', None)
        if sa_dim:
            self.shrink = tf.keras.layers.Dense(sa_dim, name='shrink')
            x_dim = sa_dim

        self.mlp_arc_h = MLP(n_hidden=config.n_mlp_arc,
                             dropout=config.mlp_dropout,
                             name='mlp_arc_h')
        self.mlp_arc_d = MLP(n_hidden=config.n_mlp_arc,
                             dropout=config.mlp_dropout,
                             name='mlp_arc_d')
        self.mlp_rel_h = MLP(n_hidden=config.n_mlp_rel,
                             dropout=config.mlp_dropout,
                             name='mlp_rel_h')
        self.mlp_rel_d = MLP(n_hidden=config.n_mlp_rel,
                             dropout=config.mlp_dropout,
                             name='mlp_rel_d')

        # the Biaffine layers
        self.arc_attn = Biaffine(n_in=config.n_mlp_arc,
                                 bias_x=True,
                                 bias_y=False,
                                 name='arc_attn')
        self.rel_attn = Biaffine(n_in=config.n_mlp_rel,
                                 n_out=config.n_rels,
                                 bias_x=True,
                                 bias_y=True,
                                 name='rel_attn')
        self.heads_WV = self.add_weight(shape=[num_heads, x_dim, x_dim])
        self.dense = tf.keras.layers.Dense(x_dim)
        self.layer_norm = LayerNormalization(name="LayerNorm")
        self.graph = config.get('graph', False)
Example #9
0
def layer_norm(name=None):
    """Return layer normalization function."""
    return LayerNormalization(name=name)
Example #10
0
 def _construct(self, params):
     self.layer = CustomLayer.from_params(params)
     self.norm  = LayerNormalization()
     self.supports_masking = True