Beispiel #1
0
 def test_relu_after_dense_batchnorm_noop_noop(self):
     input_layer = layers.Input(batch_shape=(None, 4))
     dense_layer = layers.Dense(kernel=np.random.random((4, 2)),
                                bias=np.random.random((2, )),
                                dense_mxts_mode=DenseMxtsMode.Linear)
     dense_layer.set_inputs(input_layer)
     batch_norm = layers.BatchNormalization(
         gamma=np.array([1.0, 1.0]).astype("float32"),
         beta=np.array([-0.5, 0.5]).astype("float32"),
         axis=-1,
         mean=np.array([-0.5, 0.5]).astype("float32"),
         var=np.array([1.0, 1.0]).astype("float32"),
         epsilon=0.001)
     batch_norm.set_inputs(dense_layer)
     noop_layer1 = layers.NoOp()
     noop_layer1.set_inputs(batch_norm)
     noop_layer2 = layers.NoOp()
     noop_layer2.set_inputs(noop_layer1)
     relu_after_bn = layers.ReLU(
         nonlinear_mxts_mode=NonlinearMxtsMode.DeepLIFT_GenomicsDefault)
     relu_after_bn.set_inputs(noop_layer2)
     relu_after_bn.build_fwd_pass_vars()
     self.assertEqual(relu_after_bn.nonlinear_mxts_mode,
                      NonlinearMxtsMode.RevealCancel)
Beispiel #2
0
def noop_conversion(name, **kwargs):
    return [layers.NoOp(name=name)]