Beispiel #1
0
 def test_fprop_pos_and_neg_contribs(self):
     conv_layer = blobs.Conv1D(W=self.conv_W,
                               b=self.conv_b,
                               stride=1,
                               border_mode=PaddingMode.valid,
                               channels_come_last=False,
                               conv_mxts_mode=ConvMxtsMode.Linear)
     self.create_small_net_with_conv_layer(conv_layer,
                                           outputs_per_channel=3)
     pos_contribs, neg_contribs = self.conv_layer.get_pos_and_neg_contribs()
     func_pos = compile_func([
         self.input_layer.get_activation_vars(),
         self.input_layer.get_reference_vars()
     ], pos_contribs)
     func_neg = compile_func([
         self.input_layer.get_activation_vars(),
         self.input_layer.get_reference_vars()
     ], neg_contribs)
     #diff from ref:
     #      [[[-9,-8,-7,-6],
     #        [-5,-4,-3,-2]],
     #       [[-1, 0, 1, 2],
     #        [ 3, 4, 5, 6]]]
     # W:
     # [-2,-1
     #   0, 1]
     # 18+8 = 26, -4 = -4
     # 0+-1+0+5 = 4 - bias (1.0) = 3
     np.testing.assert_almost_equal(
         func_pos(self.inp, np.ones_like(self.inp)),
         np.array([[[26, 23, 20], [4, 3, 2]], [[6, 5, 6], [0, 1, 4]]]))
     np.testing.assert_almost_equal(
         func_neg(self.inp, np.ones_like(self.inp)),
         np.array([[[-4, -3, -2], [-26, -23, -20]],
                   [[0, -1, -4], [-6, -5, -6]]]))
Beispiel #2
0
 def test_fprop(self):
     conv_layer = blobs.Conv1D(W=self.conv_W,
                               b=self.conv_b,
                               stride=1,
                               border_mode=PaddingMode.valid,
                               channels_come_last=False,
                               conv_mxts_mode=ConvMxtsMode.Linear)
     self.create_small_net_with_conv_layer(conv_layer,
                                           outputs_per_channel=3)
     func = compile_func([self.input_layer.get_activation_vars()],
                         self.conv_layer.get_activation_vars())
     #input:
     #      [[[-8,-7,-6,-5],
     #        [-4,-3,-2,-1]],
     #       [[ 0, 1, 2, 3],
     #        [ 4, 5, 6, 7]]]
     # W:
     # [-2,-1
     #   0, 1]
     # 16+7+0+-3 = 20 - bias (1.0) = 19
     # 0+-1+0+5 = 4 - bias (1.0) = 3
     np.testing.assert_almost_equal(
         func(self.inp),
         np.array([[[19, 17, 15], [-19, -17, -15]], [[3, 1, -1],
                                                     [-3, -1, 1]]]))
Beispiel #3
0
    def test_fprop_stride(self):

        conv_layer = blobs.Conv1D(W=self.conv_W,
                                  b=self.conv_b,
                                  stride=2,
                                  border_mode=PaddingMode.valid,
                                  channels_come_last=False)
        self.create_small_net_with_conv_layer(conv_layer,
                                              outputs_per_channel=3)
        func = compile_func([self.input_layer.get_activation_vars()],
                            self.conv_layer.get_activation_vars())
        np.testing.assert_almost_equal(
            func(self.inp),
            np.array([[[23, 35], [-23, -35]], [[71, 83], [-71, -83]]]))
Beispiel #4
0
 def test_relu_after_conv1d(self):
     input_layer = blobs.Input(num_dims=None, shape=(None, 2, 2))
     conv_layer = blobs.Conv1D(W=np.random.random((2, 2, 2)),
                               b=np.random.random((2, )),
                               conv_mxts_mode=ConvMxtsMode.Linear,
                               stride=1,
                               border_mode=PaddingMode.valid,
                               channels_come_last=True)
     conv_layer.set_inputs(input_layer)
     relu_after_conv = blobs.ReLU(
         nonlinear_mxts_mode=NonlinearMxtsMode.DeepLIFT_GenomicsDefault)
     relu_after_conv.set_inputs(conv_layer)
     relu_after_conv.build_fwd_pass_vars()
     self.assertEqual(relu_after_conv.nonlinear_mxts_mode,
                      NonlinearMxtsMode.Rescale)
Beispiel #5
0
 def test_dense_backprop_stride(self):
     conv_layer = blobs.Conv1D(W=self.conv_W,
                               b=self.conv_b,
                               stride=2,
                               border_mode=PaddingMode.valid,
                               channels_come_last=False)
     self.create_small_net_with_conv_layer(conv_layer,
                                           outputs_per_channel=2)
     self.dense_layer.update_task_index(task_index=0)
     func = compile_func([
         self.input_layer.get_activation_vars(),
         self.input_layer.get_reference_vars()
     ], self.input_layer.get_mxts())
     np.testing.assert_almost_equal(
         func(self.inp, np.zeros_like(self.inp)),
         np.array([[[0, 2, 0, 2], [4, 6, 4, 6]], [[0, 2, 0, 2],
                                                  [4, 6, 4, 6]]]))
def conv1d_conversion(layer, name, verbose, nonlinear_mxts_mode,
                      conv_mxts_mode, **kwargs):
    #nonlinear_mxts_mode only used for activation
    print(layer.get_config())
    converted_activation = activation_conversion(
        layer, name, verbose, nonlinear_mxts_mode=nonlinear_mxts_mode)
    print(layer.get_weights()[0].shape)
    to_return = [
        blobs.Conv1D(
            name=("preact_" if len(converted_activation) > 0 else "") + name,
            # W=layer.get_weights()[0].squeeze(1),
            W=layer.get_weights()[0],
            b=layer.get_weights()[1],
            stride=layer.get_config()[KerasKeys.strides][0],
            padding_mode=layer.get_config()[KerasKeys.padding].upper(),
            conv_mxts_mode=conv_mxts_mode)
    ]
    to_return.extend(converted_activation)
    return to_return
Beispiel #7
0
def conv1d_conversion(layer, name, verbose, nonlinear_mxts_mode, **kwargs):
    #nonlinear_mxts_mode only used for activation
    converted_activation = activation_conversion(
        layer, name, verbose, nonlinear_mxts_mode=nonlinear_mxts_mode)
    W = layer.get_weights()[0]
    if (W.shape[-1] != 1):  #is NHWC and not NCHW - need to transpose
        W = W.transpose(3, 2, 0, 1)
    to_return = [
        blobs.Conv1D(
            name=("preact_" if len(converted_activation) > 0 else "") + name,
            W=np.squeeze(W, 3),
            b=layer.get_weights()[1],
            stride=layer.get_config()[KerasKeys.subsample_length],
            border_mode=layer.get_config()[KerasKeys.border_mode],
            #for conv1d implementations, channels always seem to come last
            channels_come_last=True)
    ]
    to_return.extend(converted_activation)
    return to_return
Beispiel #8
0
 def test_relu_after_conv1d_batchnorm(self):
     input_layer = blobs.Input(num_dims=None, shape=(None, 2, 2))
     conv_layer = blobs.Conv1D(W=np.random.random((2, 2, 2)),
                               b=np.random.random((2, )),
                               conv_mxts_mode=ConvMxtsMode.Linear,
                               stride=1,
                               border_mode=PaddingMode.valid,
                               channels_come_last=True)
     conv_layer.set_inputs(input_layer)
     batch_norm = blobs.BatchNormalization(gamma=np.array([1.0, 1.0]),
                                           beta=np.array([-0.5, 0.5]),
                                           axis=-1,
                                           mean=np.array([-0.5, 0.5]),
                                           std=np.array([1.0, 1.0]),
                                           epsilon=0.001)
     batch_norm.set_inputs(conv_layer)
     relu_after_bn = blobs.ReLU(
         nonlinear_mxts_mode=NonlinearMxtsMode.DeepLIFT_GenomicsDefault)
     relu_after_bn.set_inputs(batch_norm)
     relu_after_bn.build_fwd_pass_vars()
     self.assertEqual(relu_after_bn.nonlinear_mxts_mode,
                      NonlinearMxtsMode.Rescale)