def test_fprop(self): conv_layer = blobs.Conv2D(W=self.conv_W, b=self.conv_b, strides=(1, 1), border_mode=B.BorderMode.valid, channels_come_last=False) self.create_small_net_with_conv_layer(conv_layer, outputs_per_channel=9) func = B.function([self.input_layer.get_activation_vars()], self.conv_layer.get_activation_vars()) np.testing.assert_almost_equal( func(self.inp), np.array([[ [[439, 467, 495], [551, 579, 607], [663, 691, 719]], [[-439, -467, -495], [-551, -579, -607], [-663, -691, -719]], ], [[ [1335, 1363, 1391], [1447, 1475, 1503], [1559, 1587, 1615], ], [[-1335, -1363, -1391], [-1447, -1475, -1503], [-1559, -1587, -1615]]]]))
def test_dense_backprop(self): conv_layer = blobs.Conv2D(W=self.conv_W, b=self.conv_b, strides=(1, 1), border_mode=B.BorderMode.valid, channels_come_last=False, conv_mxts_mode=ConvMxtsMode.Linear) self.create_small_net_with_conv_layer(conv_layer, outputs_per_channel=9) self.dense_layer.update_task_index(task_index=0) func = B.function([ self.input_layer.get_activation_vars(), self.input_layer.get_reference_vars() ], self.input_layer.get_mxts()) np.testing.assert_almost_equal( func(self.inp, np.zeros_like(self.inp)), np.array([[[[0, 2, 2, 2], [4, 12, 12, 8], [4, 12, 12, 8], [4, 10, 10, 6]], [[8, 18, 18, 10], [20, 44, 44, 24], [20, 44, 44, 24], [12, 26, 26, 14]]], [[[0, 2, 2, 2], [4, 12, 12, 8], [4, 12, 12, 8], [4, 10, 10, 6]], [[8, 18, 18, 10], [20, 44, 44, 24], [20, 44, 44, 24], [12, 26, 26, 14]]]]))
def conv2d_conversion(layer, name, mxts_mode): #mxts_mode not used converted_activation = activation_conversion(layer, name, mxts_mode=mxts_mode) to_return = [ blobs.Conv2D( name=("preact_" if len(converted_activation) > 0 else "") + name, W=layer.get_weights()[0], b=layer.get_weights()[1], strides=layer.get_config()[KerasKeys.subsample], border_mode=layer.get_config()[KerasKeys.border_mode]) ] to_return.extend(converted_activation) return to_return
def test_relu_after_conv2d(self): input_layer = blobs.Input(num_dims=None, shape=(None, 2, 2, 2)) conv_layer = blobs.Conv2D(W=np.random.random((2, 2, 2, 2)), b=np.random.random((2, )), conv_mxts_mode=ConvMxtsMode.Linear, strides=(1, 1), border_mode=PaddingMode.valid, channels_come_last=True) conv_layer.set_inputs(input_layer) relu_after_conv = blobs.ReLU( nonlinear_mxts_mode=NonlinearMxtsMode.DeepLIFT_GenomicsDefault) relu_after_conv.set_inputs(conv_layer) relu_after_conv.build_fwd_pass_vars() self.assertEqual(relu_after_conv.nonlinear_mxts_mode, NonlinearMxtsMode.Rescale)
def conv2d_conversion(layer, name, verbose, nonlinear_mxts_mode, conv_mxts_mode, **kwargs): print(layer.get_config()) #nonlinear_mxts_mode only used for activation converted_activation = activation_conversion( layer, name, verbose, nonlinear_mxts_mode=nonlinear_mxts_mode) to_return = [ blobs.Conv2D( name=("preact_" if len(converted_activation) > 0 else "") + name, W=layer.get_weights()[0], b=layer.get_weights()[1], strides=layer.get_config()[KerasKeys.strides], padding_mode=layer.get_config()["padding"].upper(), conv_mxts_mode=conv_mxts_mode) ] to_return.extend(converted_activation) return to_return
def test_fprop_stride(self): conv_layer = blobs.Conv2D(W=self.conv_W, b=self.conv_b, strides=(2, 2), border_mode=B.BorderMode.valid, channels_come_last=False, conv_mxts_mode=ConvMxtsMode.Linear) self.create_small_net_with_conv_layer(conv_layer, outputs_per_channel=9) func = B.function([self.input_layer.get_activation_vars()], self.conv_layer.get_activation_vars()) np.testing.assert_almost_equal( func(self.inp), np.array([[[[439, 495], [663, 719]], [[-439, -495], [-663, -719]]], [[[1335, 1391], [1559, 1615]], [[-1335, -1391], [-1559, -1615]]]]))
def conv2d_conversion(layer, name, verbose, nonlinear_mxts_mode, **kwargs): #nonlinear_mxts_mode only used for activation converted_activation = activation_conversion( layer, name, verbose, nonlinear_mxts_mode=nonlinear_mxts_mode) W = layer.get_weights()[0] channels_come_last = False if KerasKeys.dim_ordering in layer.get_config(): dim_ordering = layer.get_config()[KerasKeys.dim_ordering] if (dim_ordering == 'tf'): W = W.transpose(3, 2, 0, 1) channels_come_last = True to_return = [ blobs.Conv2D( name=("preact_" if len(converted_activation) > 0 else "") + name, W=W, b=layer.get_weights()[1], strides=layer.get_config()[KerasKeys.subsample], border_mode=layer.get_config()[KerasKeys.border_mode], channels_come_last=channels_come_last) ] to_return.extend(converted_activation) return to_return
def test_relu_after_conv2d_batchnorm(self): input_layer = blobs.Input(num_dims=None, shape=(None, 2, 2, 2)) conv_layer = blobs.Conv2D(W=np.random.random((2, 2, 2, 2)), b=np.random.random((2, )), conv_mxts_mode=ConvMxtsMode.Linear, strides=(1, 1), border_mode=PaddingMode.valid, channels_come_last=True) conv_layer.set_inputs(input_layer) batch_norm = blobs.BatchNormalization(gamma=np.array([1.0, 1.0]), beta=np.array([-0.5, 0.5]), axis=-1, mean=np.array([-0.5, 0.5]), std=np.array([1.0, 1.0]), epsilon=0.001) batch_norm.set_inputs(conv_layer) relu_after_bn = blobs.ReLU( nonlinear_mxts_mode=NonlinearMxtsMode.DeepLIFT_GenomicsDefault) relu_after_bn.set_inputs(batch_norm) relu_after_bn.build_fwd_pass_vars() self.assertEqual(relu_after_bn.nonlinear_mxts_mode, NonlinearMxtsMode.Rescale)