Exemplo n.º 1
0
    def test_fprop_maxpool2d(self): 

        pool_layer = blobs.MaxPool2D(pool_size=(2,2),
                          strides=(1,1),
                          border_mode=B.BorderMode.valid,
                          ignore_border=True,
                          maxpool_deeplift_mode=MaxPoolDeepLiftMode.gradient,
                          channels_come_last=False)
        self.create_small_net_with_pool_layer(pool_layer,
                                              outputs_per_channel=9)

        func = B.function([self.input_layer.get_activation_vars()],
                           self.pool_layer.get_activation_vars())
        np.testing.assert_almost_equal(func([self.reference_inps[0],
                                             self.reference_inps[0]-1]),
                                       np.array(
                                       [[[[1,2,3],
                                          [5,5,4],
                                          [6,7,8]],
                                         [[2,3,4],
                                          [6,6,5],
                                          [7,8,9]]],
                                        [[[0,1,2],
                                          [4,4,3],
                                          [5,6,7]],
                                         [[1,2,3],
                                          [5,5,4],
                                          [6,7,8]]]]))
Exemplo n.º 2
0
    def test_backprop_maxpool2d_gradients(self):
        pool_layer = blobs.MaxPool2D(pool_size=(2,2),
                  strides=(1,1),
                  border_mode=B.BorderMode.valid,
                  ignore_border=True,
                  maxpool_deeplift_mode=MaxPoolDeepLiftMode.gradient,
                  channels_come_last=False)
        self.create_small_net_with_pool_layer(pool_layer,
                                              outputs_per_channel=9)

        self.dense_layer.update_task_index(task_index=0)
        func = B.function([
                self.input_layer.get_activation_vars(),
                self.input_layer.get_reference_vars()],
                                   self.input_layer.get_mxts())
        np.testing.assert_almost_equal(
            func(self.backprop_test_inps,
                 np.ones_like(self.backprop_test_inps)*self.reference_inps),
                                  np.array(
                                  [[np.array([[1, 0, 0, 0],
                                     [0, 0, 2, 0],
                                     [2, 1, 1, 0],
                                     [0, 0, 1, 1]])*2,
                                    np.array([[0, 0, 1, 1],
                                     [0, 1, 0, 0],
                                     [0, 2, 1, 0],
                                     [1, 0, 1, 1]])*3], 
                                   [np.array([[0, 0, 1, 1],
                                     [0, 1, 0, 0],
                                     [0, 2, 1, 0],
                                     [1, 0, 1, 1]])*2,
                                    np.array([[1, 0, 0, 0],
                                     [0, 0, 2, 0],
                                     [2, 1, 1, 0],
                                     [0, 0, 1, 1]])*3]]))
Exemplo n.º 3
0
    def test_backprop_avgpool2d(self):
        pool_layer = blobs.AvgPool2D(pool_size=(2,2),
                  strides=(1,1),
                  border_mode=B.BorderMode.valid,
                  ignore_border=True,
                  channels_come_last=False)
        self.create_small_net_with_pool_layer(pool_layer,
                                              outputs_per_channel=9)

        self.dense_layer.update_task_index(task_index=0)
        func = B.function([self.input_layer.get_activation_vars(), 
                           self.input_layer.get_reference_vars()],
                           self.input_layer.get_mxts())
        avg_pool_grads = np.array([[1, 2, 2, 1],
                                   [2, 4, 4, 2],
                                   [2, 4, 4, 2],
                                   [1, 2, 2, 1]]).astype("float32") 
        np.testing.assert_almost_equal(func(
                  self.backprop_test_inps,
                  np.ones_like(self.backprop_test_inps)*self.reference_inps),
                                  np.array(
                                  [[avg_pool_grads*2*0.25,
                                    avg_pool_grads*3*0.25], 
                                   [avg_pool_grads*2*0.25,
                                    avg_pool_grads*3*0.25]]))
Exemplo n.º 4
0
    def test_fprop(self):

        conv_layer = blobs.Conv2D(W=self.conv_W,
                                  b=self.conv_b,
                                  strides=(1, 1),
                                  border_mode=B.BorderMode.valid,
                                  channels_come_last=False)
        self.create_small_net_with_conv_layer(conv_layer,
                                              outputs_per_channel=9)

        func = B.function([self.input_layer.get_activation_vars()],
                          self.conv_layer.get_activation_vars())
        np.testing.assert_almost_equal(
            func(self.inp),
            np.array([[
                [[439, 467, 495], [551, 579, 607], [663, 691, 719]],
                [[-439, -467, -495], [-551, -579, -607], [-663, -691, -719]],
            ],
                      [[
                          [1335, 1363, 1391],
                          [1447, 1475, 1503],
                          [1559, 1587, 1615],
                      ],
                       [[-1335, -1363, -1391], [-1447, -1475, -1503],
                        [-1559, -1587, -1615]]]]))
Exemplo n.º 5
0
    def test_fprop_avgpool2d(self): 

        pool_layer = blobs.AvgPool2D(pool_size=(2,2),
                                  strides=(1,1),
                                  border_mode=B.BorderMode.valid,
                                  ignore_border=True,
                                  channels_come_last=False)
        self.create_small_net_with_pool_layer(pool_layer,
                                              outputs_per_channel=9)

        func = B.function([self.input_layer.get_activation_vars()],
                           self.pool_layer.get_activation_vars())
        np.testing.assert_almost_equal(func([self.reference_inps[0],
                                             self.reference_inps[0]-1]),
                                       0.25*np.array(
                                       [[[[ 1, 3, 5],
                                          [ 6,10, 4],
                                          [11,16,19]],
                                         [[ 5, 7, 9],
                                          [10,14, 8],
                                          [15,20,23]]],
                                        [[[-3,-1, 1],
                                          [ 2, 6, 0],
                                          [ 7,12,15]],
                                         [[ 1, 3, 5],
                                          [ 6,10, 4],
                                          [11,16,19]]]]))
Exemplo n.º 6
0
def get_cross_corr_function(filters):
    from deeplift import backend as B

    if (len(filters.shape)==3):
        filters = filters[:,None,:,:]
    assert filters.shape[1]==1 #input channels=1
    assert filters.shape[2]==4 #acgt

    #set up the convolution. Note that convolutions reverse things
    filters = np.array(filters[:,:,::-1,::-1]).astype("float32") 
    input_var = B.tensor_with_dims(num_dims=4, name="input")
    conv_out = B.conv2d(inp=input_var,
                        filters=filters,
                        border_mode="valid",
                        subsample=(1,1))
    compiled_func = B.function(inputs=[input_var], outputs=conv_out)

    def cross_corr(regions_to_scan, batch_size, progress_update=None):
        assert len(regions_to_scan.shape)==4
        assert regions_to_scan.shape[1]==1 #input channels=1
        assert regions_to_scan.shape[2]==4 #acgt
        #run function in batches
        conv_results = np.array(deeplift.util.run_function_in_batches(
                                func=compiled_func,
                                input_data_list=[regions_to_scan],
                                batch_size=batch_size,
                                progress_update=progress_update))
        return conv_results
    return cross_corr
Exemplo n.º 7
0
    def test_dense_backprop(self):
        conv_layer = blobs.Conv2D(W=self.conv_W,
                                  b=self.conv_b,
                                  strides=(1, 1),
                                  border_mode=B.BorderMode.valid,
                                  channels_come_last=False,
                                  conv_mxts_mode=ConvMxtsMode.Linear)
        self.create_small_net_with_conv_layer(conv_layer,
                                              outputs_per_channel=9)

        self.dense_layer.update_task_index(task_index=0)
        func = B.function([
            self.input_layer.get_activation_vars(),
            self.input_layer.get_reference_vars()
        ], self.input_layer.get_mxts())
        np.testing.assert_almost_equal(
            func(self.inp, np.zeros_like(self.inp)),
            np.array([[[[0, 2, 2, 2], [4, 12, 12, 8], [4, 12, 12, 8],
                        [4, 10, 10, 6]],
                       [[8, 18, 18, 10], [20, 44, 44, 24], [20, 44, 44, 24],
                        [12, 26, 26, 14]]],
                      [[[0, 2, 2, 2], [4, 12, 12, 8], [4, 12, 12, 8],
                        [4, 10, 10, 6]],
                       [[8, 18, 18, 10], [20, 44, 44, 24], [20, 44, 44, 24],
                        [12, 26, 26, 14]]]]))
Exemplo n.º 8
0
 def test_concat(self):
     func = B.function([
         self.input_layer1.get_activation_vars(),
         self.input_layer2.get_activation_vars()
     ], self.concat_layer.get_activation_vars())
     np.testing.assert_allclose(func(self.inp1, self.inp2),
                                np.array([[[[1]], [[1]]], [[[2]], [[2]]]]))
Exemplo n.º 9
0
def get_smoothen_function(window_size, same_size_return=True):
    """
        Returns a function for smoothening inputs with a window
         of size window_size.

        Returned function has arguments of inp,
         batch_size and progress_update
    """
    from deeplift import backend as B
    inp_tensor = B.tensor_with_dims(2, "inp_tensor") 

    if (same_size_return):
        #do padding so that the output will have the same size as the input
        #remember, the output will have length of input length - (window_size-1)
        #so, we're going to pad with int(window_size/2), and for even window_size
        #we will trim off the value from the front of the output later on
        padding = int(window_size/2)  
        new_dims = [inp_tensor.shape[0], inp_tensor.shape[1]+2*padding]
        padded_inp = B.zeros(new_dims)
        #fill the middle region with the original input
        padded_inp = B.set_subtensor(
                        padded_inp[:,padding:(inp_tensor.shape[1]+padding)],
                        inp_tensor) 
        #duplicate the left end for padding
        padded_inp = B.set_subtensor(padded_inp[:,0:padding],
                                     inp_tensor[:,0:padding])
        #duplicate the right end for padding
        padded_inp = B.set_subtensor(
                        padded_inp[:,(inp_tensor.shape[1]+padding):],
                        inp_tensor[:,(inp_tensor.shape[1]-padding):])
    else:
        padded_inp = inp_tensor
    padded_inp = padded_inp[:,None,None,:]

    averaged_padded_inp = B.pool2d(
                            inp=padded_inp,
                            pool_size=(1,window_size),
                            strides=(1,1),
                            border_mode="valid",
                            ignore_border=True,
                            pool_mode=B.PoolMode.avg) 

    #if window_size is even, then we have an extra value in the output,
    #so kick off the value from the front
    if (window_size%2==0 and same_size_return):
        averaged_padded_inp = averaged_padded_inp[:,:,:,1:]

    averaged_padded_inp = averaged_padded_inp[:,0,0,:]
    smoothen_func = B.function([inp_tensor], averaged_padded_inp)

    def smoothen(inp, batch_size, progress_update=None):
       return run_function_in_batches(
                func=smoothen_func,
                input_data_list=[inp],
                batch_size=batch_size,
                progress_update=progress_update)

    return smoothen
Exemplo n.º 10
0
 def _get_prediction_function(self, inputs, output):
     func = B.function(inputs=inputs, outputs=output) 
     def prediction_function(input_data_list,
                             batch_size, progress_update=None):
         to_return = deeplift.util.run_function_in_batches(
                 func=func,
                 input_data_list=input_data_list,
                 batch_size = batch_size,
                 progress_update = progress_update)
         return to_return
     return prediction_function
Exemplo n.º 11
0
    def _get_func(self,
                  find_scores_layer,
                  target_layer,
                  input_layers,
                  func_type,
                  slice_objects=None):
        find_scores_layer.reset_mxts_updated()
        self._set_scoring_mode_for_target_layer(target_layer)
        find_scores_layer.update_mxts()
        if (func_type == FuncType.contribs):
            output_symbolic_vars = find_scores_layer.get_target_contrib_vars()
        elif (func_type == FuncType.multipliers):
            output_symbolic_vars = find_scores_layer.get_mxts()
        elif (func_type == FuncType.contribs_of_input_with_filter_refs):
            output_symbolic_vars =\
             find_scores_layer.get_contribs_of_inputs_with_filter_refs()
        else:
            raise RuntimeError("Unsupported func_type: " + func_type)
        if (slice_objects is not None):
            output_symbolic_vars = output_symbolic_vars[slice_objects]
        core_function = B.function([
            input_layer.get_activation_vars() for input_layer in input_layers
        ] + [input_layer.get_reference_vars() for input_layer in input_layers],
                                   output_symbolic_vars)

        def func(task_idx,
                 input_data_list,
                 batch_size,
                 progress_update,
                 input_references_list=None):
            if (input_references_list is None):
                print("No reference provided - using zeros")
                input_references_list = [0.0 for x in input_data_list]
            input_references_list = [
                np.ones_like(input_data) * reference
                for (input_data,
                     reference) in zip(input_data_list, input_references_list)
            ]
            #WARNING: this is not thread-safe. Do not try to
            #parallelize or you can end up with multiple target_layers
            #active at once
            target_layer.set_active()
            target_layer.update_task_index(task_idx)
            to_return = deeplift.util.run_function_in_batches(
                func=core_function,
                input_data_list=input_data_list + input_references_list,
                batch_size=batch_size,
                progress_update=progress_update)
            target_layer.set_inactive()
            return to_return

        return func
Exemplo n.º 12
0
 def test_concat_backprop(self):
     func = B.function([
             self.input_layer1.get_activation_vars(),
             self.input_layer2.get_activation_vars()],
             #self.concat_layer.get_mxts(),
             [self.input_layer1.get_mxts(),
              self.input_layer2.get_mxts()],
             )
     print(func(self.inp1, self.inp2))
     self.dense_layer.update_task_index(task_index=0)
     np.testing.assert_allclose(func(self.inp1, self.inp2),
                                [np.array([[[[1]]],[[[1]]]]),
                                 np.array([[[[2]]],[[[2]]]])])
Exemplo n.º 13
0
    def test_fprop_stride(self):

        conv_layer = blobs.Conv2D(W=self.conv_W,
                                  b=self.conv_b,
                                  strides=(2, 2),
                                  border_mode=B.BorderMode.valid,
                                  channels_come_last=False,
                                  conv_mxts_mode=ConvMxtsMode.Linear)
        self.create_small_net_with_conv_layer(conv_layer,
                                              outputs_per_channel=9)

        func = B.function([self.input_layer.get_activation_vars()],
                          self.conv_layer.get_activation_vars())
        np.testing.assert_almost_equal(
            func(self.inp),
            np.array([[[[439, 495], [663, 719]], [[-439, -495], [-663, -719]]],
                      [[[1335, 1391], [1559, 1615]],
                       [[-1335, -1391], [-1559, -1615]]]]))
Exemplo n.º 14
0
def compile_func(inputs, outputs):
    import deeplift.backend as B
    return B.function(inputs, outputs)
Exemplo n.º 15
0
    def _get_func(self,
                  find_scores_layers,
                  target_layer,
                  input_layers,
                  func_type,
                  slice_objects=None):
        if isinstance(find_scores_layers, list) == False:
            remove_list_wrapper_on_return = True
            find_scores_layers = [find_scores_layers]
        else:
            remove_list_wrapper_on_return = False
        for find_scores_layer in find_scores_layers:
            find_scores_layer.reset_mxts_updated()
        self._set_scoring_mode_for_target_layer(target_layer)
        for find_scores_layer in find_scores_layers:
            find_scores_layer.update_mxts()
        if (func_type == FuncType.contribs):
            output_symbolic_vars = [
                find_scores_layer.get_target_contrib_vars()
                for find_scores_layer in find_scores_layers
            ]
        elif (func_type == FuncType.multipliers):
            output_symbolic_vars = [
                find_scores_layer.get_mxts()
                for find_scores_layer in find_scores_layers
            ]
        elif (func_type == FuncType.contribs_of_input_with_filter_refs):
            output_symbolic_vars =\
             [find_scores_layer.get_contribs_of_inputs_with_filter_refs()
              for find_scores_layer in find_scores_layers]
        else:
            raise RuntimeError("Unsupported func_type: " + func_type)
        if (slice_objects is not None):
            output_symbolic_vars = output_symbolic_vars[slice_objects]
        core_function = B.function([
            input_layer.get_activation_vars() for input_layer in input_layers
        ] + [input_layer.get_reference_vars() for input_layer in input_layers],
                                   output_symbolic_vars)

        def func(task_idx,
                 input_data_list,
                 batch_size,
                 progress_update,
                 input_references_list=None):
            if (isinstance(input_data_list, dict)):
                assert hasattr(self, '_input_layer_names'),\
                 ("Dictionary supplied for input_data_list but model does "
                  "not have an attribute '_input_layer_names")
                input_data_list = [
                    input_data_list[x] for x in self._input_layer_names
                ]
            if (input_references_list is None):
                print("No reference provided - using zeros")
                input_references_list = [0.0 for x in input_data_list]
            if (isinstance(input_references_list, dict)):
                assert hasattr(self, '_input_layer_names'),\
                 ("Dictionary supplied for input_references_list but model "
                  "does not have an attribute '_input_layer_names")
                input_references_list = [
                    input_references_list[x] for x in self._input_layer_names
                ]
            input_references_list = [
                np.ones_like(input_data) * reference
                for (input_data,
                     reference) in zip(input_data_list, input_references_list)
            ]
            #WARNING: this is not thread-safe. Do not try to
            #parallelize or you can end up with multiple target_layers
            #active at once
            target_layer.set_active()
            target_layer.update_task_index(task_idx)
            to_return = deeplift.util.run_function_in_batches(
                func=core_function,
                input_data_list=input_data_list + input_references_list,
                batch_size=batch_size,
                progress_update=progress_update,
                multimodal_output=True)
            target_layer.set_inactive()
            if (remove_list_wrapper_on_return):
                #remove the enclosing []; should be only one element
                assert len(to_return) == 1
                to_return = to_return[0]
            return to_return

        return func
Exemplo n.º 16
0
 def test_concat_backprop2(self):
     func = B.function([self.flatten_layer.get_activation_vars()],
             self.flatten_layer.get_mxts(),
             )