def setUp(self): super(AxiomsTest, self).setUp() # Make a linear model for testing. graph_lin = Graph() with graph_lin.as_default(): x_lin = placeholder('float32', (None, self.input_size)) y_lin = x_lin @ self.model_lin_weights + self.model_lin_bias self.model_lin = ModelWrapper(graph_lin, x_lin, y_lin) # Make a deeper model for testing. graph_deep = Graph() with graph_deep.as_default(): x_deep = placeholder('float32', (None, self.input_size)) z1_deep = (x_deep @ self.model_deep_weights_1 + self.model_deep_bias_1) z2_deep = relu(z1_deep) z3_deep = (z2_deep @ self.model_deep_weights_2 + self.model_deep_bias_2) z4_deep = relu(z3_deep) y_deep = (z4_deep @ self.model_deep_weights_3 + self.model_deep_bias_3) self.model_deep = ModelWrapper(graph_deep, x_deep, y_deep, dict(layer2=z2_deep, layer3=z3_deep)) self.layer2 = 'layer2' self.layer3 = 'layer3'
def setUp(self): super(BatchTest, self).setUp() # Make a linear model for testing. x_lin = Input((self.input_size, )) y_lin = Dense(self.output_size)(x_lin) self.model_lin = ModelWrapper(Model(x_lin, y_lin)) self.model_lin._model.set_weights( [self.model_lin_weights, self.model_lin_bias]) # Make a deeper model for testing. x_deep = Input((self.input_size, )) y_deep = Dense(self.internal1_size)(x_deep) y_deep = Activation('relu')(y_deep) y_deep = Dense(self.internal2_size)(y_deep) y_deep = Activation('relu')(y_deep) y_deep = Dense(self.output_size)(y_deep) self.model_deep = ModelWrapper(Model(x_deep, y_deep)) self.model_deep._model.set_weights([ self.model_deep_weights_1, self.model_deep_bias_1, self.model_deep_weights_2, self.model_deep_bias_2, self.model_deep_weights_3, self.model_deep_bias_3 ])
def setUp(self): super(ModelWrapperTest, self).setUp() subclassed = TFFunctionModel() subclassed.build((5, 2)) subclassed.set_weights([ self.layer1_weights, self.internal_bias, self.layer2_weights, self.internal_bias, self.layer3_weights, self.bias ]) self.model = ModelWrapper(subclassed) self.model.set_output_layers([subclassed.dense_3]) self.layer0 = None self.layer1 = 0 self.layer2 = 1
def test_internal_multiple_inputs(self): class ConcatenateLayer(Module): def forward(this, x1, x2): return cat((x1, x2), 1) class M(Module): def __init__(this): super(M, this).__init__() this.z1 = Linear(5, 6) this.concat = ConcatenateLayer() this.z3 = Linear(7, 7) this.y = Linear(7, 3) def forward(this, x1, x2): x1 = this.z1(x1) z = this.concat(x1, x2) z = this.z3(z) return this.y(z) model = ModelWrapper(M(), [(5,), (1,)]) infl = InternalInfluence( model, Cut('concat', anchor='in'), ClassQoI(1), PointDoi()) res = infl.attributions( np.array([[1., 2., 3., 4., 5.]]).astype('float32'), np.array([[1.]]).astype('float32')) self.assertEqual(len(res), 2) self.assertEqual(res[0].shape, (1, 6)) self.assertEqual(res[1].shape, (1, 1))
def test_anchors(self): x = Input((2, )) z1 = Dense(2)(x) z2 = Activation('relu')(z1) y = Dense(1)(z2) k_model = Model(x, y) k_model.set_weights([ np.array([[1., 0.], [0., -1.]]), np.array([0., 0.]), np.array([[1.], [1.]]), np.array([0.]) ]) model = ModelWrapper(k_model) infl_out = InternalInfluence(model, Cut(2, anchor='out'), ClassQoI(0), PointDoi(), multiply_activation=False) infl_in = InternalInfluence(model, Cut(2, anchor='in'), ClassQoI(0), PointDoi(), multiply_activation=False) res_out = infl_out.attributions(np.array([[1., 1.]])) res_in = infl_in.attributions(np.array([[1., 1.]])) self.assertEqual(res_out.shape, (1, 2)) self.assertEqual(res_in.shape, (1, 2)) self.assertTrue(np.allclose(res_out, np.array([[1., 1.]]))) self.assertTrue(np.allclose(res_in, np.array([[1., 0.]])))
def test_internal_slice_multiple_layers(self): graph = Graph() with graph.as_default(): x1 = tf.placeholder('float32', (None, 5)) z1 = x1 @ tf.random.normal((5, 6)) x2 = tf.placeholder('float32', (None, 1)) z2 = x2 @ tf.random.normal((1, 2)) z3 = z2 @ tf.random.normal((2, 4)) z4 = tf.concat([z1, z3], axis=1) z5 = z4 @ tf.random.normal((10, 7)) y = z5 @ tf.random.normal((7, 3)) model = ModelWrapper( graph, [x1, x2], y, dict(cut_layer1=z1, cut_layer2=z2)) infl = InternalInfluence( model, Cut(['cut_layer1', 'cut_layer2']), ClassQoI(1), PointDoi()) res = infl.attributions( [np.array([[1., 2., 3., 4., 5.]]), np.array([[1.]])]) self.assertEqual(len(res), 2) self.assertEqual(res[0].shape, (1, 6)) self.assertEqual(res[1].shape, (1, 2))
def test_internal_slice_multiple_layers(self): class M(Module): def __init__(this): super(M, this).__init__() this.cut_layer1 = Linear(5, 6) this.cut_layer2 = Linear(1, 2) this.z3 = Linear(2, 4) this.z5 = Linear(10, 7) this.y = Linear(7, 3) def forward(this, x1, x2): z1 = this.cut_layer1(x1) z2 = this.cut_layer2(x2) z3 = this.z3(z2) z4 = cat((z1, z3), 1) z5 = this.z5(z4) return this.y(z5) model = ModelWrapper(M(), [(5,), (1,)]) infl = InternalInfluence( model, Cut(['cut_layer1', 'cut_layer2']), ClassQoI(1), PointDoi()) res = infl.attributions( np.array([[1., 2., 3., 4., 5.]]).astype('float32'), np.array([[1.]]).astype('float32')) self.assertEqual(len(res), 2) self.assertEqual(res[0].shape, (1, 6)) self.assertEqual(res[1].shape, (1, 2))
def setUp(self): super(AxiomsTest, self).setUp() # Make a linear model for testing. class M_lin(Module): def __init__(this): super(M_lin, this).__init__() this.layer = Linear(self.input_size, self.output_size) this.layer.weight.data = B.as_tensor(self.model_lin_weights.T) this.layer.bias.data = B.as_tensor(self.model_lin_bias) def forward(this, x): return this.layer(x) self.model_lin = ModelWrapper(M_lin(), (self.input_size, )) # Make a deeper model for testing. class M_deep(Module): def __init__(this): super(M_deep, this).__init__() this.l1 = Linear(self.input_size, self.internal1_size) this.l1_relu = ReLU() this.l2 = Linear(self.internal1_size, self.internal2_size) this.l2_relu = ReLU() this.l3 = Linear(self.internal2_size, self.output_size) this.l1.weight.data = B.as_tensor(self.model_deep_weights_1.T) this.l1.bias.data = B.as_tensor(self.model_deep_bias_1) this.l2.weight.data = B.as_tensor(self.model_deep_weights_2.T) this.l2.bias.data = B.as_tensor(self.model_deep_bias_2) this.l3.weight.data = B.as_tensor(self.model_deep_weights_3.T) this.l3.bias.data = B.as_tensor(self.model_deep_bias_3) def forward(this, x): x = this.l1(x) x = this.l1_relu(x) x = this.l2(x) x = this.l2_relu(x) return this.l3(x) self.model_deep = ModelWrapper(M_deep(), (self.input_size, )) self.layer2 = 'l1_relu' self.layer3 = 'l2'
def test_multiple_outputs(self): x = Input((5, )) z1 = Dense(6)(x) z2 = Dense(7)(z1) y1 = Dense(2)(z2) z3 = Dense(8)(z2) y2 = Dense(3)(z3) model = ModelWrapper(Model(x, [y1, y2]))
def test_catch_cut_index_error(self): x = Input((2, )) z1 = Dense(2)(x) z2 = Activation('relu')(z1) y = Dense(1)(z2) model = ModelWrapper(Model(x, y)) with self.assertRaises(ValueError): infl = InternalInfluence(model, Cut(4), ClassQoI(0), PointDoi()) infl.attributions(np.array([[1., 1.]]))
class ModelWrapperTest(ModelWrapperTestBase, TestCase): def setUp(self): super(ModelWrapperTest, self).setUp() subclassed = TFFunctionModel() subclassed.build((5, 2)) subclassed.set_weights([ self.layer1_weights, self.internal_bias, self.layer2_weights, self.internal_bias, self.layer3_weights, self.bias ]) self.model = ModelWrapper(subclassed) self.model.set_output_layers([subclassed.dense_3]) self.layer0 = None self.layer1 = 0 self.layer2 = 1 @unittest.skip( "Base class uses layer 0 as multi-input but does not exist in subclass" ) def test_qoibprop_multiple_inputs(self): return
def test_catch_cut_name_error(self): graph = Graph() with graph.as_default(): x = tf.placeholder('float32', (None, 2)) z1 = x @ tf.random.normal((2, 2)) z2 = relu(z1) y = z2 @ tf.random.normal((2, 1)) model = ModelWrapper(graph, x, y) with self.assertRaises(ValueError): infl = InternalInfluence( model, Cut('not_a_real_layer'), ClassQoI(0), PointDoi()) infl.attributions(np.array([[1., 1.]]))
def test_per_timestep(self): num_classes = 5 num_features = 3 num_timesteps = 4 num_hidden_state = 10 batch_size = 32 base_model = Sequential([ Input(shape=(num_timesteps, num_features)), GRU(num_hidden_state, name="rnn", return_sequences=True), Dense(num_classes, name="dense"), ]) model = ModelWrapper(base_model) super(MultiQoiTest, self).per_timestep_qoi(model, num_classes, num_features, num_timesteps, batch_size)
def test_anchors(self): class M(Module): def __init__(this): super(M, this).__init__() this.z1 = Linear(2, 2) this.z2 = ReLU() this.y = Linear(2, 1) this.z1.weight.data = B.as_tensor( np.array([[1., 0.], [0., -1.]]).T) this.z1.bias.data = B.as_tensor(np.array([0., 0.])) this.y.weight.data = B.as_tensor(np.array([[1.], [1.]]).T) this.y.bias.data = B.as_tensor(np.array([0.])) def forward(this, x): z1 = this.z1(x) z2 = this.z2(z1) return this.y(z2) model = ModelWrapper(M(), (2,)) infl_out = InternalInfluence( model, Cut('z2', anchor='out'), ClassQoI(0), PointDoi(), multiply_activation=False) infl_in = InternalInfluence( model, Cut('z2', anchor='in'), ClassQoI(0), PointDoi(), multiply_activation=False) res_out = infl_out.attributions(np.array([[1., 1.]])) res_in = infl_in.attributions(np.array([[1., 1.]])) self.assertEqual(res_out.shape, (1, 2)) self.assertEqual(res_in.shape, (1, 2)) self.assertTrue(np.allclose(res_out, np.array([[1., 1.]]))) self.assertTrue(np.allclose(res_in, np.array([[1., 0.]])))
def setUp(self): super(ModelWrapperTest, self).setUp() x = Input((2,)) z = Dense(2, activation='relu')(x) z = Dense(2, activation='relu')(z) y = Dense(1, name='logits')(z) self.model = ModelWrapper(Model(x, y)) self.model._model.set_weights( [ self.layer1_weights, self.internal_bias, self.layer2_weights, self.internal_bias, self.layer3_weights, self.bias ]) self.layer0 = 0 self.layer1 = 1 self.layer2 = 2
def test_multiple_inputs(self): x1 = Input((5, )) z1 = Dense(6)(x1) x2 = Input((1, )) z2 = Concatenate()([z1, x2]) z3 = Dense(7)(z2) y = Dense(3)(z3) model = ModelWrapper(Model([x1, x2], y)) infl = InternalInfluence(model, InputCut(), ClassQoI(1), PointDoi()) res = infl.attributions( [np.array([[1., 2., 3., 4., 5.]]), np.array([[1.]])]) self.assertEqual(len(res), 2) self.assertEqual(res[0].shape, (1, 5)) self.assertEqual(res[1].shape, (1, 1))
def test_per_timestep(self): num_classes = 5 num_features = 3 num_timesteps = 4 num_hidden_state = 10 batch_size = 32 class M(Module): def __init__(self): super(M, self).__init__() self.rnn = GRU(num_features, num_hidden_state) self.dense = Linear(num_hidden_state, num_classes) def forward(self, x): z1 = self.rnn(x) z2 = self.dense(z1[0]) return z2 model = ModelWrapper(M(), (num_timesteps, num_features)) super(MultiQoiTest, self).per_timestep_qoi(model, num_classes, num_features, num_timesteps, batch_size)
def test_internal_slice_multiple_layers(self): x1 = Input((5, )) z1 = Dense(6, name='cut_layer1')(x1) x2 = Input((1, )) z2 = Dense(2, name='cut_layer2')(x2) z3 = Dense(4)(z2) z4 = Concatenate()([z1, z3]) z5 = Dense(7)(z4) y = Dense(3)(z5) model = ModelWrapper(Model([x1, x2], y)) infl = InternalInfluence(model, Cut(['cut_layer1', 'cut_layer2']), ClassQoI(1), PointDoi()) res = infl.attributions( [np.array([[1., 2., 3., 4., 5.]]), np.array([[1.]])]) self.assertEqual(len(res), 2) self.assertEqual(res[0].shape, (1, 6)) self.assertEqual(res[1].shape, (1, 2))
def test_catch_cut_name_error(self): class M(Module): def __init__(this): super(M, this).__init__() this.z1 = Linear(2, 2) this.z2 = ReLU() this.y = Linear(2, 1) def forward(this, x): z1 = this.z1(x) z2 = this.z2(z1) return this.y(z2) model = ModelWrapper(M(), (2,)) with self.assertRaises(ValueError): infl = InternalInfluence( model, Cut('not_a_real_layer'), ClassQoI(0), PointDoi()) infl.attributions(np.array([[1., 1.]]).astype('float32'))
def test_multiple_inputs(self): graph = Graph() with graph.as_default(): x1 = tf.placeholder('float32', (None, 5)) z1 = x1 @ tf.random.normal((5, 6)) x2 = tf.placeholder('float32', (None, 1)) z2 = tf.concat([z1, x2], axis=1) z3 = z2 @ tf.random.normal((7, 7)) y = z3 @ tf.random.normal((7, 3)) model = ModelWrapper(graph, [x1, x2], y) infl = InternalInfluence(model, InputCut(), ClassQoI(1), PointDoi()) res = infl.attributions( [np.array([[1., 2., 3., 4., 5.]]), np.array([[1.]])]) self.assertEqual(len(res), 2) self.assertEqual(res[0].shape, (1, 5)) self.assertEqual(res[1].shape, (1, 1))