def test_gradient_pass_though(self, model, input_shape): if model == "darknet53": check = { '1024': [ input_shape[0], input_shape[1] // 32, input_shape[2] // 32, 255 ], '512': [ input_shape[0], input_shape[1] // 16, input_shape[2] // 16, 255 ], '256': [ input_shape[0], input_shape[1] // 8, input_shape[2] // 8, 255 ], } name = "spp" elif model == "darknet_tiny": check = { '1024': [ input_shape[0], input_shape[1] // 32, input_shape[2] // 32, 340 ], '256': [ input_shape[0], input_shape[1] // 16, input_shape[2] // 16, 340 ], } name = "tiny" else: check = None return loss = ks.losses.MeanSquaredError() optimizer = ks.optimizers.SGD() test_layer = builder.Backbone_Builder(model) pred = Yolov3Head(name) init = tf.random_normal_initializer() x = tf.Variable( initial_value=init(shape=input_shape, dtype=tf.float32)) y = { key: tf.Variable(initial_value=init(shape=value, dtype=tf.float32)) for key, value in check.items() } with tf.GradientTape() as tape: x_cent = test_layer(x) x_hat = pred(x_cent) losses = 0 for key in y: grad_loss = loss(x_hat[key], y[key]) losses += grad_loss grad = tape.gradient(losses, pred.trainable_variables) optimizer.apply_gradients(zip(grad, pred.trainable_variables)) self.assertNotIn(None, grad) return
def test_pass_through(self, model, input_shape): if model == "darknet53": check = { '256': [ input_shape[0], input_shape[1] // 8, input_shape[2] // 8, 256], '512': [ input_shape[0], input_shape[1] // 16, input_shape[2] // 16, 512], '1024': [ input_shape[0], input_shape[1] // 32, input_shape[2] // 32, 1024], } elif model == "darknet_tiny": check = { '256': [ input_shape[0], input_shape[1] // 16, input_shape[2] // 16, 256], '1024': [ input_shape[0], input_shape[1] // 32, input_shape[2] // 32, 1024], } else: check = None init = tf.random_normal_initializer() x = tf.Variable(initial_value=init( shape=input_shape, dtype=tf.float32)) y = builder.Backbone_Builder(model)(x) y_shape = {key: value.shape.as_list() for key, value in y.items()} self.assertAllEqual(check, y_shape) print(y_shape, check) return