def testVariablesSetDevice(self): batch_size = 5 height, width = 299, 299 num_classes = 1000 inputs = tf.random_uniform((batch_size, height, width, 3)) # Force all Variables to reside on the device. with tf.variable_scope('on_cpu'), tf.device('/cpu:0'): inception.inception_v4(inputs, num_classes) with tf.variable_scope('on_gpu'), tf.device('/gpu:0'): inception.inception_v4(inputs, num_classes) for v in tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES, scope='on_cpu'): self.assertDeviceEqual(v.device, '/cpu:0') for v in tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES, scope='on_gpu'): self.assertDeviceEqual(v.device, '/gpu:0')
def model(x, H, reuse, is_training=True): slim_attention_lname = 'Mixed_3b' if H['slim_basename'] == 'resnet_v1_101': with slim.arg_scope(resnet.resnet_arg_scope()): _, T = resnet.resnet_v1_101(x, is_training=is_training, num_classes=1000, reuse=reuse) elif H['slim_basename'] == 'resnet_v2_152': with slim.arg_scope(resnet.resnet_arg_scope()): _, T = resnet.resnet_v2_152(x, is_training=is_training, num_classes=1001, reuse=reuse) elif H['slim_basename'] == 'InceptionV1': with slim.arg_scope(inception.inception_v1_arg_scope()): _, T = inception.inception_v1(x, is_training=is_training, num_classes=1001, spatial_squeeze=False, reuse=reuse) elif H['slim_basename'] == 'InceptionV2': with slim.arg_scope(inception.inception_v2_arg_scope()): _, T = inception.inception_v2(x, is_training=is_training, num_classes=1001, spatial_squeeze=False, reuse=reuse) elif H['slim_basename'] == 'InceptionV3': with slim.arg_scope(inception.inception_v3_arg_scope()): _, T = inception.inception_v3(x, is_training=is_training, num_classes=1001, spatial_squeeze=False, reuse=reuse) slim_attention_lname = 'Mixed_5b' elif H['slim_basename'] == 'InceptionV4': with slim.arg_scope(inception.inception_v4_arg_scope()): _, T = inception.inception_v4(x, is_training=is_training, num_classes=1001, reuse=reuse) slim_attention_lname = 'Mixed_4a' elif H['slim_basename'] == 'MobilenetV1': with slim.arg_scope(mobilenet_v1.mobilenet_v1_arg_scope()): _, T = mobilenet_v1.mobilenet_v1(x, is_training=is_training, reuse=reuse) # print '\n'.join(map(str, [(k, v.op.outputs[0].get_shape()) for k, v in T.iteritems()])) coarse_feat = T[H['slim_top_lname']][:, :, :, :H['later_feat_channels']] assert coarse_feat.op.outputs[0].get_shape()[3] == H['later_feat_channels'] # fine feat can be used to reinspect input attention_lname = H.get('slim_attention_lname', slim_attention_lname) early_feat = T[attention_lname] return coarse_feat, early_feat
def testTrainEvalWithReuse(self): train_batch_size = 5 eval_batch_size = 2 height, width = 150, 150 num_classes = 1000 with self.test_session() as sess: train_inputs = tf.random_uniform( (train_batch_size, height, width, 3)) inception.inception_v4(train_inputs, num_classes) eval_inputs = tf.random_uniform( (eval_batch_size, height, width, 3)) logits, _ = inception.inception_v4(eval_inputs, num_classes, is_training=False, reuse=True) predictions = tf.argmax(logits, 1) sess.run(tf.global_variables_initializer()) output = sess.run(predictions) self.assertEquals(output.shape, (eval_batch_size, ))
def testBuildWithoutAuxLogits(self): batch_size = 5 height, width = 299, 299 num_classes = 1000 inputs = tf.random_uniform((batch_size, height, width, 3)) logits, endpoints = inception.inception_v4(inputs, num_classes, create_aux_logits=False) self.assertFalse('AuxLogits' in endpoints) self.assertTrue(logits.op.name.startswith('InceptionV4/Logits')) self.assertListEqual(logits.get_shape().as_list(), [batch_size, num_classes])
def testHalfSizeImages(self): batch_size = 5 height, width = 150, 150 num_classes = 1000 inputs = tf.random_uniform((batch_size, height, width, 3)) logits, end_points = inception.inception_v4(inputs, num_classes) self.assertTrue(logits.op.name.startswith('InceptionV4/Logits')) self.assertListEqual(logits.get_shape().as_list(), [batch_size, num_classes]) pre_pool = end_points['Mixed_7d'] self.assertListEqual(pre_pool.get_shape().as_list(), [batch_size, 3, 3, 1536])
def testEvaluation(self): batch_size = 2 height, width = 299, 299 num_classes = 1000 with self.test_session() as sess: eval_inputs = tf.random_uniform((batch_size, height, width, 3)) logits, _ = inception.inception_v4(eval_inputs, num_classes, is_training=False) predictions = tf.argmax(logits, 1) sess.run(tf.global_variables_initializer()) output = sess.run(predictions) self.assertEquals(output.shape, (batch_size, ))
def testUnknownBatchSize(self): batch_size = 1 height, width = 299, 299 num_classes = 1000 with self.test_session() as sess: inputs = tf.placeholder(tf.float32, (None, height, width, 3)) logits, _ = inception.inception_v4(inputs, num_classes) self.assertTrue(logits.op.name.startswith('InceptionV4/Logits')) self.assertListEqual(logits.get_shape().as_list(), [None, num_classes]) images = tf.random_uniform((batch_size, height, width, 3)) sess.run(tf.global_variables_initializer()) output = sess.run(logits, {inputs: images.eval()}) self.assertEquals(output.shape, (batch_size, num_classes))
def testAllEndPointsShapes(self): batch_size = 5 height, width = 299, 299 num_classes = 1000 inputs = tf.random_uniform((batch_size, height, width, 3)) _, end_points = inception.inception_v4(inputs, num_classes) endpoints_shapes = { 'Conv2d_1a_3x3': [batch_size, 149, 149, 32], 'Conv2d_2a_3x3': [batch_size, 147, 147, 32], 'Conv2d_2b_3x3': [batch_size, 147, 147, 64], 'Mixed_3a': [batch_size, 73, 73, 160], 'Mixed_4a': [batch_size, 71, 71, 192], 'Mixed_5a': [batch_size, 35, 35, 384], # 4 x Inception-A blocks 'Mixed_5b': [batch_size, 35, 35, 384], 'Mixed_5c': [batch_size, 35, 35, 384], 'Mixed_5d': [batch_size, 35, 35, 384], 'Mixed_5e': [batch_size, 35, 35, 384], # Reduction-A block 'Mixed_6a': [batch_size, 17, 17, 1024], # 7 x Inception-B blocks 'Mixed_6b': [batch_size, 17, 17, 1024], 'Mixed_6c': [batch_size, 17, 17, 1024], 'Mixed_6d': [batch_size, 17, 17, 1024], 'Mixed_6e': [batch_size, 17, 17, 1024], 'Mixed_6f': [batch_size, 17, 17, 1024], 'Mixed_6g': [batch_size, 17, 17, 1024], 'Mixed_6h': [batch_size, 17, 17, 1024], # Reduction-A block 'Mixed_7a': [batch_size, 8, 8, 1536], # 3 x Inception-C blocks 'Mixed_7b': [batch_size, 8, 8, 1536], 'Mixed_7c': [batch_size, 8, 8, 1536], 'Mixed_7d': [batch_size, 8, 8, 1536], # Logits and predictions 'AuxLogits': [batch_size, num_classes], 'PreLogitsFlatten': [batch_size, 1536], 'Logits': [batch_size, num_classes], 'Predictions': [batch_size, num_classes] } self.assertItemsEqual(endpoints_shapes.keys(), end_points.keys()) for endpoint_name in endpoints_shapes: expected_shape = endpoints_shapes[endpoint_name] self.assertTrue(endpoint_name in end_points) self.assertListEqual( end_points[endpoint_name].get_shape().as_list(), expected_shape)
def testBuildLogits(self): batch_size = 5 height, width = 299, 299 num_classes = 1000 inputs = tf.random_uniform((batch_size, height, width, 3)) logits, end_points = inception.inception_v4(inputs, num_classes) auxlogits = end_points['AuxLogits'] predictions = end_points['Predictions'] self.assertTrue(auxlogits.op.name.startswith('InceptionV4/AuxLogits')) self.assertListEqual(auxlogits.get_shape().as_list(), [batch_size, num_classes]) self.assertTrue(logits.op.name.startswith('InceptionV4/Logits')) self.assertListEqual(logits.get_shape().as_list(), [batch_size, num_classes]) self.assertTrue( predictions.op.name.startswith('InceptionV4/Logits/Predictions')) self.assertListEqual(predictions.get_shape().as_list(), [batch_size, num_classes])