Exemple #1
0
 def testBuildPreLogitsCifarModel(self):
     batch_size = 5
     height, width = 32, 32
     num_classes = None
     inputs = tf.random_uniform((batch_size, height, width, 3))
     tf.train.create_global_step()
     with slim.arg_scope(nasnet.nasnet_cifar_arg_scope()):
         net, end_points = nasnet.build_nasnet_cifar(inputs, num_classes)
     self.assertFalse('AuxLogits' in end_points)
     self.assertFalse('Predictions' in end_points)
     self.assertTrue(net.op.name.startswith('final_layer/Mean'))
     self.assertListEqual(net.get_shape().as_list(), [batch_size, 768])
Exemple #2
0
 def testOverrideHParamsCifarModel(self):
     batch_size = 5
     height, width = 32, 32
     num_classes = 10
     inputs = tf.random_uniform((batch_size, height, width, 3))
     tf.train.create_global_step()
     config = nasnet.cifar_config()
     config.set_hparam('data_format', 'NCHW')
     with slim.arg_scope(nasnet.nasnet_cifar_arg_scope()):
         _, end_points = nasnet.build_nasnet_cifar(inputs,
                                                   num_classes,
                                                   config=config)
     self.assertListEqual(end_points['Stem'].shape.as_list(),
                          [batch_size, 96, 32, 32])
Exemple #3
0
 def testNoAuxHeadCifarModel(self):
     batch_size = 5
     height, width = 32, 32
     num_classes = 10
     for use_aux_head in (True, False):
         tf.reset_default_graph()
         inputs = tf.random_uniform((batch_size, height, width, 3))
         tf.train.create_global_step()
         config = nasnet.cifar_config()
         config.set_hparam('use_aux_head', int(use_aux_head))
         with slim.arg_scope(nasnet.nasnet_cifar_arg_scope()):
             _, end_points = nasnet.build_nasnet_cifar(inputs,
                                                       num_classes,
                                                       config=config)
         self.assertEqual('AuxLogits' in end_points, use_aux_head)
Exemple #4
0
 def testBuildLogitsCifarModel(self):
     batch_size = 5
     height, width = 32, 32
     num_classes = 10
     inputs = tf.random_uniform((batch_size, height, width, 3))
     tf.train.create_global_step()
     with slim.arg_scope(nasnet.nasnet_cifar_arg_scope()):
         logits, end_points = nasnet.build_nasnet_cifar(inputs, num_classes)
     auxlogits = end_points['AuxLogits']
     predictions = end_points['Predictions']
     self.assertListEqual(auxlogits.get_shape().as_list(),
                          [batch_size, num_classes])
     self.assertListEqual(logits.get_shape().as_list(),
                          [batch_size, num_classes])
     self.assertListEqual(predictions.get_shape().as_list(),
                          [batch_size, num_classes])
Exemple #5
0
 def testUseBoundedAcitvationCifarModel(self):
     batch_size = 1
     height, width = 32, 32
     num_classes = 10
     for use_bounded_activation in (True, False):
         tf.reset_default_graph()
         inputs = tf.random_uniform((batch_size, height, width, 3))
         config = nasnet.cifar_config()
         config.set_hparam('use_bounded_activation', use_bounded_activation)
         with slim.arg_scope(nasnet.nasnet_cifar_arg_scope()):
             _, _ = nasnet.build_nasnet_cifar(inputs,
                                              num_classes,
                                              config=config)
         for node in tf.get_default_graph().as_graph_def().node:
             if node.op.startswith('Relu'):
                 self.assertEqual(node.op == 'Relu6',
                                  use_bounded_activation)
Exemple #6
0
 def testAllEndPointsShapesCifarModel(self):
     batch_size = 5
     height, width = 32, 32
     num_classes = 10
     inputs = tf.random_uniform((batch_size, height, width, 3))
     tf.train.create_global_step()
     with slim.arg_scope(nasnet.nasnet_cifar_arg_scope()):
         _, end_points = nasnet.build_nasnet_cifar(inputs, num_classes)
     endpoints_shapes = {
         'Stem': [batch_size, 32, 32, 96],
         'Cell_0': [batch_size, 32, 32, 192],
         'Cell_1': [batch_size, 32, 32, 192],
         'Cell_2': [batch_size, 32, 32, 192],
         'Cell_3': [batch_size, 32, 32, 192],
         'Cell_4': [batch_size, 32, 32, 192],
         'Cell_5': [batch_size, 32, 32, 192],
         'Cell_6': [batch_size, 16, 16, 384],
         'Cell_7': [batch_size, 16, 16, 384],
         'Cell_8': [batch_size, 16, 16, 384],
         'Cell_9': [batch_size, 16, 16, 384],
         'Cell_10': [batch_size, 16, 16, 384],
         'Cell_11': [batch_size, 16, 16, 384],
         'Cell_12': [batch_size, 8, 8, 768],
         'Cell_13': [batch_size, 8, 8, 768],
         'Cell_14': [batch_size, 8, 8, 768],
         'Cell_15': [batch_size, 8, 8, 768],
         'Cell_16': [batch_size, 8, 8, 768],
         'Cell_17': [batch_size, 8, 8, 768],
         'Reduction_Cell_0': [batch_size, 16, 16, 256],
         'Reduction_Cell_1': [batch_size, 8, 8, 512],
         'global_pool': [batch_size, 768],
         # Logits and predictions
         'AuxLogits': [batch_size, num_classes],
         'Logits': [batch_size, num_classes],
         'Predictions': [batch_size, num_classes]
     }
     self.assertItemsEqual(endpoints_shapes.keys(), end_points.keys())
     for endpoint_name in endpoints_shapes:
         tf.logging.info('Endpoint name: {}'.format(endpoint_name))
         expected_shape = endpoints_shapes[endpoint_name]
         self.assertTrue(endpoint_name in end_points)
         self.assertListEqual(
             end_points[endpoint_name].get_shape().as_list(),
             expected_shape)