Beispiel #1
0
  def testRaiseValueErrorWithInvalidDepthMultiplier(self):
    batch_size = 5
    height, width = 224, 224
    num_classes = 1000

    inputs = tf.random_uniform((batch_size, height, width, 3))
    with self.assertRaises(ValueError):
      _ = mobilenet_v1.mobilenet_v1(
          inputs, num_classes, depth_multiplier=-0.1)
    with self.assertRaises(ValueError):
      _ = mobilenet_v1.mobilenet_v1(
          inputs, num_classes, depth_multiplier=0.0)
def build_model():
    """Build the mobilenet_v1 model for evaluation.

  Returns:
    g: graph with rewrites after insertion of quantization ops and batch norm
    folding.
    eval_ops: eval ops for inference.
    variables_to_restore: List of variables to restore from checkpoint.
  """
    g = tf.Graph()
    with g.as_default():
        inputs, labels = imagenet_input(is_training=False)

        scope = mobilenet_v1.mobilenet_v1_arg_scope(is_training=False,
                                                    weight_decay=0.0)
        with slim.arg_scope(scope):
            logits, _ = mobilenet_v1.mobilenet_v1(
                inputs,
                is_training=False,
                depth_multiplier=FLAGS.depth_multiplier,
                num_classes=FLAGS.num_classes)

        if FLAGS.quantize:
            tf.contrib.quantize.create_eval_graph()

        eval_ops = metrics(logits, labels)

    return g, eval_ops
Beispiel #3
0
  def testTrainEvalWithReuse(self):
    train_batch_size = 5
    eval_batch_size = 2
    height, width = 150, 150
    num_classes = 1000

    train_inputs = tf.random_uniform((train_batch_size, height, width, 3))
    mobilenet_v1.mobilenet_v1(train_inputs, num_classes)
    eval_inputs = tf.random_uniform((eval_batch_size, height, width, 3))
    logits, _ = mobilenet_v1.mobilenet_v1(eval_inputs, num_classes,
                                          reuse=True)
    predictions = tf.argmax(logits, 1)

    with self.test_session() as sess:
      sess.run(tf.global_variables_initializer())
      output = sess.run(predictions)
      self.assertEqual(output.shape, (eval_batch_size,))
Beispiel #4
0
  def testBuildEndPointsWithDepthMultiplierLessThanOne(self):
    batch_size = 5
    height, width = 224, 224
    num_classes = 1000

    inputs = tf.random_uniform((batch_size, height, width, 3))
    _, end_points = mobilenet_v1.mobilenet_v1(inputs, num_classes)

    endpoint_keys = [key for key in list(end_points.keys()) if key.startswith('Conv')]

    _, end_points_with_multiplier = mobilenet_v1.mobilenet_v1(
        inputs, num_classes, scope='depth_multiplied_net',
        depth_multiplier=0.5)

    for key in endpoint_keys:
      original_depth = end_points[key].get_shape().as_list()[3]
      new_depth = end_points_with_multiplier[key].get_shape().as_list()[3]
      self.assertEqual(0.5 * original_depth, new_depth)
Beispiel #5
0
  def testLogitsNotSqueezed(self):
    num_classes = 25
    images = tf.random_uniform([1, 224, 224, 3])
    logits, _ = mobilenet_v1.mobilenet_v1(images,
                                          num_classes=num_classes,
                                          spatial_squeeze=False)

    with self.test_session() as sess:
      tf.global_variables_initializer().run()
      logits_out = sess.run(logits)
      self.assertListEqual(list(logits_out.shape), [1, 1, 1, num_classes])
Beispiel #6
0
  def testBuildPreLogitsNetwork(self):
    batch_size = 5
    height, width = 224, 224
    num_classes = None

    inputs = tf.random_uniform((batch_size, height, width, 3))
    net, end_points = mobilenet_v1.mobilenet_v1(inputs, num_classes)
    self.assertTrue(net.op.name.startswith('MobilenetV1/Logits/AvgPool'))
    self.assertListEqual(net.get_shape().as_list(), [batch_size, 1, 1, 1024])
    self.assertFalse('Logits' in end_points)
    self.assertFalse('Predictions' in end_points)
Beispiel #7
0
  def testHalfSizeImages(self):
    batch_size = 5
    height, width = 112, 112
    num_classes = 1000

    inputs = tf.random_uniform((batch_size, height, width, 3))
    logits, end_points = mobilenet_v1.mobilenet_v1(inputs, num_classes)
    self.assertTrue(logits.op.name.startswith('MobilenetV1/Logits'))
    self.assertListEqual(logits.get_shape().as_list(),
                         [batch_size, num_classes])
    pre_pool = end_points['Conv2d_13_pointwise']
    self.assertListEqual(pre_pool.get_shape().as_list(),
                         [batch_size, 4, 4, 1024])
Beispiel #8
0
  def testEvaluation(self):
    batch_size = 2
    height, width = 224, 224
    num_classes = 1000

    eval_inputs = tf.random_uniform((batch_size, height, width, 3))
    logits, _ = mobilenet_v1.mobilenet_v1(eval_inputs, num_classes,
                                          is_training=False)
    predictions = tf.argmax(logits, 1)

    with self.test_session() as sess:
      sess.run(tf.global_variables_initializer())
      output = sess.run(predictions)
      self.assertEqual(output.shape, (batch_size,))
Beispiel #9
0
  def testBuildClassificationNetwork(self):
    batch_size = 5
    height, width = 224, 224
    num_classes = 1000

    inputs = tf.random_uniform((batch_size, height, width, 3))
    logits, end_points = mobilenet_v1.mobilenet_v1(inputs, num_classes)
    self.assertTrue(logits.op.name.startswith(
        'MobilenetV1/Logits/SpatialSqueeze'))
    self.assertListEqual(logits.get_shape().as_list(),
                         [batch_size, num_classes])
    self.assertTrue('Predictions' in end_points)
    self.assertListEqual(end_points['Predictions'].get_shape().as_list(),
                         [batch_size, num_classes])
def build_model():
    """Builds graph for model to train with rewrites for quantization.

  Returns:
    g: Graph with fake quantization ops and batch norm folding suitable for
    training quantized weights.
    train_tensor: Train op for execution during training.
  """
    g = tf.Graph()
    with g.as_default(), tf.device(
            tf.train.replica_device_setter(FLAGS.ps_tasks)):
        inputs, labels = imagenet_input(is_training=True)
        with slim.arg_scope(
                mobilenet_v1.mobilenet_v1_arg_scope(is_training=True)):
            logits, _ = mobilenet_v1.mobilenet_v1(
                inputs,
                is_training=True,
                depth_multiplier=FLAGS.depth_multiplier,
                num_classes=FLAGS.num_classes)

        tf.losses.softmax_cross_entropy(labels, logits)

        # Call rewriter to produce graph with fake quant ops and folded batch norms
        # quant_delay delays start of quantization till quant_delay steps, allowing
        # for better model accuracy.
        if FLAGS.quantize:
            tf.contrib.quantize.create_training_graph(
                quant_delay=get_quant_delay())

        total_loss = tf.losses.get_total_loss(name='total_loss')
        # Configure the learning rate using an exponential decay.
        num_epochs_per_decay = 2.5
        imagenet_size = 1271167
        decay_steps = int(imagenet_size / FLAGS.batch_size *
                          num_epochs_per_decay)

        learning_rate = tf.train.exponential_decay(
            get_learning_rate(),
            tf.train.get_or_create_global_step(),
            decay_steps,
            _LEARNING_RATE_DECAY_FACTOR,
            staircase=True)
        opt = tf.train.GradientDescentOptimizer(learning_rate)

        train_tensor = slim.learning.create_train_op(total_loss, optimizer=opt)

    slim.summaries.add_scalar_summary(total_loss, 'total_loss', 'losses')
    slim.summaries.add_scalar_summary(learning_rate, 'learning_rate',
                                      'training')
    return g, train_tensor
Beispiel #11
0
  def testUnknowBatchSize(self):
    batch_size = 1
    height, width = 224, 224
    num_classes = 1000

    inputs = tf.placeholder(tf.float32, (None, height, width, 3))
    logits, _ = mobilenet_v1.mobilenet_v1(inputs, num_classes)
    self.assertTrue(logits.op.name.startswith('MobilenetV1/Logits'))
    self.assertListEqual(logits.get_shape().as_list(),
                         [None, num_classes])
    images = tf.random_uniform((batch_size, height, width, 3))

    with self.test_session() as sess:
      sess.run(tf.global_variables_initializer())
      output = sess.run(logits, {inputs: images.eval()})
      self.assertEqual(output.shape, (batch_size, num_classes))
Beispiel #12
0
 def testUnknownImageShape(self):
   tf.reset_default_graph()
   batch_size = 2
   height, width = 224, 224
   num_classes = 1000
   input_np = np.random.uniform(0, 1, (batch_size, height, width, 3))
   with self.test_session() as sess:
     inputs = tf.placeholder(tf.float32, shape=(batch_size, None, None, 3))
     logits, end_points = mobilenet_v1.mobilenet_v1(inputs, num_classes)
     self.assertTrue(logits.op.name.startswith('MobilenetV1/Logits'))
     self.assertListEqual(logits.get_shape().as_list(),
                          [batch_size, num_classes])
     pre_pool = end_points['Conv2d_13_pointwise']
     feed_dict = {inputs: input_np}
     tf.global_variables_initializer().run()
     pre_pool_out = sess.run(pre_pool, feed_dict=feed_dict)
     self.assertListEqual(list(pre_pool_out.shape), [batch_size, 7, 7, 1024])