示例#1
0
 def test_fw(self):
     """Tests the forward computation is the same."""
     with tf.Graph().as_default(), self.test_session(
     ) as sess, log.verbose_level(2):
         config = get_config("resnet-test")
         config.num_channel = 4
         config.height = 8
         config.width = 8
         np.random.seed(0)
         xval = np.random.uniform(-1.0, 1.0,
                                  [10, 8, 8, 4]).astype(np.float32)
         x = tf.constant(xval)
         x1 = x[:5, :, :, :]
         x2 = x[5:, :, :, :]
         # We need to split two regular runs because of the complication brought by
         # batch normalization.
         with tf.variable_scope("Model", reuse=None):
             m11 = get_model("resnet", config, inp=x1)
         with tf.variable_scope("Model", reuse=True):
             m12 = get_model("resnet", config, inp=x2)
         with tf.variable_scope("Model", reuse=True):
             m2 = get_multi_gpu_model("resnet",
                                      config,
                                      num_replica=2,
                                      inp=x)
         sess.run(tf.global_variables_initializer())
         y11, y12, y2 = sess.run([m11.output, m12.output, m2.output])
         np.testing.assert_allclose(y11, y2[:5, :], rtol=1e-5)
         np.testing.assert_allclose(y12, y2[5:, :], rtol=1e-5)
  def _test_single_pass(self, method):
    config = get_config("resnet-test")
    config.momentum = 0.0
    config.base_learn_rate = 1e-1
    np.random.seed(0)
    BSIZE = config.batch_size
    xval = np.random.uniform(
        -1.0, 1.0, [BSIZE, config.height, config.width,
                    config.num_channel]).astype(np.float32)
    yval = np.floor(np.random.uniform(0, 9.9, [BSIZE])).astype(np.int32)

    # Run multi tower version.
    with tf.Graph().as_default(), self.test_session(
    ) as sess, log.verbose_level(2):
      x = tf.constant(xval)
      y = tf.constant(yval)
      with tf.variable_scope("Model", reuse=None):
        m1 = get_multi_gpu_model(
            "resnet", config, num_replica=2, inp=x, label=y)
      sess.run(tf.global_variables_initializer())
      m1.assign_lr(sess, config.base_learn_rate)
      tvars = tf.trainable_variables()
      tvars_str = map(lambda x: x.name, tvars)

      saver = tf.train.Saver(tvars)
      if not os.path.exists(FOLDER):
        os.makedirs(FOLDER)
      saver.save(sess, CKPT_FNAME)
      m1.train_step(sess)
      tvars_v1 = sess.run(tvars)
      tvars_d1 = dict(zip(tvars_str, tvars_v1))

    # Run MultiPassModel.
    with tf.Graph().as_default(), self.test_session(
    ) as sess, log.verbose_level(2):
      with tf.variable_scope("Model", reuse=True):
        m2 = MultiPassModel(
            config,
            ResNetModel,
            num_passes=2,
            debug=True,
            inp=x,
            label=y,
            aggregate_method=method)
      tvars = tf.trainable_variables()
      saver = tf.train.Saver(tvars)
      sess.run(tf.global_variables_initializer())
      saver.restore(sess, CKPT_FNAME)
      m2.assign_lr(sess, config.base_learn_rate)
      m2.train_step(sess)
      tvars_v2 = sess.run(tvars)
      tvars_d2 = dict(zip(tvars_str, tvars_v2))

    for vv in tvars_str:
      log.info(vv, verbose=2)
      np.testing.assert_allclose(
          tvars_d1[vv], tvars_d2[vv], rtol=1e-4, atol=1e-6)
      log.info("...ok", verbose=2)
示例#3
0
def _get_config():
    if FLAGS.config is not None:
        return get_config_from_json(FLAGS.config)
    else:
        if FLAGS.restore:
            save_folder = os.path.realpath(
                os.path.abspath(os.path.join(FLAGS.results, FLAGS.id)))
            return get_config_from_json(os.path.join(save_folder, "conf.json"))
        else:
            return get_config(FLAGS.model)
  def _test_grad(self, modelname):
    """Tests the manual gradients for every layer."""
    with tf.Graph().as_default(), self.test_session(
    ) as sess, log.verbose_level(2):
      config = get_config(modelname)
      config.manual_gradients = False
      config.filter_initialization = "uniform"

      # Declare a regular bprop model.
      m1 = get_model(config.model_class, config, is_training=True)
      name_list1 = map(lambda x: x[1].name, m1.grads_and_vars)
      grads1 = map(lambda x: x[0], m1.grads_and_vars)
      tf.get_variable_scope().reuse_variables()

      # Declare a manual bprop model.
      config.manual_gradients = True
      m2 = get_model(config.model_class, config, is_training=True)
      name_list2 = map(lambda x: x[1].name, m2.grads_and_vars)
      grads2 = map(lambda x: x[0], m2.grads_and_vars)

      # Check lengths are equal.
      self.assertEqual(len(m1.grads_and_vars), len(m2.grads_and_vars))

      # Prepare synthetic data.
      xval = np.random.uniform(-1.0, 1.0, [
          config.batch_size, config.height, config.width, config.num_channel
      ]).astype(np.float32)
      yval = np.floor(
          np.random.uniform(0.0, config.num_classes - 0.1, [config.batch_size
                                                           ])).astype(np.int32)
      sess.run(tf.global_variables_initializer())

      g1 = sess.run(grads1, feed_dict={m1.input: xval, m1.label: yval})
      gdict1 = dict(zip(name_list1, g1))
      g2 = sess.run(grads2, feed_dict={m2.input: xval, m2.label: yval})
      gdict2 = dict(zip(name_list2, g2))

      # Check two gradients are the same.
      check_two_dict(gdict1, gdict2, tol=1e0, name=modelname)
 def _test_getmodel(self, modelname):
   with tf.Graph().as_default(), self.test_session(), log.verbose_level(2):
     config = get_config(modelname)
     get_model(config.model_class, config)
def _get_config():
    # Manually set config.
    if FLAGS.config is not None:
        return get_config_from_json(FLAGS.config)
    else:
        return get_config(FLAGS.model)
示例#7
0
    def test_bk(self):
        """Tests the backward computation is the same."""
        with tf.Graph().as_default(), self.test_session(
        ) as sess, log.verbose_level(2):
            config = get_config("resnet-test")
            config.num_channel = 4
            config.height = 8
            config.width = 8
            np.random.seed(0)
            xval = np.random.uniform(-1.0, 1.0,
                                     [10, 8, 8, 4]).astype(np.float32)
            yval = np.floor(np.random.uniform(0, 9.9, [10])).astype(np.int32)
            x = tf.constant(xval)
            y = tf.constant(yval)
            # log.fatal(y.get_shape())
            x1 = x[:5, :, :, :]
            x2 = x[5:, :, :, :]
            y1 = y[:5]
            y2 = y[5:]
            with tf.variable_scope("Model", reuse=None):
                m11 = get_model("resnet", config, inp=x1, label=y1)
            with tf.variable_scope("Model", reuse=True):
                m12 = get_model("resnet", config, inp=x2, label=y2)
            with tf.variable_scope("Model", reuse=True):
                m2 = get_multi_gpu_model("resnet",
                                         config,
                                         num_replica=2,
                                         inp=x,
                                         label=y)
            sess.run(tf.global_variables_initializer())
            tvars = tf.global_variables()

            name_list11 = map(lambda x: x[1].name, m11.grads_and_vars)
            grads11 = map(lambda x: x[0], m11.grads_and_vars)
            g11 = sess.run(grads11)
            gdict11 = dict(zip(name_list11, g11))
            name_list12 = map(lambda x: x[1].name, m12.grads_and_vars)
            grads12 = map(lambda x: x[0], m12.grads_and_vars)
            g12 = sess.run(grads12)
            gdict12 = dict(zip(name_list12, g12))

            name_list21 = map(lambda x: x[1].name, m2.tower_grads_and_vars[0])
            grads21 = map(lambda x: x[0], m2.tower_grads_and_vars[0])
            g21 = sess.run(grads21)
            gdict21 = dict(zip(name_list21, g21))
            name_list22 = map(lambda x: x[1].name, m2.tower_grads_and_vars[1])
            grads22 = map(lambda x: x[0], m2.tower_grads_and_vars[1])
            g22 = sess.run(grads22)
            gdict22 = dict(zip(name_list22, g22))

            # Check two gradients are the same.
            check_two_dict(gdict11, gdict21)
            check_two_dict(gdict12, gdict22)

            # Check the average gradients are the same.
            name_list2 = map(lambda x: x[1].name, m2.grads_and_vars)
            grads2 = map(lambda x: x[0], m2.grads_and_vars)
            g2 = sess.run(grads2)
            gdict2 = dict(zip(name_list2, g2))

            name_list1 = name_list11
            g1 = [(gdict11[kk] + gdict12[kk]) / 2.0 for kk in name_list1]
            gdict1 = dict(zip(name_list1, g1))
            check_two_dict(gdict1, gdict2)