def train(trained_models_dir, estimator_model_dir, training_chunk_dir,
          generation, params):
  """Train the latest model from gathered data.

  Args:
    trained_models_dir: Where to export the completed generation.
    estimator_model_dir: tf.estimator model directory.
    training_chunk_dir: Directory where gathered training chunks are.
    generation: Which generation you are training.
    params: A MiniGoParams instance of hyperparameters for the model.
  """
  new_model_name = utils.generate_model_name(generation)
  print('New model will be {}'.format(new_model_name))
  new_model = os.path.join(trained_models_dir, new_model_name)

  print('Training on gathered game data...')
  tf_records = sorted(
      tf.gfile.Glob(os.path.join(training_chunk_dir, '*'+_TF_RECORD_SUFFIX)))
  tf_records = tf_records[
      -(params.train_window_size // params.examples_per_chunk):]

  print('Training from: {} to {}'.format(tf_records[0], tf_records[-1]))
  with utils.logged_timer('Training'):
    dualnet.train(estimator_model_dir, tf_records, generation, params)
    dualnet.export_model(estimator_model_dir, new_model)
Пример #2
0
def train(trained_models_dir, estimator_model_dir, training_chunk_dir,
          generation, params):
  """Train the latest model from gathered data.

  Args:
    trained_models_dir: Where to export the completed generation.
    estimator_model_dir: tf.estimator model directory.
    training_chunk_dir: Directory where gathered training chunks are.
    generation: Which generation you are training.
    params: A MiniGoParams instance of hyperparameters for the model.
  """
  new_model_name = utils.generate_model_name(generation)
  print('New model will be {}'.format(new_model_name))
  new_model = os.path.join(trained_models_dir, new_model_name)

  print('Training on gathered game data...')
  tf_records = sorted(
      tf.gfile.Glob(os.path.join(training_chunk_dir, '*'+_TF_RECORD_SUFFIX)))
  tf_records = tf_records[
      -(params.train_window_size // params.examples_per_chunk):]

  print('Training from: {} to {}'.format(tf_records[0], tf_records[-1]))
  with utils.logged_timer('Training'):
    dualnet.train(estimator_model_dir, tf_records, generation, params)
    dualnet.export_model(estimator_model_dir, new_model)
Пример #3
0
def train(trained_models_dir, estimator_model_dir, training_chunk_dir, params):
    """Train the latest model from gathered data.

  Args:
    trained_models_dir: Where to export the completed generation.
    estimator_model_dir: tf.estimator model directory.
    training_chunk_dir: Directory where gathered training chunks are.
    params: An object of hyperparameters for the model.
  """
    model_num, model_name = utils.get_latest_model(trained_models_dir)
    print('Initializing from model {}'.format(model_name))

    new_model_name = utils.generate_model_name(model_num + 1)
    print('New model will be {}'.format(new_model_name))
    save_file = os.path.join(trained_models_dir, new_model_name)

    tf_records = sorted(
        tf.gfile.Glob(os.path.join(training_chunk_dir,
                                   '*' + _TF_RECORD_SUFFIX)))
    tf_records = tf_records[-(params.train_window_size //
                              params.examples_per_chunk):]

    print('Training from: {} to {}'.format(tf_records[0], tf_records[-1]))
    with utils.logged_timer('Training'):
        dualnet.train(estimator_model_dir, tf_records, model_num + 1, params)
        dualnet.export_model(estimator_model_dir, save_file)
Пример #4
0
  def test_inference(self):
    with tempfile.TemporaryDirectory() as working_dir, \
        tempfile.TemporaryDirectory() as export_dir:
      dualnet.bootstrap(working_dir, model_params.DummyMiniGoParams())
      exported_model = os.path.join(export_dir, 'bootstrap-model')
      dualnet.export_model(working_dir, exported_model)

      n1 = dualnet.DualNetRunner(
          exported_model, model_params.DummyMiniGoParams())
      n1.run(go.Position(utils_test.BOARD_SIZE))

      n2 = dualnet.DualNetRunner(
          exported_model, model_params.DummyMiniGoParams())
      n2.run(go.Position(utils_test.BOARD_SIZE))
Пример #5
0
    def test_inference(self):
        with tempfile.TemporaryDirectory() as working_dir, \
            tempfile.TemporaryDirectory() as export_dir:
            dualnet.bootstrap(working_dir, model_params.DummyMiniGoParams())
            exported_model = os.path.join(export_dir, 'bootstrap-model')
            dualnet.export_model(working_dir, exported_model)

            n1 = dualnet.DualNetRunner(exported_model,
                                       model_params.DummyMiniGoParams())
            n1.run(go.Position(utils_test.BOARD_SIZE))

            n2 = dualnet.DualNetRunner(exported_model,
                                       model_params.DummyMiniGoParams())
            n2.run(go.Position(utils_test.BOARD_SIZE))
def bootstrap(estimator_model_dir, trained_models_dir, params):
  """Initialize the model with random weights.

  Args:
    estimator_model_dir: tf.estimator model directory.
    trained_models_dir: Dir to save the trained models. Here to export the first
      bootstrapped generation.
    params: A MiniGoParams instance of hyperparameters for the model.
  """
  bootstrap_name = utils.generate_model_name(0)
  _ensure_dir_exists(trained_models_dir)
  bootstrap_model_path = os.path.join(trained_models_dir, bootstrap_name)
  _ensure_dir_exists(estimator_model_dir)

  print('Bootstrapping with working dir {}\n Model 0 exported to {}'.format(
      estimator_model_dir, bootstrap_model_path))
  dualnet.bootstrap(estimator_model_dir, params)
  dualnet.export_model(estimator_model_dir, bootstrap_model_path)
Пример #7
0
def bootstrap(estimator_model_dir, trained_models_dir, params):
  """Initialize the model with random weights.

  Args:
    estimator_model_dir: tf.estimator model directory.
    trained_models_dir: Dir to save the trained models. Here to export the first
      bootstrapped generation.
    params: A MiniGoParams instance of hyperparameters for the model.
  """
  bootstrap_name = utils.generate_model_name(0)
  _ensure_dir_exists(trained_models_dir)
  bootstrap_model_path = os.path.join(trained_models_dir, bootstrap_name)
  _ensure_dir_exists(estimator_model_dir)

  print('Bootstrapping with working dir {}\n Model 0 exported to {}'.format(
      estimator_model_dir, bootstrap_model_path))
  dualnet.bootstrap(estimator_model_dir, params)
  dualnet.export_model(estimator_model_dir, bootstrap_model_path)