Пример #1
0
def create_calibrated_etl(window_size, config, lattice_rank=4):
    feature_columns = create_feature_columns(window_size)
    feature_names = [fc.name for fc in feature_columns]
    hparams = tfl.CalibratedEtlHParams(
        feature_names=feature_names,
        learning_rate=0.02,
        calibration_l2_laplacian_reg=1.0e-4,
        lattice_l2_laplacian_reg=1.0e-5,
        lattice_l2_torsion_reg=1.0e-5,
        optimizer=tf.train.AdamOptimizer,
        non_monotonic_num_lattices=128,
        interpolation_type="simplex",
        non_monotonic_lattice_size=4,
        non_monotonic_lattice_rank=2,
        missing_input_value=missing_val
    )
    hparams.set_feature_param("chr", "num_keypoints", 23)
    hparams.set_feature_param("assay", "num_keypoints", 35)
    hparams.set_feature_param("line", "num_keypoints", 51)
    for offset in window(window_size):
        hparams.set_feature_param(f"base_{offset}", "num_keypoints", len(base_symbols))
    
    for i in range(1, num_cell_lines+1):
        hparams.set_feature_param(f"C{i}", "num_keypoints", cell_line_keypoints)

    for i in range(1, num_assays+1):
        hparams.set_feature_param(f"M{i}", "num_keypoints", assay_keypoints)
   
    return tfl.calibrated_etl_regressor(
        feature_columns=feature_columns,
        model_dir=model_dir, config=config, hparams=hparams,
        quantiles_dir=os.path.join(quantiles_dir, str(window_size))
    )
Пример #2
0
def create_calibrated_etl(feature_columns, config, quantiles_dir):
    """Creates a calibrated ETL estimator."""
    # No enforced monotonicity in this example.
    feature_names = [fc.name for fc in feature_columns]
    hparams = tfl.CalibratedEtlHParams(feature_names=feature_names,
                                       num_keypoints=200,
                                       learning_rate=0.02,
                                       non_monotonic_num_lattices=200,
                                       non_monotonic_lattice_rank=2,
                                       non_monotonic_lattice_size=2,
                                       calibration_l2_laplacian_reg=4.0e-3,
                                       lattice_l2_laplacian_reg=1.0e-5,
                                       lattice_l2_torsion_reg=4.0e-4)
    hparams.parse(FLAGS.hparams)
    _pprint_hparams(hparams)
    return tfl.calibrated_etl_classifier(feature_columns=feature_columns,
                                         model_dir=config.model_dir,
                                         config=config,
                                         hparams=hparams,
                                         quantiles_dir=quantiles_dir)
Пример #3
0
def fit_model(x,
              y,
              lattice_size=5,
              non_monotonic_num_lattices=1,
              non_monotonic_lattice_rank=1):
  """Fits a single 1D lattice to the provided data.

  Args:
      x: covariates
      y: labels
      lattice_size: (int, optional) Number of knots in each lattice dimension,
        total knots is lattice_size^lattice_rank, for each lattice
      non_monotonic_num_lattices: (int, optional)
      non_monotonic_lattice_rank: (int, optional) number of inputs to each

  Returns:
      etl_estimator: fitted TF Estimator
  """
  # Hyperparameters.
  num_keypoints = 100
  hparams = tfl.CalibratedEtlHParams(
      non_monotonic_lattice_rank=non_monotonic_lattice_rank,
      non_monotonic_num_lattices=non_monotonic_num_lattices,
      non_monotonic_lattice_size=lattice_size,
      num_keypoints=num_keypoints,
      learning_rate=0.007,
      linear_embedding_calibration_num_keypoints=100)

  # Estimator.
  feature_columns = [
      tf.feature_column.numeric_column('X_0'),
      tf.feature_column.numeric_column('X_1'),
  ]

  # Training is sensitive to initialization
  config = tf.estimator.RunConfig(tf_random_seed=1)
  def keypoints_config():
    return tfl.uniform_keypoints_for_signal(
        num_keypoints,
        input_min=0.0,
        input_max=x.max(),
        output_min=0.0,
        output_max=lattice_size - 1
    )
  etl_estimator = tfl.calibrated_etl_classifier(
      feature_columns=feature_columns,
      hparams=hparams,
      keypoints_initializers_fn=keypoints_config,
      config=config
  )

  # Input function.
  input_fn = tf.compat.v1.estimator.inputs.numpy_input_fn(
      x={
          'X_0': x[:, 0],
          'X_1': x[:, 1]
      },
      y=y.flatten(),
      batch_size=10000,
      num_epochs=100,
      shuffle=False)

  # Train!
  etl_estimator.train(input_fn=input_fn)

  # Evaluate
  eval_input_fn = tf.compat.v1.estimator.inputs.numpy_input_fn(
      x={
          'X_0': x[:, 0],
          'X_1': x[:, 1]
      },
      y=y.flatten(),
      batch_size=10000,
      num_epochs=1,
      shuffle=False)
  print(etl_estimator.evaluate(input_fn=eval_input_fn))

  return etl_estimator