def test_report_metric_with_ckpt(self):
     """Test writing 1 metric record with checkpoint."""
     os.environ['CLOUD_ML_HP_METRIC_FILE'] = os.path.join(self.test_dir, 'metric.output')
     os.environ['CLOUD_ML_TRIAL_ID'] = '1'
     hpt = HyperTune()
     hpt.report_hyperparameter_tuning_metric(
         hyperparameter_metric_tag='my_metric_tag',
         metric_value=0.987,
         global_step=1000,
         checkpoint_path='gs://my_bucket/ckpt/')
     with open(os.path.join(self.test_dir, 'metric.output')) as metric_output:
       metric = json.loads(metric_output.readlines()[-1].strip())
       self.assertAlmostEqual(0.987, float(metric['my_metric_tag']))
       self.assertEqual('1', metric['trial'])
       self.assertEqual('1000', metric['global_step'])
       self.assertEqual('gs://my_bucket/ckpt/', metric['checkpoint_path'])
示例#2
0
class HyperTuneClassificationCallback(Callback):
    def __init__(self):
        super().__init__()
        self.hpt = HyperTune()
        self.best_accuracy_epoch = (0, -1)

    def on_epoch_end(self, epoch: int, logs: Dict = None):
        accuracy = logs["val_categorical_accuracy"]
        self._report(accuracy, epoch)
        self.best_accuracy_epoch = max(self.best_accuracy_epoch, (accuracy, epoch))

    def on_train_begin(self, logs=None):
        self.best_accuracy_epoch = (0, -1)

    def on_train_end(self, logs=None):
        self._report(*self.best_accuracy_epoch)

    def _report(self, accuracy: float, epoch: int):
        self.hpt.report_hyperparameter_tuning_metric(
            hyperparameter_metric_tag="val_accuracy", metric_value=accuracy, global_step=epoch
        )
    def test_report_metric_circular(self):
        """ Test that the metric file will only store most recent 100 records.

        """
        os.environ['CLOUD_ML_HP_METRIC_FILE'] = os.path.join(self.test_dir, 'metric.output')
        os.environ['CLOUD_ML_TRIAL_ID'] = '1'
        hpt = HyperTune()
        for step in range(0, 200):
            hpt.report_hyperparameter_tuning_metric(
                hyperparameter_metric_tag='my_metric_tag',
                metric_value=0.1 * step,
                global_step=step)
        with open(os.path.join(self.test_dir, 'metric.output')) as metric_output:
            metric_content = metric_output.readlines()
            metric = json.loads(metric_content[0].strip())
            self.assertAlmostEqual(10.0, float(metric['my_metric_tag']))
            self.assertEqual('1', metric['trial'])
            self.assertEqual('100', metric['global_step'])
            metric = json.loads(metric_content[-1].strip())
            self.assertAlmostEqual(19.9, float(metric['my_metric_tag']))
            self.assertEqual('1', metric['trial'])
            self.assertEqual('199', metric['global_step'])
    """
    DO NOT CHANGE THE CODE BELOW
    """
    # ===============================================
    # ==== Evaluate performance against test set ====
    # ===============================================
    # Create DMatrix for XGBoost from DataFrames
    d_matrix_train = xgb.DMatrix(X_train, y_train)
    d_matrix_eval = xgb.DMatrix(X_eval)
    model = xgb.train(params, d_matrix_train)
    y_pred = model.predict(d_matrix_eval)
    rmse = math.sqrt(mean_squared_error(y_eval, y_pred))
    print('RMSE: {:.3f}'.format(rmse))

    # Return the score back to HyperTune to inform the next iteration
    # of hyperparameter search
    hpt = HyperTune()
    hpt.report_hyperparameter_tuning_metric(
        hyperparameter_metric_tag='nyc_fare', metric_value=rmse)

    # ============================================
    # ==== Upload the model to Google Storage ====
    # ============================================
    JOB_NAME = os.environ['CLOUD_ML_JOB_ID']
    TRIAL_ID = os.environ['CLOUD_ML_TRIAL_ID']
    model_name = 'model.bst'
    model.save_model(model_name)
    blob = output_bucket.blob('{}/{}_rmse{:.3f}_{}'.format(
        JOB_NAME, TRIAL_ID, rmse, model_name))
    blob.upload_from_filename(model_name)
示例#5
0
def _report_metric(hpt: hypertune.HyperTune, epoch, metric_value):

    hpt.report_hyperparameter_tuning_metric(
        hyperparameter_metric_tag=_METRIC_TAG,
        metric_value=metric_value,
        global_step=epoch)
示例#6
0
      tf.keras.layers.MaxPooling2D(),
      tf.keras.layers.Conv2D(32, 3, activation='relu'),
      tf.keras.layers.MaxPooling2D(),
      tf.keras.layers.Flatten(),
      tf.keras.layers.Dense(10, activation='softmax')
  ])
  model.compile(
      loss=tf.keras.losses.sparse_categorical_crossentropy,
      optimizer=tf.keras.optimizers.SGD(learning_rate=args.lr, decay=args.decay),
      metrics=['accuracy'])
  return model

model = build_and_compile_cnn_model()

# Instantiate the HyperTune reporting object
hpt = HyperTune()

# Reporting callback
class HPTCallback(tf.keras.callbacks.Callback):

    def on_epoch_end(self, epoch, logs=None):
        global hpt
        hpt.report_hyperparameter_tuning_metric(
        hyperparameter_metric_tag='val_accuracy',
        metric_value=logs['val_accuracy'],
        global_step=epoch)

# Train the model
model.fit(train_dataset, epochs=5, steps_per_epoch=10, validation_data=test_dataset.take(8),
    callbacks=[HPTCallback()])
model.save(args.model_dir)
示例#7
0
 def __init__(self):
     super().__init__()
     self.hpt = HyperTune()
     self.best_accuracy_epoch = (0, -1)