def testMetricHistoriesFiles(self): logdir = tf.test.get_temp_dir() tf.gfile.MkDir(os.path.join(logdir, 'job1')) tf.gfile.MkDir(os.path.join(logdir, 'job2')) p = early_stop.MetricHistory.Params().Set(logdir=logdir) mh1 = early_stop.MetricHistory( p.Set(jobname='job1', metric='m1', local_filesystem=True)) mh2 = early_stop.MetricHistory( p.Set(jobname='job2', metric='m2', local_filesystem=True)) early_stop.MetricHistory.ConditionalAppend('job1', 'm1', 1, 10.0) early_stop.MetricHistory.ConditionalAppend('job1', 'm2', 1, 10.0) early_stop.MetricHistory.ConditionalAppend('job2', 'm2', 1, 10.0) early_stop.MetricHistory.ConditionalAppend('job1', 'm1', 2, 5.0) self.assertTrue(tf.gfile.Exists(mh1.hist_file)) self.assertTrue(tf.gfile.Exists(mh2.hist_file)) with tf.gfile.GFile(mh1.hist_file) as f: lines = f.readlines() self.assertEqual(len(lines), 2) self.assertEqual(lines[0].rstrip(), '1 10.000000') self.assertEqual(lines[1].rstrip(), '2 5.000000') with tf.gfile.GFile(mh2.hist_file) as f: lines = f.readlines() self.assertEqual(len(lines), 1) self.assertEqual(lines[0].rstrip(), '1 10.000000')
def testMetricHistoriesMapUniqueness(self): # pylint: disable=unused-variable p = early_stop.MetricHistory.Params() mh1 = early_stop.MetricHistory(p.Set(jobname='job1', metric='m1')) mh2 = early_stop.MetricHistory(p.Set(jobname='job2', metric='m2')) mh3 = early_stop.MetricHistory(p.Set(jobname='job1', metric='m1')) m = early_stop.MetricHistory._metric_histories_map self.assertEqual(len(m), 2) self.assertEqual(m[early_stop.MetricHistory._Key('job1', 'm1')], mh3) self.assertEqual(m[early_stop.MetricHistory._Key('job2', 'm2')], mh2)
def __init__(self, params): super(AdaptiveScheduler, self).__init__(params) if len(self.params.tasks) != 2 or len(self.params.expected) != 2: raise ValueError('Only two tasks are supported by this scheduler.') if self.params.epsilon < 0: raise ValueError('Epsilon should be positive.') self.tasks = self.params.tasks self.last_scores = [0.0] * 2 self._metric_histories = [ early_stop.MetricHistory(self.params.mh_a), early_stop.MetricHistory(self.params.mh_b) ]
def __init__(self, params): super().__init__(params) p = self.params with tf.variable_scope(p.name): wp = py_utils.WeightParams( shape=[], init=py_utils.WeightInit.Constant(1.0), collections=['DevBasedSchedule_vars'], dtype=tf.float32) self._cur_factor = py_utils.CreateVariable( 'cur_factor', wp, trainable=False) wp = py_utils.WeightParams( shape=[], init=py_utils.WeightInit.Constant(0), collections=['DevBasedSchedule_vars'], dtype=tf.int64) self._ref_step = py_utils.CreateVariable('ref_step', wp, trainable=False) self._metric_history = early_stop.MetricHistory(p.metric_history) self._best_step = ops.best_step(self._metric_history.hist_file, p.tolerance)
def __init__(self, params): super().__init__(params) p = self.params self._metric_history = early_stop.MetricHistory(p.metric_history)
def __init__(self, params): super().__init__(params) self.SetVariableFree(False) p = self.params self._metric_history = early_stop.MetricHistory(p.metric_history)