def create_experiment_summary(): """Returns a summary proto buffer holding this experiment.""" # Convert TEMPERATURE_LIST to google.protobuf.ListValue temperature_list = struct_pb2.ListValue() temperature_list.extend(TEMPERATURE_LIST) materials = struct_pb2.ListValue() materials.extend(HEAT_COEFFICIENTS.keys()) return summary.experiment_pb( hparam_infos=[ api_pb2.HParamInfo(name='initial_temperature', display_name='Initial temperature', type=api_pb2.DATA_TYPE_FLOAT64, domain_discrete=temperature_list), api_pb2.HParamInfo(name='ambient_temperature', display_name='Ambient temperature', type=api_pb2.DATA_TYPE_FLOAT64, domain_discrete=temperature_list), api_pb2.HParamInfo(name='material', display_name='Material', type=api_pb2.DATA_TYPE_STRING, domain_discrete=materials) ], metric_infos=[ api_pb2.MetricInfo(name=api_pb2.MetricName( tag='temperature/current/scalar_summary'), display_name='Current Temp.'), api_pb2.MetricInfo(name=api_pb2.MetricName( tag='temperature/difference_to_ambient/scalar_summary'), display_name='Difference To Ambient Temp.'), api_pb2.MetricInfo( name=api_pb2.MetricName(tag='delta/scalar_summary'), display_name='Delta T') ])
def create_experiment_summary(num_units_list, dropout_rate_list, optimizer_list): from tensorboard.plugins.hparams import api_pb2 from tensorboard.plugins.hparams import summary as hparams_summary from google.protobuf import struct_pb2 num_units_list_val = struct_pb2.ListValue() num_units_list_val.extend(num_units_list) dropout_rate_list_val = struct_pb2.ListValue() dropout_rate_list_val.extend(dropout_rate_list) optimizer_list_val = struct_pb2.ListValue() optimizer_list_val.extend(optimizer_list) return hparams_summary.experiment_pb( # The hyperparameters being changed hparam_infos=[ api_pb2.HParamInfo(name='num_units', display_name='Number of units', type=api_pb2.DATA_TYPE_FLOAT64, domain_discrete=num_units_list_val), api_pb2.HParamInfo(name='dropout_rate', display_name='Dropout rate', type=api_pb2.DATA_TYPE_FLOAT64, domain_discrete=dropout_rate_list_val), api_pb2.HParamInfo(name='optimizer', display_name='Optimizer', type=api_pb2.DATA_TYPE_STRING, domain_discrete=optimizer_list_val) ], # The metrics being tracked metric_infos=[ api_pb2.MetricInfo( name=api_pb2.MetricName( tag='epoch_accuracy'), display_name='Accuracy'), ] )
def create_experiment_summary(optimizer_list, num_units_list, dropout_list): optimizer_list_val = struct_pb2.ListValue() optimizer_list_val.extend(optimizer_list) num_units_list_val = struct_pb2.ListValue() num_units_list_val.extend(num_units_list) dropout_list_val = struct_pb2.ListValue() dropout_list_val.extend(dropout_list) return hparams_summary.experiment_pb( hparam_infos=[ api_pb2.HParamInfo( name='optimizer', display_name='Optimizer', type=api_pb2.DATA_TYPE_STRING, domain_discrete=optimizer_list_val), api_pb2.HParamInfo( name='num_units', display_name='Number of units', type=api_pb2.DATA_TYPE_FLOAT64, domain_discrete=num_units_list_val), api_pb2.HParamInfo( name='dropout', display_name='Dropout', type=api_pb2.DATA_TYPE_FLOAT64, domain_discrete=dropout_list_val), ], metric_infos=[ api_pb2.MetricInfo( name=api_pb2.MetricName(tag='accuracy'), display_name='Accuracy'), api_pb2.MetricInfo( name=api_pb2.MetricName(tag='elapse'), display_name='Elapse (s)') ])
def create_experiment_summary(): """Returns a summary proto buffer holding this experiment""" # Convert TEMPERATURE_LIST to google.protobuf.ListValue temperature_list = struct_pb2.ListValue() temperature_list.extend(TEMPERATURE_LIST) return summary.experiment_pb( hparam_infos=[ api_pb2.HParamInfo(name="initial_temperature", display_name="initial temperature", type=api_pb2.DATA_TYPE_FLOAT64, domain_discrete=temperature_list), api_pb2.HParamInfo(name="ambient_temperature", display_name="ambient temperature", type=api_pb2.DATA_TYPE_FLOAT64, domain_discrete=temperature_list), api_pb2.HParamInfo(name="heat_coefficient", display_name="heat coefficient", type=api_pb2.DATA_TYPE_FLOAT64, domain_discrete=temperature_list) ], metric_infos=[ api_pb2.MetricInfo(name=api_pb2.MetricName( tag="temperature/current/scalar_summary"), display_name="Current Temp."), api_pb2.MetricInfo(name=api_pb2.MetricName( tag="temperature/difference_to_ambient/scalar_summary"), display_name="Difference To Ambient Temp."), api_pb2.MetricInfo( name=api_pb2.MetricName(tag="delta/scalar_summary"), display_name="Delta T") ])
def _to_summary_pb(num_units_list, dropout_rate_list, optimizer_list): nus_val = struct_pb2.ListValue() nus_val.extend(num_units_list) drs_val = struct_pb2.ListValue() drs_val.extend(dropout_rate_list) opts_val = struct_pb2.ListValue() opts_val.extend(optimizer_list) return hparams.experiment_pb( hparam_infos=[ api_pb2.HParamInfo( name='num_units', display_name='Number of units', type=api_pb2.DATA_TYPE_FLOAT64, domain_discrete=nus_val), api_pb2.HParamInfo( name='dropout_rate', display_name='Dropout rate', type=api_pb2.DATA_TYPE_FLOAT64, domain_discrete=drs_val), api_pb2.HParamInfo( name='optimizer', display_name='Optimizer', type=api_pb2.DATA_TYPE_STRING, domain_discrete=opts_val) ], metric_infos=[ api_pb2.MetricInfo( name=api_pb2.MetricName(tag='accuracy'), display_name='Accuracy'), ])
def summary_pb(self): """Create a top-level experiment summary describing this experiment. The resulting summary should be written to a log directory that encloses all the individual sessions' log directories. Analogous to the low-level `experiment_pb` function in the `hparams.summary` module. """ hparam_infos = [] for hparam in self._hparams: info = api_pb2.HParamInfo( name=hparam.name, description=hparam.description, display_name=hparam.display_name, ) domain = hparam.domain if domain is not None: domain.update_hparam_info(info) hparam_infos.append(info) metric_infos = [metric.as_proto() for metric in self._metrics] return summary.experiment_pb( hparam_infos=hparam_infos, metric_infos=metric_infos, user=self._user, description=self._description, time_created_secs=self._time_created_secs, )
def create_experiment_summary(): """Returns a summary proto buffer holding this experiment""" # Convert TEMPERATURE_LIST to google.protobuf.ListValue temperature_list = struct_pb2.ListValue() temperature_list.extend(TEMPERATURE_LIST) return summary.experiment_pb( hparam_infos=[ api_pb2.HParamInfo(name="initial_temperature", display_name="initial temperature", type=api_pb2.DATA_TYPE_FLOAT64, domain_discrete=temperature_list), api_pb2.HParamInfo(name="ambient_temperature", display_name="ambient temperature", type=api_pb2.DATA_TYPE_FLOAT64, domain_discrete=temperature_list), api_pb2.HParamInfo(name="heat_coefficient", display_name="heat coefficient", type=api_pb2.DATA_TYPE_FLOAT64, domain_discrete=temperature_list) ], metric_infos=[ api_pb2.MetricInfo( name=api_pb2.MetricName( tag="temperature/current/scalar_summary"), display_name="Current Temp."), api_pb2.MetricInfo( name=api_pb2.MetricName( tag="temperature/difference_to_ambient/scalar_summary"), display_name="Difference To Ambient Temp."), api_pb2.MetricInfo( name=api_pb2.MetricName( tag="delta/scalar_summary"), display_name="Delta T") ] )
def _to_summary_pb(num_units_list, dropout_rate_list, optimizer_list): nus_val = struct_pb2.ListValue() nus_val.extend(num_units_list) drs_val = struct_pb2.ListValue() drs_val.extend(dropout_rate_list) opts_val = struct_pb2.ListValue() opts_val.extend(optimizer_list) return hparams.experiment_pb( hparam_infos=[ api_pb2.HParamInfo( name="num_units", display_name="Number of units", type=api_pb2.DATA_TYPE_FLOAT64, domain_discrete=nus_val, ), api_pb2.HParamInfo( name="drop_rate", display_name="Dropout rate", type=api_pb2.DATA_TYPE_FLOAT64, domain_discrete=drs_val, ), api_pb2.HParamInfo( name="optimizer", display_name="Optimizer", type=api_pb2.DATA_TYPE_STRING, domain_discrete=opts_val, ), ], metric_infos=[ api_pb2.MetricInfo(name=api_pb2.MetricName(tag="accuracy"), display_name="Accuracy"), ], )
def create_experiment(): hparam_infos = hparams_util_pb2.HParamInfosList() text_format.Merge(FLAGS.hparam_infos, hparam_infos) metric_infos = hparams_util_pb2.MetricInfosList() text_format.Merge(FLAGS.metric_infos, metric_infos) write_summary( summary.experiment_pb(hparam_infos.hparam_infos, metric_infos.metric_infos, FLAGS.user, FLAGS.description, FLAGS.time_created_secs))
def create_experiment_summary(): """Create an `api_pb2.Experiment` proto describing the experiment.""" def discrete_domain(values): domain = struct_pb2.ListValue() domain.extend(values) return domain hparams = [ api_pb2.HParamInfo( name="conv_layers", type=api_pb2.DATA_TYPE_FLOAT64, # actually int domain_discrete=discrete_domain([1, 2, 3]), ), api_pb2.HParamInfo( name="conv_kernel_size", type=api_pb2.DATA_TYPE_FLOAT64, # actually int domain_discrete=discrete_domain([3, 5]), ), api_pb2.HParamInfo( name="dense_layers", type=api_pb2.DATA_TYPE_FLOAT64, # actually int domain_discrete=discrete_domain([1, 2, 3]), ), api_pb2.HParamInfo( name="dropout", type=api_pb2.DATA_TYPE_FLOAT64, domain_interval=api_pb2.Interval(min_value=0.1, max_value=0.4), ), api_pb2.HParamInfo( name="optimizer", type=api_pb2.DATA_TYPE_STRING, domain_discrete=discrete_domain(["adam", "adagrad"]), ), ] metrics = [ api_pb2.MetricInfo( name=api_pb2.MetricName(group="validation", tag="epoch_accuracy"), display_name="accuracy (val.)", ), api_pb2.MetricInfo( name=api_pb2.MetricName(group="validation", tag="epoch_loss"), display_name="loss (val.)", ), api_pb2.MetricInfo( name=api_pb2.MetricName(group="train", tag="batch_accuracy"), display_name="accuracy (train)", ), api_pb2.MetricInfo( name=api_pb2.MetricName(group="train", tag="batch_loss"), display_name="loss (train)", ), ] return hparams_summary.experiment_pb( hparam_infos=hparams, metric_infos=metrics, )
def test_experiment_pb(self): hparam_infos = [ api_pb2.HParamInfo( name="param1", display_name="display_name1", description="foo", type=api_pb2.DATA_TYPE_STRING, domain_discrete=struct_pb2.ListValue(values=[ struct_pb2.Value(string_value="a"), struct_pb2.Value(string_value="b"), ]), ), api_pb2.HParamInfo( name="param2", display_name="display_name2", description="bar", type=api_pb2.DATA_TYPE_FLOAT64, domain_interval=api_pb2.Interval(min_value=-100.0, max_value=100.0), ), ] metric_infos = [ api_pb2.MetricInfo( name=api_pb2.MetricName(tag="loss"), dataset_type=api_pb2.DATASET_VALIDATION, ), api_pb2.MetricInfo( name=api_pb2.MetricName(group="train/", tag="acc"), dataset_type=api_pb2.DATASET_TRAINING, ), ] time_created_secs = 314159.0 self.assertEqual( summary.experiment_pb(hparam_infos, metric_infos, time_created_secs=time_created_secs), tf.compat.v1.Summary(value=[ tf.compat.v1.Summary.Value( tag="_hparams_/experiment", tensor=summary._TF_NULL_TENSOR, metadata=tf.compat.v1.SummaryMetadata( plugin_data=tf.compat.v1.SummaryMetadata.PluginData( plugin_name="hparams", content=(plugin_data_pb2.HParamsPluginData( version=0, experiment=api_pb2.Experiment( time_created_secs=time_created_secs, hparam_infos=hparam_infos, metric_infos=metric_infos, ), ).SerializeToString()), )), ) ]), )
def _create_experiment_summary(self): alpha_list_val = struct_pb2.ListValue() alpha_list_val.extend(self._alpha_list) alpha_decay_list_val = struct_pb2.ListValue() alpha_decay_list_val.extend(self._alpha_decay_list) gamma_list_val = struct_pb2.ListValue() gamma_list_val.extend(self._gamma_list) init_epsilon_list_val = struct_pb2.ListValue() init_epsilon_list_val.extend(self._init_epsilon_list) n_exploration_episodes_val = struct_pb2.ListValue() n_exploration_episodes_val.extend(self._n_exploration_episodes) return hparams_summary.experiment_pb( # The hyperparameters being changed hparam_infos=[ api_pb2.HParamInfo(name='alpha', display_name='Learning rate', type=api_pb2.DATA_TYPE_FLOAT64, domain_discrete=alpha_list_val), api_pb2.HParamInfo(name='alpha_decay', display_name='Learning rate decay', type=api_pb2.DATA_TYPE_FLOAT64, domain_discrete=alpha_decay_list_val), api_pb2.HParamInfo(name='gamma', display_name='Reward discount factor', type=api_pb2.DATA_TYPE_FLOAT64, domain_discrete=gamma_list_val), api_pb2.HParamInfo(name='init_epsilon', display_name='Initial exploration', type=api_pb2.DATA_TYPE_FLOAT64, domain_discrete=init_epsilon_list_val), api_pb2.HParamInfo(name='n_exploration_episodes', display_name='Initial exploration', type=api_pb2.DATA_TYPE_FLOAT64, domain_discrete=n_exploration_episodes_val) ], # The metrics being tracked metric_infos=[ api_pb2.MetricInfo( name=api_pb2.MetricName( tag='sum_reward'), display_name='SumReward'), ] )
def test_experiment_pb(self): hparam_infos = [ api_pb2.HParamInfo(name="param1", display_name="display_name1", description="foo", type=api_pb2.DATA_TYPE_STRING, domain_discrete=struct_pb2.ListValue( values=[struct_pb2.Value(string_value='a'), struct_pb2.Value(string_value='b')])), api_pb2.HParamInfo(name="param2", display_name="display_name2", description="bar", type=api_pb2.DATA_TYPE_FLOAT64, domain_interval=api_pb2.Interval(min_value=-100.0, max_value=100.0)) ] metric_infos = [ api_pb2.MetricInfo(name=api_pb2.MetricName(tag="loss"), dataset_type=api_pb2.DATASET_VALIDATION), api_pb2.MetricInfo(name=api_pb2.MetricName(group="train/", tag="acc"), dataset_type=api_pb2.DATASET_TRAINING), ] time_created_secs = 314159.0 self.assertEqual( summary.experiment_pb(hparam_infos, metric_infos, time_created_secs=time_created_secs), tf.Summary( value=[ tf.Summary.Value( tag="_hparams_/experiment", metadata=tf.SummaryMetadata( plugin_data=tf.SummaryMetadata.PluginData( plugin_name="hparams", content=( plugin_data_pb2.HParamsPluginData( version=0, experiment=api_pb2.Experiment( time_created_secs=time_created_secs, hparam_infos=hparam_infos, metric_infos=metric_infos)) .SerializeToString())))) ]))
def _create_experiment_summary(self): alpha_list_val = struct_pb2.ListValue() alpha_list_val.extend(self._alpha_list) gamma_list_val = struct_pb2.ListValue() gamma_list_val.extend(self._gamma_list) return hparams_summary.experiment_pb( # The hyperparameters being changed hparam_infos=[ api_pb2.HParamInfo(name='alpha', display_name='Learning rate', type=api_pb2.DATA_TYPE_FLOAT64, domain_discrete=alpha_list_val), api_pb2.HParamInfo(name='gamma', display_name='Reward discount factor', type=api_pb2.DATA_TYPE_FLOAT64, domain_discrete=gamma_list_val) ], # The metrics being tracked metric_infos=[ api_pb2.MetricInfo( name=api_pb2.MetricName(tag='cummulative_reward'), display_name='CumReward'), ])
def hparams_config(hparams, metrics, time_created_secs=None): """Write a top-level experiment configuration. This configuration describes the hyperparameters and metrics that will be tracked in the experiment, but does not record any actual values of those hyperparameters and metrics. It can be created before any models are actually trained. Args: hparams: A list of `HParam` values. metrics: A list of `Metric` values. time_created_secs: The time that this experiment was created, as seconds since epoch. Defaults to the current time. """ hparam_infos = [] for hparam in hparams: info = api_pb2.HParamInfo( name=hparam.name, description=hparam.description, display_name=hparam.display_name, ) domain = hparam.domain if domain is not None: domain.update_hparam_info(info) hparam_infos.append(info) metric_infos = [metric.as_proto() for metric in metrics] experiment_pb = summary.experiment_pb( hparam_infos=hparam_infos, metric_infos=metric_infos, time_created_secs=time_created_secs, ) raw_pb = experiment_pb.SerializeToString() summary_scope = (getattr(tf.compat.v2.summary.experimental, "summary_scope", None) or tf.summary.summary_scope) with summary_scope("hparams_summary"): return tf.compat.v2.summary.experimental.write_raw_pb(raw_pb, step=0)