Exemplo n.º 1
0
def create_experiment_summary():
    """Returns a summary proto buffer holding this experiment."""

    # Convert TEMPERATURE_LIST to google.protobuf.ListValue
    temperature_list = struct_pb2.ListValue()
    temperature_list.extend(TEMPERATURE_LIST)
    materials = struct_pb2.ListValue()
    materials.extend(HEAT_COEFFICIENTS.keys())
    return summary.experiment_pb(
        hparam_infos=[
            api_pb2.HParamInfo(name='initial_temperature',
                               display_name='Initial temperature',
                               type=api_pb2.DATA_TYPE_FLOAT64,
                               domain_discrete=temperature_list),
            api_pb2.HParamInfo(name='ambient_temperature',
                               display_name='Ambient temperature',
                               type=api_pb2.DATA_TYPE_FLOAT64,
                               domain_discrete=temperature_list),
            api_pb2.HParamInfo(name='material',
                               display_name='Material',
                               type=api_pb2.DATA_TYPE_STRING,
                               domain_discrete=materials)
        ],
        metric_infos=[
            api_pb2.MetricInfo(name=api_pb2.MetricName(
                tag='temperature/current/scalar_summary'),
                               display_name='Current Temp.'),
            api_pb2.MetricInfo(name=api_pb2.MetricName(
                tag='temperature/difference_to_ambient/scalar_summary'),
                               display_name='Difference To Ambient Temp.'),
            api_pb2.MetricInfo(
                name=api_pb2.MetricName(tag='delta/scalar_summary'),
                display_name='Delta T')
        ])
Exemplo n.º 2
0
def create_experiment_summary():
    """Returns a summary proto buffer holding this experiment"""
    # Convert TEMPERATURE_LIST to google.protobuf.ListValue
    temperature_list = struct_pb2.ListValue()
    temperature_list.extend(TEMPERATURE_LIST)
    return summary.experiment_pb(
        hparam_infos=[
            api_pb2.HParamInfo(name="initial_temperature",
                               display_name="initial temperature",
                               type=api_pb2.DATA_TYPE_FLOAT64,
                               domain_discrete=temperature_list),
            api_pb2.HParamInfo(name="ambient_temperature",
                               display_name="ambient temperature",
                               type=api_pb2.DATA_TYPE_FLOAT64,
                               domain_discrete=temperature_list),
            api_pb2.HParamInfo(name="heat_coefficient",
                               display_name="heat coefficient",
                               type=api_pb2.DATA_TYPE_FLOAT64,
                               domain_discrete=temperature_list)
        ],
        metric_infos=[
            api_pb2.MetricInfo(name=api_pb2.MetricName(
                tag="temperature/current/scalar_summary"),
                               display_name="Current Temp."),
            api_pb2.MetricInfo(name=api_pb2.MetricName(
                tag="temperature/difference_to_ambient/scalar_summary"),
                               display_name="Difference To Ambient Temp."),
            api_pb2.MetricInfo(
                name=api_pb2.MetricName(tag="delta/scalar_summary"),
                display_name="Delta T")
        ])
Exemplo n.º 3
0
def create_experiment_summary(optimizer_list, num_units_list, dropout_list):
  optimizer_list_val = struct_pb2.ListValue()
  optimizer_list_val.extend(optimizer_list)
  num_units_list_val = struct_pb2.ListValue()
  num_units_list_val.extend(num_units_list)
  dropout_list_val = struct_pb2.ListValue()
  dropout_list_val.extend(dropout_list)

  return hparams_summary.experiment_pb(
      hparam_infos=[
          api_pb2.HParamInfo(
              name='optimizer',
              display_name='Optimizer',
              type=api_pb2.DATA_TYPE_STRING,
              domain_discrete=optimizer_list_val),
          api_pb2.HParamInfo(
              name='num_units',
              display_name='Number of units',
              type=api_pb2.DATA_TYPE_FLOAT64,
              domain_discrete=num_units_list_val),
          api_pb2.HParamInfo(
              name='dropout',
              display_name='Dropout',
              type=api_pb2.DATA_TYPE_FLOAT64,
              domain_discrete=dropout_list_val),
      ],
      metric_infos=[
          api_pb2.MetricInfo(
              name=api_pb2.MetricName(tag='accuracy'), display_name='Accuracy'),
          api_pb2.MetricInfo(
              name=api_pb2.MetricName(tag='elapse'), display_name='Elapse (s)')
      ])
Exemplo n.º 4
0
def create_experiment_summary():
    """Create an `api_pb2.Experiment` proto describing the experiment."""
    def discrete_domain(values):
        domain = struct_pb2.ListValue()
        domain.extend(values)
        return domain

    hparams = [
        api_pb2.HParamInfo(
            name="conv_layers",
            type=api_pb2.DATA_TYPE_FLOAT64,  # actually int
            domain_discrete=discrete_domain([1, 2, 3]),
        ),
        api_pb2.HParamInfo(
            name="conv_kernel_size",
            type=api_pb2.DATA_TYPE_FLOAT64,  # actually int
            domain_discrete=discrete_domain([3, 5]),
        ),
        api_pb2.HParamInfo(
            name="dense_layers",
            type=api_pb2.DATA_TYPE_FLOAT64,  # actually int
            domain_discrete=discrete_domain([1, 2, 3]),
        ),
        api_pb2.HParamInfo(
            name="dropout",
            type=api_pb2.DATA_TYPE_FLOAT64,
            domain_interval=api_pb2.Interval(min_value=0.1, max_value=0.4),
        ),
        api_pb2.HParamInfo(
            name="optimizer",
            type=api_pb2.DATA_TYPE_STRING,
            domain_discrete=discrete_domain(["adam", "adagrad"]),
        ),
    ]
    metrics = [
        api_pb2.MetricInfo(
            name=api_pb2.MetricName(group="validation", tag="epoch_accuracy"),
            display_name="accuracy (val.)",
        ),
        api_pb2.MetricInfo(
            name=api_pb2.MetricName(group="validation", tag="epoch_loss"),
            display_name="loss (val.)",
        ),
        api_pb2.MetricInfo(
            name=api_pb2.MetricName(group="train", tag="batch_accuracy"),
            display_name="accuracy (train)",
        ),
        api_pb2.MetricInfo(
            name=api_pb2.MetricName(group="train", tag="batch_loss"),
            display_name="loss (train)",
        ),
    ]
    return hparams_summary.experiment_pb(
        hparam_infos=hparams,
        metric_infos=metrics,
    )
Exemplo n.º 5
0
 def test_experiment_pb(self):
     hparam_infos = [
         api_pb2.HParamInfo(
             name="param1",
             display_name="display_name1",
             description="foo",
             type=api_pb2.DATA_TYPE_STRING,
             domain_discrete=struct_pb2.ListValue(values=[
                 struct_pb2.Value(string_value="a"),
                 struct_pb2.Value(string_value="b"),
             ]),
         ),
         api_pb2.HParamInfo(
             name="param2",
             display_name="display_name2",
             description="bar",
             type=api_pb2.DATA_TYPE_FLOAT64,
             domain_interval=api_pb2.Interval(min_value=-100.0,
                                              max_value=100.0),
         ),
     ]
     metric_infos = [
         api_pb2.MetricInfo(
             name=api_pb2.MetricName(tag="loss"),
             dataset_type=api_pb2.DATASET_VALIDATION,
         ),
         api_pb2.MetricInfo(
             name=api_pb2.MetricName(group="train/", tag="acc"),
             dataset_type=api_pb2.DATASET_TRAINING,
         ),
     ]
     time_created_secs = 314159.0
     self.assertEqual(
         summary.experiment_pb(hparam_infos,
                               metric_infos,
                               time_created_secs=time_created_secs),
         tf.compat.v1.Summary(value=[
             tf.compat.v1.Summary.Value(
                 tag="_hparams_/experiment",
                 tensor=summary._TF_NULL_TENSOR,
                 metadata=tf.compat.v1.SummaryMetadata(
                     plugin_data=tf.compat.v1.SummaryMetadata.PluginData(
                         plugin_name="hparams",
                         content=(plugin_data_pb2.HParamsPluginData(
                             version=0,
                             experiment=api_pb2.Experiment(
                                 time_created_secs=time_created_secs,
                                 hparam_infos=hparam_infos,
                                 metric_infos=metric_infos,
                             ),
                         ).SerializeToString()),
                     )),
             )
         ]),
     )
Exemplo n.º 6
0
def create_experiment_summary(num_units_list, dropout_rate_list, optimizer_list):
    from tensorboard.plugins.hparams import api_pb2
    from tensorboard.plugins.hparams import summary as hparams_summary
    from google.protobuf import struct_pb2
    num_units_list_val = struct_pb2.ListValue()
    num_units_list_val.extend(num_units_list)
    dropout_rate_list_val = struct_pb2.ListValue()
    dropout_rate_list_val.extend(dropout_rate_list)
    optimizer_list_val = struct_pb2.ListValue()
    optimizer_list_val.extend(optimizer_list)
    return hparams_summary.experiment_pb(
        # The hyperparameters being changed
        hparam_infos=[
            api_pb2.HParamInfo(name='num_units',
                               display_name='Number of units',
                               type=api_pb2.DATA_TYPE_FLOAT64,
                               domain_discrete=num_units_list_val),
            api_pb2.HParamInfo(name='dropout_rate',
                               display_name='Dropout rate',
                               type=api_pb2.DATA_TYPE_FLOAT64,
                               domain_discrete=dropout_rate_list_val),
            api_pb2.HParamInfo(name='optimizer',
                               display_name='Optimizer',
                               type=api_pb2.DATA_TYPE_STRING,
                               domain_discrete=optimizer_list_val)
        ],
        # The metrics being tracked
        metric_infos=[
            api_pb2.MetricInfo(
                name=api_pb2.MetricName(
                    tag='epoch_accuracy'),
                display_name='Accuracy'),
        ]
    )
Exemplo n.º 7
0
def _set_avg_session_metrics(session_group):
  """Sets the metrics for the group to be the average of its sessions.

  The resulting session group metrics consist of the union of metrics across
  the group's sessions. The value of each session group metric is the average
  of that metric values across the sessions in the group. The 'step' and
  'wall_time_secs' fields of the resulting MetricValue field in the session
  group are populated with the corresponding averages (truncated for 'step')
  as well.

  Args:
    session_group: A SessionGroup protobuffer.
  """
  assert session_group.sessions, 'SessionGroup cannot be empty.'
  # Algorithm: Iterate over all (session, metric) pairs and maintain a
  # dict from _MetricIdentifier to _MetricStats objects.
  # Then use the final dict state to compute the average for each metric.
  metric_stats = collections.defaultdict(_MetricStats)
  for session in session_group.sessions:
    for metric_value in session.metric_values:
      metric_name = _MetricIdentifier(group=metric_value.name.group,
                                      tag=metric_value.name.tag)
      stats = metric_stats[metric_name]
      stats.total += metric_value.value
      stats.count += 1
      stats.total_step += metric_value.training_step
      stats.total_wall_time_secs += metric_value.wall_time_secs

  del session_group.metric_values[:]
  for (metric_name, stats) in six.iteritems(metric_stats):
    session_group.metric_values.add(
        name=api_pb2.MetricName(group=metric_name.group, tag=metric_name.tag),
        value=float(stats.total)/float(stats.count),
        training_step=stats.total_step // stats.count,
        wall_time_secs=stats.total_wall_time_secs / stats.count)
Exemplo n.º 8
0
 def as_proto(self):
     return api_pb2.MetricInfo(
         name=api_pb2.MetricName(group=self._group, tag=self._tag,),
         display_name=self._display_name,
         description=self._description,
         dataset_type=self._dataset_type,
     )
Exemplo n.º 9
0
def _to_summary_pb(num_units_list, dropout_rate_list, optimizer_list):
    nus_val = struct_pb2.ListValue()
    nus_val.extend(num_units_list)
    drs_val = struct_pb2.ListValue()
    drs_val.extend(dropout_rate_list)
    opts_val = struct_pb2.ListValue()
    opts_val.extend(optimizer_list)
    return hparams.experiment_pb(
        hparam_infos=[
            api_pb2.HParamInfo(
                name='num_units',
                display_name='Number of units',
                type=api_pb2.DATA_TYPE_FLOAT64,
                domain_discrete=nus_val),
            api_pb2.HParamInfo(
                name='dropout_rate',
                display_name='Dropout rate',
                type=api_pb2.DATA_TYPE_FLOAT64,
                domain_discrete=drs_val),
            api_pb2.HParamInfo(
                name='optimizer',
                display_name='Optimizer',
                type=api_pb2.DATA_TYPE_STRING,
                domain_discrete=opts_val)
        ],
        metric_infos=[
            api_pb2.MetricInfo(
                name=api_pb2.MetricName(tag='accuracy'),
                display_name='Accuracy'),
        ])
Exemplo n.º 10
0
def _to_summary_pb(num_units_list, dropout_rate_list, optimizer_list):
    nus_val = struct_pb2.ListValue()
    nus_val.extend(num_units_list)
    drs_val = struct_pb2.ListValue()
    drs_val.extend(dropout_rate_list)
    opts_val = struct_pb2.ListValue()
    opts_val.extend(optimizer_list)
    return hparams.experiment_pb(
        hparam_infos=[
            api_pb2.HParamInfo(
                name="num_units",
                display_name="Number of units",
                type=api_pb2.DATA_TYPE_FLOAT64,
                domain_discrete=nus_val,
            ),
            api_pb2.HParamInfo(
                name="drop_rate",
                display_name="Dropout rate",
                type=api_pb2.DATA_TYPE_FLOAT64,
                domain_discrete=drs_val,
            ),
            api_pb2.HParamInfo(
                name="optimizer",
                display_name="Optimizer",
                type=api_pb2.DATA_TYPE_STRING,
                domain_discrete=opts_val,
            ),
        ],
        metric_infos=[
            api_pb2.MetricInfo(name=api_pb2.MetricName(tag="accuracy"), display_name="Accuracy"),
        ],
    )
Exemplo n.º 11
0
 def _compute_metric_infos(
     self, experiment_id, hparams_run_to_tag_to_content
 ):
     return (
         api_pb2.MetricInfo(name=api_pb2.MetricName(group=group, tag=tag))
         for tag, group in self._compute_metric_names(
             experiment_id, hparams_run_to_tag_to_content
         )
     )
Exemplo n.º 12
0
 def _create_experiment_summary(self):
     alpha_list_val = struct_pb2.ListValue()
     alpha_list_val.extend(self._alpha_list)
     alpha_decay_list_val = struct_pb2.ListValue()
     alpha_decay_list_val.extend(self._alpha_decay_list)
     gamma_list_val = struct_pb2.ListValue()
     gamma_list_val.extend(self._gamma_list)
     init_epsilon_list_val = struct_pb2.ListValue()
     init_epsilon_list_val.extend(self._init_epsilon_list)
     n_exploration_episodes_val = struct_pb2.ListValue()
     n_exploration_episodes_val.extend(self._n_exploration_episodes)
     return hparams_summary.experiment_pb(
         # The hyperparameters being changed
         hparam_infos=[
             api_pb2.HParamInfo(name='alpha',
                                display_name='Learning rate',
                                type=api_pb2.DATA_TYPE_FLOAT64,
                                domain_discrete=alpha_list_val),
             api_pb2.HParamInfo(name='alpha_decay',
                                display_name='Learning rate decay',
                                type=api_pb2.DATA_TYPE_FLOAT64,
                                domain_discrete=alpha_decay_list_val),
             api_pb2.HParamInfo(name='gamma',
                                display_name='Reward discount factor',
                                type=api_pb2.DATA_TYPE_FLOAT64,
                                domain_discrete=gamma_list_val),
             api_pb2.HParamInfo(name='init_epsilon',
                                display_name='Initial exploration',
                                type=api_pb2.DATA_TYPE_FLOAT64,
                                domain_discrete=init_epsilon_list_val),
             api_pb2.HParamInfo(name='n_exploration_episodes',
                                display_name='Initial exploration',
                                type=api_pb2.DATA_TYPE_FLOAT64,
                                domain_discrete=n_exploration_episodes_val)
         ],
         # The metrics being tracked
         metric_infos=[
             api_pb2.MetricInfo(
                 name=api_pb2.MetricName(
                     tag='sum_reward'),
                 display_name='SumReward'),
         ]
     )
 def _create_experiment_summary(self):
     alpha_list_val = struct_pb2.ListValue()
     alpha_list_val.extend(self._alpha_list)
     gamma_list_val = struct_pb2.ListValue()
     gamma_list_val.extend(self._gamma_list)
     return hparams_summary.experiment_pb(
         # The hyperparameters being changed
         hparam_infos=[
             api_pb2.HParamInfo(name='alpha',
                                display_name='Learning rate',
                                type=api_pb2.DATA_TYPE_FLOAT64,
                                domain_discrete=alpha_list_val),
             api_pb2.HParamInfo(name='gamma',
                                display_name='Reward discount factor',
                                type=api_pb2.DATA_TYPE_FLOAT64,
                                domain_discrete=gamma_list_val)
         ],
         # The metrics being tracked
         metric_infos=[
             api_pb2.MetricInfo(
                 name=api_pb2.MetricName(tag='cummulative_reward'),
                 display_name='CumReward'),
         ])
Exemplo n.º 14
0
 def _compute_metric_infos(self):
     return (api_pb2.MetricInfo(
         name=api_pb2.MetricName(group=group, tag=tag))
             for tag, group in self._compute_metric_names())