def _GeneratorFromPath(path,
                       event_file_active_filter=None,
                       detect_file_replacement=None):
    """Create an event generator for file or directory at given path string."""
    if not path:
        raise ValueError("path must be a valid string")
    if io_wrapper.IsSummaryEventsFile(path):
        return event_file_loader.EventFileLoader(path, detect_file_replacement)
    elif event_file_active_filter:
        loader_factory = (
            lambda path: event_file_loader.TimestampedEventFileLoader(
                path, detect_file_replacement))
        return directory_loader.DirectoryLoader(
            path,
            loader_factory,
            path_filter=io_wrapper.IsSummaryEventsFile,
            active_filter=event_file_active_filter,
        )
    else:
        loader_factory = lambda path: event_file_loader.EventFileLoader(
            path, detect_file_replacement)
        return directory_watcher.DirectoryWatcher(
            path,
            loader_factory,
            io_wrapper.IsSummaryEventsFile,
        )
예제 #2
0
def load_events(base, experiment, series):
    p_series = os.path.join(base, experiment, series)
    agg = []
    for f in os.listdir(p_series):
        fpath = os.path.join(p_series, f)
        if os.path.isdir(fpath) or 'profile' in f:
            continue
        #print(f)
        efl = event_file_loader.EventFileLoader(fpath)
        for i, e in enumerate(efl.Load()):
            #if i>=5:break
            #print(i, e)
            ts = e.wall_time
            step = e.step
            summary_value = e.summary.value
            if len(summary_value) > 0:
                # https://developers.google.com/protocol-buffers/docs/pythontutorial
                #print(f"Found {type(v)}")
                #print(f"Found {summary_value[0].simple_value}")
                agg.append({
                    'ts': ts,
                    'step': step,
                    summary_value[0].tag: summary_value[0].simple_value,
                })

    df = pd.DataFrame(agg)  # , columns=['ts', 'step', 'value']
    # https://www.science-emergence.com/Articles/How-to-add-metadata-to-a-data-frame-with-pandas-in-python-/
    df.base = base
    df.experiment = experiment
    df.series = series
    return df
예제 #3
0
def get_metrics_from_eval_tfevents(eval_dir):
    """Get metrics from tfevents in eval dir

    Args:
        eval_dir (str): Root directory where eval summaries are stored

    Returns:
        list[float], list[int]: average returns, and average episode lengths
    """
    event_file = None
    for root, dirs, files in os.walk(eval_dir):
        for file_name in files:
            if "events" in file_name and 'profile' not in file_name:
                event_file = os.path.join(root, file_name)
                break

    assert event_file is not None

    logging.info("Parse event file:%s", event_file)
    episode_returns = []
    episode_lengths = []
    for event_str in event_file_loader.EventFileLoader(event_file).Load():
        if event_str.summary.value:
            for item in event_str.summary.value:
                if item.tag == 'Metrics/AverageReturn':
                    episode_returns.append(item.simple_value)
                elif item.tag == 'Metrics/AverageEpisodeLength':
                    episode_lengths.append(item.simple_value)

    assert len(episode_returns) > 0
    logging.info("Episode returns, %s, episode lengths: %s", episode_returns,
                 episode_lengths)
    return episode_returns, episode_lengths
예제 #4
0
def _event_dict_list_from_events_file(event_file_path):
    """Given an event file path, load the event data.
    
    Args:
        event_file_path (str): A path to a TensorFlow events.out* file.
    """
    loader = event_file_loader.EventFileLoader(event_file_path)
    events = []
    for event in loader.Load():
        events.append(event)

    output_events = []

    for thing in events:
        d = protobuf_to_dict(thing)

        output_event = {}
        if "wall_time" in d:
            output_event["wall_time"] = d["wall_time"]
        if "step" in d:
            output_event["step"] = d["step"]
        if "summary" in d.keys():
            values = {}
            for value in d["summary"]["value"]:
                if "simple_value" in value.keys():
                    output_event[value["tag"]] = value["simple_value"]

        if "loss" in output_event:
            output_events.append(output_event)

    return output_events
예제 #5
0
def _GeneratorFromPath(path):
    """Create an event generator for file or directory at given path string."""
    if not path:
        raise ValueError('path must be a valid string')
    if IsTensorFlowEventsFile(path):
        return event_file_loader.EventFileLoader(path)
    else:
        return directory_watcher.DirectoryWatcher(
            path, event_file_loader.EventFileLoader, IsTensorFlowEventsFile)
예제 #6
0
def load_events(filename):
    try:
        # tensorboard>=1.14.0
        from tensorboard.backend.event_processing import event_file_loader
        loader = event_file_loader.EventFileLoader(filename)
        return list(loader.Load())
    except ImportError:
        import tensorflow as tf
        return list(tf.train.summary_iterator(filename))
예제 #7
0
def stream_tensorboard_scalars(event_file):
    loader = event_file_loader.EventFileLoader(event_file)
    for event in loader.Load():
        t = event.step
        if event.summary is not None and len(event.summary.value):
            val = event.summary.value[0]
            tag = val.tag
            value = val.simple_value or np.array(val.tensor.float_val).reshape(
                [d.size for d in val.tensor.tensor_shape.dim])
            yield t, tag, value
예제 #8
0
    def test_summary(self):
        with tempfile.TemporaryDirectory() as root_dir:
            writer = alf.summary.create_summary_writer(root_dir,
                                                       flush_secs=10,
                                                       max_queue=10)
            alf.summary.set_default_writer(writer)
            alf.summary.enable_summary()
            with alf.summary.scope("root") as scope_name:
                self.assertEqual(scope_name, "root/")
                alf.summary.scalar("scalar", 2020)
                with alf.summary.scope("a") as scope_name:
                    self.assertEqual(scope_name, "root/a/")
                    alf.summary.text("text", "sample text")
                with alf.summary.record_if(lambda: False):
                    alf.summary.text("test", "this should not appear")
                alf.summary.disable_summary()
                alf.summary.text("testa", "this should not appear")
                alf.summary.enable_summary()
                with alf.summary.scope("b") as scope_name:
                    self.assertEqual(scope_name, "root/b/")
                    alf.summary.histogram("histogram",
                                          torch.arange(100).numpy())
            writer.close()

            event_file = _find_event_file(root_dir)

            self.assertIsNotNone(event_file)

            tag2val = {
                'root/scalar': None,
                'root/a/text/text_summary': None,
                'root/b/histogram': None
            }

            for event_str in event_file_loader.EventFileLoader(
                    event_file).Load():
                if event_str.summary.value:
                    for item in event_str.summary.value:
                        self.assertTrue(item.tag in tag2val)
                        if item.HasField('simple_value'):
                            tag2val[item.tag] = item.simple_value
                        elif item.HasField('histo'):
                            tag2val[item.tag] = item.histo
                        else:
                            self.assertTrue('tensor')
                            tag2val[item.tag] = tensor_util.make_ndarray(
                                item.tensor)

            self.assertEqual(tag2val['root/scalar'], 2020)
            self.assertEqual(tag2val['root/a/text/text_summary'][0],
                             b'sample text')
            self.assertEqual(tag2val['root/b/histogram'].min, 0)
            self.assertEqual(tag2val['root/b/histogram'].max, 99)
            self.assertEqual(len(tag2val['root/b/histogram'].bucket_limit), 31)
예제 #9
0
def _GeneratorFromPath(path):
    """Create an event generator for file or directory at given path string."""
    if not path:
        raise ValueError("path must be a valid string")
    if io_wrapper.IsSummaryEventsFile(path):
        return event_file_loader.EventFileLoader(path)
    else:
        return directory_watcher.DirectoryWatcher(
            path,
            event_file_loader.EventFileLoader,
            io_wrapper.IsSummaryEventsFile,
        )
예제 #10
0
    def test_graph_def(self):
        # Create a `GraphDef` and write it to disk as an event.
        logdir = self.get_temp_dir()
        writer = test_util.FileWriter(logdir)
        graph_def = graph_pb2.GraphDef()
        graph_def.node.add(name="alice", op="Person")
        graph_def.node.add(name="bob", op="Person")
        graph_def.node.add(name="friendship",
                           op="Friendship",
                           input=["alice", "bob"])
        writer.add_graph(graph=None, graph_def=graph_def, global_step=123)
        writer.flush()

        # Read in the `Event` containing the written `graph_def`.
        files = os.listdir(logdir)
        self.assertLen(files, 1)
        event_file = os.path.join(logdir, files[0])
        self.assertIn("tfevents", event_file)
        loader = event_file_loader.EventFileLoader(event_file)
        events = list(loader.Load())
        self.assertLen(events, 2)
        self.assertEqual(events[0].WhichOneof("what"), "file_version")
        self.assertEqual(events[1].WhichOneof("what"), "graph_def")
        old_event = events[1]

        new_events = self._migrate_event(old_event)
        self.assertLen(new_events, 2)
        self.assertIs(new_events[0], old_event)
        new_event = new_events[1]

        self.assertEqual(new_event.WhichOneof("what"), "summary")
        self.assertLen(new_event.summary.value, 1)
        tensor = tensor_util.make_ndarray(new_event.summary.value[0].tensor)
        self.assertEqual(
            new_event.summary.value[0].metadata.data_class,
            summary_pb2.DATA_CLASS_BLOB_SEQUENCE,
        )
        self.assertEqual(
            new_event.summary.value[0].metadata.plugin_data.plugin_name,
            graphs_metadata.PLUGIN_NAME,
        )
        self.assertEqual(tensor.shape, (1, ))
        new_graph_def_bytes = tensor[0]
        self.assertIsInstance(new_graph_def_bytes, bytes)
        self.assertGreaterEqual(len(new_graph_def_bytes), 16)
        new_graph_def = graph_pb2.GraphDef.FromString(new_graph_def_bytes)

        self.assertProtoEquals(graph_def, new_graph_def)
def _GeneratorFromPath(path, event_file_active_filter=None):
    """Create an event generator for file or directory at given path string."""
    if not path:
        raise ValueError('path must be a valid string')
    if io_wrapper.IsTensorFlowEventsFile(path):
        return event_file_loader.EventFileLoader(path)
    elif event_file_active_filter:
        return directory_loader.DirectoryLoader(
            path,
            event_file_loader.TimestampedEventFileLoader,
            path_filter=io_wrapper.IsTensorFlowEventsFile,
            active_filter=event_file_active_filter)
    else:
        return directory_watcher.DirectoryWatcher(
            path, event_file_loader.EventFileLoader,
            io_wrapper.IsTensorFlowEventsFile)
def _latest_eval(eval_dir, eval_metrics):
    """Get the latest global step for which an evaluation result was written."""
    if not tf.io.gfile.exists(eval_dir):
        return None

    expected_tags = set(
        [common.join_scope('Metrics', metric.name) for metric in eval_metrics])

    # Record which summaries were written for each global step.
    events_by_step = collections.defaultdict(set)
    for events_file in tf.io.gfile.listdir(eval_dir):
        loader = event_file_loader.EventFileLoader(
            os.path.join(eval_dir, events_file))
        for event in loader.Load():
            if event.summary.value:
                events_by_step[event.step].add(event.summary.value[0].tag)

    # Find the greatest step for which all expected summaries are present.
    for step in sorted(list(events_by_step.keys()), key=lambda step: -step):
        if events_by_step[step].issuperset(expected_tags):
            return step

    return None
예제 #13
0
def get_tensorboard_log_watcher_from_path(path: Text):
    """Create an event generator for file or directory at given path string.

    This method creates an event generator using tensorboard directory_watcher.
    The generator.load() method will return event logs as they become available.
    The generator does not repeat events.

    Args:
        path: Text representing a directory, file, or Google Cloud Storage
        (GCS) for tensorboard logs.
    Returns:
        A tensorboard directory_watcher event generator.
    Raises:
        ValueError: if path is not defined.
    """
    if not path:
        raise ValueError("path must be a valid string")
    if io_wrapper.IsSummaryEventsFile(path):
        return event_file_loader.EventFileLoader(path)
    return directory_watcher.DirectoryWatcher(
        path,
        event_file_loader.EventFileLoader,
        io_wrapper.IsSummaryEventsFile,
    )
예제 #14
0
    def _process_health_pill_event(self, node_name_set, mapping, target_step,
                                   file_path):
        """Creates health pills out of data in an event.

        Creates health pills out of the event and adds them to the mapping.

        Args:
          node_name_set: A set of node names that are relevant.
          mapping: The mapping from node name to HealthPillEvents.
              This object may be destructively modified.
          target_step: The target step at which to obtain health pills.
          file_path: The path to the file with health pill events.

        Returns:
          Whether we should stop reading events because future events are no longer
          relevant.
        """
        events_loader = event_file_loader.EventFileLoader(file_path)
        for event in events_loader.Load():
            if not event.HasField("summary"):
                logger.warning(
                    "An event in a debugger events file lacks a summary.")
                continue

            if event.step < target_step:
                # This event is not of the relevant step. We perform this check
                # first because the majority of events will be eliminated from
                # consideration by this check.
                continue

            if event.step > target_step:
                # We have passed the relevant step. No need to read more events.
                return True

            for value in event.summary.value:
                # Obtain the device name from the metadata.
                summary_metadata = value.metadata
                plugin_data = summary_metadata.plugin_data
                if plugin_data.plugin_name == constants.DEBUGGER_PLUGIN_NAME:
                    try:
                        content = json.loads(
                            tf.compat.as_text(
                                summary_metadata.plugin_data.content))
                    except ValueError as err:
                        logger.warning(
                            "Could not parse the JSON string containing data for "
                            "the debugger plugin: %r, %r",
                            content,
                            err,
                        )
                        continue
                    device_name = content["device"]
                    output_slot = content["outputSlot"]
                else:
                    logger.error(
                        "No debugger plugin data found for event with tag %s and node "
                        "name %s.",
                        value.tag,
                        value.node_name,
                    )
                    continue

                if not value.HasField("tensor"):
                    logger.warning(
                        "An event in a debugger events file lacks a tensor value."
                    )
                    continue

                match = re.match(r"^(.*):(\d+):DebugNumericSummary$",
                                 value.node_name)
                if not match:
                    logger.warning(
                        ("A event with a health pill has an invalid watch, (i.e., an "
                         "unexpected debug op): %r"),
                        value.node_name,
                    )
                    return None

                health_pill = self._process_health_pill_value(
                    wall_time=event.wall_time,
                    step=event.step,
                    device_name=device_name,
                    output_slot=output_slot,
                    node_name=match.group(1),
                    tensor_proto=value.tensor,
                    node_name_set=node_name_set,
                )
                if not health_pill:
                    continue
                mapping[health_pill.node_name].append(health_pill)

        # Keep reading events.
        return False
예제 #15
0
def generator_from_event_file(event_file):
  """Returns a generator that yields events from an event file."""
  return event_file_loader.EventFileLoader(event_file).Load()
예제 #16
0
 def _LoaderForTestFile(self, filename):
     return event_file_loader.EventFileLoader(
         os.path.join(self.get_temp_dir(), filename))