Esempio n. 1
0
def _parse_event_proto_str(proto_str):
    """
    Converts the serialized Event protobufs from `tf.data.TFRecordDataset` into a `Message` object.

    """
    event_msg = Event()
    event_msg.ParseFromString(proto_str)
    return event_msg
Esempio n. 2
0
 def _add_event(self, step, summary):
     t = time.time()
     e = Event(wall_time=t, step=step, summary=summary)
     self.queue.append(e)
     if t - self.last_append >= self.tick or len(self.queue) >= self.max_queue_len:
         self.flush()
         self.last_append = t
Esempio n. 3
0
def rename_events(input_path, output_path, old_tags, new_tag):
    # Make a record writer
    with tf.io.TFRecordWriter(str(output_path)) as writer:
        # Iterate event records
        for rec in tf.data.TFRecordDataset([str(input_path)]):
            # Read event
            ev = Event()
            ev.MergeFromString(rec.numpy())
            # Check if it is a summary
            if ev.summary:
                # Iterate summary values
                for v in ev.summary.value:
                    # Check if the tag should be renamed
                    if v.tag in old_tags:
                        # Rename with new tag name
                        v.tag = new_tag
            writer.write(ev.SerializeToString())
def write_summary(dpath, aggregations_per_key):
    writer = tf.summary.FileWriter(dpath)

    for key, (steps, wall_times, aggregations) in aggregations_per_key.items():
        for step, wall_time, aggregation in zip(steps, wall_times, aggregations):
            summary = tf.Summary(value=[tf.Summary.Value(tag=key, simple_value=aggregation)])
            scalar_event = Event(wall_time=wall_time, step=step, summary=summary)
            writer.add_event(scalar_event)

        writer.flush()
Esempio n. 5
0
def iter_log_events(tf_event_file):
    import tensorflow as tf
    from tensorflow.core.util.event_pb2 import Event
    for event in tf.data.TFRecordDataset([tf_event_file]):
        event = Event.FromString(event.numpy())
        if event.summary.value:
            assert len(event.summary.value
                       ) == 1, "Unexpected length for event summary"
            value = event.summary.value[0]
            yield value.tag, value.simple_value
def remove_events(input_file, groups_to_remove):
    new_file = input_file + ".new"
    # Make a record writer
    with tf.io.TFRecordWriter(new_file) as writer:
        # Iterate event records
        for rec in tf.data.TFRecordDataset([input_file]):
            # Read event
            ev = Event()
            ev.MergeFromString(rec.numpy())
            # Check if it is a summary event
            if ev.summary:
                orig_values = [v for v in ev.summary.value]
                filtered_values = [v for v in orig_values if not is_tag_matching_group(v.tag, groups_to_remove)]
                #print(f"filtered_values={len(filtered_values)}, orig_values={len(orig_values)}")
                if len(filtered_values) != len(orig_values):
                    # for v in orig_values:
                    #     print(v)
                    del ev.summary.value[:]
                    ev.summary.value.extend(filtered_values)
            writer.write(ev.SerializeToString())
    os.rename(new_file, input_file)
Esempio n. 7
0
def rename_events(input_file, old_tags, new_tag):
    new_file = input_file + ".new"
    # Make a record writer
    with tf.io.TFRecordWriter(new_file) as writer:
        # Iterate event records
        for rec in tf.data.TFRecordDataset([input_file]):
            # Read event
            ev = Event()
            ev.MergeFromString(rec.numpy())
            # Check if it is a summary
            #print(ev)
            if ev.summary:
                # Iterate summary values
                for v in ev.summary.value:
                    #print(v)
                    # Check if the tag should be renamed
                    if v.tag in old_tags:
                        # Rename with new tag name
                        v.tag = new_tag
            writer.write(ev.SerializeToString())
    os.rename(new_file, input_file)
Esempio n. 8
0
def write_summary(dpath, extracts):
    dpath = dpath / FOLDER_NAME / dpath.name
    writer = tf.summary.FileWriter(dpath)

    for key, (steps, wall_times, values) in extracts.items():
        for episode, step, wall_time, value in zip(steps, wall_times, values):
            summary = tf.Summary(
                value=[tf.Summary.Value(tag=key, simple_value=value)])
            scalar_event = Event(episode=episode,
                                 wall_time=wall_time,
                                 step=step,
                                 summary=summary)
            writer.add_event(scalar_event)

        writer.flush()
Esempio n. 9
0
def write_summary(dpath, dname, fname, aggregations_per_key, steps,
                  wall_times):
    fpath = os.path.join(dpath, dname)
    fpath = os.path.abspath(fpath)

    writer = tf.summary.FileWriter(fpath)

    for key, aggregations in aggregations_per_key.items():
        for step, wall_time, aggregation in zip(steps, wall_times,
                                                aggregations):
            summary = tf.Summary(
                value=[tf.Summary.Value(tag=key, simple_value=aggregation)])
            scalar_event = Event(wall_time=wall_time,
                                 step=step,
                                 summary=summary)
            writer.add_event(scalar_event)

        writer.flush()
Esempio n. 10
0
def read_events(stream):
    '''
    Read and return as a generator a sequence of Event protos from
    file-like object `stream`.
    '''
    header_size = struct.calcsize('<QI')
    len_size = struct.calcsize('<Q')
    footer_size = struct.calcsize('<I')

    while True:
        header = stream.read(header_size)
        if len(header) == 0:
            break
        elif len(header) < header_size:
            raise SummaryReaderException(
                'unexpected EOF (expected a %d-byte header, '
                'got %d bytes)' % (header_size, len(header)))
        data_len, len_crc = struct.unpack('<QI', header)
        len_crc_actual = masked_crc(header[:len_size])
        if len_crc_actual != len_crc:
            raise SummaryReaderException('incorrect length CRC (%d != %d)' %
                                         (len_crc_actual, len_crc))

        data = stream.read(data_len)
        if len(data) < data_len:
            raise SummaryReaderException(
                'unexpected EOF (expected %d bytes, got %d)' %
                (data_len, len(data)))
        yield Event.FromString(data)

        footer = stream.read(footer_size)
        if len(footer) < footer_size:
            raise SummaryReaderException(
                'unexpected EOF (expected a %d-byte footer, '
                'got %d bytes)' % (footer_size, len(footer)))
        data_crc, = struct.unpack('<I', footer)
        data_crc_actual = masked_crc(data)
        if data_crc_actual != data_crc:
            raise SummaryReaderException('incorrect data CRC (%d != %d)' %
                                         (data_crc_actual, data_crc))
Esempio n. 11
0
 def _add_event(self, event: event_pb2.Event, step: Optional[int]) -> None:
     # _add_event is vendored from TensorFlow: tensorflow/python/summary/writer/writer.py
     event.wall_time = time.time()
     if step is not None:
         event.step = int(step)
     self.writer.add_event(event)
Esempio n. 12
0
 def _write_event(self, summary_value, step):
     self.writer.add_event(
         Event(wall_time=round(time.time()),
               step=step,
               summary=Summary(value=[summary_value])))