def _tf_events(self): try: from tensorboard.backend.event_processing.event_accumulator \ import _GeneratorFromPath except ImportError as e: log.debug("error importing event generator: %s", e) return None else: return _GeneratorFromPath(self.dir).Load()
def _tf_events(self): try: from tensorboard.backend.event_processing.event_accumulator \ import _GeneratorFromPath except ImportError as e: log.debug("error importing event generator: %s", e) return None else: with warnings.catch_warnings(): warnings.simplefilter("ignore", Warning) return _GeneratorFromPath(self.dir).Load()
def __iter__(self): """Yields (tag, val, step) for scalars. """ try: from tensorboard.backend.event_processing import event_accumulator except ImportError: pass else: events = event_accumulator._GeneratorFromPath(self.dir).Load() for event in events: if not event.HasField("summary"): continue for val in event.summary.value: if not val.HasField("simple_value"): continue yield val.tag, val.simple_value, event.step
def __iter__(self): """Yields (tag, val, step) for scalars. """ try: from tensorboard.backend.event_processing import event_accumulator except ImportError: pass else: events = event_accumulator._GeneratorFromPath(self.dir).Load() try: for event in events: if not event.HasField("summary"): continue for val in event.summary.value: if not val.HasField("simple_value"): continue yield val.tag, val.simple_value, event.step except RuntimeError as e: # PEP 479 landed in Python 3.7 and TB triggers this # runtime error when there are no events to read. if e.args[0] != "generator raised StopIteration": raise
def _maybe_scalars(self, fields, run): from tensorboard.backend.event_processing import event_multiplexer from tensorboard.backend.event_processing import event_accumulator _ensure_tf_logger_patched() scalars = {} for path in event_multiplexer.GetLogdirSubdirectories(run.path): events_checksum_field_name = self._events_checksum_field_name(path) last_checksum = fields.get(events_checksum_field_name) cur_checksum = self._events_checksum(path) log.debug("event path checksums for %s: last=%s, cur=%s", path, last_checksum, cur_checksum) if last_checksum == cur_checksum: continue scalars[events_checksum_field_name] = cur_checksum log.debug("indexing events in %s", path) rel_path = os.path.relpath(path, run.path) events = event_accumulator._GeneratorFromPath(path).Load() scalar_vals = self._scalar_vals(events, rel_path) for key, vals in scalar_vals.items(): if not vals: continue self._store_scalar_vals(key, vals, scalars) return scalars
def _maybe_scalars(self, fields, run): from tensorboard.backend.event_processing import io_wrapper from tensorboard.backend.event_processing import event_accumulator from guild import tfevent tfevent.ensure_tf_logging_patched() scalars = {} scalar_aliases = self._init_scalar_aliases(run) for path in io_wrapper.GetLogdirSubdirectories(run.path): if not self._path_in_run(path, run): log.debug("%s is not part of run %s, skipping", path, run.id) continue events_checksum_field_name = self._events_checksum_field_name(path) last_checksum = fields.get(events_checksum_field_name) cur_checksum = self._events_checksum(path) log.debug("event path checksums for %s: last=%s, cur=%s", path, last_checksum, cur_checksum) if last_checksum != cur_checksum: log.debug("indexing events in %s", path) rel_path = os.path.relpath(path, run.path) events = event_accumulator._GeneratorFromPath(path).Load() scalar_vals = self._scalar_vals(events, rel_path) self._apply_scalar_vals(scalar_vals, scalars, scalar_aliases) scalars[events_checksum_field_name] = cur_checksum return scalars
def clean(self): self.ea_tracker._generator = tfproto._GeneratorFromPath(self.path) # pylint: disable=protected-access self._clean()
def _init_event_loader(path): # pylint: disable=protected-access return event_accumulator._GeneratorFromPath(path)
def iter_tf_events(dir): from tensorboard.backend.event_processing import event_accumulator with warnings.catch_warnings(): warnings.simplefilter("ignore", Warning) return event_accumulator._GeneratorFromPath(dir).Load()