예제 #1
0
파일: writer.py 프로젝트: ml7/tensorboardX
    def __init__(self, log_dir=None, comment='', **kwargs):
        """
        Args:
            log_dir (string): save location, default is: runs/**CURRENT_DATETIME_HOSTNAME**, which changes after each
              run. Use hierarchical folder structure to compare between runs easily. e.g. 'runs/exp1', 'runs/exp2'
            comment (string): comment that appends to the default ``log_dir``. If ``log_dir`` is assigned,
              this argument will no effect.
            purge_step (int):
              When logging crashes at step :math:`T+X` and restarts at step :math:`T`, any events
              whose global_step larger or euqal to :math:`T` will be purged and hiding from TensorBoard.
              Note that the resumed experiment and the crashed experiment should have the same ``log_dir``.
            filename_suffix (string):
              Every event file's name is suffixed with suffix. example: ``SummaryWriter(filename_suffix='.123')``
            kwargs: extra keyword arguments for FileWriter (e.g. 'flush_secs'
              controls how often to flush pending events). For more arguments
              please refer to docs for 'tf.summary.FileWriter'.
        """
        if not log_dir:
            import socket
            from datetime import datetime
            current_time = datetime.now().strftime('%b%d_%H-%M-%S')
            log_dir = os.path.join(
                'runs', current_time + '_' + socket.gethostname() + comment)

        if 'purge_step' in kwargs.keys():
            most_recent_step = kwargs.pop('purge_step')
            if not os.path.exists(log_dir):
                print('warning: you are purging unexisting data.')
            self.file_writer = FileWriter(logdir=log_dir, **kwargs)
            self.file_writer.add_event(
                Event(step=most_recent_step, file_version='brain.Event:2'))
            self.file_writer.add_event(
                Event(step=most_recent_step,
                      session_log=SessionLog(status=SessionLog.START)))
        else:
            self.file_writer = FileWriter(logdir=log_dir, **kwargs)

        # Create default bins for histograms, see generate_testdata.py in
        # tensorflow/tensorboard
        v = 1E-12
        buckets = []
        neg_buckets = []
        while v < 1E20:
            buckets.append(v)
            neg_buckets.append(-v)
            v *= 1.1
        self.default_bins = neg_buckets[::-1] + [0] + buckets

        self.all_writers = {self.file_writer.get_logdir(): self.file_writer}
        # {writer_id : [[timestamp, step, value],...],...}
        self.scalar_dict = {}

        # TODO (ml7): Remove try-except when PyTorch 1.0 merges PyTorch and
        # Caffe2
        try:
            import caffe2
            from caffe2.python import workspace  # workaround for pytorch/issue#10249
            self.caffe2_enabled = True
        except (SystemExit, ImportError):
            self.caffe2_enabled = False
예제 #2
0
    def __iter__(self) -> Event:
        """Iterates over events in the current events file.

        Returns:
            An Event object
        """
        while True:
            header_size = struct.calcsize("Q")
            header = self._read_and_check(header_size)
            if header is None:
                break
            event_size = struct.unpack("Q", header)[0]
            event_raw = self._read_and_check(event_size)
            if event_raw is None:
                raise EventReadingException("Unexpected end of events file")
            event = Event()
            event.ParseFromString(event_raw)
            yield event
예제 #3
0
    def __iter__(self) -> Event:
        """
        Iterates over events in the current events file

        :return: An Event object
        :except: NotImplementedError if the stream is in non-blocking mode.
        :except: EventReadingError on reading error.
        """
        while True:
            header_size = struct.calcsize('Q')
            header = self._read_and_check(header_size)
            if header is None:
                break
            event_size = struct.unpack('Q', header)[0]
            event_raw = self._read_and_check(event_size)
            if event_raw is None:
                raise EventReadingError('Unexpected end of events file')
            event = Event()
            event.ParseFromString(event_raw)
            yield event