def test_async_writer_without_write(self):
   logdir = self.get_temp_dir()
   w = EventFileWriter(logdir)
   w.close()
   event_files = sorted(glob.glob(os.path.join(logdir, '*')))
   r = PyRecordReader_New(event_files[0])
   r.GetNext()
   s = event_pb2.Event.FromString(r.record())
   self.assertEqual(s.file_version, "brain.Event:2")
示例#2
0
 def test_empty_record(self):
     filename = os.path.join(self.get_temp_dir(), "empty_record")
     w = RecordWriter(open(filename, "wb"))
     bytes_to_write = b""
     w.write(bytes_to_write)
     w.close()
     r = PyRecordReader_New(filename)
     r.GetNext()
     self.assertEqual(r.record(), bytes_to_write)
 def test_flush(self):
     N_TEST = 5
     w = SummaryWriter(flush_secs=1)
     f = w.file_writer.event_writer._ev_writer._file_name
     for i in range(N_TEST):
         w.add_scalar('a', i)
         time.sleep(2)
     r = PyRecordReader_New(f)
     r.GetNext()  # meta data, so skip
     for _ in range(N_TEST):  # all of the data should be flushed
         r.GetNext()
 def test_flush_timer_is_long_so_data_is_not_there(self):
     with self.assertRaises(BaseException):
         N_TEST = 5
         w = SummaryWriter(flush_secs=20)
         f = w.file_writer.event_writer._ev_writer._file_name
         for i in range(N_TEST):
             w.add_scalar('a', i)
             time.sleep(2)
         r = PyRecordReader_New(f)
         r.GetNext()  # meta data, so skip
         for _ in range(N_TEST):  # missing data
             r.GetNext()
示例#5
0
  def test_record_writer_roundtrip(self):
    filename = os.path.join(self.get_temp_dir(), "record_writer_roundtrip")
    w = RecordWriter(open(filename, 'wb'))
    chunks_to_write = ["hello world{}".format(i).encode() for i in range(10)]
    for bytes in chunks_to_write:
      w.write(bytes)
    w.close()

    r = PyRecordReader_New(filename)
    for bytes in chunks_to_write:
      r.GetNext()
      self.assertEqual(r.record(), bytes)
    def test_record_writer_roundtrip(self):
        filename = os.path.join(self.get_temp_dir(), "record_writer_roundtrip")
        w = RecordWriter(filename)
        bytes_to_write = b"hello world"
        times_to_test = 50
        for _ in range(times_to_test):
            w.write(bytes_to_write)
        w.close()

        r = PyRecordReader_New(filename)
        for i in range(times_to_test):
            r.GetNext()
            self.assertEqual(r.record(), bytes_to_write)
示例#7
0
 def test_record_immediate_read(self):
   filename = os.path.join(self.get_temp_dir(), "record_immediate_read")
   chunks_to_write = ["hello world{}".format(i).encode() for i in range(10)]
   w = RecordWriter(open(filename, 'wb'))
   r = PyRecordReader_New(filename)
   with self.assertRaises(errors.OutOfRangeError):
     r.GetNext()
   for bytes in chunks_to_write:
     w.write(bytes)
     w.flush()
     r.GetNext()
     self.assertEqual(r.record(), bytes)
   w.close()
 def test_event_file_writer_roundtrip(self):
   _TAGNAME = 'dummy'
   _DUMMY_VALUE = 42
   logdir = self.get_temp_dir()
   w = EventFileWriter(logdir)
   summary = Summary(value=[Summary.Value(tag=_TAGNAME, simple_value=_DUMMY_VALUE)])
   fakeevent = event_pb2.Event(summary=summary)
   w.add_event(fakeevent)
   w.close()
   event_files = sorted(glob.glob(os.path.join(logdir, '*')))
   self.assertEqual(len(event_files), 1)
   r = PyRecordReader_New(event_files[0])
   r.GetNext()  # meta data, so skip
   r.GetNext()
   self.assertEqual(fakeevent.SerializeToString(), r.record())
示例#9
0
    def test_writer(self):
        TEST_LEN = 100
        N_PROC = 4
        writer = SummaryWriter()
        event_filename = writer.file_writer.event_writer._ev_writer._file_name

        predifined_values = list(range(TEST_LEN))

        def train3():
            for i in range(TEST_LEN):
                writer.add_scalar('many_write_in_func', predifined_values[i])
                time.sleep(0.01 * np.random.randint(0, 10))

        processes = []
        for i in range(N_PROC):
            p1 = mp.Process(target=train3)
            processes.append(p1)
            p1.start()

        for p in processes:
            p.join()
        writer.close()

        collected_values = []
        r = PyRecordReader_New(event_filename)
        r.GetNext()  # meta data, so skip
        for _ in range(TEST_LEN * N_PROC):  # all of the data should be flushed
            r.GetNext()
            ev = event_pb2.Event()
            value = ev.FromString(r.record()).summary.value
            collected_values.append(value[0].simple_value)

        collected_values = sorted(collected_values)
        for i in range(TEST_LEN):
            for j in range(N_PROC):
                assert collected_values[i * N_PROC + j] == i