def test_iterator_cant_read_after_first_line(self): mock_raise = mock.Mock() def raise_(__count=[0]): __count[0] += 1 print("raise_", __count) if __count[0] == 2: mock_raise('bang') raise OSError("bang") return "abc event=3 1s\n" mock_h1 = mock.Mock() mock_h1.readline.side_effect = raise_ streamer = collection.Streamer( ['log1'], collection.LogFormats.InfluxDB, precision='s') # patch handles so that we can test the _iterator function directly streamer.handles = {'h1': mock_h1} list(streamer._iterator()) self.assertEqual(len(streamer.handles), 0) mock_raise.assert_called_once_with('bang')
def test_steamer_two_files_earliest_after(self): with tempfile.TemporaryDirectory() as tmpdirname: log1 = os.path.join(tmpdirname, 'log1') log2 = os.path.join(tmpdirname, 'log2') with open(log1, 'wt') as f: self._write_influxdb_lines( f, 'aa-', 10, 0, 1) with open(log2, 'wt') as f: self._write_influxdb_lines( f, 'bb-', 3, 1, 3) sorted_logs = [(os.path.join(tmpdirname, f), l) for (f, l) in [ ('log1', 'abc event=aa-0 0'), ('log2', 'abc event=bb-0 1'), ('log1', 'abc event=aa-1 1'), ('log1', 'abc event=aa-2 2'), ('log1', 'abc event=aa-3 3'), ('log2', 'abc event=bb-1 4'), ('log1', 'abc event=aa-4 4'), ('log1', 'abc event=aa-5 5'), ('log1', 'abc event=aa-6 6'), ('log2', 'abc event=bb-2 7'), ('log1', 'abc event=aa-7 7'), ('log1', 'abc event=aa-8 8'), ('log1', 'abc event=aa-9 9')]] with collection.Streamer( [log2, log1], collection.LogFormats.InfluxDB, precision='s') as events: all_events = list(events) self.assertEqual(all_events, sorted_logs)
def test_stream_cant_open_file(self): def raise_(_): raise FileNotFoundError("bang") with mock.patch('builtins.open') as mock_open: mock_open.side_effect = raise_ streamer = collection.Streamer( ['log1'], collection.LogFormats.InfluxDB, precision='s') streamer.__enter__() mock_open.assert_called_once_with('log1') self.assertEqual(len(streamer.handles), 0)
def test_stream_two_files_one_empty(self): with tempfile.TemporaryDirectory() as tmpdirname: log1 = os.path.join(tmpdirname, 'log1') log2 = os.path.join(tmpdirname, 'log2') with open(log1, 'wt') as f: self._write_influxdb_lines( f, 'aa-', 1, 0, 1) with open(log2, 'wt') as f: pass sorted_logs = [(os.path.join(tmpdirname, f), l) for (f, l) in [ ('log1', 'abc event=aa-0 0')]] with collection.Streamer( [log1, log2], collection.LogFormats.InfluxDB, precision='s') as events: all_events = list(events) self.assertEqual(all_events, sorted_logs)
def test__iterator_cant_read_from_file(self): mock_raise = mock.Mock() def raise_(): mock_raise('bang') raise OSError("bang") mock_h1 = mock.Mock() mock_h1.readline.side_effect = raise_ streamer = collection.Streamer( ['log1'], collection.LogFormats.InfluxDB, precision='s') # patch handles so that we can test the _iterator function directly streamer.handles = {'h1': mock_h1} list(streamer._iterator()) self.assertEqual(len(streamer.handles), 0) mock_raise.assert_called_once_with('bang')
def test_stream_exit_closes_files(self): def raise_(): raise FileNotFoundError("bang") mock_h1 = mock.Mock() mock_h2 = mock.Mock() mock_h1.close.side_effect = raise_ streamer = collection.Streamer( ['log1'], collection.LogFormats.InfluxDB, precision='s') # patch handles so that we can test __exit__ function streamer.handles = {'h1': mock_h1, 'h2': mock_h2} self.assertFalse(streamer.__exit__('a', 'b', 'c')) mock_h1.close.assert_called_once_with() mock_h2.close.assert_called_once_with() self.mock_logger.warning.assert_called_once_with( mock.ANY, 'h1', 'bang')