def test_file_publisher_compress_csv(self): # Test configurations with compress and csv format options tempdir = tempfile.mkdtemp() name = '%s/log_file' % tempdir parsed_url = netutils.urlsplit( 'file://%s?max_bytes=50&backup_count=2&compress=True&format=csv' % name) publisher = file.FilePublisher(self.CONF, parsed_url) publisher.publish_samples(self.test_data) self.assertTrue(os.path.exists('%s' % name)) csv_fileh = open(name, 'rb') dialect = csv.Sniffer().sniff(csv_fileh.read(1024).decode()) self.assertIsNotNone(dialect) self.assertTrue(os.path.exists('%s.1.gz' % name)) try: f = gzip.open('%s.1.gz' % name, 'rb') content = f.read() self.assertTrue(b'resource_metadata' in content) self.assertTrue(b'volume' in content) self.assertFalse(b'blahblah' in content) except Exception as e: self.assertRaises(UserWarning, e) finally: f.close()
def test_file_publisher_compress(self): # Test configuration with compress option tempdir = tempfile.mkdtemp() name = '%s/log_file' % tempdir parsed_url = netutils.urlsplit( 'file://%s?max_bytes=50&backup_count=3&compress=True' % name) publisher = file.FilePublisher(self.CONF, parsed_url) publisher.publish_samples(self.test_data) handler = publisher.publisher_logger.handlers[0] self.assertIsInstance(handler, logging.handlers.RotatingFileHandler) self.assertEqual( [50, name, 3], [handler.maxBytes, handler.baseFilename, handler.backupCount]) # The rotating file gets created since only allow 50 bytes. self.assertTrue(os.path.exists('%s.1.gz' % name)) try: f = gzip.open('%s.1.gz' % name, 'rb') content = f.read() self.assertTrue(b'resource_metadata' in content) self.assertTrue(b'volume' in content) self.assertFalse(b'blahblah' in content) except Exception as e: self.assertRaises(UserWarning, e) finally: f.close()
def test_file_publisher_json(self): tempdir = tempfile.mkdtemp() name = '%s/log_file_json' % tempdir parsed_url = netutils.urlsplit('file://%s?json' % name) publisher = file.FilePublisher(self.CONF, parsed_url) publisher.publish_samples(self.test_data) handler = publisher.publisher_logger.handlers[0] self.assertIsInstance(handler, logging.handlers.RotatingFileHandler) self.assertEqual( [0, name, 0], [handler.maxBytes, handler.baseFilename, handler.backupCount]) self.assertTrue(os.path.exists(name)) with open(name, 'r') as f: content = f.readlines() self.assertEqual(len(self.test_data), len(content)) for index, line in enumerate(content): try: json_data = json.loads(line) except ValueError: self.fail("File written is not valid json") self.assertEqual(self.test_data[index].id, json_data['id']) self.assertEqual(self.test_data[index].timestamp, json_data['timestamp'])
def test_file_publisher_invalid(self): # Test invalid max bytes, backup count configurations tempdir = tempfile.mkdtemp() parsed_url = utils.urlsplit('file://%s/log_file_bad' '?max_bytes=yus&backup_count=5y' % tempdir) publisher = file.FilePublisher(parsed_url) publisher.publish_samples(None, self.test_data) self.assertIsNone(publisher.publisher_logger)
def test_file_publisher(self): # Test valid configurations parsed_url = urlsplit( 'file:///tmp/log_file?max_bytes=50&backup_count=3') publisher = file.FilePublisher(parsed_url) publisher.publish_counters(None, self.test_data) handler = publisher.publisher_logger.handlers[0] self.assertTrue( isinstance(handler, logging.handlers.RotatingFileHandler)) self.assertEqual( [handler.maxBytes, handler.baseFilename, handler.backupCount], [50, '/tmp/log_file', 3]) # The rotating file gets created since only allow 50 bytes. self.assertTrue(os.path.exists('/tmp/log_file.1')) # Test missing max bytes, backup count configurations parsed_url = urlsplit('file:///tmp/log_file_plain') publisher = file.FilePublisher(parsed_url) publisher.publish_counters(None, self.test_data) handler = publisher.publisher_logger.handlers[0] self.assertTrue( isinstance(handler, logging.handlers.RotatingFileHandler)) self.assertEqual( [handler.maxBytes, handler.baseFilename, handler.backupCount], [0, '/tmp/log_file_plain', 0]) # The rotating file gets created since only allow 50 bytes. self.assertTrue(os.path.exists('/tmp/log_file_plain')) # Test invalid max bytes, backup count configurations parsed_url = urlsplit( 'file:///tmp/log_file_bad?max_bytes=yus&backup_count=5y') publisher = file.FilePublisher(parsed_url) publisher.publish_counters(None, self.test_data) self.assertIsNone(publisher.publisher_logger)
def test_file_publisher_maxbytes(self): # Test valid configurations tempdir = tempfile.mkdtemp() name = '%s/log_file' % tempdir parsed_url = netutils.urlsplit( 'file://%s?max_bytes=50&backup_count=3' % name) publisher = file.FilePublisher(parsed_url) publisher.publish_samples(None, self.test_data) handler = publisher.publisher_logger.handlers[0] self.assertIsInstance(handler, logging.handlers.RotatingFileHandler) self.assertEqual( [50, name, 3], [handler.maxBytes, handler.baseFilename, handler.backupCount]) # The rotating file gets created since only allow 50 bytes. self.assertTrue(os.path.exists('%s.1' % name))
def test_file_publisher_csv(self): # Test configurations with format=csv tempdir = tempfile.mkdtemp() name = '%s/log_file' % tempdir parsed_url = netutils.urlsplit( 'file://%s?max_bytes=50&backup_count=3&format=csv' % name) publisher = file.FilePublisher(self.CONF, parsed_url) publisher.publish_samples(self.test_data) self.assertTrue(os.path.exists('%s' % name)) csv_fileh = open(name, 'rb') try: dialect = csv.Sniffer().sniff(csv_fileh.read(1024).decode()) self.assertIsNotNone(dialect) csv_fileh.seek(0) except csv.Error as e: self.assertRaises(csv.Error, e)
def test_file_publisher(self): # Test missing max bytes, backup count configurations tempdir = tempfile.mkdtemp() name = '%s/log_file_plain' % tempdir parsed_url = netutils.urlsplit('file://%s' % name) publisher = file.FilePublisher(parsed_url) publisher.publish_samples(None, self.test_data) handler = publisher.publisher_logger.handlers[0] self.assertIsInstance(handler, logging.handlers.RotatingFileHandler) self.assertEqual( [0, name, 0], [handler.maxBytes, handler.baseFilename, handler.backupCount]) # Test the content is corrected saved in the file self.assertTrue(os.path.exists(name)) with open(name, 'r') as f: content = f.read() for sample_item in self.test_data: self.assertTrue(sample_item.id in content) self.assertTrue(sample_item.timestamp in content)