def test_get_s3_dst_prefix(self): logfilehandler = LogFileHandler(self.rds_instance, 'bucket', 'logs_prefix') self.assertEqual( 'logs_prefix/{}/{}'.format(self.rds_instance.engine, self.rds_instance.name), logfilehandler.get_s3_dst_prefix_for_instance())
def test_discover_rds_logfiles_with_no_logfiles( self, describe_logfiles_of_instance): # emulate response of AWS api call part DescribeDBLogFiles describe_logfiles_of_instance.return_value = [] logfilehandler = LogFileHandler(self.rds_instance, 'foo', 'bar') result = logfilehandler.discover_logfiles_in_rds() describe_logfiles_of_instance.assert_called_with( self.rds_instance.name) self.assertEqual(set(), result)
def setUpClass(self): (self.function_stack_name, self.bucket_stack_name, self.lambda_function_name, self.bucket_name) = get_env() self.dst_prefix = utils.get_temp_prefix() logger.info('using s3 dst: {}/{}'.format(self.bucket_name, self.dst_prefix)) self.rds_instance = utils.get_one_rds_instance() self.lfh = LogFileHandler(self.rds_instance, self.bucket_name, self.dst_prefix)
def test_discover_s3_logfiles_with_no_logfiles(self): # bucket must exist self.s3.create_bucket(Bucket='bucket') logfilehandler = LogFileHandler(self.rds_instance, 'bucket', 'logs_prefix') # destination for logfiles must exist, so create it setup_s3_destination(logfilehandler.dst_bucket, logfilehandler.dst_prefix_instance) self.assertEqual(set(), logfilehandler.discover_logfiles_in_s3())
def test_log_file_handler_discover_rds_logfiles(self): instance = get_one_rds_instance() client = boto3.client('rds') response = client.describe_db_log_files( DBInstanceIdentifier=instance.name) logfilehandler = LogFileHandler(RDSInstance(instance.name), 'b', 'p') discovered_logfiles = logfilehandler.discover_logfiles_in_rds() self.assertEqual(type(set()), type(discovered_logfiles)) self.assertEqual(len(response['DescribeDBLogFiles']), len(discovered_logfiles))
def test_discover_logfiles_in_s3(self): # bucket must exist self.s3.create_bucket(Bucket='bucket') self.s3.put_object(Bucket='bucket', Key='logs/inst1/f1') self.s3.put_object(Bucket='bucket', Key='logs/inst1/f2') # now some logfile for another instance self.s3.put_object(Bucket='bucket', Key='logs/other/f1') logfiles = { S3LogFile('f1', '', '', size=0), S3LogFile('f2', '', '', size=0) } logfilehandler = LogFileHandler(RDSInstance('inst1'), 'bucket', 'logs') self.assertSetEqual(logfiles, logfilehandler.discover_logfiles_in_s3())
def test_setup_s3_destination_on_empty_bucket(self): self.s3.create_bucket(Bucket='bucket') logfilehandler = LogFileHandler(self.rds_instance, 'bucket', 'logs_prefix') setup_s3_destination(logfilehandler.dst_bucket, logfilehandler.dst_prefix_instance) folders = list_folders(bucket='bucket', prefix='logs_prefix') self.assertTrue({self.rds_instance.name}.issubset(folders))
def test_setup_s3_destination_with_existing(self): self.s3.create_bucket(Bucket='bucket') self.s3.put_object(Bucket='bucket', Key='logs_prefix/') logfilehandler = LogFileHandler(self.rds_instance, 'bucket', 'logs_prefix') setup_s3_destination(logfilehandler.dst_bucket, logfilehandler.dst_prefix_instance) folders = list_folders(bucket='bucket', prefix='logs_prefix') # foldername is rds_instance id (see test_get_s3_dst_prefix()) self.assertTrue({self.rds_instance.name}.issubset(folders))
def test_discover_rds_logfiles(self, describe_logfiles_of_instance): # emulate response of AWS api call part DescribeDBLogFiles describe_logfiles_of_instance.return_value = [{ 'LogFileName': 'file1', 'LastWritten': 123, 'Size': 124 }, { 'LogFileName': 'file2', 'LastWritten': 123, 'Size': 124 }] logfilehandler = LogFileHandler(self.rds_instance, 'foo', 'bar') result = logfilehandler.discover_logfiles_in_rds() describe_logfiles_of_instance.assert_called_with( self.rds_instance.name) self.assertEqual( { RdsLogFile('file1', '', size=124), RdsLogFile('file2', '', size=124) }, result)
class TestLogFile(unittest.TestCase): @classmethod def setUpClass(self): (self.function_stack_name, self.bucket_stack_name, self.lambda_function_name, self.bucket_name) = get_env() self.dst_prefix = utils.get_temp_prefix() logger.info('using s3 dst: {}/{}'.format(self.bucket_name, self.dst_prefix)) self.rds_instance = utils.get_one_rds_instance() self.lfh = LogFileHandler(self.rds_instance, self.bucket_name, self.dst_prefix) @classmethod def tearDownClass(self): logger.info('cleanup: {}:{}'.format(self.bucket_name, self.dst_prefix)) utils.delete_prefix(self.bucket_name, self.dst_prefix) def setUp(self): self.logfile_name = utils.choose_one_random_logfile_does_not_exists_in_s3( self.rds_instance.name, self.bucket_name) def test_logfile_rds_has_same_size_in_local(self): src = RdsLogFile(self.logfile_name, self.rds_instance.name) def test_logfile_equals_size(self): src = RdsLogFile(self.logfile_name, self.rds_instance.name) dst = self.lfh.copy(src) src.fetch_size() dst.fetch_size() self.assertEqual(src, dst) self.assertEqual(src.size, dst.size) def test_logfile_not_equals_size(self): src = RdsLogFile(self.logfile_name, self.rds_instance.name) dst = self.lfh.get_s3logfile(self.logfile_name) dst.write('') src.fetch_size() dst.fetch_size() self.assertNotEqual(src, dst) self.assertNotEqual(src.size, dst.size)
def process_instance(self, instance): logging.info('processing rds-instance: \'%s\'', instance.name) logfilehandler = LogFileHandler(instance, self.s3_dst_bucket, self.s3_dst_prefix_for_logs) setup_s3_destination(logfilehandler.dst_bucket, logfilehandler.dst_prefix_instance) logfiles_to_copy = self.discover_logfiles_to_copy(logfilehandler) logging.info('going to copy %d new logfiles ...', len(logfiles_to_copy)) self.sync_logfiles(logfiles_to_copy, logfilehandler) # write metric / logentry logging.info('synced %d files for \'%s\'', len(logfiles_to_copy), instance.name)
def test_logfiles_to_copy_size_diff(self): src = {RdsLogFile('foo', '', size=123)} dst = {S3LogFile('foo', '', '', size=0)} self.assertEqual({LogFile('foo', size=123)}, LogFileHandler.logfiles_to_copy(src, dst))
def test_logfiles_to_copy_new_files_on_src_old_on_dst(self): src = {RdsLogFile('foo', ''), RdsLogFile('bar', '')} dst = {S3LogFile('foo', '', ''), S3LogFile('xyz', '', '')} self.assertEqual({LogFile('bar')}, LogFileHandler.logfiles_to_copy(src, dst))
def test_logfiles_to_copy_empty_dst(self): dst = set() src = {RdsLogFile('foo', '')} self.assertEqual(src, LogFileHandler.logfiles_to_copy(src, dst)) src = {RdsLogFile('foo', ''), RdsLogFile('bar', '')} self.assertEqual(src, LogFileHandler.logfiles_to_copy(src, dst))
def test_logfiles_to_copy_empty_src(self): src = set() dst = set() self.assertEqual(set(), LogFileHandler.logfiles_to_copy(src, dst)) dst = {S3LogFile('foo', '', '')} self.assertEqual(set(), LogFileHandler.logfiles_to_copy(src, dst))