def test_get_s3_dst_prefix(self):
     logfilehandler = LogFileHandler(self.rds_instance, 'bucket',
                                     'logs_prefix')
     self.assertEqual(
         'logs_prefix/{}/{}'.format(self.rds_instance.engine,
                                    self.rds_instance.name),
         logfilehandler.get_s3_dst_prefix_for_instance())
예제 #2
0
 def test_setup_s3_destination_on_empty_bucket(self):
     self.s3.create_bucket(Bucket='bucket')
     logfilehandler = LogFileHandler(self.rds_instance, 'bucket',
                                     'logs_prefix')
     setup_s3_destination(logfilehandler.dst_bucket,
                          logfilehandler.dst_prefix_instance)
     folders = list_folders(bucket='bucket', prefix='logs_prefix')
     self.assertTrue({self.rds_instance.name}.issubset(folders))
예제 #3
0
 def test_discover_rds_logfiles_with_no_logfiles(
         self, describe_logfiles_of_instance):
     # emulate response of AWS api call part DescribeDBLogFiles
     describe_logfiles_of_instance.return_value = []
     logfilehandler = LogFileHandler(self.rds_instance, 'foo', 'bar')
     result = logfilehandler.discover_logfiles_in_rds()
     describe_logfiles_of_instance.assert_called_with(
         self.rds_instance.name)
     self.assertEqual(set(), result)
예제 #4
0
 def setUpClass(self):
     (self.function_stack_name, self.bucket_stack_name,
      self.lambda_function_name, self.bucket_name) = get_env()
     self.dst_prefix = utils.get_temp_prefix()
     logger.info('using s3 dst: {}/{}'.format(self.bucket_name,
                                              self.dst_prefix))
     self.rds_instance = utils.get_one_rds_instance()
     self.lfh = LogFileHandler(self.rds_instance, self.bucket_name,
                               self.dst_prefix)
예제 #5
0
    def test_discover_s3_logfiles_with_no_logfiles(self):
        # bucket must exist
        self.s3.create_bucket(Bucket='bucket')

        logfilehandler = LogFileHandler(self.rds_instance, 'bucket',
                                        'logs_prefix')
        # destination for logfiles must exist, so create it
        setup_s3_destination(logfilehandler.dst_bucket,
                             logfilehandler.dst_prefix_instance)
        self.assertEqual(set(), logfilehandler.discover_logfiles_in_s3())
예제 #6
0
 def test_setup_s3_destination_with_existing(self):
     self.s3.create_bucket(Bucket='bucket')
     self.s3.put_object(Bucket='bucket', Key='logs_prefix/')
     logfilehandler = LogFileHandler(self.rds_instance, 'bucket',
                                     'logs_prefix')
     setup_s3_destination(logfilehandler.dst_bucket,
                          logfilehandler.dst_prefix_instance)
     folders = list_folders(bucket='bucket', prefix='logs_prefix')
     # foldername is rds_instance id (see test_get_s3_dst_prefix())
     self.assertTrue({self.rds_instance.name}.issubset(folders))
예제 #7
0
    def test_log_file_handler_discover_rds_logfiles(self):
        instance = get_one_rds_instance()

        client = boto3.client('rds')
        response = client.describe_db_log_files(
            DBInstanceIdentifier=instance.name)
        logfilehandler = LogFileHandler(RDSInstance(instance.name), 'b', 'p')
        discovered_logfiles = logfilehandler.discover_logfiles_in_rds()
        self.assertEqual(type(set()), type(discovered_logfiles))
        self.assertEqual(len(response['DescribeDBLogFiles']),
                         len(discovered_logfiles))
예제 #8
0
 def process_instance(self, instance):
     logging.info('processing rds-instance: \'%s\'', instance.name)
     logfilehandler = LogFileHandler(instance, self.s3_dst_bucket,
                                     self.s3_dst_prefix_for_logs)
     setup_s3_destination(logfilehandler.dst_bucket,
                          logfilehandler.dst_prefix_instance)
     logfiles_to_copy = self.discover_logfiles_to_copy(logfilehandler)
     logging.info('going to copy %d new logfiles ...',
                  len(logfiles_to_copy))
     self.sync_logfiles(logfiles_to_copy, logfilehandler)
     # write metric / logentry
     logging.info('synced %d files for \'%s\'', len(logfiles_to_copy),
                  instance.name)
예제 #9
0
    def test_discover_logfiles_in_s3(self):
        # bucket must exist
        self.s3.create_bucket(Bucket='bucket')

        self.s3.put_object(Bucket='bucket', Key='logs/inst1/f1')
        self.s3.put_object(Bucket='bucket', Key='logs/inst1/f2')
        # now some logfile for another instance
        self.s3.put_object(Bucket='bucket', Key='logs/other/f1')

        logfiles = {
            S3LogFile('f1', '', '', size=0),
            S3LogFile('f2', '', '', size=0)
        }
        logfilehandler = LogFileHandler(RDSInstance('inst1'), 'bucket', 'logs')
        self.assertSetEqual(logfiles, logfilehandler.discover_logfiles_in_s3())
예제 #10
0
 def test_discover_rds_logfiles(self, describe_logfiles_of_instance):
     # emulate response of AWS api call part DescribeDBLogFiles
     describe_logfiles_of_instance.return_value = [{
         'LogFileName': 'file1',
         'LastWritten': 123,
         'Size': 124
     }, {
         'LogFileName': 'file2',
         'LastWritten': 123,
         'Size': 124
     }]
     logfilehandler = LogFileHandler(self.rds_instance, 'foo', 'bar')
     result = logfilehandler.discover_logfiles_in_rds()
     describe_logfiles_of_instance.assert_called_with(
         self.rds_instance.name)
     self.assertEqual(
         {
             RdsLogFile('file1', '', size=124),
             RdsLogFile('file2', '', size=124)
         }, result)