Пример #1
0
 def test_backup_missing_required_args(self):
     with six.assertRaisesRegex(self, ValueError, 'Required argument'):
         backup.Backup(backup_type='globals')
     with six.assertRaisesRegex(self, ValueError, 'Required argument'):
         backup.Backup(backup_type='daily', backup_dir='foo', conn_info={})
     with six.assertRaisesRegex(self, ValueError, 'Required argument'):
         backup.Backup(backup_type='daily', database='foo', conn_info={})
     with six.assertRaisesRegex(self, ValueError, 'Required argument'):
         backup.Backup(backup_dir='foo', database='foo', conn_info={})
Пример #2
0
 def test_backup_filename_invalid_suffix(self):
     with six.assertRaisesRegex(self, ValueError, 'Invalid backup suffix'):
         f = 'localhost.5432.adb.aschema.2016-12-04.daily.bad_suffix'
         backup.Backup(filename=f)
Пример #3
0
 def test_backup_filename_invalid_date(self):
     with six.assertRaisesRegex(self, ValueError, 'invalid date'):
         f = 'localhost.5432.adb.aschema.12-04-2016.daily.pg_dump_Fc'
         backup.Backup(filename=f)
Пример #4
0
 def test_backup_filename_invalid(self):
     with six.assertRaisesRegex(self, ValueError, 'Invalid.*filename'):
         backup.Backup(filename='a.backup.file')
Пример #5
0
 def test_backup_invalid_backup_type(self):
     with six.assertRaisesRegex(self, ValueError, 'backup_type.*not in'):
         backup.Backup(backup_type='bad',
                       database='foo',
                       backup_dir='foo',
                       conn_info={'hostname': 'foo'})
Пример #6
0
 def test_backup_invalid_conn_info(self):
     with six.assertRaisesRegex(self, ValueError, 'Invalid key'):
         backup.Backup(backup_type='daily',
                       database='foo',
                       backup_dir='foo',
                       conn_info={'bad': 'juju'})
Пример #7
0
    def check_files(self,
                    no_count_checks=False,
                    globals_count=1,
                    daily_count=1,
                    weekly_count=1,
                    monthly_count=1,
                    schemas=None):
        """Assert expected counts and return day, week, and month numbers.

        Returns {'daily': {'schemalabel1': [day numbers], ...},
                 'weekly': {'schemalabel1': [week numbers], ...},
                 'monthly': {'schemalabel1': [week numbers], ...},

        If `no_count_checks` is True, then the expected count values are not
        required or checked; the only action is to return the "numbers".
        returned.

        If `schemas` is not None or empty, it is expected that there will be one file of each type
        per schema, plus one for the empty database; the `schemalabel` of the latter is
        'no_schemas'.

        If `schemas` is None or empty, it is expected that there will be one file of each type,
        and the `schemalabel` will be 'all_schemas'.

        :param globals_count:
        :param daily_count:
        :param weekly_count:
        :param monthly_count:
        :param schemas:
        :return: see above
        """
        files = os.listdir(self.tmpdir)

        hostport = '{}.{}'.format(
            backup.hostname_label(self.config.get('backup', 'hostname')),
            self.config.get('backup', 'port'))

        if not no_count_checks:
            tpl = r'{}\.database_na\.no_schemas\..*\.globals.sql'
            globals_pat = tpl.format(hostport)
            global_files = pg_utils.filter_collection(files,
                                                      include=globals_pat)
            self.assertEquals(len(global_files), globals_count)

        if schemas:
            num_schemas = len(schemas)
        else:
            num_schemas = 0

        tpl = r'{hostport}\.test\..*\..*\.{type}.pg_dump_Fc'

        daily_pat = tpl.format(hostport=hostport, type='daily')
        daily_files = pg_utils.filter_collection(files, include=daily_pat)
        day_numbers = collections.defaultdict(list)
        for f in backup.sort_files(daily_files):
            this_backup = backup.Backup(filename=f)
            day_number = this_backup.date
            schema = this_backup.schema_label
            # Make sure we don't have redundant daily backups (no-brainer)
            self.assertNotIn(day_number, day_numbers[schema])
            day_numbers[schema].append(day_number)

        if not no_count_checks:
            self.assertEquals(len(daily_files),
                              daily_count * (num_schemas + 1))

        weekly_pat = tpl.format(hostport=hostport, type='weekly')
        weekly_files = pg_utils.filter_collection(files, include=weekly_pat)
        week_numbers = collections.defaultdict(list)
        for f in backup.sort_files(weekly_files):
            this_backup = backup.Backup(filename=f)
            week_number = this_backup.week_number()
            schema = this_backup.schema_label
            # Make sure we don't have redundant weekly backups
            self.assertNotIn(week_number, week_numbers[schema])
            week_numbers[schema].append(week_number)

        if not no_count_checks:
            self.assertEquals(len(weekly_files),
                              weekly_count * (num_schemas + 1))

        monthly_pat = tpl.format(hostport=hostport, type='monthly')
        monthly_files = pg_utils.filter_collection(files, include=monthly_pat)
        month_numbers = collections.defaultdict(list)
        for f in backup.sort_files(monthly_files):
            this_backup = backup.Backup(filename=f)
            month_number = this_backup.month_number()
            schema = this_backup.schema_label
            # Make sure we don't have redundant monthly backups
            self.assertNotIn(month_number, month_numbers[schema])
            month_numbers[schema].append(month_number)

        if not no_count_checks:
            self.assertEquals(len(monthly_files),
                              monthly_count * (num_schemas + 1))

        return {
            'daily': day_numbers,
            'weekly': week_numbers,
            'monthly': month_numbers
        }