def test_global_messages_templatetag(self): context = Context() template = Template('{% load planb %}{% global_messages %}') self.assertEqual(template.render(context), '') # Hack to trigger email updates doesn't show messages. FilesetFactory(first_fail=BOGODATE) self.assertEqual(template.render(context), '') first_fail = timezone.now() hostgroup = HostGroupFactory() for i in range(3): fileset = FilesetFactory(hostgroup=hostgroup, first_fail=first_fail) BackupRunFactory(fileset=fileset) self.assertEqual( template.render(context).count('Backup failure since'), 3) for i in range(8): fileset = FilesetFactory(hostgroup=hostgroup, first_fail=first_fail) BackupRunFactory(fileset=fileset) output = template.render(context) self.assertIn( 'There are lots of failed backups. Listing only the oldest 10.', output) self.assertEqual(output.count('Backup failure since'), 10)
def test_rename_fileset(self): fileset = FilesetFactory(storage_alias='zfs') old_name = fileset.storage.get_dataset_name( fileset.hostgroup.name, fileset.friendly_name) self.assertEqual(old_name, fileset.dataset_name) fileset.hostgroup.name = 'some-other' fileset.hostgroup.save() new_name = fileset.storage.get_dataset_name( fileset.hostgroup.name, fileset.friendly_name) with patch.object(fileset.storage, '_perform_binary_command') as m: m.return_value = '/' + old_name # dataset mountpoint. fileset.rename_dataset(new_name) m.assert_called_with(('rename', old_name, new_name)) self.assertEqual(fileset.dataset_name, new_name)
def test_breport(self): fileset = FilesetFactory(friendly_name='desktop01.local', hostgroup__name='local', hostgroup__notify_email='test@local', total_size_mb=94950, last_ok='2019-11-29T13:47Z', last_run='2019-11-29T13:47Z') BackupRunFactory(fileset=fileset, success=True, total_size_mb=94950, snapshot_size_mb=84950, snapshot_size_listing=TEST_DUTREE_LISTING) stdout, stderr = self.run_command('breport', output='email') message = mail.outbox[0] self.assertEqual(message.to, ['test@local']) self.assertEqual(message.subject, 'Example Company backup report "local"') self.assertEqual(message.body, TEST_BREPORT) attachment = message.attachments[0] self.assertEqual(attachment[0], 'pretty_report.html') self.assertIn( '<title>Plan B backup report for "local"</title>', attachment[1]) self.assertEqual(attachment[2], 'text/html')
def test_admin_model(self): user = UserFactory(is_staff=True, is_superuser=True) self.client.force_login(user) hostgroup = HostGroupFactory() fileset = FilesetFactory(hostgroup=hostgroup) backuprun = BackupRunFactory(fileset=fileset) response = self.client.get('/planb/hostgroup/') row = response.context['results'][0] self.assertIn(hostgroup.name, row[1]) self.assertIn(fileset.friendly_name, row[3]) response = self.client.get('/planb/fileset/') row = response.context['results'][0] self.assertIn(fileset.friendly_name, row[1]) response = self.client.get('/planb/backuprun/') row = response.context['results'][0] self.assertIn(str(backuprun.fileset), row[2]) # Test enqueue admin action. data = { 'action': 'enqueue_multiple', '_selected_action': [fileset.pk], } response = self.client.post('/planb/fileset/', data, follow=True) self.assertRedirects(response, '/planb/fileset/') self.assertContains( response, 'The selection has been queued for immediate backup') # Test rename task spawn after hostgroup name change. data = { 'name': 'my-group', '_save': 'Save', } response = self.client.post('/planb/hostgroup/{}/change/'.format( hostgroup.pk), data, follow=True) self.assertContains( response, 'A rename task has been queued for all filesets in the hostgroup') # Test rename task spawn after fileset name change. data = { 'friendly_name': 'my-host', 'hostgroup': hostgroup.pk, 'daily_retention': 1, 'weekly_retention': 1, 'monthly_retention': 1, 'yearly_retention': 1, } response = self.client.post('/planb/fileset/{}/change/'.format( fileset.pk), data, follow=True) self.assertContains(response, 'A rename task has been queued for the fileset')
def test_confexport(self): fileset = FilesetFactory(friendly_name='desktop', hostgroup__name='local') RsyncConfigFactory(fileset=fileset) stdout, stderr = self.run_command('confexport', output='json') self.assertEqual(stdout, TEST_CONFEXPORT_JSON) stdout, stderr = self.run_command('confexport', output='yaml') self.assertEqual(stdout, TEST_CONFEXPORT_YAML)
def test_blist(self): stdout, stderr = self.run_command('blist') self.assertEqual(stdout, '\n') hostgroup = HostGroupFactory(name='local') web01 = FilesetFactory(friendly_name='web01', hostgroup=hostgroup) db01 = FilesetFactory(friendly_name='db01', hostgroup=hostgroup) stdout, stderr = self.run_command('blist', zabbix=True) self.assertEqual( '{"data": [{"{#BKNAME}": "local-db01"}, {"{#BKNAME}": ' '"local-web01"}]}\n', stdout) ExecConfigFactory(fileset=web01, transport_command='/bin/magic') RsyncConfigFactory(fileset=db01, host='database1.local') FilesetFactory(friendly_name='stats', hostgroup__name='remote') stdout, stderr = self.run_command('blist') self.assertEqual(stdout, TEST_BLIST)
def test_bclone(self): fileset = FilesetFactory() RsyncConfigFactory(fileset=fileset) stdout, stderr = self.run_command('bclone', fileset.pk, 'fileset-clone', 'copy.host.co') fileset_copy = Fileset.objects.get(friendly_name='fileset-clone') # Note that the output also contains a backup queue message. self.assertIn('Cloned {} to {}'.format(fileset, fileset_copy), stdout) self.assertEqual(fileset_copy.get_transport().host, 'copy.host.co')
def test_bqueue(self): # The task queue may have other test data, clean it up. self.run_command('bqueueflush') fileset = FilesetFactory() stdout, stderr = self.run_command('bqueueall') self.assertIn('Enqueued {}'.format(fileset), stdout) stdout, stderr = self.run_command('bqueueflush') self.assertIn('Dropped 1 jobs from Task queue', stdout) self.assertIn('Dropped 1 jobs from DB queue', stdout)
def test_slist(self): # Create a standalone clean dummy storage for this test. storage = DummyStorage({'NAME': 'DummyPool I'}, 'dummy') test_pools = {'dummy': storage} with patch('planb.management.commands.slist.pools', test_pools), \ patch('planb.models.pools', test_pools): dataset = FilesetFactory(friendly_name='storage', hostgroup__name='local', storage_alias='dummy').get_dataset() dataset.set_disk_usage(84883399164) dataset = FilesetFactory(friendly_name='desktop', hostgroup__name='local', storage_alias='dummy').get_dataset() dataset.set_disk_usage(60630999402) # a dataset not mapped to a fileset. dataset = storage.get_dataset('cold/other_host') dataset.set_disk_usage(271626877324) stdout, stderr = self.run_command('slist') self.assertEqual(stdout, TEST_SLIST)
def test_snapshot_create(self): fileset = FilesetFactory(storage_alias='dummy') # Clean dataset, create all enabled snapshots. self.assertEqual(len(fileset.snapshot_create()), 4) # Snapshots exist, still create a new daily. self.assertEqual(len(fileset.snapshot_create()), 1) fileset = FilesetFactory( storage_alias='dummy', monthly_retention=False, yearly_retention=False) # Only create the daily and weekly snapshots. self.assertEqual(len(fileset.snapshot_create()), 2) self.assertEqual(len(fileset.snapshot_create()), 1)