def test_does_not_run_createrepo_unnecessarily(self): osmajor = u'GreenBeretLinux99' with session.begin(): lab_controller = data_setup.create_labcontroller( fqdn=u'dummylab.example.invalid') distro_tree = data_setup.create_distro_tree( osmajor=OSMajor.lazy_create(osmajor=osmajor), harness_dir=False, lab_controllers=[lab_controller]) remote_harness_dir = tempfile.mkdtemp(suffix='remote') self.addCleanup(shutil.rmtree, remote_harness_dir) local_harness_dir = tempfile.mkdtemp(suffix='local') self.addCleanup(shutil.rmtree, local_harness_dir) self._create_remote_harness(remote_harness_dir, osmajor) # run it once, repo is built run_command( 'repo_update.py', 'beaker-repo-update', ['-b', 'file://%s/' % remote_harness_dir, '-d', local_harness_dir], ignore_stderr=True) repodata_dir = os.path.join(local_harness_dir, osmajor, 'repodata') mtime = os.path.getmtime(repodata_dir) # run it again, repo should not be rebuilt time.sleep(0.001) run_command( 'repo_update.py', 'beaker-repo-update', ['-b', 'file://%s/' % remote_harness_dir, '-d', local_harness_dir], ignore_stderr=True) self.assertEquals(os.path.getmtime(repodata_dir), mtime)
def test_exclude_nonexistent_osmajor(self): with session.begin(): osmajor = OSMajor.lazy_create(osmajor="exist") lab_controller = data_setup.create_labcontroller( fqdn=u'dummylab.example.invalid') distro_tree = data_setup.create_distro_tree( osmajor=osmajor.osmajor, harness_dir=False, lab_controllers=[lab_controller]) nonexistent_osmajor = OSMajor.lazy_create(osmajor=u'notexist') remote_harness_dir = tempfile.mkdtemp(suffix='remote') self.addCleanup(shutil.rmtree, remote_harness_dir) local_harness_dir = tempfile.mkdtemp(suffix='local') self.addCleanup(shutil.rmtree, local_harness_dir) self._create_remote_harness(remote_harness_dir, osmajor.osmajor) run_command( 'repo_update.py', 'beaker-repo-update', ['-b', 'file://%s/' % remote_harness_dir, '-d', local_harness_dir], ignore_stderr=True) self.assertTrue( os.path.exists(os.path.join(local_harness_dir, osmajor.osmajor))) self.assertFalse( os.path.exists( os.path.join(local_harness_dir, nonexistent_osmajor.osmajor)))
def test_update_harness_repos(self): """Test that the update_repo() call runs as expected. This checks that the harness repos that are supposed to be synced are actually synced. Does not check repo metadata. """ self._create_remote_harness('foobangmajor') self._create_remote_harness('foobazmajor') faux_local_harness = tempfile.mkdtemp('local_harness') self.addCleanup(shutil.rmtree, faux_local_harness) with session.begin(): lab_controller = data_setup.create_labcontroller(fqdn=u'dummylab.example.invalid') distro_tree = data_setup.create_distro_tree( osmajor=OSMajor.lazy_create(osmajor=u'foobangmajor'), harness_dir=False, lab_controllers=[lab_controller]) distro_tree = data_setup.create_distro_tree( osmajor=OSMajor.lazy_create(osmajor=u'foobazmajor'), harness_dir=False, lab_controllers=[lab_controller]) run_command('repo_update.py', 'beaker-repo-update', ['-b', self.harness_repo_url, '-d', faux_local_harness], ignore_stderr=True) self.assertTrue(os.path.exists(os.path.join(faux_local_harness, 'foobangmajor'))) self.assertTrue(os.path.exists(os.path.join(faux_local_harness, 'foobazmajor')))
def test_log_not_delete(self): # Job that is not within it's expiry time with session.begin(): job_not_delete = data_setup.create_completed_job( start_time=datetime.datetime.utcnow() - datetime.timedelta(days=60), finish_time=datetime.datetime.utcnow() - datetime.timedelta(days=29)) job_not_delete.recipesets[0].recipes[0].logs.append( LogRecipe(filename=u'test.log')) r_not_delete = job_not_delete.recipesets[0].recipes[0] dir_not_delete = os.path.join(r_not_delete.logspath, r_not_delete.filepath) self.make_dir(dir_not_delete) ft = open(os.path.join(dir_not_delete, 'test.log'), 'w') ft.close() session.flush() run_command('log_delete.py', 'beaker-log-delete') self.assertRaises(AssertionError, self._assert_logs_not_in_db, self.job_to_delete) try: self.check_dir_not_there(dir_not_delete) raise Exception('%s was deleted when it shold not have been' % dir_not_delete) except AssertionError: pass
def test_replaces_bad_packages(self): osmajor = u'MauveBeanieLinux3' package = 'tmp-distribution-beaker-task_test-2.0-5.noarch.rpm' with session.begin(): data_setup.create_distro_tree(osmajor=osmajor) self._create_remote_harness(osmajor) local_harness_dir = tempfile.mkdtemp(suffix='local') self.addCleanup(shutil.rmtree, local_harness_dir) # Local harness dir has a corrupted copy of the package os.mkdir(os.path.join(local_harness_dir, osmajor)) orig_size = os.path.getsize( os.path.join(self.harness_repo_dir, osmajor, package)) with open(os.path.join(local_harness_dir, osmajor, package), 'wb') as f: f.write(b'a' * orig_size) run_command( 'repo_update.py', 'beaker-repo-update', ['--debug', '-b', self.harness_repo_url, '-d', local_harness_dir], ignore_stderr=True) self.assertEquals( open(os.path.join(self.harness_repo_dir, osmajor, package), 'rb').read(), open(os.path.join(local_harness_dir, osmajor, package), 'rb').read())
def test_errors_out_if_file_not_specified(self): try: run_command('product_update.py', 'product-update', []) self.fail('should raise') except CommandError as e: self.assertIn( 'Specify product data to load using --product-file or --product-url', e.stderr_output)
def test_recipe_task_result_rows_are_deleted(self): with session.begin(): self.job_to_delete.deleted = datetime.datetime.utcnow() recipe = self.job_to_delete.recipesets[0].recipes[0] recipetask = recipe.tasks[0] self.assertEqual(len(recipetask.results), 1) run_command('log_delete.py', 'beaker-log-delete') with session.begin(): session.expire_all() self.assertEqual(len(recipetask.results), 0)
def test_purge_deleted(self): with session.begin(): self.job_to_delete.deleted = datetime.datetime.utcnow() self.job_to_delete.recipesets[0].recipes[0].logs.append(LogRecipe(filename=u'test.log')) r_ = self.job_to_delete.recipesets[0].recipes[0] dir = os.path.join(r_.logspath, r_.filepath) self.make_dir(dir) f = open(os.path.join(dir, 'test.log'), 'w') f.close() run_command('log_delete.py', 'beaker-log-delete') self._assert_logs_not_in_db(Job.by_id(self.job_to_delete.id)) self.check_dir_not_there(dir)
def test_adds_existing_user_to_admin_group(self): with session.begin(): admin_group = Group.by_name(u'admin') existing_user = data_setup.create_user() self.assertNotIn(admin_group, existing_user.groups) run_command('init.py', 'beaker-init', ['--user', existing_user.user_name]) with session.begin(): admin_group = Group.by_name(u'admin') existing_user = User.query.get(existing_user.user_id) self.assertIn(admin_group, existing_user.groups) # run the same thing again, should have no effect but should not break run_command('init.py', 'beaker-init', ['--user', existing_user.user_name])
def test_rendered_kickstart_is_deleted(self): with session.begin(): self.job_to_delete.deleted = datetime.datetime.utcnow() recipe = self.job_to_delete.recipesets[0].recipes[0] ks = RenderedKickstart(kickstart=u'This is not a real kickstart.') recipe.installation.rendered_kickstart = ks session.flush() ks_id = ks.id run_command('log_delete.py', 'beaker-log-delete') with session.begin(): session.expire_all() self.assertEqual(recipe.installation.rendered_kickstart, None) self.assertEqual(RenderedKickstart.query.filter_by(id=ks_id).count(), 0)
def test_ignores_empty_cpe_identifiers(self): xml_file = tempfile.NamedTemporaryFile() xml_file.write("""\ <products> <product> <cpe></cpe> </product> </products> """) xml_file.flush() run_command('product_update.py', 'product-update', ['-f', xml_file.name]) with session.begin(): self.assertEquals(Product.query.filter(Product.name == u'').count(), 0) self.assertEquals(Product.query.filter(Product.name == u'None').count(), 0)
def test_loads_cpe_identifiers_from_json_url(self): with open(os.path.join(self.product_docroot, 'product.json'), 'wb') as json_file: json_file.write("""\ [ {"id": 1, "cpe": "cpe:/a:redhat:jboss_data_virtualization:6.2.0"}, {"id": 2, "cpe": "cpe:/a:redhat:jboss_operations_network:3.2.0"}, {"id": 3, "cpe": ""}, {"id": 4} ] """) run_command('product_update.py', 'product-update', ['--product-url', 'http://localhost:19998/product.json']) with session.begin(): Product.by_name(u'cpe:/a:redhat:jboss_data_virtualization:6.2.0') Product.by_name(u'cpe:/a:redhat:jboss_operations_network:3.2.0')
def test_refresh_ldap_group_membership(self): with session.begin(): group = Group(group_name=u'alp', display_name=u'Australian Labor Party', membership_type=GroupMembershipType.ldap) old_member = data_setup.create_user(user_name=u'krudd') group.add_member(old_member) run_command('refresh_ldap.py', 'beaker-refresh-ldap') with session.begin(): session.expire_all() self.assertEquals(group.users, [User.by_user_name(u'jgillard')]) # second time is a no-op run_command('refresh_ldap.py', 'beaker-refresh-ldap') with session.begin(): session.expire_all() self.assertEquals(group.users, [User.by_user_name(u'jgillard')])
def test_ignores_duplicate_cpe_identifiers(self): xml_file = tempfile.NamedTemporaryFile() xml_file.write("""\ <products> <product> <cpe>cpe:/a:redhat:ceph_storage:69</cpe> </product> <product> <cpe>cpe:/a:redhat:ceph_storage:69</cpe> </product> </products> """) xml_file.flush() run_command('product_update.py', 'product-update', ['-f', xml_file.name]) with session.begin(): Product.by_name(u'cpe:/a:redhat:ceph_storage:69')
def test_delete_and_purge_expired(self): with session.begin(): job_to_delete = data_setup.create_completed_job( start_time=datetime.datetime.utcnow() - datetime.timedelta(days=60), finish_time=datetime.datetime.utcnow() - datetime.timedelta(days=31)) self.job_to_delete.owner = self.user job_to_delete.recipesets[0].recipes[0].logs.append(LogRecipe(filename=u'test.log')) r_delete = job_to_delete.recipesets[0].recipes[0] dir_delete = os.path.join(r_delete.logspath, r_delete.filepath) self.make_dir(dir_delete) fd = open(os.path.join(dir_delete, 'test.log'), 'w') fd.close() run_command('log_delete.py', 'beaker-log-delete') self._assert_logs_not_in_db(job_to_delete) self.check_dir_not_there(dir_delete)
def test_ignores_empty_cpe_identifiers(self): xml_file = tempfile.NamedTemporaryFile() xml_file.write("""\ <products> <product> <cpe></cpe> </product> </products> """) xml_file.flush() run_command('product_update.py', 'product-update', ['-f', xml_file.name]) with session.begin(): self.assertEquals( Product.query.filter(Product.name == u'').count(), 0) self.assertEquals( Product.query.filter(Product.name == u'None').count(), 0)
def test_loads_cpe_identifiers_from_xml_url(self): with open(os.path.join(self.product_docroot, 'product.xml'), 'wb') as xml_file: xml_file.write("""\ <products> <product> <cpe>cpe:/o:redhat:enterprise_linux:7.0</cpe> </product> <product> <cpe>cpe:/o:redhat:enterprise_linux:7:2</cpe> </product> </products> """) run_command('product_update.py', 'product-update', ['--product-url', 'http://localhost:19998/product.xml']) with session.begin(): Product.by_name(u'cpe:/o:redhat:enterprise_linux:7.0') Product.by_name(u'cpe:/o:redhat:enterprise_linux:7:2')
def test_loads_cpe_identifiers_from_xml_file(self): xml_file = tempfile.NamedTemporaryFile() xml_file.write("""\ <products> <product> <cpe>cpe:/a:redhat:ceph_storage:2</cpe> </product> <product> <cpe>cpe:/o:redhat:enterprise_linux:4:update8</cpe> </product> </products> """) xml_file.flush() run_command('product_update.py', 'product-update', ['-f', xml_file.name]) with session.begin(): # check that the products have been inserted into the db Product.by_name(u'cpe:/a:redhat:ceph_storage:2') Product.by_name(u'cpe:/o:redhat:enterprise_linux:4:update8')
def test_deletes_old_jobs_which_never_started(self): with session.begin(): the_past = datetime.datetime.utcnow() - datetime.timedelta(days=31) cancelled_job = data_setup.create_job(queue_time=the_past) cancelled_job.cancel() cancelled_job.update_status() aborted_job = data_setup.create_job(queue_time=the_past) aborted_job.abort() aborted_job.update_status() self.assertEqual(cancelled_job.status, TaskStatus.cancelled) self.assertEqual(aborted_job.status, TaskStatus.aborted) self.assertIsNone(cancelled_job.recipesets[0].recipes[0].finish_time) self.assertIsNone(aborted_job.recipesets[0].recipes[0].finish_time) self.assertIsNone(cancelled_job.deleted) self.assertIsNone(aborted_job.deleted) run_command('log_delete.py', 'beaker-log-delete') with session.begin(): session.expire_all() self.assertIsNotNone(cancelled_job.deleted) self.assertIsNotNone(aborted_job.deleted)
def test_limit(self): limit = 10 def _create_jobs(): with session.begin(): for i in range(limit + 1): job_to_purge = data_setup.create_completed_job() job_to_purge.recipesets[0].recipes[0].logs.append( LogRecipe(filename=u'test.log')) job_to_purge.deleted = datetime.datetime.utcnow() # Test with limit _create_jobs() with_limit = run_command('log_delete.py', 'beaker-log-delete', ['--dry-run', '--verbose', '--limit=10']) self.assert_(len(with_limit.splitlines()) == limit) # Test no limit set _create_jobs() no_limit = run_command('log_delete.py', 'beaker-log-delete', ['--dry-run', '--verbose']) self.assert_(len(no_limit.splitlines()) > limit)
def test_version(self): out = run_command('log_delete.py', 'beaker-log-delete', ['--version']) self.assertEquals(out.strip(), __version__)
def test_version(self): out = run_command('create_kickstart.py', 'beaker-create-kickstart', ['--version']) self.assertEquals(out.strip(), __version__)
def test_302_redirect(self): open(os.path.join(self.recipe_logs_dir, 'dummy.txt'), 'w').write('dummy') self.create_deleted_job_with_log(u'redirect/302/recipe/', u'dummy.txt') run_command('log_delete.py', 'beaker-log-delete') self.assert_(not os.path.exists(os.path.join(self.logs_dir, 'recipe')))
def test_404(self): self.create_deleted_job_with_log(u'notexist/', u'dummy.txt') run_command('log_delete.py', 'beaker-log-delete')
def test_version(self): out = run_command('product_update.py', 'product-update', ['--version']) self.assertEquals(out.strip(), __version__)
def test_version(self): out = run_command('ipxe_image.py', 'beaker-create-ipxe-image', ['--version']) self.assertEquals(out.strip(), __version__)
def _run_create_kickstart(self, args): return run_command('create_kickstart.py', 'beaker-create-kickstart', args)
def test_version(self): out = run_command('usage_reminder.py', 'beaker-usage-reminder', ['--version']) self.assertEquals(out.strip(), __version__)
def test_version(self): out = run_command('repo_update.py', 'beaker-repo-update', ['--version']) self.assertEquals(out.strip(), __version__)
def test_version(self): out = run_command('refresh_ldap.py', 'beaker-refresh-ldap', ['--version']) self.assertEquals(out.strip(), __version__)
def test_image_should_not_be_deleted_when_not_uploaded(self): out = run_command('ipxe_image.py', 'beaker-create-ipxe-image', ['--no-upload']) self.assertTrue(os.path.exists(out.strip())) os.unlink(out.strip())
def test_version(self): out = run_command('init.py', 'beaker-init', ['--version']) self.assertEquals(out.strip(), __version__)
def test_version(self): out = run_command('beakerd.py', 'beakerd', ['--version']) self.assertEquals(out.strip(), __version__)