class RemoteLogDeletionTest(unittest.TestCase): def setUp(self): self.logs_dir = tempfile.mkdtemp(prefix='beaker-test-log-delete') self.archive_server = Process('archive_server.py', args=['python', os.path.join(os.path.dirname(__file__), '..', '..', 'archive_server.py'), '--base', self.logs_dir], listen_port=19998) self.archive_server.start() def tearDown(self): self.archive_server.stop() shutil.rmtree(self.logs_dir, ignore_errors=True) def create_deleted_job_with_log(self, path, filename): with session.begin(): job = data_setup.create_completed_job() job.to_delete = datetime.datetime.utcnow() session.flush() job.recipesets[0].recipes[0].log_server = u'localhost:19998' job.recipesets[0].recipes[0].logs[:] = [ LogRecipe(server=u'http://localhost:19998/%s' % path, filename=filename)] for rt in job.recipesets[0].recipes[0].tasks: rt.logs[:] = [] def test_deletion(self): os.mkdir(os.path.join(self.logs_dir, 'recipe')) open(os.path.join(self.logs_dir, 'recipe', 'dummy.txt'), 'w').write('dummy') os.mkdir(os.path.join(self.logs_dir, 'dont_tase_me_bro')) self.create_deleted_job_with_log(u'recipe/', u'dummy.txt') self.assertEquals(log_delete.log_delete(), 0) # exit status self.assert_(not os.path.exists(os.path.join(self.logs_dir, 'recipe'))) self.assert_(os.path.exists(os.path.join(self.logs_dir, 'dont_tase_me_bro'))) def test_301_redirect(self): os.mkdir(os.path.join(self.logs_dir, 'recipe')) open(os.path.join(self.logs_dir, 'recipe', 'dummy.txt'), 'w').write('dummy') self.create_deleted_job_with_log(u'redirect/301/recipe/', u'dummy.txt') self.assertEquals(log_delete.log_delete(), 0) # exit status self.assert_(not os.path.exists(os.path.join(self.logs_dir, 'recipe'))) def test_302_redirect(self): os.mkdir(os.path.join(self.logs_dir, 'recipe')) open(os.path.join(self.logs_dir, 'recipe', 'dummy.txt'), 'w').write('dummy') self.create_deleted_job_with_log(u'redirect/302/recipe/', u'dummy.txt') self.assertEquals(log_delete.log_delete(), 0) # exit status self.assert_(not os.path.exists(os.path.join(self.logs_dir, 'recipe'))) def test_404(self): self.create_deleted_job_with_log(u'notexist/', u'dummy.txt') self.assertEquals(log_delete.log_delete(), 0) # exit status
def setUp(self): self.logs_dir = tempfile.mkdtemp(prefix='beaker-test-log-delete') self.archive_server = Process('archive_server.py', args=['python', os.path.join(os.path.dirname(__file__), '..', '..', 'archive_server.py'), '--base', self.logs_dir], listen_port=19998) self.archive_server.start()
def setUp(self): # set up a directory for our dummy job logs, with an HTTP server self.logs_dir = tempfile.mkdtemp(prefix='beaker-client-test-job-logs') self.addCleanup(shutil.rmtree, self.logs_dir, ignore_errors=True) self.archive_server = Process('http_server.py', args=[sys.executable, pkg_resources.resource_filename('bkr.inttest', 'http_server.py'), '--base', self.logs_dir], listen_port=19998) self.archive_server.start() self.addCleanup(self.archive_server.stop) self.log_server_url = u'http://localhost:19998/' # job for testing with session.begin(): self.job = data_setup.create_completed_job() recipe = self.job.recipesets[0].recipes[0] os.mkdir(os.path.join(self.logs_dir, 'R')) open(os.path.join(self.logs_dir, 'R', 'dummy.txt'), 'w').write('recipe\n') recipe.logs[:] = [LogRecipe(server=self.log_server_url, path=u'R', filename=u'dummy.txt')] os.mkdir(os.path.join(self.logs_dir, 'T')) open(os.path.join(self.logs_dir, 'T', 'dummy.txt'), 'w').write('task\n') recipe.tasks[0].logs[:] = [LogRecipeTask(server=self.log_server_url, path=u'T', filename=u'dummy.txt')] os.mkdir(os.path.join(self.logs_dir, 'TR')) open(os.path.join(self.logs_dir, 'TR', 'dummy.txt'), 'w').write('result\n') recipe.tasks[0].results[0].logs[:] = [LogRecipeTaskResult( server=self.log_server_url, path=u'TR', filename=u'dummy.txt')]
def setUp(self): self.product_docroot = tempfile.mkdtemp(prefix='beaker-fake-product-server') self.addCleanup(shutil.rmtree, self.product_docroot, ignore_errors=True) self.product_server = Process('http_server.py', args=[sys.executable, pkg_resources.resource_filename('bkr.inttest', 'http_server.py'), '--base', self.product_docroot], listen_port=19998) self.product_server.start() self.addCleanup(self.product_server.stop)
class RemoteLogDeletionTest(DatabaseTestCase): def setUp(self): # XXX We should eventually configure these redirect tests # to work with apache, until then, we do this... test_id = self.id() if test_id.endswith('test_301_redirect') or \ test_id.endswith('test_302_redirect'): self.force_local_archive_server = True else: self.force_local_archive_server = False if 'BEAKER_LABCONTROLLER_HOSTNAME' in os.environ and not \ self.force_local_archive_server: self.logs_dir = config.get('basepath.logs') self.recipe_logs_dir = os.path.join(self.logs_dir, 'recipe') self.log_server = os.environ['BEAKER_LABCONTROLLER_HOSTNAME'] self.log_server_url = 'http://%s/logs' % self.log_server self.addCleanup(shutil.rmtree, self.recipe_logs_dir, ignore_errors=True) else: self.logs_dir = tempfile.mkdtemp(prefix='beaker-test-log-delete') self.recipe_logs_dir = os.path.join(self.logs_dir, 'recipe') self.archive_server = Process('http_server.py', args=[sys.executable, pkg_resources.resource_filename('bkr.inttest', 'http_server.py'), '--base', self.logs_dir, '--writable'], listen_port=19998) self.archive_server.start() self.log_server = 'localhost:19998' self.log_server_url = 'http://%s' % self.log_server self.addCleanup(shutil.rmtree, self.logs_dir, ignore_errors=True) self.addCleanup(self.archive_server.stop) try: os.mkdir(self.recipe_logs_dir) except OSError, e: if e.errno == errno.EEXIST: # perhaps something else created it and did not clean it up pass else: raise if 'BEAKER_LABCONTROLLER_HOSTNAME' in os.environ and not \ self.force_local_archive_server: # XXX This assumes we are running against apache, and allows # WebDAV to delete stuff. os.chmod(self.recipe_logs_dir, 02777) orig_umask = os.umask(000) self.addCleanup(os.umask, orig_umask)
def test_on_error_warns_if_server_version_does_not_match(self): fake_server = Process('http_server.py', args=[sys.executable, pkg_resources.resource_filename('bkr.inttest', 'http_server.py'), '--base', '/notexist', '--add-response-header', 'X-Beaker-Version:999.3'], listen_port=19998) fake_server.start() self.addCleanup(fake_server.stop) # use AUTH_METHOD=none because we can't authenticate to the fake server config = create_client_config(hub_url='http://localhost:19998', auth_method=u'none') try: run_client(['bkr', 'system-status', 'asdf.example.com'], config=config) self.fail('should raise') except ClientError as e: error_lines = e.stderr_output.splitlines() self.assertEquals(error_lines[0], 'WARNING: client version is %s but server version is 999.3' % __version__) self.assertIn('HTTP error: 404 Client Error: Not Found', error_lines[1])
def setUpClass(cls): # Need to populate a directory with fake images, and serve it over # HTTP, so that beaker-pxemenu can download the images when it builds # the menus. cls.distro_dir = tempfile.mkdtemp() os.mkdir(os.path.join(cls.distro_dir, 'pxeboot')) open(os.path.join(cls.distro_dir, 'pxeboot/vmlinuz'), 'w').write('lol') open(os.path.join(cls.distro_dir, 'pxeboot/initrd'), 'w').write('lol') cls.distro_server = Process('http_server.py', args=[ sys.executable, pkg_resources.resource_filename( 'bkr.inttest', 'http_server.py'), '--base', cls.distro_dir ], listen_port=19998) cls.distro_server.start() cls.tftp_dir = tempfile.mkdtemp()
def setUp(self): # XXX We should eventually configure these redirect tests # to work with apache, until then, we do this... test_id = self.id() if test_id.endswith('test_301_redirect') or \ test_id.endswith('test_302_redirect'): self.force_local_archive_server = True else: self.force_local_archive_server = False if 'BEAKER_LABCONTROLLER_HOSTNAME' in os.environ and not \ self.force_local_archive_server: self.logs_dir = config.get('basepath.logs') self.recipe_logs_dir = os.path.join(self.logs_dir, 'recipe') self.log_server = os.environ['BEAKER_LABCONTROLLER_HOSTNAME'] self.log_server_url = 'http://%s/logs' % self.log_server self.addCleanup(shutil.rmtree, self.recipe_logs_dir, ignore_errors=True) else: self.logs_dir = tempfile.mkdtemp(prefix='beaker-test-log-delete') self.recipe_logs_dir = os.path.join(self.logs_dir, 'recipe') self.archive_server = Process('archive_server.py', args=['python', os.path.join(os.path.dirname(__file__), '..', '..', 'archive_server.py'), '--base', self.logs_dir], listen_port=19998) self.archive_server.start() self.log_server = 'localhost:19998' self.log_server_url = 'http://%s' % self.log_server self.addCleanup(shutil.rmtree, self.logs_dir, ignore_errors=True) self.addCleanup(self.archive_server.stop) try: os.mkdir(self.recipe_logs_dir) except OSError, e: if e.errno == errno.EEXIST: # perhaps something else created it and did not clean it up pass else: raise
class JobLogsTest(ClientTestCase): def setUp(self): # set up a directory for our dummy job logs, with an HTTP server self.logs_dir = tempfile.mkdtemp(prefix='beaker-client-test-job-logs') self.addCleanup(shutil.rmtree, self.logs_dir, ignore_errors=True) self.archive_server = Process('http_server.py', args=[sys.executable, pkg_resources.resource_filename('bkr.inttest', 'http_server.py'), '--base', self.logs_dir], listen_port=19998) self.archive_server.start() self.addCleanup(self.archive_server.stop) self.log_server_url = u'http://localhost:19998/' # job for testing with session.begin(): self.job = data_setup.create_completed_job() recipe = self.job.recipesets[0].recipes[0] os.mkdir(os.path.join(self.logs_dir, 'R')) open(os.path.join(self.logs_dir, 'R', 'dummy.txt'), 'w').write('recipe\n') recipe.logs[:] = [LogRecipe(server=self.log_server_url, path=u'R', filename=u'dummy.txt')] os.mkdir(os.path.join(self.logs_dir, 'T')) open(os.path.join(self.logs_dir, 'T', 'dummy.txt'), 'w').write('task\n') recipe.tasks[0].logs[:] = [LogRecipeTask(server=self.log_server_url, path=u'T', filename=u'dummy.txt')] os.mkdir(os.path.join(self.logs_dir, 'TR')) open(os.path.join(self.logs_dir, 'TR', 'dummy.txt'), 'w').write('result\n') recipe.tasks[0].results[0].logs[:] = [LogRecipeTaskResult( server=self.log_server_url, path=u'TR', filename=u'dummy.txt')] def test_by_job(self): out = run_client(['bkr', 'job-logs', self.job.t_id]) logs = out.splitlines() self.assertEquals(logs[0], self.log_server_url + u'R/dummy.txt') self.assertEquals(logs[1], self.log_server_url + u'T/dummy.txt') self.assertEquals(logs[2], self.log_server_url + u'TR/dummy.txt') def test_by_recipeset(self): out = run_client(['bkr', 'job-logs', self.job.recipesets[0].t_id]) logs = out.splitlines() self.assertEquals(logs[0], self.log_server_url + u'R/dummy.txt') self.assertEquals(logs[1], self.log_server_url + u'T/dummy.txt') self.assertEquals(logs[2], self.log_server_url + u'TR/dummy.txt') def test_by_recipe(self): out = run_client(['bkr', 'job-logs', self.job.recipesets[0].recipes[0].t_id]) logs = out.splitlines() self.assertEquals(logs[0], self.log_server_url + u'R/dummy.txt') self.assertEquals(logs[1], self.log_server_url + u'T/dummy.txt') self.assertEquals(logs[2], self.log_server_url + u'TR/dummy.txt') def test_by_task(self): out = run_client(['bkr', 'job-logs', self.job.recipesets[0].recipes[0].tasks[0].t_id]) logs = out.splitlines() self.assertEquals(logs[0], self.log_server_url + u'T/dummy.txt') self.assertEquals(logs[1], self.log_server_url + u'TR/dummy.txt') def test_by_taskresult(self): out = run_client(['bkr', 'job-logs', self.job.recipesets[0].recipes[0].tasks[0].results[0].t_id]) logs = out.splitlines() self.assertEquals(logs[0], self.log_server_url + u'TR/dummy.txt') # https://bugzilla.redhat.com/show_bug.cgi?id=595512 def test_invalid_taskspec(self): try: run_client(['bkr', 'job-logs', '12345']) fail('should raise') except ClientError, e: self.assert_('Invalid taskspec' in e.stderr_output)
def setup_package(): global lc_fqdn, _daemons_running_externally conf = get_conf() if not 'BEAKER_LABCONTROLLER_HOSTNAME' in os.environ: # Need to start the lab controller daemons ourselves with session.begin(): user = data_setup.create_user( user_name=conf.get('USERNAME').decode('utf8'), password=conf.get('PASSWORD')) lc = data_setup.create_labcontroller(fqdn=u'localhost', user=user) processes.extend([ Process('beaker-proxy', args=[ 'python', '../LabController/src/bkr/labcontroller/main.py', '-c', config_file, '-f' ], listen_port=8000, stop_signal=signal.SIGTERM), Process('beaker-provision', args=[ 'python', '../LabController/src/bkr/labcontroller/provision.py', '-c', config_file, '-f' ], stop_signal=signal.SIGTERM), Process('beaker-watchdog', args=[ 'python', '../LabController/src/bkr/labcontroller/watchdog.py', '-c', config_file, '-f' ], stop_signal=signal.SIGTERM), ]) lc_fqdn = u'localhost' else: _daemons_running_externally = True # We have been passed a space seperated list of LCs lab_controllers = os.environ.get( 'BEAKER_LABCONTROLLER_HOSTNAME').decode('utf8') lab_controllers_list = lab_controllers.split() # Just get the last one, it shouldn't matter to us lab_controller = lab_controllers_list.pop() # Make sure that the LC is in the DB data_setup.create_labcontroller(fqdn=lab_controller) lc_fqdn = lab_controller # Clear out any existing job logs, so that they are registered correctly # when first created. # If we've been passed a remote hostname for the LC, we assume it's been # freshly provisioned and the dir will already be empty. shutil.rmtree(conf.get('CACHEPATH'), ignore_errors=True) try: for process in processes: process.start() except: for process in processes: process.stop() raise
class ProductUpdateTest(DatabaseTestCase): def setUp(self): self.product_docroot = tempfile.mkdtemp(prefix='beaker-fake-product-server') self.addCleanup(shutil.rmtree, self.product_docroot, ignore_errors=True) self.product_server = Process('http_server.py', args=[sys.executable, pkg_resources.resource_filename('bkr.inttest', 'http_server.py'), '--base', self.product_docroot], listen_port=19998) self.product_server.start() self.addCleanup(self.product_server.stop) def test_version(self): out = run_command('product_update.py', 'product-update', ['--version']) self.assertEquals(out.strip(), __version__) def test_errors_out_if_file_not_specified(self): try: run_command('product_update.py', 'product-update', []) self.fail('should raise') except CommandError as e: self.assertIn( 'Specify product data to load using --product-file or --product-url', e.stderr_output) def test_loads_cpe_identifiers_from_xml_file(self): xml_file = tempfile.NamedTemporaryFile() xml_file.write("""\ <products> <product> <cpe>cpe:/a:redhat:ceph_storage:2</cpe> </product> <product> <cpe>cpe:/o:redhat:enterprise_linux:4:update8</cpe> </product> </products> """) xml_file.flush() run_command('product_update.py', 'product-update', ['-f', xml_file.name]) with session.begin(): # check that the products have been inserted into the db Product.by_name(u'cpe:/a:redhat:ceph_storage:2') Product.by_name(u'cpe:/o:redhat:enterprise_linux:4:update8') def test_ignores_duplicate_cpe_identifiers(self): xml_file = tempfile.NamedTemporaryFile() xml_file.write("""\ <products> <product> <cpe>cpe:/a:redhat:ceph_storage:69</cpe> </product> <product> <cpe>cpe:/a:redhat:ceph_storage:69</cpe> </product> </products> """) xml_file.flush() run_command('product_update.py', 'product-update', ['-f', xml_file.name]) with session.begin(): Product.by_name(u'cpe:/a:redhat:ceph_storage:69') def test_ignores_empty_cpe_identifiers(self): xml_file = tempfile.NamedTemporaryFile() xml_file.write("""\ <products> <product> <cpe></cpe> </product> </products> """) xml_file.flush() run_command('product_update.py', 'product-update', ['-f', xml_file.name]) with session.begin(): self.assertEquals(Product.query.filter(Product.name == u'').count(), 0) self.assertEquals(Product.query.filter(Product.name == u'None').count(), 0) def test_loads_cpe_identifiers_from_xml_url(self): with open(os.path.join(self.product_docroot, 'product.xml'), 'wb') as xml_file: xml_file.write("""\ <products> <product> <cpe>cpe:/o:redhat:enterprise_linux:7.0</cpe> </product> <product> <cpe>cpe:/o:redhat:enterprise_linux:7:2</cpe> </product> </products> """) run_command('product_update.py', 'product-update', ['--product-url', 'http://localhost:19998/product.xml']) with session.begin(): Product.by_name(u'cpe:/o:redhat:enterprise_linux:7.0') Product.by_name(u'cpe:/o:redhat:enterprise_linux:7:2') def test_loads_cpe_identifiers_from_json_url(self): with open(os.path.join(self.product_docroot, 'product.json'), 'wb') as json_file: json_file.write("""\ [ {"id": 1, "cpe": "cpe:/a:redhat:jboss_data_virtualization:6.2.0"}, {"id": 2, "cpe": "cpe:/a:redhat:jboss_operations_network:3.2.0"}, {"id": 3, "cpe": ""}, {"id": 4} ] """) run_command('product_update.py', 'product-update', ['--product-url', 'http://localhost:19998/product.json']) with session.begin(): Product.by_name(u'cpe:/a:redhat:jboss_data_virtualization:6.2.0') Product.by_name(u'cpe:/a:redhat:jboss_operations_network:3.2.0')
class ProductUpdateTest(DatabaseTestCase): def setUp(self): self.product_docroot = tempfile.mkdtemp( prefix='beaker-fake-product-server') self.addCleanup(shutil.rmtree, self.product_docroot, ignore_errors=True) self.product_server = Process('http_server.py', args=[ sys.executable, pkg_resources.resource_filename( 'bkr.inttest', 'http_server.py'), '--base', self.product_docroot ], listen_port=19998) self.product_server.start() self.addCleanup(self.product_server.stop) def test_version(self): out = run_command('product_update.py', 'product-update', ['--version']) self.assertEquals(out.strip(), __version__) def test_errors_out_if_file_not_specified(self): try: run_command('product_update.py', 'product-update', []) self.fail('should raise') except CommandError as e: self.assertIn( 'Specify product data to load using --product-file or --product-url', e.stderr_output) def test_loads_cpe_identifiers_from_xml_file(self): xml_file = tempfile.NamedTemporaryFile() xml_file.write("""\ <products> <product> <cpe>cpe:/a:redhat:ceph_storage:2</cpe> </product> <product> <cpe>cpe:/o:redhat:enterprise_linux:4:update8</cpe> </product> </products> """) xml_file.flush() run_command('product_update.py', 'product-update', ['-f', xml_file.name]) with session.begin(): # check that the products have been inserted into the db Product.by_name(u'cpe:/a:redhat:ceph_storage:2') Product.by_name(u'cpe:/o:redhat:enterprise_linux:4:update8') def test_ignores_duplicate_cpe_identifiers(self): xml_file = tempfile.NamedTemporaryFile() xml_file.write("""\ <products> <product> <cpe>cpe:/a:redhat:ceph_storage:69</cpe> </product> <product> <cpe>cpe:/a:redhat:ceph_storage:69</cpe> </product> </products> """) xml_file.flush() run_command('product_update.py', 'product-update', ['-f', xml_file.name]) with session.begin(): Product.by_name(u'cpe:/a:redhat:ceph_storage:69') def test_ignores_empty_cpe_identifiers(self): xml_file = tempfile.NamedTemporaryFile() xml_file.write("""\ <products> <product> <cpe></cpe> </product> </products> """) xml_file.flush() run_command('product_update.py', 'product-update', ['-f', xml_file.name]) with session.begin(): self.assertEquals( Product.query.filter(Product.name == u'').count(), 0) self.assertEquals( Product.query.filter(Product.name == u'None').count(), 0) def test_loads_cpe_identifiers_from_xml_url(self): with open(os.path.join(self.product_docroot, 'product.xml'), 'wb') as xml_file: xml_file.write("""\ <products> <product> <cpe>cpe:/o:redhat:enterprise_linux:7.0</cpe> </product> <product> <cpe>cpe:/o:redhat:enterprise_linux:7:2</cpe> </product> </products> """) run_command('product_update.py', 'product-update', ['--product-url', 'http://localhost:19998/product.xml']) with session.begin(): Product.by_name(u'cpe:/o:redhat:enterprise_linux:7.0') Product.by_name(u'cpe:/o:redhat:enterprise_linux:7:2') def test_loads_cpe_identifiers_from_json_url(self): with open(os.path.join(self.product_docroot, 'product.json'), 'wb') as json_file: json_file.write("""\ [ {"id": 1, "cpe": "cpe:/a:redhat:jboss_data_virtualization:6.2.0"}, {"id": 2, "cpe": "cpe:/a:redhat:jboss_operations_network:3.2.0"}, {"id": 3, "cpe": ""}, {"id": 4} ] """) run_command('product_update.py', 'product-update', ['--product-url', 'http://localhost:19998/product.json']) with session.begin(): Product.by_name(u'cpe:/a:redhat:jboss_data_virtualization:6.2.0') Product.by_name(u'cpe:/a:redhat:jboss_operations_network:3.2.0')
class RepoUpdate(DatabaseTestCase): """Tests the repo_update.py script""" def setUp(self): # We will point beaker-repo-update at this fake version of the # harness repos that we normally publish on # https://beaker-project.org/yum/harness/ self.harness_repo_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, self.harness_repo_dir) self.harness_repo_server = Process('http_server.py', args=[sys.executable, pkg_resources.resource_filename('bkr.inttest', 'http_server.py'), '--base', self.harness_repo_dir], listen_port=19998) self.harness_repo_server.start() self.addCleanup(self.harness_repo_server.stop) self.harness_repo_url = 'http://localhost:19998/' def _create_remote_harness(self, osmajor): repo_dir = os.path.join(self.harness_repo_dir, osmajor) os.mkdir(repo_dir) rpm_file = pkg_resources.resource_filename('bkr.server.tests', \ 'tmp-distribution-beaker-task_test-2.0-5.noarch.rpm') shutil.copy(rpm_file, repo_dir) result = run_createrepo(cwd=repo_dir) self.assertEqual(result.returncode, 0, result.err) def test_version(self): out = run_command('repo_update.py', 'beaker-repo-update', ['--version']) self.assertEquals(out.strip(), __version__) def test_update_harness_repos(self): """Test that the update_repo() call runs as expected. This checks that the harness repos that are supposed to be synced are actually synced. Does not check repo metadata. """ self._create_remote_harness('foobangmajor') self._create_remote_harness('foobazmajor') faux_local_harness = tempfile.mkdtemp('local_harness') self.addCleanup(shutil.rmtree, faux_local_harness) with session.begin(): lab_controller = data_setup.create_labcontroller(fqdn=u'dummylab.example.invalid') distro_tree = data_setup.create_distro_tree( osmajor=OSMajor.lazy_create(osmajor=u'foobangmajor'), harness_dir=False, lab_controllers=[lab_controller]) distro_tree = data_setup.create_distro_tree( osmajor=OSMajor.lazy_create(osmajor=u'foobazmajor'), harness_dir=False, lab_controllers=[lab_controller]) run_command('repo_update.py', 'beaker-repo-update', ['-b', self.harness_repo_url, '-d', faux_local_harness], ignore_stderr=True) self.assertTrue(os.path.exists(os.path.join(faux_local_harness, 'foobangmajor'))) self.assertTrue(os.path.exists(os.path.join(faux_local_harness, 'foobazmajor'))) # https://bugzilla.redhat.com/show_bug.cgi?id=1027516 def test_does_not_run_createrepo_unnecessarily(self): osmajor = u'GreenBeretLinux99' with session.begin(): lab_controller = data_setup.create_labcontroller(fqdn=u'dummylab.example.invalid') distro_tree = data_setup.create_distro_tree(osmajor=OSMajor.lazy_create(osmajor=osmajor), harness_dir=False, lab_controllers=[lab_controller]) local_harness_dir = tempfile.mkdtemp(suffix='local') self.addCleanup(shutil.rmtree, local_harness_dir) self._create_remote_harness(osmajor) # run it once, repo is built run_command('repo_update.py', 'beaker-repo-update', ['-b', self.harness_repo_url, '-d', local_harness_dir], ignore_stderr=True) repodata_dir = os.path.join(local_harness_dir, osmajor, 'repodata') mtime = os.path.getmtime(repodata_dir) # run it again, repo should not be rebuilt time.sleep(0.001) run_command('repo_update.py', 'beaker-repo-update', ['-b', self.harness_repo_url, '-d', local_harness_dir], ignore_stderr=True) self.assertEquals(os.path.getmtime(repodata_dir), mtime) # https://bugzilla.redhat.com/show_bug.cgi?id=1213225 def test_exclude_nonexistent_osmajor(self): with session.begin(): osmajor = OSMajor.lazy_create(osmajor="exist") lab_controller = data_setup.create_labcontroller(fqdn=u'dummylab.example.invalid') distro_tree = data_setup.create_distro_tree(osmajor=osmajor.osmajor, harness_dir=False, lab_controllers=[lab_controller]) nonexistent_osmajor = OSMajor.lazy_create(osmajor=u'notexist') local_harness_dir = tempfile.mkdtemp(suffix='local') self.addCleanup(shutil.rmtree, local_harness_dir) self._create_remote_harness(osmajor.osmajor) run_command('repo_update.py', 'beaker-repo-update', ['-b', self.harness_repo_url, '-d', local_harness_dir], ignore_stderr=True) self.assertTrue(os.path.exists(os.path.join(local_harness_dir, osmajor.osmajor))) self.assertFalse(os.path.exists(os.path.join(local_harness_dir, nonexistent_osmajor.osmajor))) # https://bugzilla.redhat.com/show_bug.cgi?id=1619969 def test_replaces_bad_packages(self): osmajor = u'MauveBeanieLinux3' package = 'tmp-distribution-beaker-task_test-2.0-5.noarch.rpm' with session.begin(): data_setup.create_distro_tree(osmajor=osmajor) self._create_remote_harness(osmajor) local_harness_dir = tempfile.mkdtemp(suffix='local') self.addCleanup(shutil.rmtree, local_harness_dir) # Local harness dir has a corrupted copy of the package os.mkdir(os.path.join(local_harness_dir, osmajor)) orig_size = os.path.getsize(os.path.join(self.harness_repo_dir, osmajor, package)) with open(os.path.join(local_harness_dir, osmajor, package), 'wb') as f: f.write(b'a' * orig_size) run_command('repo_update.py', 'beaker-repo-update', ['--debug', '-b', self.harness_repo_url, '-d', local_harness_dir], ignore_stderr=True) self.assertEquals( open(os.path.join(self.harness_repo_dir, osmajor, package), 'rb').read(), open(os.path.join(local_harness_dir, osmajor, package), 'rb').read())
class JobLogsTest(ClientTestCase): def setUp(self): # set up a directory for our dummy job logs, with an HTTP server self.logs_dir = tempfile.mkdtemp(prefix='beaker-client-test-job-logs') self.addCleanup(shutil.rmtree, self.logs_dir, ignore_errors=True) self.archive_server = Process('http_server.py', args=[sys.executable, pkg_resources.resource_filename('bkr.inttest', 'http_server.py'), '--base', self.logs_dir], listen_port=19998) self.archive_server.start() self.addCleanup(self.archive_server.stop) self.log_server_url = u'http://localhost:19998/' # job for testing with session.begin(): self.job = data_setup.create_completed_job() self.recipe = self.job.recipesets[0].recipes[0] os.mkdir(os.path.join(self.logs_dir, 'R')) open(os.path.join(self.logs_dir, 'R', 'dummy.txt'), 'w').write('recipe\n') self.recipe.logs[:] = [LogRecipe(server=self.log_server_url, path=u'R', filename=u'dummy.txt')] os.mkdir(os.path.join(self.logs_dir, 'T')) open(os.path.join(self.logs_dir, 'T', 'dummy.txt'), 'w').write('task\n') self.recipe.tasks[0].logs[:] = [LogRecipeTask(server=self.log_server_url, path=u'T', filename=u'dummy.txt')] os.mkdir(os.path.join(self.logs_dir, 'TR')) open(os.path.join(self.logs_dir, 'TR', 'dummy.txt'), 'w').write('result\n') self.recipe.tasks[0].results[0].logs[:] = [LogRecipeTaskResult( server=self.log_server_url, path=u'TR', filename=u'dummy.txt')] # https://bugzilla.redhat.com/show_bug.cgi?id=1391282 def test_by_job(self): out = run_client(['bkr', 'job-logs', self.job.t_id]) logs = out.splitlines() self.assertEquals(logs[0], get_server_base() + u'recipes/%s/logs/R/dummy.txt' % self.recipe.id) self.assertEquals(logs[1], get_server_base() + u'recipes/%s/tasks/%s/logs/T/dummy.txt' % (self.recipe.id, self.recipe.tasks[0].id)) self.assertEquals(logs[2], get_server_base() + u'recipes/%s/tasks/%s/results/%s/logs/TR/dummy.txt' % (self.recipe.id, self.recipe.tasks[0].id, self.recipe.tasks[0].results[0].id)) def test_by_recipeset(self): out = run_client(['bkr', 'job-logs', self.job.recipesets[0].t_id]) logs = out.splitlines() self.assertEquals(logs[0], get_server_base() + u'recipes/%s/logs/R/dummy.txt' % self.recipe.id) self.assertEquals(logs[1], get_server_base() + u'recipes/%s/tasks/%s/logs/T/dummy.txt' % (self.recipe.id, self.recipe.tasks[0].id)) self.assertEquals(logs[2], get_server_base() + u'recipes/%s/tasks/%s/results/%s/logs/TR/dummy.txt' % (self.recipe.id, self.recipe.tasks[0].id, self.recipe.tasks[0].results[0].id)) def test_by_recipe(self): out = run_client(['bkr', 'job-logs', self.job.recipesets[0].recipes[0].t_id]) logs = out.splitlines() self.assertEquals(logs[0], get_server_base() + u'recipes/%s/logs/R/dummy.txt' % self.recipe.id) self.assertEquals(logs[1], get_server_base() + u'recipes/%s/tasks/%s/logs/T/dummy.txt' % (self.recipe.id, self.recipe.tasks[0].id)) self.assertEquals(logs[2], get_server_base() + u'recipes/%s/tasks/%s/results/%s/logs/TR/dummy.txt' % (self.recipe.id, self.recipe.tasks[0].id, self.recipe.tasks[0].results[0].id)) def test_by_task(self): out = run_client(['bkr', 'job-logs', self.job.recipesets[0].recipes[0].tasks[0].t_id]) logs = out.splitlines() self.assertEquals(logs[0], get_server_base() + u'recipes/%s/tasks/%s/logs/T/dummy.txt' % (self.recipe.id, self.recipe.tasks[0].id)) self.assertEquals(logs[1], get_server_base() + u'recipes/%s/tasks/%s/results/%s/logs/TR/dummy.txt' % (self.recipe.id, self.recipe.tasks[0].id, self.recipe.tasks[0].results[0].id)) def test_by_taskresult(self): out = run_client(['bkr', 'job-logs', self.job.recipesets[0].recipes[0].tasks[0].results[0].t_id]) logs = out.splitlines() self.assertEquals(logs[0], get_server_base() + u'recipes/%s/tasks/%s/results/%s/logs/TR/dummy.txt' % (self.recipe.id, self.recipe.tasks[0].id, self.recipe.tasks[0].results[0].id)) # https://bugzilla.redhat.com/show_bug.cgi?id=595512 def test_invalid_taskspec(self): try: run_client(['bkr', 'job-logs', '12345']) self.fail('should raise') except ClientError as e: self.assert_('Invalid taskspec' in e.stderr_output) def test_prints_sizes(self): out = run_client(['bkr', 'job-logs', '--size', self.job.t_id]) lines = out.splitlines() self.assertEquals(lines[0], ' 7 %srecipes/%s/logs/R/dummy.txt' % (get_server_base(), self.recipe.id)) self.assertEquals(lines[1], ' 5 %srecipes/%s/tasks/%s/logs/T/dummy.txt' % (get_server_base(), self.recipe.id, self.recipe.tasks[0].id)) self.assertEquals(lines[2], ' 7 %srecipes/%s/tasks/%s/results/%s/logs/TR/dummy.txt' % (get_server_base(), self.recipe.id, self.recipe.tasks[0].id, self.recipe.tasks[0].results[0].id)) def test_size_handles_404(self): with session.begin(): self.job.recipesets[0].recipes[0].logs[0].filename = u'idontexist.txt' out = run_client(['bkr', 'job-logs', '--size', self.job.t_id]) lines = out.splitlines() self.assertEquals(lines[0], '<missing> %srecipes/%s/logs/R/idontexist.txt' % (get_server_base(), self.recipe.id)) def test_size_handles_http_errors(self): with session.begin(): # /error/500 is treated specially by http_server.py, returns 500 self.job.recipesets[0].recipes[0].logs[0].path = u'error' self.job.recipesets[0].recipes[0].logs[0].filename = u'500' out = run_client(['bkr', 'job-logs', '--size', self.job.t_id]) lines = out.splitlines() self.assertEquals(lines[0], '<error:500> %srecipes/%s/logs/error/500' % (get_server_base(), self.recipe.id))
class JobLogsTest(ClientTestCase): def setUp(self): # set up a directory for our dummy job logs, with an HTTP server self.logs_dir = tempfile.mkdtemp(prefix='beaker-client-test-job-logs') self.addCleanup(shutil.rmtree, self.logs_dir, ignore_errors=True) self.archive_server = Process('http_server.py', args=[ sys.executable, pkg_resources.resource_filename( 'bkr.inttest', 'http_server.py'), '--base', self.logs_dir ], listen_port=19998) self.archive_server.start() self.addCleanup(self.archive_server.stop) self.log_server_url = u'http://localhost:19998/' # job for testing with session.begin(): self.job = data_setup.create_completed_job() recipe = self.job.recipesets[0].recipes[0] os.mkdir(os.path.join(self.logs_dir, 'R')) open(os.path.join(self.logs_dir, 'R', 'dummy.txt'), 'w').write('recipe\n') recipe.logs[:] = [ LogRecipe(server=self.log_server_url, path=u'R', filename=u'dummy.txt') ] os.mkdir(os.path.join(self.logs_dir, 'T')) open(os.path.join(self.logs_dir, 'T', 'dummy.txt'), 'w').write('task\n') recipe.tasks[0].logs[:] = [ LogRecipeTask(server=self.log_server_url, path=u'T', filename=u'dummy.txt') ] os.mkdir(os.path.join(self.logs_dir, 'TR')) open(os.path.join(self.logs_dir, 'TR', 'dummy.txt'), 'w').write('result\n') recipe.tasks[0].results[0].logs[:] = [ LogRecipeTaskResult(server=self.log_server_url, path=u'TR', filename=u'dummy.txt') ] def test_by_job(self): out = run_client(['bkr', 'job-logs', self.job.t_id]) logs = out.splitlines() self.assertEquals(logs[0], self.log_server_url + u'R/dummy.txt') self.assertEquals(logs[1], self.log_server_url + u'T/dummy.txt') self.assertEquals(logs[2], self.log_server_url + u'TR/dummy.txt') def test_by_recipeset(self): out = run_client(['bkr', 'job-logs', self.job.recipesets[0].t_id]) logs = out.splitlines() self.assertEquals(logs[0], self.log_server_url + u'R/dummy.txt') self.assertEquals(logs[1], self.log_server_url + u'T/dummy.txt') self.assertEquals(logs[2], self.log_server_url + u'TR/dummy.txt') def test_by_recipe(self): out = run_client( ['bkr', 'job-logs', self.job.recipesets[0].recipes[0].t_id]) logs = out.splitlines() self.assertEquals(logs[0], self.log_server_url + u'R/dummy.txt') self.assertEquals(logs[1], self.log_server_url + u'T/dummy.txt') self.assertEquals(logs[2], self.log_server_url + u'TR/dummy.txt') def test_by_task(self): out = run_client([ 'bkr', 'job-logs', self.job.recipesets[0].recipes[0].tasks[0].t_id ]) logs = out.splitlines() self.assertEquals(logs[0], self.log_server_url + u'T/dummy.txt') self.assertEquals(logs[1], self.log_server_url + u'TR/dummy.txt') def test_by_taskresult(self): out = run_client([ 'bkr', 'job-logs', self.job.recipesets[0].recipes[0].tasks[0].results[0].t_id ]) logs = out.splitlines() self.assertEquals(logs[0], self.log_server_url + u'TR/dummy.txt') # https://bugzilla.redhat.com/show_bug.cgi?id=595512 def test_invalid_taskspec(self): try: run_client(['bkr', 'job-logs', '12345']) fail('should raise') except ClientError, e: self.assert_('Invalid taskspec' in e.stderr_output)
def setUpClass(cls): cls.distro_server = Process('http_server.py', args=[sys.executable, pkg_resources.resource_filename('bkr.inttest', 'http_server.py'), '--base', '/notexist'], listen_port=19998) cls.distro_server.start()
def setUpClass(cls): cls.task_server = Process('http_server.py', args=[sys.executable, _http_server, '--base', _sync_tasks_dir], listen_port=19998) cls.task_server.start() cls.task_url = 'http://localhost:19998/'