def setup_package(): assert os.path.exists(CONFIG_FILE), 'Config file %s must exist' % CONFIG_FILE load_config(configfile=CONFIG_FILE) log_to_stream(sys.stdout, level=logging.DEBUG) from bkr.inttest import data_setup if not 'BEAKER_SKIP_INIT_DB' in os.environ: data_setup.setup_model() with session.begin(): data_setup.create_labcontroller() #always need a labcontroller data_setup.create_task(name=u'/distribution/install', requires= u'make gcc nfs-utils wget procmail redhat-lsb ntp ' u'@development-tools @development-libs @development ' u'@desktop-platform-devel @server-platform-devel ' u'libxml2-python expect pyOpenSSL'.split()) data_setup.create_task(name=u'/distribution/reservesys', requires=u'emacs vim-enhanced unifdef sendmail'.split()) data_setup.create_task(name=u'/distribution/utils/dummy') data_setup.create_task(name=u'/distribution/inventory') data_setup.create_distro() if not os.path.exists(turbogears.config.get('basepath.rpms')): os.mkdir(turbogears.config.get('basepath.rpms')) setup_slapd() turbogears.testutil.make_app(Root) turbogears.testutil.start_server() global processes processes = [] if 'BEAKER_SERVER_BASE_URL' not in os.environ: # need to start the server ourselves # Usual pkg_resources ugliness is needed to ensure gunicorn doesn't # import pkg_resources before we get a chance to specify our # requirements in bkr.server.wsgi processes.extend([ Process('gunicorn', args=[sys.executable, '-c', '__requires__ = ["CherryPy < 3.0"]; import pkg_resources; ' \ 'from gunicorn.app.wsgiapp import run; run()', '--bind', ':%s' % turbogears.config.get('server.socket_port'), '--workers', '8', '--access-logfile', '-', '--preload', 'bkr.server.wsgi:application'], listen_port=turbogears.config.get('server.socket_port')), ]) processes.extend([ Process('slapd', args=['slapd', '-d0', '-F' + slapd_config_dir, '-hldap://127.0.0.1:3899/'], listen_port=3899, stop_signal=signal.SIGINT), ]) try: for process in processes: process.start() except: for process in processes: process.stop() raise
def test_update_harness_repos(self): """Test that the update_repo() call runs as expected. This checks that the harness repos that are supposed to be synced are actually synced. Does not check repo metadata. """ if 'sqlite' in get_engine().name: raise unittest.SkipTest('SQL generated by lazy_create is not valid' ' in sqlite') base_path = mkdtemp() self.addCleanup(rmtree, base_path) faux_remote_harness1 = self._create_remote_harness(base_path, 'foobangmajor') faux_remote_harness2 = self._create_remote_harness(base_path, 'foobazmajor') faux_local_harness = mkdtemp('local_harness') self.addCleanup(rmtree, faux_local_harness) with session.begin(): lab_controller = data_setup.create_labcontroller(fqdn=u'dummylab.example.invalid') distro_tree = data_setup.create_distro_tree( osmajor=OSMajor.lazy_create(osmajor=u'foobangmajor'), harness_dir=False, lab_controllers=[lab_controller]) distro_tree = data_setup.create_distro_tree( osmajor=OSMajor.lazy_create(osmajor=u'foobazmajor'), harness_dir=False, lab_controllers=[lab_controller]) # I'm not testing the config here, so just use createrepo update_repos('file://%s/' % base_path, faux_local_harness) self.assertTrue(os.path.exists(os.path.join(faux_local_harness, 'foobangmajor'))) self.assertTrue(os.path.exists(os.path.join(faux_local_harness, 'foobazmajor')))
def test_does_not_run_createrepo_unnecessarily(self): osmajor = u'GreenBeretLinux99' with session.begin(): lab_controller = data_setup.create_labcontroller( fqdn=u'dummylab.example.invalid') distro_tree = data_setup.create_distro_tree( osmajor=OSMajor.lazy_create(osmajor=osmajor), harness_dir=False, lab_controllers=[lab_controller]) remote_harness_dir = tempfile.mkdtemp(suffix='remote') self.addCleanup(shutil.rmtree, remote_harness_dir) local_harness_dir = tempfile.mkdtemp(suffix='local') self.addCleanup(shutil.rmtree, local_harness_dir) self._create_remote_harness(remote_harness_dir, osmajor) # run it once, repo is built run_command( 'repo_update.py', 'beaker-repo-update', ['-b', 'file://%s/' % remote_harness_dir, '-d', local_harness_dir], ignore_stderr=True) repodata_dir = os.path.join(local_harness_dir, osmajor, 'repodata') mtime = os.path.getmtime(repodata_dir) # run it again, repo should not be rebuilt time.sleep(0.001) run_command( 'repo_update.py', 'beaker-repo-update', ['-b', 'file://%s/' % remote_harness_dir, '-d', local_harness_dir], ignore_stderr=True) self.assertEquals(os.path.getmtime(repodata_dir), mtime)
def test_exclude_nonexistent_osmajor(self): with session.begin(): osmajor = OSMajor.lazy_create(osmajor="exist") lab_controller = data_setup.create_labcontroller( fqdn=u'dummylab.example.invalid') distro_tree = data_setup.create_distro_tree( osmajor=osmajor.osmajor, harness_dir=False, lab_controllers=[lab_controller]) nonexistent_osmajor = OSMajor.lazy_create(osmajor=u'notexist') remote_harness_dir = tempfile.mkdtemp(suffix='remote') self.addCleanup(shutil.rmtree, remote_harness_dir) local_harness_dir = tempfile.mkdtemp(suffix='local') self.addCleanup(shutil.rmtree, local_harness_dir) self._create_remote_harness(remote_harness_dir, osmajor.osmajor) run_command( 'repo_update.py', 'beaker-repo-update', ['-b', 'file://%s/' % remote_harness_dir, '-d', local_harness_dir], ignore_stderr=True) self.assertTrue( os.path.exists(os.path.join(local_harness_dir, osmajor.osmajor))) self.assertFalse( os.path.exists( os.path.join(local_harness_dir, nonexistent_osmajor.osmajor)))
def test_does_not_run_createrepo_unnecessarily(self): if 'sqlite' in get_engine().name: raise unittest.SkipTest('SQL generated by lazy_create is not valid' ' in sqlite') osmajor = u'GreenBeretLinux99' with session.begin(): lab_controller = data_setup.create_labcontroller( fqdn=u'dummylab.example.invalid') distro_tree = data_setup.create_distro_tree( osmajor=OSMajor.lazy_create(osmajor=osmajor), harness_dir=False, lab_controllers=[lab_controller]) remote_harness_dir = mkdtemp(suffix='remote') self.addCleanup(rmtree, remote_harness_dir) local_harness_dir = mkdtemp(suffix='local') self.addCleanup(rmtree, local_harness_dir) self._create_remote_harness(remote_harness_dir, osmajor) # run it once, repo is built update_repos('file://%s/' % remote_harness_dir, local_harness_dir) repodata_dir = os.path.join(local_harness_dir, osmajor, 'repodata') mtime = os.path.getmtime(repodata_dir) # run it again, repo should not be rebuilt time.sleep(0.001) update_repos('file://%s/' % remote_harness_dir, local_harness_dir) self.assertEquals(os.path.getmtime(repodata_dir), mtime)
def test_update_harness_repos(self): """Test that the update_repo() call runs as expected. This checks that the harness repos that are supposed to be synced are actually synced. Does not check repo metadata. """ self._create_remote_harness('foobangmajor') self._create_remote_harness('foobazmajor') faux_local_harness = tempfile.mkdtemp('local_harness') self.addCleanup(shutil.rmtree, faux_local_harness) with session.begin(): lab_controller = data_setup.create_labcontroller(fqdn=u'dummylab.example.invalid') distro_tree = data_setup.create_distro_tree( osmajor=OSMajor.lazy_create(osmajor=u'foobangmajor'), harness_dir=False, lab_controllers=[lab_controller]) distro_tree = data_setup.create_distro_tree( osmajor=OSMajor.lazy_create(osmajor=u'foobazmajor'), harness_dir=False, lab_controllers=[lab_controller]) run_command('repo_update.py', 'beaker-repo-update', ['-b', self.harness_repo_url, '-d', faux_local_harness], ignore_stderr=True) self.assertTrue(os.path.exists(os.path.join(faux_local_harness, 'foobangmajor'))) self.assertTrue(os.path.exists(os.path.join(faux_local_harness, 'foobazmajor')))
def test_exclude_nonexistent_osmajor(self): if 'sqlite' in get_engine().name: raise unittest.SkipTest('SQL generated by lazy_create is not valid' ' in sqlite') with session.begin(): osmajor = OSMajor.lazy_create(osmajor="exist") lab_controller = data_setup.create_labcontroller(fqdn=u'dummylab.example.invalid') distro_tree = data_setup.create_distro_tree(osmajor=osmajor.osmajor, harness_dir=False, lab_controllers=[lab_controller]) nonexistent_osmajor = OSMajor.lazy_create(osmajor=u'notexist') remote_harness_dir = mkdtemp(suffix='remote') self.addCleanup(rmtree, remote_harness_dir) local_harness_dir = mkdtemp(suffix='local') self.addCleanup(rmtree, local_harness_dir) self._create_remote_harness(remote_harness_dir, osmajor.osmajor) update_repos('file://%s/' % remote_harness_dir, local_harness_dir) self.assertTrue(os.path.exists(os.path.join(local_harness_dir, osmajor.osmajor))) self.assertFalse(os.path.exists(os.path.join(local_harness_dir, nonexistent_osmajor.osmajor)))
def test_does_not_run_createrepo_unnecessarily(self): if 'sqlite' in get_engine().name: raise unittest.SkipTest('SQL generated by lazy_create is not valid' ' in sqlite') osmajor = u'GreenBeretLinux99' with session.begin(): lab_controller = data_setup.create_labcontroller(fqdn=u'dummylab.example.invalid') distro_tree = data_setup.create_distro_tree(osmajor=OSMajor.lazy_create(osmajor=osmajor), harness_dir=False, lab_controllers=[lab_controller]) remote_harness_dir = mkdtemp(suffix='remote') self.addCleanup(rmtree, remote_harness_dir) local_harness_dir = mkdtemp(suffix='local') self.addCleanup(rmtree, local_harness_dir) self._create_remote_harness(remote_harness_dir, osmajor) # run it once, repo is built update_repos('file://%s/' % remote_harness_dir, local_harness_dir) repodata_dir = os.path.join(local_harness_dir, osmajor, 'repodata') mtime = os.path.getmtime(repodata_dir) # run it again, repo should not be rebuilt time.sleep(0.001) update_repos('file://%s/' % remote_harness_dir, local_harness_dir) self.assertEquals(os.path.getmtime(repodata_dir), mtime)
def test_update_harness_repos(self): """Test that the update_repo() call runs as expected. This checks that the harness repos that are supposed to be synced are actually synced. Does not check repo metadata. """ if 'sqlite' in get_engine().name: raise unittest.SkipTest('SQL generated by lazy_create is not valid' ' in sqlite') base_path = mkdtemp() self.addCleanup(rmtree, base_path) faux_remote_harness1 = self._create_remote_harness( base_path, 'foobangmajor') faux_remote_harness2 = self._create_remote_harness( base_path, 'foobazmajor') faux_local_harness = mkdtemp('local_harness') self.addCleanup(rmtree, faux_local_harness) with session.begin(): lab_controller = data_setup.create_labcontroller( fqdn=u'dummylab.example.invalid') distro_tree = data_setup.create_distro_tree( osmajor=OSMajor.lazy_create(osmajor=u'foobangmajor'), harness_dir=False, lab_controllers=[lab_controller]) distro_tree = data_setup.create_distro_tree( osmajor=OSMajor.lazy_create(osmajor=u'foobazmajor'), harness_dir=False, lab_controllers=[lab_controller]) # I'm not testing the config here, so just use createrepo update_repos('file://%s/' % base_path, faux_local_harness) self.assertTrue( os.path.exists(os.path.join(faux_local_harness, 'foobangmajor'))) self.assertTrue( os.path.exists(os.path.join(faux_local_harness, 'foobazmajor')))
def test_exclude_nonexistent_osmajor(self): if 'sqlite' in get_engine().name: raise unittest.SkipTest('SQL generated by lazy_create is not valid' ' in sqlite') with session.begin(): osmajor = OSMajor.lazy_create(osmajor="exist") lab_controller = data_setup.create_labcontroller( fqdn=u'dummylab.example.invalid') distro_tree = data_setup.create_distro_tree( osmajor=osmajor.osmajor, harness_dir=False, lab_controllers=[lab_controller]) nonexistent_osmajor = OSMajor.lazy_create(osmajor=u'notexist') remote_harness_dir = mkdtemp(suffix='remote') self.addCleanup(rmtree, remote_harness_dir) local_harness_dir = mkdtemp(suffix='local') self.addCleanup(rmtree, local_harness_dir) self._create_remote_harness(remote_harness_dir, osmajor.osmajor) update_repos('file://%s/' % remote_harness_dir, local_harness_dir) self.assertTrue( os.path.exists(os.path.join(local_harness_dir, osmajor.osmajor))) self.assertFalse( os.path.exists( os.path.join(local_harness_dir, nonexistent_osmajor.osmajor)))
def setup_package(): assert os.path.exists( CONFIG_FILE), 'Config file %s must exist' % CONFIG_FILE load_config(configfile=CONFIG_FILE) log_to_stream(sys.stdout, level=logging.DEBUG) from bkr.inttest import data_setup if not 'BEAKER_SKIP_INIT_DB' in os.environ: data_setup.setup_model() with session.begin(): data_setup.create_labcontroller() #always need a labcontroller data_setup.create_task( name=u'/distribution/install', requires=u'make gcc nfs-utils wget procmail redhat-lsb ntp ' u'@development-tools @development-libs @development ' u'@desktop-platform-devel @server-platform-devel ' u'libxml2-python expect pyOpenSSL'.split()) data_setup.create_task( name=u'/distribution/reservesys', requires=u'emacs vim-enhanced unifdef sendmail'.split()) data_setup.create_task(name=u'/distribution/utils/dummy') data_setup.create_task(name=u'/distribution/inventory') data_setup.create_distro() if not os.path.exists(turbogears.config.get('basepath.rpms')): os.mkdir(turbogears.config.get('basepath.rpms')) setup_slapd() turbogears.testutil.make_app(Root) turbogears.testutil.start_server() global processes processes = [] if 'BEAKER_SERVER_BASE_URL' not in os.environ: # need to start the server ourselves # Usual pkg_resources ugliness is needed to ensure gunicorn doesn't # import pkg_resources before we get a chance to specify our # requirements in bkr.server.wsgi processes.extend([ Process('gunicorn', args=[sys.executable, '-c', '__requires__ = ["CherryPy < 3.0"]; import pkg_resources; ' \ 'from gunicorn.app.wsgiapp import run; run()', '--bind', ':%s' % turbogears.config.get('server.socket_port'), '--workers', '8', '--access-logfile', '-', '--preload', 'bkr.server.wsgi:application'], listen_port=turbogears.config.get('server.socket_port')), ]) processes.extend([ Process('slapd', args=[ 'slapd', '-d0', '-F' + slapd_config_dir, '-hldap://127.0.0.1:3899/' ], listen_port=3899, stop_signal=signal.SIGINT), ]) try: for process in processes: process.start() except: for process in processes: process.stop() raise
def setup_package(): assert os.path.exists( CONFIG_FILE), 'Config file %s must exist' % CONFIG_FILE load_config(configfile=CONFIG_FILE) log_to_stream(sys.stdout, level=logging.DEBUG) from bkr.inttest import data_setup if not 'BEAKER_SKIP_INIT_DB' in os.environ: data_setup.setup_model() with session.begin(): # Fill in the bare minimum data which Beaker assumes will always be present. # Note that this can be called multiple times (for example, the # beaker-server-redhat add-on package reuses this setup function). if not LabController.query.count(): data_setup.create_labcontroller() if not Task.query.count(): data_setup.create_task( name=u'/distribution/install', requires=u'make gcc nfs-utils wget procmail redhat-lsb ntp ' u'@development-tools @development-libs @development ' u'@desktop-platform-devel @server-platform-devel ' u'libxml2-python expect pyOpenSSL'.split()) data_setup.create_task(name=u'/distribution/check-install') data_setup.create_task( name=u'/distribution/reservesys', requires=u'emacs vim-enhanced unifdef sendmail'.split()) data_setup.create_task(name=u'/distribution/utils/dummy') data_setup.create_task(name=u'/distribution/inventory') if not Distro.query.count(): # The 'BlueShoeLinux5-5' string appears in many tests, because it's # the distro name used in complete-job.xml. data_setup.create_distro_tree(osmajor=u'BlueShoeLinux5', distro_name=u'BlueShoeLinux5-5') if os.path.exists(turbogears.config.get('basepath.rpms')): # Remove any task RPMs left behind by previous test runs for entry in os.listdir(turbogears.config.get('basepath.rpms')): shutil.rmtree(os.path.join(turbogears.config.get('basepath.rpms'), entry), ignore_errors=True) else: os.mkdir(turbogears.config.get('basepath.rpms')) setup_slapd() mail_capture_thread.start() if turbogears.config.get('openstack.identity_api_url'): setup_openstack() turbogears.testutil.make_app(Root) turbogears.testutil.start_server() global processes processes = [] if 'BEAKER_SERVER_BASE_URL' not in os.environ: # need to start the server ourselves # Usual pkg_resources ugliness is needed to ensure gunicorn doesn't # import pkg_resources before we get a chance to specify our # requirements in bkr.server.wsgi processes.extend([ Process('gunicorn', args=[sys.executable, '-c', '__requires__ = ["CherryPy < 3.0"]; import pkg_resources; ' \ 'from gunicorn.app.wsgiapp import run; run()', '--bind', ':%s' % turbogears.config.get('server.socket_port'), '--workers', '8', '--access-logfile', '-', '--preload', 'bkr.server.wsgi:application'], listen_port=turbogears.config.get('server.socket_port')), ]) processes.extend([ Process('slapd', args=[ 'slapd', '-d0', '-F' + slapd_config_dir, '-hldap://127.0.0.1:3899/' ], listen_port=3899, stop_signal=signal.SIGINT), ]) try: for process in processes: process.start() except: for process in processes: process.stop() raise