def test_update_harness_repos(self): """Test that the update_repo() call runs as expected. This checks that the harness repos that are supposed to be synced are actually synced. Does not check repo metadata. """ if 'sqlite' in get_engine().name: raise unittest.SkipTest('SQL generated by lazy_create is not valid' ' in sqlite') base_path = mkdtemp() self.addCleanup(rmtree, base_path) faux_remote_harness1 = self._create_remote_harness( base_path, 'foobangmajor') faux_remote_harness2 = self._create_remote_harness( base_path, 'foobazmajor') faux_local_harness = mkdtemp('local_harness') self.addCleanup(rmtree, faux_local_harness) with session.begin(): OSMajor.lazy_create(osmajor=u'foobangmajor') OSMajor.lazy_create(osmajor=u'foobazmajor') # I'm not testing the config here, so just use createrepo update_repos('file://%s/' % base_path, faux_local_harness) self.assertTrue( os.path.exists(os.path.join(faux_local_harness, 'foobangmajor'))) self.assertTrue( os.path.exists(os.path.join(faux_local_harness, 'foobazmajor')))
def test_update_harness_repos(self): """Test that the update_repo() call runs as expected. This checks that the harness repos that are supposed to be synced are actually synced. Does not check repo metadata. """ if 'sqlite' in get_engine().name: raise unittest.SkipTest('SQL generated by lazy_create is not valid' ' in sqlite') base_path = mkdtemp() self.addCleanup(rmtree, base_path) faux_remote_harness1 = self._create_remote_harness(base_path, 'foobangmajor') faux_remote_harness2 = self._create_remote_harness(base_path, 'foobazmajor') faux_local_harness = mkdtemp('local_harness') self.addCleanup(rmtree, faux_local_harness) with session.begin(): lab_controller = data_setup.create_labcontroller(fqdn=u'dummylab.example.invalid') distro_tree = data_setup.create_distro_tree( osmajor=OSMajor.lazy_create(osmajor=u'foobangmajor'), harness_dir=False, lab_controllers=[lab_controller]) distro_tree = data_setup.create_distro_tree( osmajor=OSMajor.lazy_create(osmajor=u'foobazmajor'), harness_dir=False, lab_controllers=[lab_controller]) # I'm not testing the config here, so just use createrepo update_repos('file://%s/' % base_path, faux_local_harness) self.assertTrue(os.path.exists(os.path.join(faux_local_harness, 'foobangmajor'))) self.assertTrue(os.path.exists(os.path.join(faux_local_harness, 'foobazmajor')))
def _from_csv(cls,system,data,csv_type,log): """ Import data from CSV file into System Objects """ try: arch = Arch.by_name(data['arch']) except ValueError: log.append("%s: Invalid Arch %s" % (system.fqdn, data['arch'])) return False if data['update'] and data['family']: try: osversion = OSVersion.by_name(OSMajor.by_name(unicode(data['family'])), unicode(data['update'])) except InvalidRequestError: log.append("%s: Invalid Family %s Update %s" % (system.fqdn, data['family'], data['update'])) return False if osversion not in [oldosversion.osversion for oldosversion in system.excluded_osversion_byarch(arch)]: if data['excluded'] == 'True': exclude_osversion = ExcludeOSVersion(osversion=osversion, arch=arch) system.excluded_osversion.append(exclude_osversion) system.record_activity(user=identity.current.user, service=u'CSV', action=u'Added', field=u'Excluded_families', old=u'', new=u'%s/%s' % (osversion, arch)) else: if data['excluded'] == 'False': for old_osversion in system.excluded_osversion_byarch(arch): if old_osversion.osversion == osversion: system.record_activity(user=identity.current.user, service=u'CSV', action=u'Removed', field=u'Excluded_families', old=u'%s/%s' % (old_osversion.osversion, arch), new=u'') session.delete(old_osversion) if not data['update'] and data['family']: try: osmajor = OSMajor.by_name(data['family']) except InvalidRequestError: log.append("%s: Invalid family %s " % (system.fqdn, data['family'])) return False if osmajor not in [oldosmajor.osmajor for oldosmajor in system.excluded_osmajor_byarch(arch)]: if data['excluded'].lower() == 'true': exclude_osmajor = ExcludeOSMajor(osmajor=osmajor, arch=arch) system.excluded_osmajor.append(exclude_osmajor) system.record_activity(user=identity.current.user, service=u'CSV', action=u'Added', field=u'Excluded_families', old=u'', new=u'%s/%s' % (osmajor, arch)) else: if data['excluded'].lower() == 'false': for old_osmajor in system.excluded_osmajor_byarch(arch): if old_osmajor.osmajor == osmajor: system.record_activity(user=identity.current.user, service=u'CSV', action=u'Removed', field=u'Excluded_families', old=u'%s/%s' % (old_osmajor.osmajor, arch), new=u'') session.delete(old_osmajor) return True
def index(self, **kwargs): value = dict((k, v) for k, v in kwargs.iteritems() if k in ['osmajor', 'tag', 'distro']) options = {} tags = DistroTag.used() # It's important that 'None selected' is prepended # rather then appended to the list of tags, as that will ensure # it is the default option in the drop down. options['tag'] = [('', 'None selected')] + \ [(tag.tag, tag.tag) for tag in tags] options['osmajor'] = [('', 'None selected')] + \ [(osmajor.osmajor, osmajor.osmajor) for osmajor in OSMajor.ordered_by_osmajor(OSMajor.in_any_lab())] options['distro'] = self._get_distro_options(**kwargs) options['lab_controller_id'] = [(None, 'None selected')] + \ LabController.get_all(valid=True) options['distro_tree_id'] = self._get_distro_tree_options(**kwargs) attrs = {} if not options['distro']: attrs['distro'] = dict(disabled=True) return dict(title=_(u'Reserve Workflow'), widget=self.widget, value=value, widget_options=options, widget_attrs=attrs)
def test_filters_out_excluded_families(self): with session.begin(): rhel3_i386 = data_setup.create_distro_tree( osmajor=u'RedHatEnterpriseLinux3', arch=u'i386', distro_tags=[u'STABLE']) rhel3_x86_64 = data_setup.create_distro_tree( osmajor=u'RedHatEnterpriseLinux3', arch=u'x86_64', distro_tags=[u'STABLE']) rhel4_i386 = data_setup.create_distro_tree( osmajor=u'RedHatEnterpriseLinux4', arch=u'i386', distro_tags=[u'STABLE']) rhel4_x86_64 = data_setup.create_distro_tree( osmajor=u'RedHatEnterpriseLinux4', arch=u'x86_64', distro_tags=[u'STABLE']) # system with RHEL4 i386 and RHEL3 x86_64 excluded system = data_setup.create_system(arch=u'i386') system.arch.append(Arch.by_name(u'x86_64')) system.excluded_osmajor.extend([ ExcludeOSMajor(arch=Arch.by_name(u'i386'), osmajor=OSMajor.by_name(u'RedHatEnterpriseLinux4')), ExcludeOSMajor(arch=Arch.by_name(u'x86_64'), osmajor=OSMajor.by_name(u'RedHatEnterpriseLinux3')), ]) out = run_client(['bkr', 'machine-test', '--machine', system.fqdn]) self.assert_(out.startswith('Submitted:'), out) with session.begin(): new_job = Job.query.order_by(Job.id.desc()).first() distro_trees = [recipe.distro_tree for recipe in new_job.all_recipes] self.assert_(rhel3_i386 in distro_trees, distro_trees) self.assert_(rhel3_x86_64 not in distro_trees, distro_trees) self.assert_(rhel4_i386 not in distro_trees, distro_trees) self.assert_(rhel4_x86_64 in distro_trees, distro_trees)
def test_filters_out_excluded_families(self): with session.begin(): rhel3_i386 = data_setup.create_distro_tree( osmajor=u"RedHatEnterpriseLinux3", arch=u"i386", distro_tags=[u"STABLE"] ) rhel3_x86_64 = data_setup.create_distro_tree( osmajor=u"RedHatEnterpriseLinux3", arch=u"x86_64", distro_tags=[u"STABLE"] ) rhel4_i386 = data_setup.create_distro_tree( osmajor=u"RedHatEnterpriseLinux4", arch=u"i386", distro_tags=[u"STABLE"] ) rhel4_x86_64 = data_setup.create_distro_tree( osmajor=u"RedHatEnterpriseLinux4", arch=u"x86_64", distro_tags=[u"STABLE"] ) # system with RHEL4 i386 and RHEL3 x86_64 excluded system = data_setup.create_system(arch=u"i386") system.arch.append(Arch.by_name(u"x86_64")) system.excluded_osmajor.extend( [ ExcludeOSMajor(arch=Arch.by_name(u"i386"), osmajor=OSMajor.by_name(u"RedHatEnterpriseLinux4")), ExcludeOSMajor(arch=Arch.by_name(u"x86_64"), osmajor=OSMajor.by_name(u"RedHatEnterpriseLinux3")), ] ) out = run_client(["bkr", "machine-test", "--machine", system.fqdn]) self.assert_(out.startswith("Submitted:"), out) with session.begin(): new_job = Job.query.order_by(Job.id.desc()).first() distro_trees = [recipe.distro_tree for recipe in new_job.all_recipes] self.assert_(rhel3_i386 in distro_trees, distro_trees) self.assert_(rhel3_x86_64 not in distro_trees, distro_trees) self.assert_(rhel4_i386 not in distro_trees, distro_trees) self.assert_(rhel4_x86_64 in distro_trees, distro_trees)
def index(self, **kwargs): kwargs.setdefault('tag', 'STABLE') value = dict((k, v) for k, v in kwargs.iteritems() if k in ['osmajor', 'tag', 'distro']) options = {} tags = DistroTag.used() options['tag'] = [('', 'None selected')] + \ [(tag.tag, tag.tag) for tag in tags] options['osmajor'] = [('', 'None selected')] + \ [(osmajor.osmajor, osmajor.osmajor) for osmajor in OSMajor.ordered_by_osmajor(OSMajor.in_any_lab())] options['distro'] = self._get_distro_options(**kwargs) options['lab_controller_id'] = [(None, 'None selected')] + \ LabController.get_all(valid=True) options['distro_tree_id'] = self._get_distro_tree_options(**kwargs) attrs = {} if not options['distro']: attrs['distro'] = dict(disabled=True) return dict(title=_(u'Reserve Workflow'), widget=self.widget, value=value, widget_options=options, widget_attrs=attrs)
def test_update_harness_repos(self): """Test that the update_repo() call runs as expected. This checks that the harness repos that are supposed to be synced are actually synced. Does not check repo metadata. """ self._create_remote_harness('foobangmajor') self._create_remote_harness('foobazmajor') faux_local_harness = tempfile.mkdtemp('local_harness') self.addCleanup(shutil.rmtree, faux_local_harness) with session.begin(): lab_controller = data_setup.create_labcontroller(fqdn=u'dummylab.example.invalid') distro_tree = data_setup.create_distro_tree( osmajor=OSMajor.lazy_create(osmajor=u'foobangmajor'), harness_dir=False, lab_controllers=[lab_controller]) distro_tree = data_setup.create_distro_tree( osmajor=OSMajor.lazy_create(osmajor=u'foobazmajor'), harness_dir=False, lab_controllers=[lab_controller]) run_command('repo_update.py', 'beaker-repo-update', ['-b', self.harness_repo_url, '-d', faux_local_harness], ignore_stderr=True) self.assertTrue(os.path.exists(os.path.join(faux_local_harness, 'foobangmajor'))) self.assertTrue(os.path.exists(os.path.join(faux_local_harness, 'foobazmajor')))
def index(self, **kwargs): # CherryPy will give us distro_tree_id as a scalar if it only has one # value, but we want it to always be a list of int if not kwargs.get('distro_tree_id'): kwargs['distro_tree_id'] = [] elif not isinstance(kwargs['distro_tree_id'], list): kwargs['distro_tree_id'] = [int(kwargs['distro_tree_id'])] else: kwargs['distro_tree_id'] = [int(x) for x in kwargs['distro_tree_id']] # If we got a distro_tree_id but no osmajor or distro, fill those in # with the right values so that the distro picker is populated properly if kwargs['distro_tree_id']: distro_tree = DistroTree.by_id(kwargs['distro_tree_id'][0]) if not kwargs.get('distro'): kwargs['distro'] = distro_tree.distro.name if not kwargs.get('osmajor'): kwargs['osmajor'] = distro_tree.distro.osversion.osmajor.osmajor options = {} options['tag'] = [tag.tag for tag in DistroTag.used()] options['osmajor'] = [osmajor.osmajor for osmajor in OSMajor.ordered_by_osmajor(OSMajor.in_any_lab())] options['distro'] = self._get_distro_options( osmajor=kwargs.get('osmajor'), tag=kwargs.get('tag')) options['distro_tree_id'] = self._get_distro_tree_options( distro=kwargs.get('distro')) options['lab'] = [lc.fqdn for lc in LabController.query.filter(LabController.removed == None)] return dict(title=_(u'Reserve Workflow'), selection=kwargs, options=options)
def test_exclude_nonexistent_osmajor(self): with session.begin(): osmajor = OSMajor.lazy_create(osmajor="exist") lab_controller = data_setup.create_labcontroller( fqdn=u'dummylab.example.invalid') distro_tree = data_setup.create_distro_tree( osmajor=osmajor.osmajor, harness_dir=False, lab_controllers=[lab_controller]) nonexistent_osmajor = OSMajor.lazy_create(osmajor=u'notexist') remote_harness_dir = tempfile.mkdtemp(suffix='remote') self.addCleanup(shutil.rmtree, remote_harness_dir) local_harness_dir = tempfile.mkdtemp(suffix='local') self.addCleanup(shutil.rmtree, local_harness_dir) self._create_remote_harness(remote_harness_dir, osmajor.osmajor) run_command( 'repo_update.py', 'beaker-repo-update', ['-b', 'file://%s/' % remote_harness_dir, '-d', local_harness_dir], ignore_stderr=True) self.assertTrue( os.path.exists(os.path.join(local_harness_dir, osmajor.osmajor))) self.assertFalse( os.path.exists( os.path.join(local_harness_dir, nonexistent_osmajor.osmajor)))
def create_task(name=None, exclude_arches=None, exclusive_arches=None, exclude_osmajors=None, exclusive_osmajors=None, version=u'1.0-1', uploader=None, owner=None, priority=u'Manual', valid=None, path=None, description=None, requires=None, runfor=None, type=None, avg_time=1200): if name is None: name = unique_name(u'/distribution/test_task_%s') if path is None: path = u'/mnt/tests/%s' % name if description is None: description = unique_name(u'description%s') if uploader is None: uploader = create_user(user_name=u'task-uploader%s' % name.replace('/', '-')) if owner is None: owner = u'*****@*****.**' % name.replace('/', '-') if valid is None: valid = True rpm = u'example%s-%s.noarch.rpm' % (name.replace('/', '-'), version) task = Task(name=name) task.rpm = rpm task.version = version task.uploader = uploader task.owner = owner task.priority = priority task.valid = valid task.path = path task.description = description task.avg_time = avg_time task.license = u'GPLv99+' if type: for t in type: task.types.append(TaskType.lazy_create(type=t)) if exclude_arches: for arch in exclude_arches: task.excluded_arches.append(Arch.by_name(arch)) if exclusive_arches: for arch in exclusive_arches: task.exclusive_arches.append(Arch.by_name(arch)) if exclude_osmajors: for osmajor in exclude_osmajors: task.excluded_osmajors.append(OSMajor.lazy_create(osmajor=osmajor)) if exclusive_osmajors: for osmajor in exclusive_osmajors: task.exclusive_osmajors.append(OSMajor.lazy_create(osmajor=osmajor)) if requires: for require in requires: tp = TaskPackage.lazy_create(package=require) task.required.append(tp) if runfor: for run in runfor: task.runfor.append(TaskPackage.lazy_create(package=run)) session.add(task) session.flush() log.debug('Created task %s', task.name) return task
def test_adding_task_with_releases_list(self): with session.begin(): OSMajor.lazy_create(osmajor=u'RedHatEnterpriseLinux5') OSMajor.lazy_create(osmajor=u'RedHatEnterpriseLinux6') rpm_path = pkg_resources.resource_filename('bkr.inttest.server', 'task-rpms/tmp-distribution-beaker-dummy_for_bz1422410-1.0-1.noarch.rpm') out = run_client(['bkr', 'task-add', rpm_path]) self.assertIn(u'Success', out) with session.begin(): task = Task.by_name(u'/distribution/beaker/dummy_for_bz1422410') self.assertItemsEqual([OSMajor.by_name(u'RedHatEnterpriseLinux5')], task.exclusive_osmajors)
def test_executed_tasks_family_sorting(self): with session.begin(): task = data_setup.create_task() data_setup.create_completed_job(task_name=task.name, distro_tree=data_setup.create_distro_tree(osmajor=u'BlueShoe10')) data_setup.create_completed_job(task_name=task.name, distro_tree=data_setup.create_distro_tree(osmajor=u'BlueShoe9')) # plus one that is never used OSMajor.lazy_create(osmajor=u'neverused666') b = self.browser b.get(get_server_base() + 'tasks/%d' % task.id) options = [element.text for element in b.find_elements_by_xpath("//select[@name='osmajor_id']/option")] self.assert_(options.index('BlueShoe9') < options.index('BlueShoe10'), options) self.assert_('neverused666' not in options, options)
def create_distro(name=None, osmajor=u'DansAwesomeLinux6', osminor=u'9', arches=None, tags=None, harness_dir=True, osmajor_installopts=None): osmajor = OSMajor.lazy_create(osmajor=osmajor) osversion = OSVersion.lazy_create(osmajor=osmajor, osminor=osminor) if arches: osversion.arches = arches if not name: name = unique_name(u'%s.%s-%%s' % (osmajor, osminor)) distro = Distro.lazy_create(name=name, osversion=osversion) for tag in (tags or []): distro.add_tag(tag) # add distro wide install options, if any if osmajor_installopts: for arch in arches: io = OSMajorInstallOptions.lazy_create( osmajor_id=osmajor.id, arch_id=Arch.by_name(arch).id) io.ks_meta = osmajor_installopts.get('ks_meta', '') io.kernel_options = osmajor_installopts.get('kernel_options', '') io.kernel_options_post = osmajor_installopts.get( 'kernel_options_post', '') log.debug('Created distro %r', distro) if harness_dir: harness_dir = os.path.join(turbogears.config.get('basepath.harness'), distro.osversion.osmajor.osmajor) if not os.path.exists(harness_dir): os.makedirs(harness_dir) return distro
def update_repos(baseurl, basepath): # We only sync repos for the OS majors that have existing trees in the lab controllers. for osmajor in OSMajor.in_any_lab(): # urlgrabber < 3.9.1 doesn't handle unicode urls osmajor = unicode(osmajor).encode('utf8') dest = "%s/%s" % (basepath,osmajor) if os.path.islink(dest): continue # skip symlinks syncer = RepoSyncer(urlparse.urljoin(baseurl, '%s/' % urllib.quote(osmajor)), dest) try: syncer.sync() except KeyboardInterrupt: raise except HarnessRepoNotFoundError: log.warning('Harness packages not found for OS major %s, ignoring', osmajor) continue flag_path = os.path.join(dest, '.new_files') if os.path.exists(flag_path): createrepo_results = run_createrepo(cwd=dest) returncode = createrepo_results.returncode if returncode != 0: err = createrepo_results.err command = createrepo_results.command raise RuntimeError('Createrepo failed.\nreturncode:%s cmd:%s err:%s' % (returncode, command, err)) os.unlink(flag_path)
def update_repos(baseurl, basepath): # We only sync repos for the OS majors that have existing trees in the lab controllers. for osmajor in OSMajor.in_any_lab(): # urlgrabber < 3.9.1 doesn't handle unicode urls osmajor = unicode(osmajor).encode('utf8') dest = "%s/%s" % (basepath, osmajor) if os.path.islink(dest): continue # skip symlinks syncer = RepoSyncer( urlparse.urljoin(baseurl, '%s/' % urllib.quote(osmajor)), dest) try: has_new_packages = syncer.sync() except KeyboardInterrupt: raise except Exception, e: log.warning('%s', e) continue if has_new_packages: createrepo_results = run_createrepo(cwd=dest) returncode = createrepo_results.returncode if returncode != 0: err = createrepo_results.err command = createrepo_results.command raise RuntimeError( 'Createrepo failed.\nreturncode:%s cmd:%s err:%s' % (returncode, command, err))
def create_distro(name=None, osmajor=u'DansAwesomeLinux6', osminor=u'9', arches=None, tags=None, harness_dir=True, osmajor_installopts=None): osmajor = OSMajor.lazy_create(osmajor=osmajor) osversion = OSVersion.lazy_create(osmajor=osmajor, osminor=osminor) if arches: osversion.arches = arches if not name: name = unique_name(u'%s.%s-%%s' % (osmajor, osminor)) distro = Distro.lazy_create(name=name, osversion=osversion) for tag in (tags or []): distro.add_tag(tag) # add distro wide install options, if any if osmajor_installopts: for arch in arches: io = OSMajorInstallOptions.lazy_create(osmajor_id=osmajor.id, arch_id=Arch.by_name(arch).id) io.ks_meta = osmajor_installopts.get('ks_meta', '') io.kernel_options = osmajor_installopts.get('kernel_options', '') io.kernel_options_post = osmajor_installopts.get('kernel_options_post', '') log.debug('Created distro %r', distro) if harness_dir: harness_dir = os.path.join(turbogears.config.get('basepath.harness'), distro.osversion.osmajor.osmajor) if not os.path.exists(harness_dir): os.makedirs(harness_dir) return distro
def test_does_not_run_createrepo_unnecessarily(self): if 'sqlite' in get_engine().name: raise unittest.SkipTest('SQL generated by lazy_create is not valid' ' in sqlite') osmajor = u'GreenBeretLinux99' with session.begin(): lab_controller = data_setup.create_labcontroller( fqdn=u'dummylab.example.invalid') distro_tree = data_setup.create_distro_tree( osmajor=OSMajor.lazy_create(osmajor=osmajor), harness_dir=False, lab_controllers=[lab_controller]) remote_harness_dir = mkdtemp(suffix='remote') self.addCleanup(rmtree, remote_harness_dir) local_harness_dir = mkdtemp(suffix='local') self.addCleanup(rmtree, local_harness_dir) self._create_remote_harness(remote_harness_dir, osmajor) # run it once, repo is built update_repos('file://%s/' % remote_harness_dir, local_harness_dir) repodata_dir = os.path.join(local_harness_dir, osmajor, 'repodata') mtime = os.path.getmtime(repodata_dir) # run it again, repo should not be rebuilt time.sleep(0.001) update_repos('file://%s/' % remote_harness_dir, local_harness_dir) self.assertEquals(os.path.getmtime(repodata_dir), mtime)
def test_does_not_run_createrepo_unnecessarily(self): osmajor = u'GreenBeretLinux99' with session.begin(): lab_controller = data_setup.create_labcontroller( fqdn=u'dummylab.example.invalid') distro_tree = data_setup.create_distro_tree( osmajor=OSMajor.lazy_create(osmajor=osmajor), harness_dir=False, lab_controllers=[lab_controller]) remote_harness_dir = tempfile.mkdtemp(suffix='remote') self.addCleanup(shutil.rmtree, remote_harness_dir) local_harness_dir = tempfile.mkdtemp(suffix='local') self.addCleanup(shutil.rmtree, local_harness_dir) self._create_remote_harness(remote_harness_dir, osmajor) # run it once, repo is built run_command( 'repo_update.py', 'beaker-repo-update', ['-b', 'file://%s/' % remote_harness_dir, '-d', local_harness_dir], ignore_stderr=True) repodata_dir = os.path.join(local_harness_dir, osmajor, 'repodata') mtime = os.path.getmtime(repodata_dir) # run it again, repo should not be rebuilt time.sleep(0.001) run_command( 'repo_update.py', 'beaker-repo-update', ['-b', 'file://%s/' % remote_harness_dir, '-d', local_harness_dir], ignore_stderr=True) self.assertEquals(os.path.getmtime(repodata_dir), mtime)
def test_concurrent_new_osversion(self): distro_data = dict(self.distro_data) # ensure osmajor already exists with session.begin(): osmajor = OSMajor.lazy_create(osmajor=distro_data["osmajor"]) # ... but osversion is new distro_data["osminor"] = "6969" self.add_distro_trees_concurrently(distro_data, distro_data)
def test_concurrent_new_osversion(self): distro_data = dict(self.distro_data) # ensure osmajor already exists with session.begin(): osmajor = OSMajor.lazy_create(osmajor=distro_data['osmajor']) # ... but osversion is new distro_data['osminor'] = '6969' self.add_distro_trees_concurrently(distro_data, distro_data)
def test_concurrent_same_tree(self): distro_data = dict(self.distro_data) # ensure osmajor, osversion, and distro already exist with session.begin(): osmajor = OSMajor.lazy_create(osmajor=distro_data["osmajor"]) osversion = OSVersion.lazy_create(osmajor=osmajor, osminor=distro_data["osminor"]) osversion.arches = [Arch.lazy_create(arch=arch) for arch in distro_data["arches"]] Distro.lazy_create(name=distro_data["name"], osversion=osversion) self.add_distro_trees_concurrently(distro_data, distro_data)
def setUp(self): with session.begin(): self.uploader = data_setup.create_user(password=u'upload') # Make sure the Releases values we are using in the test cases # below are already known to Beaker, otherwise they will be ignored. OSMajor.lazy_create(osmajor=u'RedHatEnterpriseLinuxServer5') OSMajor.lazy_create(osmajor=u'RedHatEnterpriseLinuxClient5') OSMajor.lazy_create(osmajor=u'RedHatEnterpriseLinux7') OSMajor.lazy_create(osmajor=u'RedHatEnterpriseLinux6') self.browser = self.get_browser() login(self.browser, user=self.uploader.user_name, password=u'upload')
def test_concurrent_new_distro(self): distro_data = dict(self.distro_data) # ensure osmajor and osversion already exist with session.begin(): osmajor = OSMajor.lazy_create(osmajor=distro_data["osmajor"]) osversion = OSVersion.lazy_create(osmajor=osmajor, osminor=distro_data["osminor"]) osversion.arches = [Arch.lazy_create(arch=arch) for arch in distro_data["arches"]] # ... but distro is new distro_data["name"] = "concurrent-new-distro" self.add_distro_trees_concurrently(distro_data, distro_data)
def test_exclude_nonexistent_osmajor(self): if 'sqlite' in get_engine().name: raise unittest.SkipTest('SQL generated by lazy_create is not valid' ' in sqlite') with session.begin(): osmajor = OSMajor.lazy_create(osmajor="exist") lab_controller = data_setup.create_labcontroller(fqdn=u'dummylab.example.invalid') distro_tree = data_setup.create_distro_tree(osmajor=osmajor.osmajor, harness_dir=False, lab_controllers=[lab_controller]) nonexistent_osmajor = OSMajor.lazy_create(osmajor=u'notexist') remote_harness_dir = mkdtemp(suffix='remote') self.addCleanup(rmtree, remote_harness_dir) local_harness_dir = mkdtemp(suffix='local') self.addCleanup(rmtree, local_harness_dir) self._create_remote_harness(remote_harness_dir, osmajor.osmajor) update_repos('file://%s/' % remote_harness_dir, local_harness_dir) self.assertTrue(os.path.exists(os.path.join(local_harness_dir, osmajor.osmajor))) self.assertFalse(os.path.exists(os.path.join(local_harness_dir, nonexistent_osmajor.osmajor)))
def test_doubled_quotes(self): with session.begin(): system = data_setup.create_system(fqdn=u'mymainframe.funtimes.invalid', arch=u's390x') OSMajor.lazy_create(osmajor=u'RedHatEnterpriseLinux7') b = self.browser login(b) b.get(get_server_base() + 'csv/csv_import') b.find_element_by_name('csv_file').send_keys( pkg_resources.resource_filename(self.__module__, 'bz802842.csv')) b.find_element_by_name('csv_file').submit() self.failUnless(is_text_present(self.browser, "No Errors")) with session.begin(): session.refresh(system) self.assertEquals(system.provisions[Arch.by_name(u's390x')]\ .provision_families[OSMajor.by_name(u'RedHatEnterpriseLinux7')]\ .kernel_options, 'rd.znet="qeth,0.0.8000,0.0.8001,0.0.8002,layer2=1,portname=lol,portno=0" ' 'ip=1.2.3.4::1.2.3.4:255.255.248.0::eth0:none MTU=1500 nameserver=1.2.3.4 ' 'DASD=20A1,21A1,22A1,23A1 MACADDR=02:DE:AD:BE:EF:16 ' '!LAYER2 !DNS !PORTNO !IPADDR !GATEWAY !HOSTNAME !NETMASK ')
def edit_osmajor(self, id=None, *args, **kw): try: osmajor = OSMajor.by_id(id) except InvalidRequestError: flash(_(u"Invalid OSMajor ID %s" % id)) redirect(".") return dict(title = "OSMajor", value = osmajor, form = self.osmajor_form, action = "./save_osmajor", options = None)
def test_does_not_run_createrepo_unnecessarily(self): if 'sqlite' in get_engine().name: raise unittest.SkipTest('SQL generated by lazy_create is not valid' ' in sqlite') osmajor = u'GreenBeretLinux99' with session.begin(): OSMajor.lazy_create(osmajor=osmajor) remote_harness_dir = mkdtemp(suffix='remote') self.addCleanup(rmtree, remote_harness_dir) local_harness_dir = mkdtemp(suffix='local') self.addCleanup(rmtree, local_harness_dir) self._create_remote_harness(remote_harness_dir, osmajor) # run it once, repo is built update_repos('file://%s/' % remote_harness_dir, local_harness_dir) repodata_dir = os.path.join(local_harness_dir, osmajor, 'repodata') mtime = os.path.getmtime(repodata_dir) # run it again, repo should not be rebuilt time.sleep(0.001) update_repos('file://%s/' % remote_harness_dir, local_harness_dir) self.assertEquals(os.path.getmtime(repodata_dir), mtime)
def test_concurrent_same_tree(self): distro_data = dict(self.distro_data) # ensure osmajor, osversion, and distro already exist with session.begin(): osmajor = OSMajor.lazy_create(osmajor=distro_data['osmajor']) osversion = OSVersion.lazy_create(osmajor=osmajor, osminor=distro_data['osminor']) osversion.arches = [Arch.lazy_create(arch=arch) for arch in distro_data['arches']] Distro.lazy_create(name=distro_data['name'], osversion=osversion) self.add_distro_trees_concurrently(distro_data, distro_data)
def save_osmajor(self, id=None, alias=None, *args, **kw): try: osmajor = OSMajor.by_id(id) except InvalidRequestError: flash(_(u"Invalid OSMajor ID %s" % id)) redirect(".") if osmajor.alias != alias: osmajor.alias = alias flash(_(u"Changes saved for %s" % osmajor)) else: flash(_(u"No changes for %s" % osmajor)) redirect(".")
def save_osmajor(self, id=None, alias=None, *args, **kw): try: osmajor = OSMajor.by_id(id) except InvalidRequestError: flash(_(u"Invalid OSMajor ID %s" % id)) redirect(".") if osmajor.alias != alias: if alias: try: existing = OSMajor.by_name_alias(alias) except NoResultFound: pass else: flash(_(u'Cannot save alias %s, it is already used by %s') % (alias, existing)) redirect('.') osmajor.alias = alias flash(_(u"Changes saved for %s" % osmajor)) else: flash(_(u"No changes for %s" % osmajor)) redirect(".")
def test_concurrent_new_distro(self): distro_data = dict(self.distro_data) # ensure osmajor and osversion already exist with session.begin(): osmajor = OSMajor.lazy_create(osmajor=distro_data['osmajor']) osversion = OSVersion.lazy_create(osmajor=osmajor, osminor=distro_data['osminor']) osversion.arches = [Arch.lazy_create(arch=arch) for arch in distro_data['arches']] # ... but distro is new distro_data['name'] = 'concurrent-new-distro' self.add_distro_trees_concurrently(distro_data, distro_data)
def index(self, **kwargs): # CherryPy will give us distro_tree_id as a scalar if it only has one # value, but we want it to always be a list of int if not kwargs.get('distro_tree_id'): kwargs['distro_tree_id'] = [] elif not isinstance(kwargs['distro_tree_id'], list): kwargs['distro_tree_id'] = [int(kwargs['distro_tree_id'])] else: kwargs['distro_tree_id'] = [ int(x) for x in kwargs['distro_tree_id'] ] # If we got a distro_tree_id but no osmajor or distro, fill those in # with the right values so that the distro picker is populated properly if kwargs['distro_tree_id']: distro_tree = DistroTree.by_id(kwargs['distro_tree_id'][0]) if not kwargs.get('distro'): kwargs['distro'] = distro_tree.distro.name if not kwargs.get('osmajor'): kwargs[ 'osmajor'] = distro_tree.distro.osversion.osmajor.osmajor options = {} options['tag'] = [tag.tag for tag in DistroTag.used()] options['osmajor'] = [ osmajor.osmajor for osmajor in OSMajor.ordered_by_osmajor(OSMajor.in_any_lab()) ] options['distro'] = self._get_distro_options( osmajor=kwargs.get('osmajor'), tag=kwargs.get('tag')) options['distro_tree_id'] = self._get_distro_tree_options( distro=kwargs.get('distro')) options['lab'] = [ lc.fqdn for lc in LabController.query.filter(LabController.removed == None) ] return dict(title=_(u'Reserve Workflow'), selection=kwargs, options=options)
def test_concurrent_different_trees(self): distro_data = dict(self.distro_data) # ensure osmajor, osversion, and distro already exist with session.begin(): osmajor = OSMajor.lazy_create(osmajor=distro_data["osmajor"]) osversion = OSVersion.lazy_create(osmajor=osmajor, osminor=distro_data["osminor"]) osversion.arches = [Arch.lazy_create(arch=arch) for arch in distro_data["arches"]] Distro.lazy_create(name=distro_data["name"], osversion=osversion) # ensure two different trees distro_data["variant"] = u"Workstation" distro_data2 = dict(distro_data) distro_data2["variant"] = u"Server" self.add_distro_trees_concurrently(distro_data, distro_data2)
def test_executed_tasks_family_sorting(self): with session.begin(): task = data_setup.create_task() data_setup.create_completed_job( task_name=task.name, distro_tree=data_setup.create_distro_tree( osmajor=u'BlueShoe10')) data_setup.create_completed_job( task_name=task.name, distro_tree=data_setup.create_distro_tree( osmajor=u'BlueShoe9')) # plus one that is never used OSMajor.lazy_create(osmajor=u'neverused666') b = self.browser b.get(get_server_base() + 'tasks/%d' % task.id) options = [ element.text for element in b.find_elements_by_xpath( "//select[@name='osmajor_id']/option") ] self.assert_( options.index('BlueShoe9') < options.index('BlueShoe10'), options) self.assert_('neverused666' not in options, options)
def test_excluded_family_non_existent_system(self): login(self.browser) fqdn = data_setup.unique_name('system%s.idonot.exist') with session.begin(): osmajor = OSMajor.lazy_create(osmajor=u'MyEnterpriseLinux') self.import_csv((u'csv_type,fqdn,arch,family,update,excluded\n' u'exclude,%s,x86_64,MyEnterpriseLinux,,True' % fqdn).encode('utf8')) with session.begin(): system = System.query.filter(System.fqdn == fqdn).one() self.assertEquals(system.excluded_osmajor[0].osmajor_id, osmajor.id)
def test_clearing_alias_stores_null(self): with session.begin(): data_setup.create_distro_tree(osmajor=u'YellowSpaceshipLinux2') osmajor = OSMajor.by_name(u'YellowSpaceshipLinux2') osmajor.alias = u'YSL2' b = self.browser go_to_edit_osmajor(b, 'YellowSpaceshipLinux2') b.find_element_by_xpath('//input[@id="form_alias"]').clear() b.find_element_by_xpath('//button[text()="Edit OSMajor"]').submit() self.assertEquals(b.find_element_by_class_name('flash').text, 'Changes saved for YellowSpaceshipLinux2') with session.begin(): session.refresh(osmajor) self.assertEquals(osmajor.alias, None) # not ''
def test_excluded_family_non_existent_system(self): login(self.browser) fqdn = data_setup.unique_name('system%s.idonot.exist') with session.begin(): osmajor = OSMajor.lazy_create(osmajor=u'MyEnterpriseLinux') self.import_csv((u'csv_type,fqdn,arch,family,update,excluded\n' u'exclude,%s,x86_64,MyEnterpriseLinux,,True' % fqdn) .encode('utf8')) with session.begin(): system = System.query.filter(System.fqdn == fqdn).one() self.assertEquals(system.excluded_osmajor[0].osmajor_id, osmajor.id)
def test_exclude_nonexistent_osmajor(self): if 'sqlite' in get_engine().name: raise unittest.SkipTest('SQL generated by lazy_create is not valid' ' in sqlite') with session.begin(): osmajor = OSMajor.lazy_create(osmajor="exist") lab_controller = data_setup.create_labcontroller( fqdn=u'dummylab.example.invalid') distro_tree = data_setup.create_distro_tree( osmajor=osmajor.osmajor, harness_dir=False, lab_controllers=[lab_controller]) nonexistent_osmajor = OSMajor.lazy_create(osmajor=u'notexist') remote_harness_dir = mkdtemp(suffix='remote') self.addCleanup(rmtree, remote_harness_dir) local_harness_dir = mkdtemp(suffix='local') self.addCleanup(rmtree, local_harness_dir) self._create_remote_harness(remote_harness_dir, osmajor.osmajor) update_repos('file://%s/' % remote_harness_dir, local_harness_dir) self.assertTrue( os.path.exists(os.path.join(local_harness_dir, osmajor.osmajor))) self.assertFalse( os.path.exists( os.path.join(local_harness_dir, nonexistent_osmajor.osmajor)))
def test_concurrent_different_trees(self): distro_data = dict(self.distro_data) # ensure osmajor, osversion, and distro already exist with session.begin(): osmajor = OSMajor.lazy_create(osmajor=distro_data['osmajor']) osversion = OSVersion.lazy_create(osmajor=osmajor, osminor=distro_data['osminor']) osversion.arches = [Arch.lazy_create(arch=arch) for arch in distro_data['arches']] Distro.lazy_create(name=distro_data['name'], osversion=osversion) # ensure two different trees distro_data['variant'] = u'Workstation' distro_data2 = dict(distro_data) distro_data2['variant'] = u'Server' self.add_distro_trees_concurrently(distro_data, distro_data2)
def save_osmajor_installopts(self, osmajor_id=None, installopts=None): try: osmajor = OSMajor.by_id(osmajor_id) except InvalidRequestError: flash(_(u"Invalid OSMajor ID %s" % id)) redirect(".") for arch, options in installopts.iteritems(): # arch=None means applied to all arches io = OSMajorInstallOptions.lazy_create(osmajor_id=osmajor.id, arch_id=Arch.by_name(arch).id if arch else None) io.ks_meta = options['ks_meta'] io.kernel_options = options['kernel_options'] io.kernel_options_post = options['kernel_options_post'] flash(_(u'Install options saved for %s') % osmajor) redirect('.')
def test_clearing_alias_stores_null(self): with session.begin(): data_setup.create_distro_tree(osmajor=u'YellowSpaceshipLinux2') osmajor = OSMajor.by_name(u'YellowSpaceshipLinux2') osmajor.alias = u'YSL2' b = self.browser go_to_edit_osmajor(b, 'YellowSpaceshipLinux2') b.find_element_by_xpath('//input[@id="form_alias"]').clear() b.find_element_by_xpath('//button[text()="Edit OSMajor"]').submit() self.assertEquals( b.find_element_by_class_name('flash').text, 'Changes saved for YellowSpaceshipLinux2') with session.begin(): session.refresh(osmajor) self.assertEquals(osmajor.alias, None) # not ''
def create_distro(name=None, osmajor=u'DansAwesomeLinux6', osminor=u'9', arches=None, tags=None): osmajor = OSMajor.lazy_create(osmajor=osmajor) osversion = OSVersion.lazy_create(osmajor=osmajor, osminor=osminor) if arches: osversion.arches = arches if not name: name = unique_name(u'%s.%s-%%s' % (osmajor, osminor)) distro = Distro.lazy_create(name=name, osversion=osversion) for tag in (tags or []): distro.add_tag(tag) log.debug('Created distro %r', distro) harness_dir = os.path.join(turbogears.config.get('basepath.harness'), distro.osversion.osmajor.osmajor) if not os.path.exists(harness_dir): os.makedirs(harness_dir) return distro
def get_arch(self, filter): """ pass in a dict() with either distro or osmajor to get possible arches """ if 'distro' in filter: # look up distro try: arches = [arch.arch for arch in Distro.by_name(filter['distro']).osversion.arches] except DatabaseLookupError: raise BX(_('Invalid Distro: %s' % filter['distro'])) elif 'osmajor' in filter: # look up osmajor try: arches = [arch.arch for arch in OSMajor.by_name(filter['osmajor']).osversions[0].arches] except InvalidRequestError: raise BX(_('Invalid OSMajor: %s' % filter['osmajor'])) return arches
def create_distro(name=None, osmajor=u'DansAwesomeLinux6', osminor=u'9', arches=None, tags=None, harness_dir=True): osmajor = OSMajor.lazy_create(osmajor=osmajor) osversion = OSVersion.lazy_create(osmajor=osmajor, osminor=osminor) if arches: osversion.arches = arches if not name: name = unique_name(u'%s.%s-%%s' % (osmajor, osminor)) distro = Distro.lazy_create(name=name, osversion=osversion) for tag in (tags or []): distro.add_tag(tag) log.debug('Created distro %r', distro) if harness_dir: harness_dir = os.path.join(turbogears.config.get('basepath.harness'), distro.osversion.osmajor.osmajor) if not os.path.exists(harness_dir): os.makedirs(harness_dir) return distro
def test_install_options_non_existent_system(self): login(self.browser) fqdn = data_setup.unique_name('system%s.idonot.exist') with session.begin(): distro_tree = data_setup.create_distro_tree(osmajor='MyEnterpriseLinux', arch=u'x86_64') self.import_csv((u'csv_type,fqdn,arch,family,update,ks_meta,kernel_options,kernel_options_post\n' u'install,%s,x86_64,MyEnterpriseLinux,,mode=cmdline,,console=ttyS0' % fqdn) .encode('utf8')) with session.begin(): system = System.query.filter(System.fqdn == fqdn).one() arch = Arch.by_name(u'x86_64') osmajor = OSMajor.by_name(u'MyEnterpriseLinux') p = system.provisions[arch].provision_families[osmajor] self.assertEquals(p.ks_meta, u'mode=cmdline') self.assertEquals(p.kernel_options_post, u'console=ttyS0')
def test_install_options_non_existent_system(self): login(self.browser) fqdn = data_setup.unique_name('system%s.idonot.exist') with session.begin(): distro_tree = data_setup.create_distro_tree( osmajor='MyEnterpriseLinux', arch=u'x86_64') self.import_csv(( u'csv_type,fqdn,arch,family,update,ks_meta,kernel_options,kernel_options_post\n' u'install,%s,x86_64,MyEnterpriseLinux,,mode=cmdline,,console=ttyS0' % fqdn).encode('utf8')) with session.begin(): system = System.query.filter(System.fqdn == fqdn).one() arch = Arch.by_name(u'x86_64') osmajor = OSMajor.by_name(u'MyEnterpriseLinux') p = system.provisions[arch].provision_families[osmajor] self.assertEquals(p.ks_meta, u'mode=cmdline') self.assertEquals(p.kernel_options_post, u'console=ttyS0')
def create_task(name=None, exclude_arch=None, exclude_osmajor=None, version=u'1.0-1', uploader=None, owner=None, priority=u'Manual', valid=None, path=None, description=None, requires=None, runfor=None, type=None): if name is None: name = unique_name(u'/distribution/test_task_%s') if path is None: path = u'/mnt/tests/%s' % name if description is None: description = unique_name(u'description%s') if uploader is None: uploader = create_user(user_name=u'task-uploader%s' % name.replace('/', '-')) if owner is None: owner = u'*****@*****.**' % name.replace('/', '-') if valid is None: valid = True rpm = u'example%s-%s.noarch.rpm' % (name.replace('/', '-'), version) task = Task.lazy_create(name=name) task.rpm = rpm task.version = version task.uploader = uploader task.owner = owner task.priority = priority task.valid = valid task.path = path task.description = description if type: for t in type: task.types.append(TaskType.lazy_create(type=t)) if exclude_arch: for arch in exclude_arch: task.excluded_arch.append(TaskExcludeArch(arch_id=Arch.by_name(arch).id)) if exclude_osmajor: for osmajor in exclude_osmajor: task.excluded_osmajor.append(TaskExcludeOSMajor(osmajor=OSMajor.lazy_create(osmajor=osmajor))) if requires: for require in requires: tp = TaskPackage.lazy_create(package=require) task.required.append(tp) if runfor: for run in runfor: task.runfor.append(TaskPackage.lazy_create(package=run)) return task
def test_cannot_save_duplicate_alias(self): with session.begin(): existing = u'OrangeBucketLinux7' existing_alias = u'OBL7' OSMajor.lazy_create(osmajor=existing).alias = existing_alias data_setup.create_distro_tree(osmajor=u'YellowSpaceshipLinux1') b = self.browser go_to_edit_osmajor(b, 'YellowSpaceshipLinux1') b.find_element_by_xpath('//input[@id="form_alias"]')\ .send_keys(existing_alias) b.find_element_by_xpath('//button[text()="Edit OSMajor"]').submit() self.assertEquals(b.find_element_by_class_name('flash').text, 'Cannot save alias OBL7, it is already used by OrangeBucketLinux7') go_to_edit_osmajor(b, 'YellowSpaceshipLinux1') b.find_element_by_xpath('//input[@id="form_alias"]')\ .send_keys(existing) b.find_element_by_xpath('//button[text()="Edit OSMajor"]').submit() self.assertEquals(b.find_element_by_class_name('flash').text, 'Cannot save alias OrangeBucketLinux7, ' 'it is already used by OrangeBucketLinux7')
def create_distro(name=None, osmajor=u'DansAwesomeLinux6', osminor=u'9', arches=None, tags=None, harness_dir=True, osmajor_installopts=None, date_created=None): osmajor = OSMajor.lazy_create(osmajor=osmajor) osversion = OSVersion.lazy_create(osmajor=osmajor, osminor=osminor) if arches: # list arches may contains unicode name or instance # Comparing instance to attribute is prohibited in SQLAlchemy 1.1 and later osversion.arches = [ Arch.by_name(arch.arch if isinstance(arch, Arch) else arch) for arch in arches ] if not name: name = unique_name(u'%s.%s-%%s' % (osmajor, osminor)) distro = Distro.lazy_create(name=name, osversion=osversion) if date_created is not None: distro.date_created = date_created for tag in (tags or []): distro.add_tag(tag) # add distro wide install options, if any if osmajor_installopts: for arch in arches: io = OSMajorInstallOptions.lazy_create( osmajor_id=osmajor.id, arch_id=Arch.by_name(arch).id) io.ks_meta = osmajor_installopts.get('ks_meta', '') io.kernel_options = osmajor_installopts.get('kernel_options', '') io.kernel_options_post = osmajor_installopts.get( 'kernel_options_post', '') log.debug('Created distro %r', distro) if harness_dir: harness_dir = os.path.join(turbogears.config.get('basepath.harness'), distro.osversion.osmajor.osmajor) if not os.path.exists(harness_dir): os.makedirs(harness_dir) return distro
def edit_version(self, name, version): """ Updates the version for all distros with the given name. :param name: name of distros to be updated, for example 'RHEL5.6-Server-20101110.0' :type name: string :param version: new version to be applied, for example 'RedHatEnterpriseLinuxServer5.6' or 'Fedora14' :type version: string """ distros = Distro.query.filter(Distro.name.like(unicode(name))) edited = [] os_major = version.split('.')[0] # Try and split OSMinor try: os_minor = version.split('.')[1] except IndexError: os_minor = '0' # Try and find OSMajor osmajor = OSMajor.lazy_create(osmajor=os_major) # Try and find OSVersion osversion = OSVersion.lazy_create(osmajor=osmajor, osminor=os_minor) # Check each Distro for distro in distros: if osversion != distro.osversion: edited.append('%s' % distro.name) distro.activity.append( DistroActivity(user=identity.current.user, service=u'XMLRPC', field_name=u'osversion', action=u'Changed', old_value=unicode(distro.osversion), new_value=unicode(osversion))) distro.osversion = osversion return edited