def testTinySummary(self): self.assertFalse( utils.FindDuplicates(opcodes_base.SUMMARY_PREFIX.values())) self.assertTrue( compat.all( prefix.endswith("_") and supplement.endswith("_") for (prefix, supplement) in opcodes_base.SUMMARY_PREFIX.items())) self.assertEqual(opcodes.OpClusterPostInit().TinySummary(), "C_POST_INIT") self.assertEqual(opcodes.OpNodeRemove().TinySummary(), "N_REMOVE") self.assertEqual(opcodes.OpInstanceMigrate().TinySummary(), "I_MIGRATE") self.assertEqual(opcodes.OpTestJqueue().TinySummary(), "TEST_JQUEUE")
def CheckArguments(self): """Check arguments. """ nodes = [] for inst in self.op.instances: if inst.iallocator is not None: raise errors.OpPrereqError( "iallocator are not allowed to be set on" " instance objects", errors.ECODE_INVAL) nodes.append(bool(inst.pnode)) if inst.disk_template in constants.DTS_INT_MIRROR: nodes.append(bool(inst.snode)) has_nodes = compat.any(nodes) if compat.all(nodes) ^ has_nodes: raise errors.OpPrereqError( "There are instance objects providing" " pnode/snode while others do not", errors.ECODE_INVAL) if not has_nodes and self.op.iallocator is None: default_iallocator = self.cfg.GetDefaultIAllocator() if default_iallocator: self.op.iallocator = default_iallocator else: raise errors.OpPrereqError( "No iallocator or nodes on the instances" " given and no cluster-wide default" " iallocator found; please specify either" " an iallocator or nodes on the instances" " or set a cluster-wide default iallocator", errors.ECODE_INVAL) CheckOpportunisticLocking(self.op) dups = utils.FindDuplicates( [op.instance_name for op in self.op.instances]) if dups: raise errors.OpPrereqError( "There are duplicate instance names: %s" % utils.CommaJoin(dups), errors.ECODE_INVAL)
def test(self): """Check whether all RAPI resources are documented. """ rapidoc = _ReadDocFile("rapi.rst") node_name = re.escape("[node_name]") instance_name = re.escape("[instance_name]") group_name = re.escape("[group_name]") network_name = re.escape("[network_name]") job_id = re.escape("[job_id]") disk_index = re.escape("[disk_index]") filter_uuid = re.escape("[filter_uuid]") query_res = re.escape("[resource]") resources = connector.GetHandlers(node_name, instance_name, group_name, network_name, job_id, disk_index, filter_uuid, query_res) handler_dups = utils.FindDuplicates(resources.values()) self.assertFalse(handler_dups, msg=("Resource handlers used more than once: %r" % handler_dups)) uri_check_fixup = { re.compile(node_name): "node1examplecom", re.compile(instance_name): "inst1examplecom", re.compile(group_name): "group4440", re.compile(network_name): "network5550", re.compile(job_id): "9409", re.compile(disk_index): "123", re.compile(filter_uuid): "c863fbb5-f248-47bf-869b-cea259890061", re.compile(query_res): "lock", } assert compat.all(VALID_URI_RE.match(value) for value in uri_check_fixup.values()), \ "Fixup values must be valid URIs, too" titles = [] prevline = None for line in rapidoc.splitlines(): if re.match(r"^\++$", line): titles.append(prevline) prevline = line prefix_exception = compat.UniqueFrozenset(["/", "/version", "/2"]) undocumented = [] used_uris = [] for key, handler in resources.iteritems(): # Regex objects if hasattr(key, "match"): self.assert_(key.pattern.startswith("^/2/"), msg="Pattern %r does not start with '^/2/'" % key.pattern) self.assertEqual(key.pattern[-1], "$") found = False for title in titles: if title.startswith("``") and title.endswith("``"): uri = title[2:-2] if key.match(uri): self._CheckRapiResource(uri, uri_check_fixup, handler) used_uris.append(uri) found = True break if not found: # TODO: Find better way of identifying resource undocumented.append(key.pattern) else: self.assert_(key.startswith("/2/") or key in prefix_exception, msg="Path %r does not start with '/2/'" % key) if ("``%s``" % key) in titles: self._CheckRapiResource(key, {}, handler) used_uris.append(key) else: undocumented.append(key) self.failIf(undocumented, msg=("Missing RAPI resource documentation for %s" % utils.CommaJoin(undocumented))) uri_dups = utils.FindDuplicates(used_uris) self.failIf(uri_dups, msg=("URIs matched by more than one resource: %s" % utils.CommaJoin(uri_dups))) self._FindRapiMissing(resources.values()) self._CheckTagHandlers(resources.values())
def testDeferTask(self): # Tests whether all tasks are run and, since we're only using a single # thread, whether everything is started in order and respects the priority wp = workerpool.WorkerPool("Test", 1, DeferringWorker) try: self._CheckWorkerCount(wp, 1) ctx = DeferringTaskContext() # Use static seed for this test rnd = random.Random(14921) data = {} num2taskid = {} for i in range(1, 333): ctx.lock.acquire() try: if i % 5 == 0: ctx.samepriodefer[i] = True finally: ctx.lock.release() prio = int(rnd.random() * 30) num2taskid[i] = 1000 * i wp.AddTask((ctx, i, prio), priority=50, task_id=num2taskid[i]) data.setdefault(prio, set()).add(i) # Cause some distortion if i % 24 == 0: time.sleep(.001) if i % 31 == 0: wp.Quiesce() wp.Quiesce() self._CheckNoTasks(wp) # Check result ctx.lock.acquire() try: self.assertEqual(data, ctx.prioresult) all_order_ids = [] for (num, numordertaskid) in ctx.num2ordertaskid.items(): order_ids = map(compat.fst, numordertaskid) self.assertFalse(utils.FindDuplicates(order_ids), msg="Order ID has been reused") all_order_ids.extend(order_ids) for task_id in map(compat.snd, numordertaskid): self.assertEqual( task_id, num2taskid[num], msg=("Task %s used different task IDs" % num)) self.assertFalse(utils.FindDuplicates(all_order_ids), msg="Order ID has been reused") finally: ctx.lock.release() self._CheckWorkerCount(wp, 1) finally: wp.TerminateWorkers() self._CheckWorkerCount(wp, 0)
def testParams(self): supported_by_all = set(["debug_level", "dry_run", "priority"]) self.assertTrue( opcodes_base.BaseOpCode not in opcodes.OP_MAPPING.values()) self.assertTrue(opcodes.OpCode not in opcodes.OP_MAPPING.values()) for cls in opcodes.OP_MAPPING.values() + [opcodes.OpCode]: all_slots = cls.GetAllSlots() self.assertEqual(len(set(all_slots) & supported_by_all), 3, msg=("Opcode %s doesn't support all base" " parameters (%r)" % (cls.OP_ID, supported_by_all))) # All opcodes must have OP_PARAMS self.assert_(hasattr(cls, "OP_PARAMS"), msg="%s doesn't have OP_PARAMS" % cls.OP_ID) param_names = [name for (name, _, _, _) in cls.GetAllParams()] self.assertEqual(all_slots, param_names) # Without inheritance self.assertEqual(cls.__slots__, [name for (name, _, _, _) in cls.OP_PARAMS]) # This won't work if parameters are converted to a dictionary duplicates = utils.FindDuplicates(param_names) self.assertFalse(duplicates, msg=("Found duplicate parameters %r in %s" % (duplicates, cls.OP_ID))) # Check parameter definitions for attr_name, aval, test, doc in cls.GetAllParams(): self.assert_(attr_name) self.assertTrue(callable(test), msg=("Invalid type check for %s.%s" % (cls.OP_ID, attr_name))) self.assertTrue(doc is None or isinstance(doc, basestring)) if callable(aval): default_value = aval() self.assertFalse( callable(default_value), msg=("Default value of %s.%s returned by function" " is callable" % (cls.OP_ID, attr_name))) else: default_value = aval if aval is not ht.NoDefault and aval is not None: self.assertTrue( test(default_value), msg=("Default value of %s.%s does not verify" % (cls.OP_ID, attr_name))) # If any parameter has documentation, all others need to have it as well has_doc = [doc is not None for (_, _, _, doc) in cls.OP_PARAMS] self.assertTrue(not compat.any(has_doc) or compat.all(has_doc), msg="%s does not document all parameters" % cls)
def testOpId(self): self.assertFalse( utils.FindDuplicates(cls.OP_ID for cls in opcodes._GetOpList())) self.assertEqual(len(opcodes._GetOpList()), len(opcodes.OP_MAPPING))
def testReplStatuses(self): self.assertFalse(utils.FindDuplicates(constants.CONFD_REPL_STATUSES), msg="Duplicated confd reply status code")
def testReqs(self): self.assertFalse(utils.FindDuplicates(constants.CONFD_REQS), msg="Duplicated confd request code")