def _fetch(self, inset=None): """ Fetch case runs from the server """ # If data initialized from the inset ---> we're done if Container._fetch(self, inset): return # Fetch test case runs from the server log.info("Fetching {0}'s case runs".format(self._identifier)) try: injects = self._teiid.run_case_runs(self.id) except teiid.TeiidNotConfigured: injects = self._server.TestRun.get_test_case_runs(self.id) except psycopg2.DatabaseError as error: log.debug("Failed to fetch data from Teiid: {0}".format(error)) injects = self._server.TestRun.get_test_case_runs(self.id) # Feed the TestRun.testcases container with the initial object # set if all cases are already cached (saving unnecesary fetch) testcaseids = [inject["case_id"] for inject in injects] if (not RunCases._is_cached(self._object.testcases) and TestCase._is_cached(testcaseids)): self._object.testcases._fetch([TestCase(id) for id in testcaseids]) # And finally create the initial object set self._current = set([ CaseRun(inject, testcaseinject=testcase) for inject in injects for testcase in self._object.testcases._items if int(inject["case_id"]) == testcase.id ]) self._original = set(self._current)
def _remove(self, bugs): """ Detach provided bugs from the test case """ log.info(u"Detaching {0} from {1}".format(listed(bugs), self._identifier)) data = [bug.bug for bug in bugs] log.data(pretty(data)) self._server.TestCaseRun.detach_bug(self.id, data)
def _remove(self, plans): """ Unlink provided plans from the test case """ multicall = xmlrpclib.MultiCall(self._server) for plan in plans: log.info("Unlinking {0} from {1}".format(plan.identifier, self._identifier)) multicall.TestCase.unlink_plan(self.id, plan.id) multicall()
def _fetch(self, inset=None): """ Fetch currently linked test cases from the server """ # If data initialized from the inset ---> we're done if Container._fetch(self, inset): return # Initialize all plan-case tags (skip when caching persistently # as this an additional/unnecessary call in that case) if config.get_cache_level() == config.CACHE_OBJECTS: log.info("Fetching tags for all {0}'s test cases".format( self._object.identifier)) for tag in self._server.TestPlan.get_all_cases_tags(self.id): Tag(tag) # Fetch test cases from the server log.info("Fetching {0}'s cases".format(self._identifier)) injects = self._server.TestPlan.get_test_cases(self.id) log.data("Fetched {0}".format(listed(injects, "inject"))) self._current = set([TestCase(inject) for inject in injects]) self._original = set(self._current) # Initialize case plans if not already cached if not PlanCasePlans._is_cached(self._object.caseplans): inset = [ CasePlan({ # Fake our own internal id from testplan & testcase "id": _idify([self._object.id, inject["case_id"]]), "case_id": inject["case_id"], "plan_id": self._object.id, "sortkey": inject["sortkey"] }) for inject in injects ] self._object.caseplans._fetch(inset)
def _remove(self, plans): """ Remove self as parent of given test plans """ log.info("Removing {1} as parent of {0}".format(self._identifier, listed([plan.identifier for plan in plans]))) for plan in plans: plan.parent = None plan.update()
def lock(self): """ Create the cache lock unless exists, set mode appropriately """ try: # Attempt to extract the PID from the lock file lock = open(self._lock) pid = lock.readline().strip() lock.close() # Make sure the PID is sane (otherwise ignore it) try: pid = int(pid) except ValueError: log.warn("Malformed cache lock ({0}), ignoring".format(pid)) raise IOError # Check that the process is still running if not os.path.exists("/proc/{0}".format(pid)): log.cache("Breaking stale lock (process {0} dead)".format(pid)) raise IOError log.info("Found lock {0}, opening read-only".format(self._lock)) self._mode = "read-only" except IOError: log.cache("Creating cache lock {0}".format(self._lock)) lock = open(self._lock, "w") lock.write("{0}\n".format(os.getpid())) lock.close() self._mode = "read-write"
def update(self): """ Update all modified mutable objects in the cache This method uses MultiCall to perform the update which can significantly speed up things when compared to updating each individual object separately. Note: The update is done in batches. The maximum number of objects updated at once is controlled by the global variable MULTICALL_MAX, by default set to 10 object per session.""" for klass in self._mutable + self._containers: modified = [mutable for mutable in klass._cache.values() if mutable._modified] if not modified: continue log.info("Found {0} in the {1} cache, updating...".format( listed(modified, "modified object"), klass.__name__)) for slice in sliced(modified, config.MULTICALL_MAX): multicall_start() for mutable in slice: mutable.update() multicall_end()
def _add(self, plans): """ Set self as parent of given test plans """ log.info("Setting {1} as parent of {0}".format( self._identifier, listed([plan.identifier for plan in plans]))) for plan in plans: plan.parent = TestPlan(self.id) plan.update()
def _fetch(self, inset=None): """ Fetch currently linked test cases from the server """ # If data initialized from the inset ---> we're done if Container._fetch(self, inset): return # Initialize all plan-case tags (skip when caching persistently # as this an additional/unnecessary call in that case) if config.get_cache_level() == config.CACHE_OBJECTS: log.info("Fetching tags for all {0}'s test cases".format(self._object.identifier)) for tag in self._server.TestPlan.get_all_cases_tags(self.id): Tag(tag) # Fetch test cases from the server log.info("Fetching {0}'s cases".format(self._identifier)) injects = self._server.TestPlan.get_test_cases(self.id) log.data("Fetched {0}".format(listed(injects, "inject"))) self._current = set([TestCase(inject) for inject in injects]) self._original = set(self._current) # Initialize case plans if not already cached if not PlanCasePlans._is_cached(self._object.caseplans): inset = [ CasePlan( { # Fake our own internal id from testplan & testcase "id": _idify([self._object.id, inject["case_id"]]), "case_id": inject["case_id"], "plan_id": self._object.id, "sortkey": inject["sortkey"], } ) for inject in injects ] self._object.caseplans._fetch(inset)
def _add(self, testcases): """ Add given test cases to the test run """ # Short info about the action identifiers = [testcase.identifier for testcase in testcases] log.info("Adding {0} to {1}".format( listed(identifiers, "testcase", max=3), self._object.identifier)) # Prepare data and push data = [testcase.id for testcase in testcases] log.data(pretty(data)) try: self._server.TestRun.add_cases(self.id, data) # Handle duplicate entry errors by adding test cases one by one except xmlrpclib.Fault as error: if not "Duplicate entry" in unicode(error): raise log.warn(error) for id in data: try: self._server.TestRun.add_cases(self.id, id) except xmlrpclib.Fault: pass # RunCaseRuns will need update ---> erase current data self._object.caseruns._init()
def _fetch(self, inset=None): """ Fetch case runs from the server """ # If data initialized from the inset ---> we're done if Container._fetch(self, inset): return # Fetch test case runs from the server log.info("Fetching {0}'s case runs".format(self._identifier)) try: injects = self._teiid.run_case_runs(self.id) except teiid.TeiidNotConfigured: injects = self._server.TestRun.get_test_case_runs(self.id) except psycopg2.DatabaseError as error: log.debug("Failed to fetch data from Teiid: {0}".format(error)) injects = self._server.TestRun.get_test_case_runs(self.id) # Feed the TestRun.testcases container with the initial object # set if all cases are already cached (saving unnecesary fetch) testcaseids = [inject["case_id"] for inject in injects] if (not RunCases._is_cached(self._object.testcases) and TestCase._is_cached(testcaseids)): self._object.testcases._fetch([TestCase(id) for id in testcaseids]) # And finally create the initial object set self._current = set([CaseRun(inject, testcaseinject=testcase) for inject in injects for testcase in self._object.testcases._items if int(inject["case_id"]) == testcase.id]) self._original = set(self._current)
def update(self): """ Update all modified mutable objects in the cache This method uses MultiCall to perform the update which can significantly speed up things when compared to updating each individual object separately. Note: The update is done in batches. The maximum number of objects updated at once is controlled by the global variable MULTICALL_MAX, by default set to 10 object per session.""" for klass in self._mutable + self._containers: modified = [mutable for mutable in klass._cache.itervalues() if mutable._modified] if not modified: continue log.info("Found {0} in the {1} cache, updating...".format( listed(modified, "modified object"), klass.__name__)) for slice in sliced(modified, config.MULTICALL_MAX): multicall_start() for mutable in slice: mutable.update() multicall_end()
def _remove(self, plans): """ Remove self as parent of given test plans """ log.info("Removing {1} as parent of {0}".format( self._identifier, listed([plan.identifier for plan in plans]))) for plan in plans: plan.parent = None plan.update()
def _add(self, plans): """ Set self as parent of given test plans """ log.info("Setting {1} as parent of {0}".format(self._identifier, listed([plan.identifier for plan in plans]))) for plan in plans: plan.parent = TestPlan(self.id) plan.update()
def _remove(self, bugs): """ Detach provided bugs from the test case """ log.info(u"Detaching {0} from {1}".format( listed(bugs), self._identifier)) data = [bug.bug for bug in bugs] log.data(pretty(data)) self._server.TestCaseRun.detach_bug(self.id, data)
def _add(self, components): """ Link provided components to the test plan """ log.info(u"Linking {1} to {0}".format(self._identifier, listed([component.name for component in components]))) data = [component.id for component in components] log.data(data) self._server.TestPlan.add_component(self.id, data)
def _fetch(self, inset=None): """ Fetch currently linked components from the server """ # If data initialized from the inset ---> we're done if Container._fetch(self, inset): return log.info("Fetching {0}'s components".format(self._identifier)) self._current = set([Component(inject) for inject in self._server.TestPlan.get_components(self.id)]) self._original = set(self._current)
def _fetch(self, inset=None): """ Fetch currently attached tags from the server """ # If data initialized from the inset ---> we're done if Container._fetch(self, inset): return log.info("Fetching {0}'s plans".format(self._identifier)) self._current = set([TestPlan(inject) for inject in self._server.TestCase.get_plans(self.id)]) self._original = set(self._current)
def _add(self, components): """ Link provided components to the test plan """ log.info(u"Linking {1} to {0}".format( self._identifier, listed([component.name for component in components]))) data = [component.id for component in components] log.data(data) self._server.TestPlan.add_component(self.id, data)
def _fetch(self, inset=None): """ Find all child test plans """ # If data initialized from the inset ---> we're done if Container._fetch(self, inset): return log.info("Fetching {0}'s child plans".format(self._identifier)) self._current = set(TestPlan.search(parent=self.id)) self._original = set(self._current)
def _add(self, bugs): """ Attach provided bugs to the test case """ log.info(u"Attaching {0} to {1}".format(listed(bugs), self._identifier)) data = [{"bug_id": bug.bug, "bug_system_id": bug.system, "case_run_id": self.id} for bug in bugs] log.data(pretty(data)) self._server.TestCaseRun.attach_bug(data) # Fetch again the whole bug list (to get the internal id) self._fetch()
def _add(self, cases): """ Link provided cases to the test plan """ # Link provided cases on the server log.info("Linking {1} to {0}".format(self._identifier, listed([case.identifier for case in cases]))) self._server.TestCase.link_plan([case.id for case in cases], self.id) # Add corresponding CasePlan objects to the PlanCasePlans container if PlanCasePlans._is_cached(self._object.caseplans): self._object.caseplans.add([CasePlan(testcase=case, testplan=self._object) for case in cases])
def _fetch(self, inset=None): """ Fetch test runs from the server """ # If data initialized from the inset ---> we're done if Container._fetch(self, inset): return log.info("Fetching testruns for {0}".format(self._identifier)) injects = self._server.TestPlan.get_test_runs(self.id) log.data(pretty(injects)) self._current = set([TestRun(inject) for inject in injects]) self._original = set(self._current)
def _fetch(self, inset=None): """ Fetch currently attached tags from the server """ # If data initialized from the inset ---> we're done if Container._fetch(self, inset): return log.info("Fetching tags for {0}".format(self._identifier)) injects = self._server.TestCase.get_tags(self.id) log.debug(pretty(injects)) self._current = set([Tag(inject) for inject in injects]) self._original = set(self._current)
def _remove(self, testcases): """ Remove given test cases from the test run """ # Short info about the action identifiers = [testcase.identifier for testcase in testcases] log.info("Removing {0} from {1}".format(listed(identifiers, "testcase", max=3), self._object.identifier)) data = [testcase.id for testcase in testcases] log.data(pretty(data)) self._server.TestRun.remove_cases(self.id, data) # RunCaseRuns will need update ---> erase current data self._object.caseruns._init()
def _remove(self, testcases): """ Remove given test cases from the test run """ # Short info about the action identifiers = [testcase.identifier for testcase in testcases] log.info("Removing {0} from {1}".format( listed(identifiers, "testcase", max=3), self._object.identifier)) data = [testcase.id for testcase in testcases] log.data(pretty(data)) self._server.TestRun.remove_cases(self.id, data) # RunCaseRuns will need update ---> erase current data self._object.caseruns._init()
def _fetch(self, inset=None): """ Fetch currently attached tags from the server """ # If data initialized from the inset ---> we're done if Container._fetch(self, inset): return log.info("Fetching {0}'s plans".format(self._identifier)) self._current = set([ TestPlan(inject) for inject in self._server.TestCase.get_plans(self.id) ]) self._original = set(self._current)
def _fetch(self, inset=None): """ Fetch currently linked components from the server """ # If data initialized from the inset ---> we're done if Container._fetch(self, inset): return log.info("Fetching {0}'s components".format(self._identifier)) self._current = set([ Component(inject) for inject in self._server.TestPlan.get_components(self.id) ]) self._original = set(self._current)
def _remove(self, cases): """ Unlink provided cases from the test plan """ # Unlink provided cases on the server multicall = xmlrpclib.MultiCall(self._server) for case in cases: log.info("Unlinking {0} from {1}".format(case.identifier, self._identifier)) multicall.TestCase.unlink_plan(case.id, self.id) multicall() # Add corresponding CasePlan objects from the PlanCasePlans container if PlanCasePlans._is_cached(self._object.caseplans): self._object.caseplans.remove([CasePlan(testcase=case, testplan=self._object) for case in cases])
def update(self): """ Update case plans with modified sortkey """ modified = [caseplan for caseplan in self if caseplan._modified] # Nothing to do if there are no sortkey changes if not modified: return # Update all modified caseplans in a single multicall log.info("Updating {0}'s case plans".format(self._identifier)) multicall = xmlrpclib.MultiCall(self._server) for caseplan in modified: caseplan._update(multicall) caseplan._modified = False multicall()
def update(self): """ Update modified case runs in multicall batches """ # Check for modified case runs modified = [caserun for caserun in self if caserun._modified] if not modified: return log.info("Updating {0}'s case runs".format(self._identifier)) # Update modified caseruns in slices for slice in sliced(modified, config.MULTICALL_MAX): multicall = xmlrpclib.MultiCall(self._server) for caserun in slice: caserun._update(multicall) caserun._modified = False multicall()
def _add(self, bugs): """ Attach provided bugs to the test case """ log.info(u"Attaching {0} to {1}".format(listed(bugs), self._identifier)) data = [{ "bug_id": bug.bug, "bug_system_id": bug.system, "case_run_id": self.id } for bug in bugs] log.data(pretty(data)) self._server.TestCaseRun.attach_bug(data) # Fetch again the whole bug list (to get the internal id) self._fetch()
def multicall_end(): """ Execute xmlrpc call queue and exit MultiCall mode """ log.info("Ending multicall session, sending to the server...") response = Nitrate._multicall_proxy() log.data("Server response:") entries = 0 for entry in response: log.data(pretty(entry)) entries += 1 Nitrate._multicall_proxy = None Nitrate._requests += 1 log.info("Multicall session finished, {0} completed".format( listed(entries, "update"))) return response
def _fetch(self, inset=None): """ Fetch case runs from the server """ # If data initialized from the inset ---> we're done if Container._fetch(self, inset): return # Fetch test case runs from the server log.info("Fetching {0}'s case runs".format(self._identifier)) try: injects = self._teiid.run_case_runs(self.id) except teiid.TeiidNotConfigured: injects = self._server.TestRun.get_test_case_runs(self.id) except psycopg2.DatabaseError, error: log.debug("Failed to fetch data from Teiid: {0}".format(error)) injects = self._server.TestRun.get_test_case_runs(self.id)
def setter(self, value): # Initialize the attribute unless already done if getattr(self, "_" + field) is NitrateNone: self._fetch() # Update only if changed if getattr(self, "_" + field) != value: setattr(self, "_" + field, value) log.info(u"Updating {0}'s {1} to '{2}'".format( self.identifier, field, value)) # Remember modified state if caching if config.get_cache_level() != config.CACHE_NONE: self._modified = True # Save the changes immediately otherwise else: self._update()
def _fetch(self, inset=None): """ Fetch test run cases from the server """ # If data initialized from the inset ---> we're done if Container._fetch(self, inset): return # Fetch attached test cases from the server log.info("Fetching {0}'s test cases".format(self._identifier)) try: injects = self._teiid.run_cases(self.id) except teiid.TeiidNotConfigured: injects = self._server.TestRun.get_test_cases(self.id) except psycopg2.DatabaseError as error: log.debug("Failed to fetch data from Teiid: {0}".format(error)) injects = self._server.TestRun.get_test_cases(self.id) self._current = set([TestCase(inject) for inject in injects]) self._original = set(self._current)
def _fetch(self, inset=None): """ Fetch case plans from the server """ # If data initialized from the inset ---> we're done if Container._fetch(self, inset): return # Fetch test case plans from the server using multicall log.info("Fetching case plans for {0}".format(self._identifier)) multicall = xmlrpclib.MultiCall(self._server) for testcase in self._object.testcases._items: multicall.TestCasePlan.get(testcase.id, self._object.id) injects = [inject for inject in multicall()] log.data(pretty(injects)) # And finally create the initial object set self._current = set([CasePlan(inject) for inject in injects]) self._original = set(self._current)
def _fetch(self, inject=None): """ Fetch user data from the server """ Nitrate._fetch(self, inject) if inject is None: # Search by id if self._id is not NitrateNone: try: log.info("Fetching user " + self.identifier) inject = self._server.User.filter({"id": self.id})[0] except IndexError: raise NitrateError( "Cannot find user for " + self.identifier) # Search by login elif self._login is not NitrateNone: try: log.info( "Fetching user for login '{0}'".format(self.login)) inject = self._server.User.filter( {"username": self.login})[0] except IndexError: raise NitrateError("No user found for login '{0}'".format( self.login)) # Search by email elif self._email is not NitrateNone: try: log.info("Fetching user for email '{0}'".format( self.email)) inject = self._server.User.filter({"email": self.email})[0] except IndexError: raise NitrateError("No user found for email '{0}'".format( self.email)) # Otherwise initialize to the current user else: log.info("Fetching the current user") inject = self._server.User.get_me() self._index("i-am-current-user") # Initialize data from the inject and index into cache log.debug("Initializing user UID#{0}".format(inject["id"])) log.data(pretty(inject)) self._inject = inject self._id = inject["id"] self._login = inject["username"] self._email = inject["email"] if inject["first_name"] and inject["last_name"]: self._name = inject["first_name"] + " " + inject["last_name"] else: self._name = None self._index(self.login, self.email)
def _fetch(self, inject=None): """ Get the missing test plan type data """ Nitrate._fetch(self, inject) # Directly fetch from the initial object dict if inject is not None: log.info("Processing PlanType ID#{0} inject".format(inject["id"])) # Search by test plan type id elif self._id is not NitrateNone: try: log.info("Fetching test plan type " + self.identifier) inject = self._server.TestPlan.get_plan_type(self.id) except xmlrpclib.Fault as error: log.debug(error) raise NitrateError( "Cannot find test plan type for " + self.identifier) # Search by test plan type name else: try: log.info(u"Fetching test plan type '{0}'".format(self.name)) inject = self._server.TestPlan.check_plan_type(self.name) except xmlrpclib.Fault as error: log.debug(error) raise NitrateError("PlanType '{0}' not found".format( self.name)) # Initialize data from the inject and index into cache log.debug("Initializing PlanType ID#{0}".format(inject["id"])) log.data(pretty(inject)) self._inject = inject self._id = inject["id"] self._name = inject["name"] self._index(self.name)
def _fetch(self, inject=None): """ Fetch version data from the server """ Nitrate._fetch(self, inject) # Directly fetch from the initial object dict if inject is not None: log.debug("Processing Version ID#{0} inject".format(inject["id"])) # Search by version id elif self._id is not NitrateNone: try: log.info("Fetching version {0}".format(self.identifier)) inject = self._server.Product.filter_versions( {'id': self.id})[0] except IndexError: raise NitrateError( "Cannot find version for {0}".format(self.identifier)) # Search by product and name else: try: log.info(u"Fetching version '{0}' of '{1}'".format( self.name, self.product.name)) inject = self._server.Product.filter_versions( {'product': self.product.id, 'value': self.name})[0] except IndexError: raise NitrateError( "Cannot find version for '{0}'".format(self.name)) # Initialize data from the inject and index into cache log.debug("Initializing Version ID#{0}".format(inject["id"])) log.data(pretty(inject)) self._inject = inject self._id = inject["id"] self._name = inject["value"] self._product = Product(inject["product_id"]) # Index by product name & version name (if product is cached) if self.product._name is not NitrateNone: self._index("{0}---in---{1}".format(self.name, self.product.name)) # Otherwise index by id only else: self._index()
def remove(self, items): """ Remove an item or a list of items from the container """ # Convert to set representation if isinstance(items, list): items = set(items) else: items = set([items]) # If there are any items to be removed remove_items = items.intersection(self._items) if remove_items: log.info("Removing {0} from {1}'s {2}".format( listed([item.identifier for item in remove_items], self._class.__name__, max=10), self._object.identifier, self.__class__.__name__)) self._items.difference_update(items) if config.get_cache_level() != config.CACHE_NONE: self._modified = True else: self._update()
def add(self, items): """ Add an item or a list of items to the container """ # Convert to set representation if isinstance(items, list): items = set(items) else: items = set([items]) # If there are any new items add_items = items - self._items if add_items: log.info("Adding {0} to {1}'s {2}".format( listed([item.identifier for item in add_items], self._class.__name__, max=10), self._object.identifier, self.__class__.__name__)) self._items.update(items) if config.get_cache_level() != config.CACHE_NONE: self._modified = True else: self._update()
def _fetch(self, inject=None): """ Fetch product data from the server """ Nitrate._fetch(self, inject) # Directly fetch from the initial object dict if inject is not None: log.debug("Initializing Product ID#{0}".format(inject["id"])) log.data(pretty(inject)) self._id = inject["id"] self._name = inject["name"] # Search by product id elif self._id is not NitrateNone: try: log.info("Fetching product " + self.identifier) inject = self._server.Product.filter({'id': self.id})[0] log.debug("Initializing product " + self.identifier) log.data(pretty(inject)) self._inject = inject self._name = inject["name"] except IndexError: raise NitrateError( "Cannot find product for " + self.identifier) # Search by product name else: try: log.info(u"Fetching product '{0}'".format(self.name)) inject = self._server.Product.filter({'name': self.name})[0] log.debug(u"Initializing product '{0}'".format(self.name)) log.data(pretty(inject)) self._inject = inject self._id = inject["id"] except IndexError: raise NitrateError( "Cannot find product for '{0}'".format(self.name)) # Index the fetched object into cache self._index(self.name)
def _fetch(self, inject=None): """ Fetch tag data from the server """ Nitrate._fetch(self, inject) # Directly fetch from the initial object dict if inject is not None: log.debug("Initializing Tag ID#{0}".format(inject["id"])) log.data(pretty(inject)) self._id = inject["id"] self._name = inject["name"] # Search by tag id elif self._id is not NitrateNone: try: log.info("Fetching tag " + self.identifier) inject = self._server.Tag.get_tags({'ids': [self.id]}) log.debug("Initializing tag " + self.identifier) log.data(pretty(inject)) self._inject = inject self._name = inject[0]["name"] except IndexError: raise NitrateError( "Cannot find tag for {0}".format(self.identifier)) # Search by tag name else: try: log.info(u"Fetching tag '{0}'".format(self.name)) inject = self._server.Tag.get_tags({'names': [self.name]}) log.debug(u"Initializing tag '{0}'".format(self.name)) log.data(pretty(inject)) self._inject = inject self._id = inject[0]["id"] except IndexError: raise NitrateError( "Cannot find tag '{0}'".format(self.name)) # Index the fetched object into cache self._index(self.name)
def _add(self, testcases): """ Add given test cases to the test run """ # Short info about the action identifiers = [testcase.identifier for testcase in testcases] log.info("Adding {0} to {1}".format( listed(identifiers, "testcase", max=3), self._object.identifier)) # Prepare data and push data = [testcase.id for testcase in testcases] log.data(pretty(data)) try: self._server.TestRun.add_cases(self.id, data) # Handle duplicate entry errors by adding test cases one by one except xmlrpclib.Fault as error: if not "Duplicate entry" in six.u(error): raise log.warn(error) for id in data: try: self._server.TestRun.add_cases(self.id, id) except xmlrpclib.Fault: pass # RunCaseRuns will need update ---> erase current data self._object.caseruns._init()