def multiupdate(cls, objects, verbose=True): first = None for obj in objects: if first is None: first = obj if first.proxy is not obj.proxy: raise RuntimeError( "multiupdate method can process only jobs from the same server" ) if first is None: return set() #nothing to do multicall = xmlrpc_client.MultiCall(first.proxy) for obj in objects: multicall.pck_status(obj.pck_id) multicall_iterator = multicall() goodObjects = set() for index in range(len(objects)): try: pck_status = multicall_iterator[index] obj = objects[index] obj.__setstatus__(pck_status) goodObjects.add(obj) except xmlrpc_client.Fault as e: if verbose: print("multicall exception raised: %s" % e, file=sys.stderr) return goodObjects
def _remove(self, plans): """ Unlink provided plans from the test case """ multicall = xmlrpclib.MultiCall(self._server) for plan in plans: log.info("Unlinking {0} from {1}".format(plan.identifier, self._identifier)) multicall.TestCase.unlink_plan(self.id, plan.id) multicall()
def update(self): """ Update case plans with modified sortkey """ modified = [caseplan for caseplan in self if caseplan._modified] # Nothing to do if there are no sortkey changes if not modified: return # Update all modified caseplans in a single multicall log.info("Updating {0}'s case plans".format(self._identifier)) multicall = xmlrpclib.MultiCall(self._server) for caseplan in modified: caseplan._update(multicall) caseplan._modified = False multicall()
def update(self): """ Update modified case runs in multicall batches """ # Check for modified case runs modified = [caserun for caserun in self if caserun._modified] if not modified: return log.info("Updating {0}'s case runs".format(self._identifier)) # Update modified caseruns in slices for slice in sliced(modified, config.MULTICALL_MAX): multicall = xmlrpclib.MultiCall(self._server) for caserun in slice: caserun._update(multicall) caserun._modified = False multicall()
def command(args): import re import sys import six.moves.xmlrpc_client as xmlrpclib from sr.tools import spending from sr.tools.trac import TracProxy try: root = spending.find_root() except spending.NotSpendingRepo: print( "Please run in spending.git top level directory", file=sys.stderr) exit(1) spends = spending.load_transactions(root) spendsumgrp = {} for s in spends: if s.trac in spendsumgrp: spendsumgrp[s.trac] += float(s.cost) else: spendsumgrp[s.trac] = float(s.cost) server = TracProxy(anon=True) mserver = xmlrpclib.MultiCall(server) tickets = server.ticket.query("status!=closed&component=Purchasing") for ticket in tickets: mserver.ticket.get(ticket) costsumgrp = {} for ticket in mserver(): match = re.search( 'Total cost: \xa3([0-9.]+)', ticket[3]['description']) if match is None: print("Unable to determine cost for ticket " + str(ticket[0]) + ". Invalid formatting") continue if ticket[0] in costsumgrp: costsumgrp[ticket[0]] += float(match.groups()[0]) else: costsumgrp[ticket[0]] = float(match.groups()[0]) for val in costsumgrp: if spendsumgrp[val] != costsumgrp[val]: print("Ticket " + str(val) + " does not match transactions") print("\tTicket cost: £" + str(costsumgrp[val])) print("\tTransactions: £" + str(spendsumgrp[val]))
def _remove(self, cases): """ Unlink provided cases from the test plan """ # Unlink provided cases on the server multicall = xmlrpclib.MultiCall(self._server) for case in cases: log.info("Unlinking {0} from {1}".format(case.identifier, self._identifier)) multicall.TestCase.unlink_plan(case.id, self.id) multicall() # Add corresponding CasePlan objects from the PlanCasePlans container if PlanCasePlans._is_cached(self._object.caseplans): self._object.caseplans.remove([ CasePlan(testcase=case, testplan=self._object) for case in cases ])
def _fetch(self, inset=None): """ Fetch case plans from the server """ # If data initialized from the inset ---> we're done if Container._fetch(self, inset): return # Fetch test case plans from the server using multicall log.info("Fetching case plans for {0}".format(self._identifier)) multicall = xmlrpclib.MultiCall(self._server) for testcase in self._object.testcases._items: multicall.TestCasePlan.get(testcase.id, self._object.id) injects = [inject for inject in multicall()] log.data(pretty(injects)) # And finally create the initial object set self._current = set([CasePlan(inject) for inject in injects]) self._original = set(self._current)
def AddJobsBulk(self, *jobs): """быстрое(batch) добавление задач в пакет принимает неограниченное количество параметров, каждый параметр - словарь, ключи и значения которого аналогичны параметрам метода AddJob""" multicall = xmlrpc_client.MultiCall(self.proxy) for job in jobs: if "files" in job: self.AddFiles(job["files"]) parents = [pj.id for pj in job.get("parents", [])] pipe_parents = [pj.id for pj in job.get("pipe_parents", [])] multicall.pck_add_job( self.id, job["shell"], parents, pipe_parents, job.get("set_tag", None), job.get("tries", self.DEFAULT_TRIES_COUNT), job.get("max_err_len", None), job.get("retry_delay", None), job.get("pipe_fail", None), job.get("description", ""), job.get("notify_timeout", NOTIFICATION_TIMEOUT), job.get("max_working_time", KILL_JOB_DEFAULT_TIMEOUT), job.get("output_to_status", False), job.get("run_as", "")) return multicall()
def multicall_start(): """ Enter MultiCall mode and queue following xmlrpc calls """ log.info("Starting multicall session, gathering updates...") Nitrate._multicall_proxy = xmlrpclib.MultiCall(Nitrate()._server)
def Reset(self): multicall = xmlrpc_client.MultiCall(self.conn.proxy) for obj in self.tags: multicall.reset_tag(obj) return multicall()
def Check(self): multicall = xmlrpc_client.MultiCall(self.conn.proxy) for tag in self.tags: multicall.check_tag(tag) multicall_iterator = multicall() self.states = dict(list(zip(self.tags, multicall_iterator)))