def remoteSetUp(self): debug("%s", self.uuid) gst.log("%s" % self.uuid) # local variables # create the pipeline try: self.pipeline = self.createPipeline() except: exception("Error while creating pipeline") self.pipeline = None finally: self.validateStep("valid-pipeline", not self.pipeline == None) if self.pipeline == None: self.remoteStop() return factory = self.pipeline.get_factory() if factory is None: facname = "(no factory)" else: facname = factory.get_name() self._elements = [(self.pipeline.get_name(),facname, "")] #name,factoryname,parentname self._watchContainer(self.pipeline) # connect to bus self.bus = self.pipeline.get_bus() self.bus.add_signal_watch() self.bus.connect("message", self._busMessageHandlerCb) PythonDBusTest.remoteSetUp(self)
def isExpectedResult(self, checkitem, result, extra_info): for pattern in self._expected_failure_patterns: if checkitem not in pattern["results"]: continue if str(result) not in pattern["results"][checkitem]: if "None" in pattern["results"][checkitem]: debug("This rule only matches skipped checks.") elif "0" in pattern["results"][checkitem]: debug("This rule only matches failed checks.") else: warning("This rule doesn't match any checks: %s", pattern) continue if "arguments" in pattern: for k, v in pattern["arguments"].items(): if k not in self.arguments: break if self.arguments[k] not in v: break else: return True else: return True return False
def addSubTest(self, testclass, arguments, monitors=None, position=-1, instance_name=None): """ testclass : a testclass to run next, can be a Scenario arguments : dictionnary of arguments monitors : list of (Monitor, monitorargs) to run the test with position : the position to insert the test in (-1 for last) instance_name : a human-readable name for the test. This method can be called several times in a row at any moment. """ if instance_name is None: instance_name = "%u.%s" % (len(self._subtest_names), testclass.__test_name__) # filter out unused arguments in arguments for non-scenarios if not issubclass(testclass, Scenario): args = {} for validkey in testclass.getFullArgumentList(): if validkey in arguments.keys(): args[validkey] = arguments[validkey] else: args = copy(arguments) debug("Appending subtest %r args:%r", testclass, args) if position == -1: self._tests.append((testclass, args, monitors, instance_name)) else: self._tests.insert(position, (testclass, args, monitors, instance_name)) self._subtest_names.append(instance_name)
def remoteTearDown(self): if not PythonDBusTest.remoteTearDown(self): return False gst.log("Tearing Down") # unref pipeline and so forth if self._waitcb: gobject.source_remove(self._waitcb) self._waitcb = None if self.pipeline: self.pipeline.set_state(gst.STATE_NULL) self.validateStep("no-errors-seen", self._errors == []) if not self._errors == []: self.extraInfo("errors", self._errors) if not self._tags == {}: debug("Got tags %r", self._tags) for key, val in self._tags.iteritems(): if isinstance(val, int): # make sure that only values < 2**31 (MAX_INT32) are ints # TODO : this is gonna screw up MASSIVELY with values > 2**63 if val >= 2**31: self._tags[key] = long(val) # FIXME : if the value is a list, the dbus python bindings screw up # # For the time being we remove the values of type list, but this is REALLY # bad. listval = [x for x in self._tags.keys() if type(self._tags[x]) == list] if listval: warning("Removing this from the taglist since they're list:%r", listval) for val in listval: del self._tags[val] self.extraInfo("tags", dbus.Dictionary(self._tags, signature="sv")) if not self._elements == []: self.extraInfo("elements-used", self._elements) return True
def setStdOutAndErr(self, stderr_out_path): debug("New path: %s", stderr_out_path) self._stdout = stderr_out_path self._stderr = self._stdout self._ensureOutRedirection() if self._redir_tty_thread is not None: self._redir_tty_thread.setStdoutFile(stderr_out_path) self._redir_tty_thread.setStderrFile(stderr_out_path)
def _generate(self): debug("Running generator command line in %r: %r" % (self.cwd, self.command)) try: process = subprocess.Popen([self.command], stdin=None, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=self.cwd, shell=True, universal_newlines=True) except Exception, e: exception("Error running external generator: %r: %s", self.command, e) return []
def _videoSourcePadAdded(self, videoSource, pad): debug("pad %r, videoCaps:%r", pad, self._videoCaps) try: if self._videoCaps: self._videoSource.link(self._videoEncoder, gst.Caps(str(self._videoCaps))) else: self._videoSource.link(self._videoEncoder) finally: debug("done")
def _noMorePadsCb(self, dbin): debug("no more pads") gst.log("no more pads") if len([stream for stream in self._streams if not stream.raw]): debug("we have non-raw streams, stopping") # FIXME : add post-checking self._analyzeDecodebin() self._validateStreams() gobject.idle_add(self.stop)
def callRemoteStart(self): # call remote instance "remoteStart()" if not self._remoteinstance: return self.args = self._parse_test_arguments(self.args) debug("Starting remote with argunents %s outputfiles %s", self.args, self.getOutputFiles()) self._remoteinstance.remoteStart(self.args, self.getOutputFiles(), reply_handler=self._voidRemoteStartCallBackHandler, error_handler=self._voidRemoteStartErrBackHandler)
def create_temp_filenames(self, global_files): oflist = self.getFullOutputFilesList() for ofname in oflist.iterkeys(): if global_files == oflist[ofname]["global"]: if not ofname in self._outputfiles or not global_files: ofd, opath = self._testrun.get_temp_file(nameid=ofname) debug("created temp file name '%s' for outputfile '%s' [%s]", opath, ofname, self.uuid) self._outputfiles[ofname] = opath os.close(ofd)
def _findCoreFile(self): cwd = self.testrun.getWorkingDirectory() files = os.listdir(cwd) debug("files : %r", files) for fname in files: if fname == "core": return os.path.join(cwd, fname) if fname == "core.%d" % self.test._pid: return os.path.join(cwd, fname) return None
def getIterationOutputFiles(self, iteration): """ Return a dictionnary containing the outputfiles in the form of {outputfile-name: /path/to/file} for a specific iteration """ try: return self._iteration_outputfiles[iteration] except KeyError: debug("No outputfile for iteration %s", iteration) return None
def setTimeout(self, timeout): """ Set the timeout period for running this test in seconds. Returns True if the timeout could be modified, else False. """ debug("timeout : %d", timeout) if self._testtimeoutid: debug("updating timeout/timeouttime") self._testtimeouttime = self._testtimeouttime - self._timeout + timeout self._timeout = timeout return True
def extraInfo(self, key, value): """ Give extra information obtained while running the tests. If key was already given, the new value will override the value previously given for the same key. Called by the test itself """ debug("%s : %r", key, value) self._extraInfo[key] = value
def _testTimeoutCb(self): debug("timeout for %r", self) now = time.time() if now < self._testtimeouttime: debug("timeout must have changed in the meantime") diff = int((self._testtimeouttime - now) * 1000) self._testtimeoutid = gobject.timeout_add(diff, self._testTimeoutCb) return False self._testtimeoutid = 0 self.stop() return False
def stopAllThreads(self): debug("stopping all threads") joinedthreads = 0 while(joinedthreads < len(self.threads)): for thread in self.threads: debug("Trying to stop thread %r" % thread) try: thread.join() joinedthreads += 1 except: warning("what happened ??")
def addIterationOutputFile(self, key, value): """ Report the location of an output file for a specific iteration """ debug("%s : %s", key, value) try: self._iteration_outputfiles[self._iteration][key] = value except KeyError: kv = {} kv[key] = value self._iteration_outputfiles[self._iteration] = kv
def tearDown(self): Monitor.tearDown(self) if self._logfile: os.close(self._logfile) if not os.path.getsize(self._logfilepath): # if log file is empty remove it debug("log file is empty, removing it") os.remove(self._logfilepath) else: # else report it self.setOutputFile("memcheck-log", self._logfilepath)
def callRemoteSetUp(self): # call remote instance "remoteSetUp()" if not self._remoteinstance: return args = dict((k, v) for k, v in self.args.items() if (k in self.getFullArgumentList() and self.getFullArgumentList()[k]["global"] == True)) args = self._parse_test_arguments(args) debug("Setting up remote with argunents %s outputfiles %s", args, self.getOutputFiles()) self._remoteinstance.remoteSetUp(args, self.getOutputFiles(), reply_handler=self._voidRemoteSetUpCallBackHandler, error_handler=self._voidRemoteSetUpErrBackHandler)
def setAsyncSetupTimeout(self, timeout): """ Set the timeout period for asynchronous test to startup in seconds. Returns True if the timeout could be modified, else False. """ debug("timeout : %d", timeout) if self._asynctimeoutid: debug("updating timeout/timeouttime") self._asynctimeouttime -= (self._asynctimeout - timeout) self._asynctimeout = timeout return True
def quit(self): """ Quit the client """ debug("Quitting...") if self._running: self.stop() try: self._storage.close(self._exit) except: self._exit()
def _elementAddedCb(self, container, element): debug("New element %r in container %r", element, container) factory = element.get_factory() factory_name = "" if not factory is None: factory_name = factory.get_name() # add himself self._elements.append((element.get_name(), factory_name, container.get_name())) # if bin, add current and connect signal if isinstance(element, gst.Bin): self._watchContainer(element)
def tearDown(self): print 'Tearing down memcheck monitor' Monitor.tearDown(self) if self._logfile: os.close(self._logfile) if not os.path.getsize(self._logfilepath): # if log file is empty remove it debug("log file is empty, removing it") os.remove(self._logfilepath) else: # else report it print 'Reporting memcheck monitor log: ', self._logfilepath self.setOutputFile("memcheck-log", self._logfilepath)
def remoteTest(self): # kickstart pipeline to initial state PythonDBusTest.remoteTest(self) debug("Setting pipeline to initial state %r", self.__pipeline_initial_state__) gst.log("Setting pipeline to initial state %r" % self.__pipeline_initial_state__) res = self.pipeline.set_state(self.__pipeline_initial_state__) debug("set_state returned %r", res) gst.log("set_state() returned %r" % res) self.validateStep("pipeline-change-state", not res == gst.STATE_CHANGE_FAILURE) if res == gst.STATE_CHANGE_FAILURE: warning("Setting pipeline to initial state failed, stopping test") gst.warning("State change failed, stopping") self.stop()
def _initialize(self): # figure out the length of all generators debug("initializing") cpy = {} for key, value in self.generators.iteritems(): gen, idx, nb = value nb = len(gen) if nb: self.combinations *= nb cpy[key] = [gen, idx, nb] debug("self.combinations: %d" % self.combinations) self.generators = cpy self._initialized = True
def setWorkingDirectory(self, workdir): """ Change the working directory. This can only be called when the TestRun isn't running. Returns True if the working directory was properly changed. Returns False if there was a problem. """ if self._running: return False debug("Changing workdir to %s", workdir) self._workingdir = workdir self._outputdir = os.path.join(self._workingdir, "outputfiles") return True
def _stop(self, paths): for desc, path in paths.iteritems(): if not os.path.getsize(path): # if log file is empty remove it debug("log file is empty, removing it") os.remove(path) else: path = self._compressFile(path) self.addIterationOutputFile(desc, path) # Add global outputfiles for desc, path in self.getOutputFiles().iteritems(): self.addIterationOutputFile(desc, self._compressFile(path, False)) return True
def addMonitor(self, monitor, monitorargs=None): """ Add a monitor to this test instance. Checks will be done to ensure that the monitor can be applied on this instance. Returns True if the monitor was applied succesfully. """ debug("monitor:%r, args:%r", monitor, monitorargs) # check if monitor is valid if not isinstance(self, monitor.__applies_on__): warning("The given monitor cannot be applied on this test") return False self._monitors.append((monitor, monitorargs))
def get_private_session_bus(): """ Get the private dbus BusConnection to use in the client. Tests should NOT use this method """ global private_bus, private_bus_pid, private_bus_address if private_bus == None: if private_bus_pid: # cleanup kill_private_dbus() private_bus_address, private_bus_pid = spawn_session_dbus()[:2] debug("Creating BusConnection for address %s" % private_bus_address) gml = DBusGMainLoop() private_bus = BusConnection(private_bus_address, mainloop=gml) return private_bus
def setUp(self): info("uuid:%s", self.uuid) if Test.setUp(self) == False: return False # get the remote launcher pargs = self._preargs pargs.extend(self.get_remote_launcher_args()) shell = isinstance (pargs, basestring) cwd = self._testrun.getWorkingDirectory() self._environ["PRIVATE_DBUS_ADDRESS"] = self._bus_address info("Setting PRIVATE_DBUS_ADDRESS : %r" % self._bus_address) info("bus:%r" % self._bus) self._prepareArguments() if False: # useful to allow some time to run dbus-monitor on the private bus print("Setting PRIVATE_DBUS_ADDRESS : %r" % self._bus_address) time.sleep(5) # spawn the other process info("opening %r" % pargs) info("cwd %s" % cwd) try: self._subprocessspawntime = time.time() self._process = subprocess.Popen(pargs, stdin = self._stdin, stdout = subprocess.PIPE, stderr = subprocess.PIPE, env=self._environ, shell = shell, cwd=cwd) self._ensureOutRedirection() self._pid = self._process.pid except: exception("Error starting the subprocess command ! %r", pargs) self.validateChecklistItem("dbus-process-spawned", False) return False debug("Subprocess created successfully [pid:%d]", self._pid) self.validateChecklistItem("dbus-process-spawned") # add a poller for the proces self._processpollid = gobject.timeout_add(500, self._pollSubProcess) # Don't forget to set a timeout for waiting for the connection return True