def getPerformanceEstimate(self, test): expected = performance.getTestPerformance(test) if expected > 0: elapsed = self.timeMonitor.getElapsedTime(test) if elapsed >= 0: perc = (elapsed * 100) / expected return "\nReckoned to be " + str(int(perc)) + "% complete comparing elapsed time with expected performance.\n" + \ "(" + performance.getTimeDescription(elapsed) + " of " + performance.getTimeDescription(expected) + ")" return ""
def forceOnPerformanceMachines(self): if self.optionMap.has_key("perf"): return 1 minTimeForce = plugins.getNumberOfSeconds(str(self.test.getConfigValue("min_time_for_performance_force"))) if minTimeForce >= 0 and getTestPerformance(self.test) > minTimeForce: return 1 # If we haven't got a log_file yet, we should do this so we collect performance reliably logFile = self.test.getFileName(self.test.getConfigValue("log_file")) return logFile is None