def onCvresults(self, *args, **kwargs): del kwargs["how"] self._writePreamble("CV Test Results") text = "Display the results of a cross-validation test with the " \ "current settings against the defaults." nsets = options["TestToolsUI", "n"] # With defaults first. self.write("<p>Testing with defaults...</p>") saved = {} for opt in options.options(True): # Ignore those that have do_not_restore as True # (These are predominately storage options, and at least # the cache directory ones may be needed later on). sect, opt = opt[1:].split(']', 1) saved[(sect, opt)] = options[(sect, opt)] if not options.no_restore(sect, opt): options.set(sect, opt, options.default(sect, opt)) options["TestToolsUI", "source"] = kwargs["TestToolsUI_source"] # XXX Cache this somewhere? If the testing data isn't changing, # XXX and the user is running multiple tests, then it doesn't # XXX make much sense to rerun the 'default's test over and over # XXX again. cv_out, errors = self.timCV(nsets) ## print errors.read() defaults = self.rates(cv_out) # Now with specified settings. self.write("<p>Testing with selected settings...</p>") for opt in options.options(True): sect, opt = opt[1:].split(']', 1) try: value = kwargs["%s_%s" % (sect, opt)] except KeyError: # Leave as the default. pass else: options.set(sect, opt, value) cv_out, errors = self.timCV(nsets) ## print errors.read() current = self.rates(cv_out) # Restore the settings. for opt in options.options(True): sect, opt = opt[1:].split(']', 1) options.set(sect, opt, saved[(sect, opt)]) # Do the comparison. comp, errors = self.compare(defaults, current) ## print errors.read() # Output the results # XXX This is just what you'd get from running cmp.py # XXX at the moment - it could be prettied up a bit. comp = comp.read() box = self._buildBox('Cross-validation test', None, cgi.escape(comp).replace("\n", "<br />")) self.write(box) self._writePostamble()
def onCvresults(self, *args, **kwargs): del kwargs["how"] self._writePreamble("CV Test Results") text = "Display the results of a cross-validation test with the " \ "current settings against the defaults." nsets = options["TestToolsUI", "n"] self.write("<p>Testing with defaults...</p>") saved = {} for opt in options.options(True): sect, opt = opt[1:].split(']', 1) saved[(sect, opt)] = options[(sect, opt)] if not options.no_restore(sect, opt): options.set(sect, opt, options.default(sect, opt)) options["TestToolsUI", "source"] = kwargs["TestToolsUI_source"] cv_out, errors = self.timCV(nsets) defaults = self.rates(cv_out) self.write("<p>Testing with selected settings...</p>") for opt in options.options(True): sect, opt = opt[1:].split(']', 1) try: value = kwargs["%s_%s" % (sect, opt)] except KeyError: pass else: options.set(sect, opt, value) cv_out, errors = self.timCV(nsets) current = self.rates(cv_out) for opt in options.options(True): sect, opt = opt[1:].split(']', 1) options.set(sect, opt, saved[(sect, opt)]) comp, errors = self.compare(defaults, current) comp = comp.read() box = self._buildBox('Cross-validation test', None, cgi.escape(comp).replace("\n", "<br />")) self.write(box) self._writePostamble()
# These are the options that will be offered on the testing page. # If the option is None, then the entry is a header and the following # options will appear in a new box on the configuration page. # These are also used to generate http request parameters and template # fields/variables. testtools_ini_map = ( ## ('General Options', None), # Put any general options that we wish to encourage people to test # here, for example: # ('Classifier', 'max_discriminators'), ('Experimental Options', None), ) # Dynamically add any current experimental/deprecated options. for opt in options.options(True): sect, opt = opt[1:].split(']', 1) if opt[:2].lower() == "x-": testtools_ini_map += ((sect, opt),) class TestToolsUserInterface(ProxyUI.ProxyUserInterface): """Serves the HTML user interface for the test tools.""" def onCv(self): global testtools_ini_map self._writePreamble("CV Test") configTable = self.html.configForm.clone() del configTable.configTextRow1 del configTable.configTextRow2 del configTable.configCbRow1 del configTable.configRow2 del configTable.blankRow
from spambayes.Options import options # These are the options that will be offered on the testing page. # If the option is None, then the entry is a header and the following # options will appear in a new box on the configuration page. # These are also used to generate http request parameters and template # fields/variables. testtools_ini_map = ( ## ('General Options', None), # Put any general options that we wish to encourage people to test # here, for example: # ('Classifier', 'max_discriminators'), ('Experimental Options', None), ) # Dynamically add any current experimental/deprecated options. for opt in options.options(True): _sect, _opt = opt[1:].split(']', 1) if opt[:2].lower() == "x-": testtools_ini_map += ((_sect, _opt), ) class TestToolsUserInterface(ProxyUI.ProxyUserInterface): """Serves the HTML user interface for the test tools.""" def onCv(self): global testtools_ini_map self._writePreamble("CV Test") configTable = self.html.configForm.clone() del configTable.configTextRow1 del configTable.configTextRow2 del configTable.configCbRow1 del configTable.configRow2