Esempio n. 1
0
    def learnClassifier(self, examples):
        transformer = orange.DomainContinuizer()
        transformer.multinomialTreatment = orange.DomainContinuizer.NValues
        transformer.continuousTreatment = orange.DomainContinuizer.NormalizeBySpan
        transformer.classTreatment = orange.DomainContinuizer.Ignore
        newdomain = transformer(examples)
        newexamples = examples.translate(newdomain)
        #print newexamples[0]
        params = {}
        parameters = []
        self.learner.normalization = False  ## Normalization already done

        if self.svm_type in [1, 4]:
            numOfNuValues = 9
            maxNu = max(self.maxNu(newexamples) - 1e-7, 0.0)
            parameters.append(
                ("nu", [i / 10.0
                        for i in range(1, 9) if i / 10.0 < maxNu] + [maxNu]))
        else:
            parameters.append(("C", [2**a for a in range(-5, 15, 2)]))
        if self.kernel_type == 2:
            parameters.append(("gamma", [2**a for a in range(-5, 5, 2)] + [0]))
        tunedLearner = orngWrap.TuneMParameters(object=self.learner,
                                                parameters=parameters,
                                                folds=self.folds)

        return SVMClassifierClassEasyWrapper(
            tunedLearner(newexamples, verbose=self.verbose), newdomain,
            examples)
Esempio n. 2
0
    def learn_classifier(self, data):
        transformer = preprocess.DomainContinuizer()
        transformer.multinomialTreatment = preprocess.DomainContinuizer.NValues
        transformer.continuousTreatment = \
            preprocess.DomainContinuizer.NormalizeBySpan
        transformer.classTreatment = preprocess.DomainContinuizer.Ignore
        newdomain = transformer(data)
        newexamples = data.translate(newdomain)

        parameters = []
        self.learner.normalization = False  # Normalization already done

        if self.svm_type in [1, 4]:
            if self.svm_type == SVMLearner.Nu_SVC:
                max_nu = max(self.max_nu(newexamples) - 1e-7, 0.0)
            else:
                max_nu = 1.0
            parameters.append(("nu", [i / 10.0 for i in range(1, 9) \
                                      if i / 10.0 < max_nu] + [max_nu]))
        else:
            parameters.append(("C", [2 ** a for a in  range(-5, 15, 2)]))
        if self.kernel_type == 2:
            parameters.append(
                ("gamma", [2 ** a for a in range(-5, 5, 2)] + [0])
            )

        import orngWrap
        tunedLearner = orngWrap.TuneMParameters(learner=self.learner,
                                                parameters=parameters,
                                                folds=self.folds)

        return tunedLearner(newexamples, verbose=self.verbose)
Esempio n. 3
0
    def tune_parameters(self, data, parameters=None, folds=5, verbose=0,
                       progress_callback=None):
        """Tune the ``parameters`` on the given ``data`` using
        internal cross validation.

        :param data: data for parameter tuning
        :type data: Orange.data.Table
        :param parameters: names of parameters to tune
            (default: ["nu", "C", "gamma"])
        :type parameters: list of strings
        :param folds: number of folds for internal cross validation
        :type folds: int
        :param verbose: set verbose output
        :type verbose: bool
        :param progress_callback: callback function for reporting progress
        :type progress_callback: callback function

        Here is example of tuning the `gamma` parameter using
        3-fold cross validation. ::

            svm = Orange.classification.svm.SVMLearner()
            svm.tune_parameters(table, parameters=["gamma"], folds=3)

        """

        import orngWrap

        if parameters is None:
            parameters = ["nu", "C", "gamma"]

        searchParams = []
        normalization = self.normalization
        if normalization:
            data = self._normalize(data)
            self.normalization = False
        if self.svm_type in [SVMLearner.Nu_SVC, SVMLearner.Nu_SVR] \
                    and "nu" in parameters:
            if isinstance(data.domain.class_var, variable.Discrete):
                max_nu = max(self.max_nu(data) - 1e-7, 0.0)
            else:
                max_nu = 1.0
            searchParams.append(("nu", [i / 10.0 for i in range(1, 9) if \
                                        i / 10.0 < max_nu] + [max_nu]))
        elif "C" in parameters:
            searchParams.append(("C", [2 ** a for a in  range(-5, 15, 2)]))

        if self.kernel_type == 2 and "gamma" in parameters:
            searchParams.append(("gamma",
                                 [2 ** a for a in range(-5, 5, 2)] + [0])
                                )
        tunedLearner = orngWrap.TuneMParameters(object=self,
                            parameters=searchParams,
                            folds=folds,
                            returnWhat=orngWrap.TuneMParameters.returnLearner,
                            progressCallback=progress_callback
                            if progress_callback else lambda i: None)
        tunedLearner(data, verbose=verbose)
        if normalization:
            self.normalization = normalization
Esempio n. 4
0
 def tuneParameters(self,
                    examples,
                    parameters=None,
                    folds=5,
                    verbose=0,
                    progressCallback=None):
     """ Tune the parameters of the SVMLearner on given examples using cross validation.
     
     :param examples: ExampleTable on which to tune the parameters 
     :param parameters: if not set defaults to ["nu", "C", "gamma"]
     :param folds: number of folds used for cross validation
     :param verbose:
     :param progressCallback: a callback function to report progress
         
     Example::
         >>> svm = SVMLearner()
         >>> svm.tuneParameters(examples, parameters=["gamma"], folds=3)
         
     This code tunes the `gamma` parameter on `examples` using 3-fold cross validation  
     
     """
     import orngWrap
     parameters = ["nu", "C", "gamma"] if parameters == None else parameters
     searchParams = []
     normalization = self.normalization
     if normalization:
         examples = self._normalize(examples)
         self.normalization = False
     if self.svm_type == SVMLearner.Nu_SVC and "nu" in parameters:
         numOfNuValues = 9
         maxNu = max(self.maxNu(examples) - 1e-7, 0.0)
         searchParams.append(
             ("nu", [i / 10.0
                     for i in range(1, 9) if i / 10.0 < maxNu] + [maxNu]))
     elif "C" in parameters:
         searchParams.append(("C", [2**a for a in range(-5, 15, 2)]))
     if self.kernel_type == 2 and "gamma" in parameters:
         searchParams.append(
             ("gamma", [2**a for a in range(-5, 5, 2)] + [0]))
     tunedLearner = orngWrap.TuneMParameters(
         object=self,
         parameters=searchParams,
         folds=folds,
         returnWhat=orngWrap.TuneMParameters.returnLearner,
         progressCallback=progressCallback
         if progressCallback else lambda i: None)
     tunedLearner(examples, verbose=verbose)
     if normalization:
         self.normalization = normalization
Esempio n. 5
0
import orange, orngTree, orngStat, orngWrap

learner = orngTree.TreeLearner()
data = orange.ExampleTable("voting")
tuner = orngWrap.TuneMParameters(object=learner,
                                 parameters=[
                                     ("minSubset", [2, 5, 10, 20]),
                                     ("measure", [
                                         orange.MeasureAttribute_gainRatio(),
                                         orange.MeasureAttribute_gini()
                                     ])
                                 ],
                                 evaluate=orngStat.AUC)
classifier = tuner(data)