Example #1
0
    def applyLearner(self):
        self.warning(0)
        if float(self.m_estimator.m) < 0:
            self.warning(0, "Parameter m should be positive")
            self.learner = None

        elif float(self.windowProportion) < 0 or float(
                self.windowProportion) > 1:
            self.warning(
                0, "Window proportion for LOESS should be between 0.0 and 1.0")
            self.learner = None

        else:
            self.learner = orange.BayesLearner(
                name=self.name, adjustThreshold=self.adjustThreshold)
            self.learner.estimatorConstructor = self.estMethods[
                self.probEstimation][1]
            if self.condProbEstimation:
                self.learner.conditionalEstimatorConstructor = self.condEstMethods[
                    self.condProbEstimation][1]
                self.learner.conditionalEstimatorConstructorContinuous = orange.ConditionalProbabilityEstimatorConstructor_loess(
                    windowProportion=self.windowProportion,
                    nPoints=self.loessPoints)

            if self.preprocessor:
                self.learner = self.preprocessor.wrapLearner(self.learner)

        self.send("Learner", self.learner)
        self.applyData()
        self.changed = False
Example #2
0
    def setData(self, attr, data):
        self.clearAll()
        self.attr, self.data = attr, data
        self.curCutPoints = []

        if not data or not attr:
            self.snapDecimals = 1
            self.probDist = None
            return

        if data.domain.classVar:
            self.contingency = orange.ContingencyAttrClass(attr, data)
            try:
                self.condProb = orange.ConditionalProbabilityEstimatorConstructor_loess(
                   self.contingency,
                   nPoints=50)
            except:
                self.condProb = None
            self.probDist = None
            attrValues = self.contingency.keys()
        else:
            self.condProb = self.contingency = None
            self.probDist = orange.Distribution(attr, data)
            attrValues = self.probDist.keys()

        if attrValues:
            self.minVal, self.maxVal = min(attrValues), max(attrValues)
        else:
            self.minVal, self.maxVal = 0, 1
        mdist = self.maxVal - self.minVal
        if mdist > 1e-30:
            self.snapDecimals = -int(math.ceil(math.log(mdist, 10)) -2)
        else:
            self.snapDecimals = 1

        self.baseCurveX = None

        self.plotRug(True)
        self.plotProbCurve(True)
        self.plotCutLines(True)

        self.updateLayout()
        self.replot()
Example #3
0
                j += 1
        print("beta:", -m.beta)

    #t = orange.ExampleTable('c:/proj/domains/voting.tab') # discrete
    t = orange.ExampleTable(
        r"E:\Development\Orange Datasets\UCI\shuttle.tab")  # discrete

    #t = orange.ExampleTable('c_cmc.tab') # continuous

    print("NAIVE BAYES")
    print("===========")
    bl = orange.BayesLearner()
    bl.estimatorConstructor = orange.ProbabilityEstimatorConstructor_Laplace()
    # prevent too many estimation points
    # increase the smoothing level
    bl.conditionalEstimatorConstructorContinuous = orange.ConditionalProbabilityEstimatorConstructor_loess(
        windowProportion=0.5, nPoints=10)
    c = bl(t)
    printmodel(t, c, printexamples=0)

    print("\n\nLOGISTIC REGRESSION")
    print("===================")
    c = orngLR_Jakulin.BasicLogisticLearner()(t)
    printmodel(t, c, printexamples=0)

    print("\n\nLINEAR SVM")
    print("==========")
    l = orngSVM.BasicSVMLearner()
    l.kernel = 0  # linear SVM
    l.for_nomogram = 1
    c = l(t)
    printmodel(t, c, printexamples=0)