attributes.remove(lr)
            else:
                attributes.remove(lr.getValueFrom.variable)
            new_domain = Orange.data.Domain(attributes, data.domain.class_var)
            new_domain.addmetas(data.domain.getmetas())
            data = data.select(new_domain)
            lr = learner.fit_model(data, weight)
        return lr


LogRegLearner = deprecated_members(
    {
        "removeSingular": "remove_singular",
        "weightID": "weight_id",
        "stepwiseLR": "stepwise_lr",
        "addCrit": "add_crit",
        "deleteCrit": "delete_crit",
        "numFeatures": "num_features",
        "removeMissing": "remove_missing",
    }
)(LogRegLearner)


class UnivariateLogRegLearner(Orange.classification.Learner):
    def __new__(cls, data=None, **argkw):
        self = Orange.classification.Learner.__new__(cls, **argkw)
        if data is not None:
            self.__init__(**argkw)
            return self.__call__(data)
        else:
            return self
예제 #2
0
    def winner_index(self):
        """Return the index of the optimal object within the sequence of
        the candidates.
        
        :rtype: int
        """
        if self.best is not None:
            return self.best_index
        else:
            return None


BestOnTheFly = deprecated_members(
    {
        "callCompareOn1st": "call_compare_on_1st",
        "winnerIndex": "winner_index",
        "randomGenerator": "random_generator",
        "bestIndex": "best_index"
    },
    wrap_methods=["__init__"])(BestOnTheFly)


@deprecated_keywords({"callCompareOn1st": "call_compare_on_1st"})
def select_best(x, compare=cmp, seed=0, call_compare_on_1st=False):
    """Return the optimal object from list x. The function is used if the candidates
    are already in the list, so using the more complicated :obj:`BestOnTheFly` directly is
    not needed.

    To demonstrate the use of :obj:`BestOnTheFly` see the implementation of
    :obj:`selectBest`::
    
      def selectBest(x, compare=cmp, seed = 0, call_compare_on_1st = False):
                        (Orange.data.Table(newdomain, data), weight_id),
                        indices)

                    newStat = self.stat(res)[0]
                    newStats = [
                        self.stat(x)[0] for x in
                        Orange.evaluation.scoring.split_by_iterations(res)
                    ]
                    print "+", newStat, newdomain

                    ## If stat has increased (ie newStat is better than bestStat)
                    if cmp(newStat,
                           bestStat) == self.statsign and statc.wilcoxont(
                               oldStats, newStats)[1] < self.add_threshold:
                        bestStat, bestStats, bestAttr = newStat, newStats, attr
            if bestAttr:
                domain = Orange.data.Domain(domain.attributes + [bestAttr],
                                            classVar)
                oldStat, oldStats = bestStat, bestStats
                stop = False
                print "added", bestAttr.name

        return self.learner(Orange.data.Table(domain, data), weight_id)


StepwiseLearner = deprecated_members(
    {
        "removeThreshold": "remove_threshold",
        "addThreshold": "add_threshold"
    }, )(StepwiseLearner)
        if isinstance(aclass, (list, tuple)):
            self.classes[i] = aclass
            self.probabilities[i] = aprob
        elif type(aclass.value) == float:
            self.classes[i] = float(aclass)
            self.probabilities[i] = aprob
        else:
            self.classes[i] = int(aclass)
            self.probabilities[i] = aprob

    def __repr__(self):
        return str(self.__dict__)


TestedExample = deprecated_members({
    "iterationNumber": "iteration_number",
    "actualClass": "actual_class"
})(TestedExample)


def mt_vals(vals):
    """
    Substitution for the unpicklable lambda function for multi-target classifiers.
    """
    return [
        val if val.is_DK() else int(val) if val.variable.var_type
        == Orange.feature.Type.Discrete else float(val) for val in vals
    ]


class ExperimentResults(object):
    """
        """Set the result of the i-th classifier to the given values."""
        if isinstance(aclass, (list, tuple)):
            self.classes[i] = aclass
            self.probabilities[i] = aprob
        elif type(aclass.value)==float:
            self.classes[i] = float(aclass)
            self.probabilities[i] = aprob
        else:
            self.classes[i] = int(aclass)
            self.probabilities[i] = aprob

    def __repr__(self):
        return str(self.__dict__)

TestedExample = deprecated_members({"iterationNumber": "iteration_number",
                                    "actualClass": "actual_class"
                                    })(TestedExample)

def mt_vals(vals):
    """
    Substitution for the unpicklable lambda function for multi-target classifiers.
    """
    return [val if val.is_DK() else int(val) if val.variable.var_type == Orange.feature.Type.Discrete
                                            else float(val) for val in vals]

class ExperimentResults(object):
    """
    ``ExperimentResults`` stores results of one or more repetitions of
    some test (cross validation, repeated sampling...) under the same
    circumstances. Instances of this class are constructed by sampling
    and testing functions from module :obj:`Orange.evaluation.testing`
예제 #6
0
        for i, var in enumerate(domain.features):
            j = i + 1 if self.intercept else i
            dict_model[var.name] = (coefficients[j], std_error[j],
                                    t_scores[j], p_vals[j])

        return LinearRegression(domain.class_var, domain, coefficients, F,
                 std_error=std_error, t_scores=t_scores, p_vals=p_vals,
                 dict_model=dict_model, fitted=fitted, residuals=residuals,
                 m=m, n=n, mu_y=mu_y, r2=r2, r2adj=r2adj, sst=sst, sse=sse,
                 ssr=ssr, std_coefficients=std_coefficients,
                 intercept=self.intercept)

deprecated_members({"ridgeLambda": "ridge_lambda",
                    "computeStats": "compute_stats",
                    "useVars": "use_vars",
                    "addSig": "add_sig",
                    "removeSig": "remove_sig",
                    }
                   , ["__init__"],
                   in_place=True)(LinearRegressionLearner)

class LinearRegression(Orange.classification.Classifier):

    """Linear regression predicts value of the response variable
    based on the values of independent variables.

    .. attribute:: F

        F-statistics of the model.

    .. attribute:: coefficients
            attributes = data.domain.features[:]
            if lr in attributes:
                attributes.remove(lr)
            else:
                attributes.remove(lr.getValueFrom.variable)
            new_domain = Orange.data.Domain(attributes, data.domain.class_var)
            new_domain.addmetas(data.domain.getmetas())
            data = data.select(new_domain)
            lr = learner.fit_model(data, weight)
        return lr

LogRegLearner = deprecated_members({
    "removeSingular": "remove_singular",
    "weightID": "weight_id",
    "stepwiseLR": "stepwise_lr",
    "addCrit": "add_crit",
    "deleteCrit": "delete_crit",
    "numFeatures": "num_features",
    "removeMissing": "remove_missing"
})(LogRegLearner)


class UnivariateLogRegLearner(Orange.classification.Learner):
    def __new__(cls, data=None, **argkw):
        self = Orange.classification.Learner.__new__(cls, **argkw)
        if data is not None:
            self.__init__(**argkw)
            return self.__call__(data)
        else:
            return self
예제 #8
0
        return self.best

    def winner_index(self):
        """Return the index of the optimal object within the sequence of
        the candidates.
        
        :rtype: int
        """
        if self.best is not None:
            return self.best_index
        else:
            return None

BestOnTheFly = deprecated_members({"callCompareOn1st": "call_compare_on_1st",
                                   "winnerIndex": "winner_index",
                                   "randomGenerator": "random_generator",
                                   "bestIndex": "best_index"
                                   },
                                   wrap_methods=["__init__"])(BestOnTheFly)


@deprecated_keywords({"callCompareOn1st": "call_compare_on_1st"})
def select_best(x, compare=cmp, seed = 0, call_compare_on_1st = False):
    """Return the optimal object from list x. The function is used if the candidates
    are already in the list, so using the more complicated :obj:`BestOnTheFly` directly is
    not needed.

    To demonstrate the use of :obj:`BestOnTheFly` see the implementation of
    :obj:`selectBest`::
    
      def selectBest(x, compare=cmp, seed = 0, call_compare_on_1st = False):
          bs=BestOnTheFly(compare, seed, call_compare_on_1st)
                setattr(self, name, value)
            self.__init__(**argkw)
            return self.__call__(data, weight_id)
        else:
            return self

    def findobj(self, name):
        import string
        names = string.split(name, ".")
        lastobj = self.learner
        for i in names[:-1]:
            lastobj = getattr(lastobj, i)
        return lastobj, names[-1]

TuneParameters = deprecated_members(
    {"returnWhat": "return_what",
     "object": "learner"},
    )(TuneParameters)


class Tune1Parameter(TuneParameters):

    """Class :obj:`Orange.optimization.Tune1Parameter` tunes a single parameter.
    
    .. attribute:: parameter
    
        The name of the parameter (or a list of names, if the same parameter is
        stored at multiple places - see the examples) to be tuned.
    
    .. attribute:: values
    
        A list of parameter's values to be tried.
            self._dirty = False
        except Exception, e:
            self.domain = None
            raise
            #self.domain = None

    def data(self):
        """
        Return :class:`Orange.data.Table` produced by the last executed query.
        """
        self.update()
        if self.exampleTable:
            return self.exampleTable
        return None

SQLReader = deprecated_members({"discreteNames":"discrete_names", "metaName":"meta_names"\
    , "className":"class_name"})(SQLReader)


class SQLWriter(object):
    """
    Establishes a connection with a database and provides the methods needed to create
    an appropriate table in the database and/or write the data from an :class:`Orange.data.Table`
    into the database.
    """
    def __init__(self, uri=None):
        """
        :param uri: Connection string (scheme://[user[:password]@]host[:port]/database[?parameters])
        :type uri: str
        """
        if uri is not None:
            self.connect(uri)
예제 #11
0
        :type weight: :obj:`~Orange.feature.Descriptor`

        """
        if self.score:
            measure = self.score
        else:
            measure = Relief(m=5, k=10)

        measured = [(attr, measure(attr, data, None, weight))
                    for attr in data.domain.attributes]
        measured.sort(lambda x, y: cmp(x[1], y[1]))
        return [x[0] for x in measured]


OrderAttributes = deprecated_members({
    "measure": "score",
}, wrap_methods=[])(OrderAttributes)


class Distance(Score):
    """The :math:`1-D` distance is defined as information gain divided
    by joint entropy :math:`H_{CA}` (:math:`C` is the class variable
    and :math:`A` the feature):

    .. math::
        1-D(C,A) = \\frac{\\mathrm{Gain}(A)}{H_{CA}}
    """
    @deprecated_keywords({"aprioriDist": "apriori_dist"})
    def __new__(cls, attr=None, data=None, apriori_dist=None, weightID=None):
        self = Score.__new__(cls)
        if attr is not None and data is not None:
                domain = Orange.data.Domain(filter(lambda x: x!=bestAttr, domain.attributes), classVar)
                oldStat, oldStats = bestStat, bestStats
                stop = False
                print "removed", bestAttr.name

        bestStat, bestAttr = oldStat, None
        for attr in data.domain.attributes:
            if not attr in domain.attributes:
                newdomain = Orange.data.Domain(domain.attributes + [attr], classVar)
                res = Orange.evaluation.testing.test_with_indices([self.learner], (Orange.data.Table(newdomain, data), weight_id), indices)
                
                newStat = self.stat(res)[0]
                newStats = [self.stat(x)[0] for x in Orange.evaluation.scoring.split_by_iterations(res)] 
                print "+", newStat, newdomain

                ## If stat has increased (ie newStat is better than bestStat)
                if cmp(newStat, bestStat) == self.statsign and statc.wilcoxont(oldStats, newStats)[1] < self.add_threshold:
                    bestStat, bestStats, bestAttr = newStat, newStats, attr
        if bestAttr:
            domain = Orange.data.Domain(domain.attributes + [bestAttr], classVar)
            oldStat, oldStats = bestStat, bestStats
            stop = False
            print "added", bestAttr.name

    return self.learner(Orange.data.Table(domain, data), weight_id)

StepwiseLearner = deprecated_members(
                    {"removeThreshold": "remove_threshold",
                     "addThreshold": "add_threshold"},
                    )(StepwiseLearner)
            d = nd
        # normalizing multiplier
        sum = 0.0
        for i in d:
            sum += i[2]*i[2]*i[1]
        f = numpy.sqrt(distnorm/numpy.max(sum,1e-6))
        # transform O
        k = 0
        for i in d:
            for j in range(i[1]):
                (ii,jj) = o[k][1]
                self.distances[ii,jj] = f*i[2]
                k += 1
        assert(len(o) == k)
        self.freshD = 0
        return effect
    
MDS = deprecated_members({"projectedDistances": "projected_distances",
                     "originalDistances": "original_distances",
                     "avgStress": "avg_stress",
                     "progressCallback": "progress_callback",
                     "getStress": "calc_stress",
                     "get_stress": "calc_stress",
                     "calcStress": "calc_stress",
                     "getDistance": "calc_distance",
                     "get_distance": "calc_distance",
                     "calcDistance": "calc_distance",
                     "Torgerson": "torgerson",
                     "SMACOFstep": "smacof_step",
                     "LSMT": "lsmt"})(MDS)
예제 #14
0
            return self.__call__(data, weight_id)
        else:
            return self

    def findobj(self, name):
        import string
        names = string.split(name, ".")
        lastobj = self.learner
        for i in names[:-1]:
            lastobj = getattr(lastobj, i)
        return lastobj, names[-1]


TuneParameters = deprecated_members(
    {
        "returnWhat": "return_what",
        "object": "learner"
    }, )(TuneParameters)


class Tune1Parameter(TuneParameters):
    """Class :obj:`Orange.optimization.Tune1Parameter` tunes a single parameter.
    
    .. attribute:: parameter
    
        The name of the parameter (or a list of names, if the same parameter is
        stored at multiple places - see the examples) to be tuned.
    
    .. attribute:: values
    
        A list of parameter's values to be tried.
예제 #15
0
                coeff_p.append(c.coefficients)
            p_vals[nz] = (abs(coeff_p) > abs(coefficients)).sum(axis=0)
            p_vals[nz] /= float(self.n_perm)

        # dictionary of regression coefficients with standard errors
        # and p-values
        model = {}
        for i, var in enumerate(domain.attributes):
            model[var.name] = (coefficients[i], std_errors[i], p_vals[i])

        return LassoRegression(domain=domain, class_var=domain.class_var,
            coef0=coef0, coefficients=coefficients, std_errors=std_errors,
            p_vals=p_vals, model=model, mu_x=mu_x)

deprecated_members({"nBoot": "n_boot",
                    "nPerm": "n_perm"},
                   wrap_methods=["__init__"],
                   in_place=True)(LassoRegressionLearner)

class LassoRegression(Orange.classification.Classifier):
    """Lasso regression predicts the value of the response variable
    based on the values of independent variables.

    .. attribute:: coef0

        Intercept (sample mean of the response variable).    

    .. attribute:: coefficients

        Regression coefficients. 

    .. attribute:: std_errors
예제 #16
0
        sum = 0.0
        for i in d:
            sum += i[2] * i[2] * i[1]
        f = numpy.sqrt(distnorm / numpy.max(sum, 1e-6))
        # transform O
        k = 0
        for i in d:
            for j in range(i[1]):
                (ii, jj) = o[k][1]
                self.distances[ii, jj] = f * i[2]
                k += 1
        assert (len(o) == k)
        self.freshD = 0
        return effect

MDS = deprecated_members({
    "projectedDistances": "projected_distances",
    "originalDistances": "original_distances",
    "avgStress": "avg_stress",
    "progressCallback": "progress_callback",
    "getStress": "calc_stress",
    "get_stress": "calc_stress",
    "calcStress": "calc_stress",
    "getDistance": "calc_distance",
    "get_distance": "calc_distance",
    "calcDistance": "calc_distance",
    "Torgerson": "torgerson",
    "SMACOFstep": "smacof_step",
    "LSMT": "lsmt"
})(MDS)
예제 #17
0
        if True or self.deflation_mode == "regression":
            # Estimate regression coefficient
            # Y = TQ' + E = X W(P'W)^-1Q' + E = XB + E
            # => B = W*Q' (p x q)
            coefs = dot(xRotations, Q.T)
            coefs = 1. / sigmaX.reshape((p, 1)) * \
                    coefs * sigmaY
        
        return {"mu_x": muX, "mu_y": muY, "sigma_x": sigmaX,
                "sigma_y": sigmaY, "T": T, "U":U, "W":U, 
                "C": C, "P":P, "Q":Q, "x_rotations": xRotations,
                "y_rotations": yRotations, "coefs": coefs}

deprecated_members({"nComp": "n_comp",
                    "deflationMode": "deflation_mode",
                    "maxIter": "max_iter"},
                   wrap_methods=["__init__"],
                   in_place=True)(PLSRegressionLearner)

class PLSRegression(Orange.classification.Classifier):
    """ Predict values of the response variables
    based on the values of independent variables.
    
    Basic notations:
    n - number of data instances
    p - number of independent variables
    q - number of reponse variables

    .. attribute:: T
    
        A n x n_comp numpy array of x-scores
            self._dirty = False
        except Exception, e:
            self.domain = None
            raise
            #self.domain = None

    def data(self):
        """
        Return :class:`Orange.data.Table` produced by the last executed query.
        """
        self.update()
        if self.exampleTable:
            return self.exampleTable
        return None

SQLReader = deprecated_members({"discreteNames":"discrete_names", "metaName":"meta_names"\
    , "className":"class_name"})(SQLReader)

class SQLWriter(object):
    """
    Establishes a connection with a database and provides the methods needed to create
    an appropriate table in the database and/or write the data from an :class:`Orange.data.Table`
    into the database.
    """
    def __init__(self, uri = None):
        """
        :param uri: Connection string (scheme://[user[:password]@]host[:port]/database[?parameters])
        :type uri: str
        """
        if uri is not None:
            self.connect(uri)
        :param weight: meta attribute that stores weights of instances
        :type weight: :obj:`~Orange.feature.Descriptor`

        """
        if self.score:
            measure = self.score
        else:
            measure = Relief(m=5, k=10)

        measured = [(attr, measure(attr, data, None, weight)) for attr in data.domain.attributes]
        measured.sort(lambda x, y: cmp(x[1], y[1]))
        return [x[0] for x in measured]

OrderAttributes = deprecated_members({
          "measure": "score",
}, wrap_methods=[])(OrderAttributes)

class Distance(Score):
    """The :math:`1-D` distance is defined as information gain divided
    by joint entropy :math:`H_{CA}` (:math:`C` is the class variable
    and :math:`A` the feature):

    .. math::
        1-D(C,A) = \\frac{\\mathrm{Gain}(A)}{H_{CA}}
    """

    @deprecated_keywords({"aprioriDist": "apriori_dist"})
    def __new__(cls, attr=None, data=None, apriori_dist=None, weightID=None):
        self = Score.__new__(cls)
        if attr is not None and data is not None: