if InitialDistribution: return histo.convolution_estimation1(known_distribution, InitialDistribution, Estimator, NbIteration, Weight, Penalty, Outside) else : return histo.convolution_estimation2(known_distribution, MinInfBound, Estimator, NbIteration, Weight, Penalty, Outside) # Extend _Histogram class _FrequencyDistribution = interface.extend_class( _FrequencyDistribution, EstimateFunctions) def Estimate(histo, itype, *args, **kargs): """Estimate function This function is a dispatcher to several estimate functions depending on the first argument and the type. :param obj: the input object (may be histogram, sequence, compound, ...) :param itype: string. .. seealso:: :func:`~openalea.stat_tool.estimate.EstimateFunctions.estimate_nonparametric`,
:func:`~openalea.stat_tool.simulate.Simulate`. """ error.CheckArgumentsLength(args, 1) possible_types = [_DiscreteParametricModel, _DiscreteMixture, _Compound, _Convolution] # filename if(len(args)==1): error.CheckType([args[0]], [str], arg_id=[1]) result = _Convolution(args[0]) # build from list of distributions else: arguments = [] #check that all arguments are correct for arg, i in zip(args, range(0, len(args))): error.CheckType([arg], [possible_types], variable_pos=[i+1]) arguments.append(arg) result = _Convolution(arguments) return result # Extend _Convolution interface.extend_class(_Convolution, interface.StatInterface) # Extend _ConvolutionData interface.extend_class(_ConvolutionData, interface.StatInterface)
:Author: Thomas Cokelaer <*****@*****.**> :Revision: $Id$ """ __version__ = "$Id$" # import sys # import os # sys.path.append(os.path.abspath(".")) import interface from openalea.stat_tool._stat_tool import _DiscreteDistributionData # Extend _DistributionData class dynamically interface.extend_class(_DiscreteDistributionData, interface.StatInterface) __all__ = ["_DiscreteDistributionData", "Histogram"] def Histogram(*args): """Construction of a frequency distribution from an object of type list(int) or from an ASCII file. In the file syntax, the frequencies *fi* for each possible value *i* are given in two columns. In the case of an argument of type (list(int)), it is simply assumed that each array element represents one data item. :param integer list: a list of integer values :param string filename: a valid filename in the proper format (see syntax part)
assert inf_bound <= sup_bound assert (sup_bound - inf_bound) < MAX_DIFF_BOUND param = D_DEFAULT proba = D_DEFAULT cumul_threshold = CUMUL_THRESHOLD return _DiscreteParametricModel(UNIFORM, \ inf_bound, sup_bound, param, proba, cumul_threshold) def Multinomial(): """to be done""" raise NotImplementedError("Multinomial not yet implemented") # Extend _DiscreteParametricModel interface.extend_class( _DiscreteParametricModel, interface.StatInterface) # Cast Functions def ToDistribution(histo): """ Cast an object of type `_DiscreteDistributionData` into an object of type `_Distribution`. :Parameters: * `histo` (DiscreteDistributionData) :Returns: If the object histo contains a 'model' part, an object of type `_Distribution` is returned, otherwise no object is returned.
result = _DiscreteMixture(args[0]) # build list of weights and distributions else: nb_param = len(args) if ((nb_param % 2) != 0): raise TypeError("Number of parameters must be pair") # Extract weights ands distributions weights = [] dists = [] for i in xrange(nb_param / 2): weights.append(args[i * 2]) error.CheckType([args[i * 2 + 1]], [types], arg_id=[i * 2 + 1]) error.CheckType([args[i * 2]], [float], arg_id=[i * 2]) #dists.append(_Distribution(args[i * 2 + 1])) dists.append((args[i * 2 + 1])) result = _DiscreteMixture(weights, dists) return result # Extend _DiscreteMixture interface.extend_class(_DiscreteMixture, interface.StatInterface) # Extend _DiscreteMixtureData interface.extend_class(_DiscreteMixtureData, interface.StatInterface) _DiscreteMixture.__doc__ = Mixture.__doc__
elif utype == "MovingAverage": error.CheckArgumentsLength(args, 1, 1) param = args[0] # todo add CheckType for int and models # param is a list of float, int if isinstance(args[0], list): # todo: check that sum equals 1 return vec.moving_average_regression_values(explanatory, response, param, Algorithm) # or a set of distributions # todo: test case of compound, convolution, mixture else: error.CheckType([param], [[_DiscreteParametricModel, _DiscreteMixture, _Convolution, _Compound]]) return vec.moving_average_regression_distribution(explanatory, response, param, Algorithm) elif utype in ["NearestNeighbors", "NearestNeighbours"]: error.CheckArgumentsLength(args, 1, 1) span = args[0] error.CheckType([span], [[float, int]]) assert span >= STAT_MINIMUM_SPAN # todo: check this assert return vec.nearest_neighbours_regression(explanatory, response, float(span), Weighting) else: raise TypeError("Bad Regression type. Must be in %s" % possible_types) # Extend _Regression class dynamically interface.extend_class(_Regression, interface.StatInterface)
error.CheckType([args[0]], [str]) result = _Compound(args[0]) possible_types = [_DiscreteParametricModel, _DiscreteMixture, _Compound, _Convolution] # build from two objects and optional threshold if len(args)==2: error.CheckType([args[0], args[1]], [possible_types, possible_types], variable_pos=[1,2]) if Threshold: result = _Compound([args[0], args[1]], Threshold) else: result = _Compound([args[0], args[1]]) return result # Extend _Compound interface.extend_class(_Compound, interface.StatInterface) # Extend _CompoundData interface.extend_class(_CompoundData, interface.StatInterface)
#if InputTypes: ret = _Vectors(obj, identifiers, InputTypes) #else: # ret = _Vectors(obj, []) else: # from a sequence index_variable = error.ParseKargs(kargs, "IndexVariable", False, [True, False]) error.CheckType([index_variable], [bool], variable_pos=[2]) ret = obj.build_vectors(index_variable) return ret interface.extend_class( _Vectors, interface.StatInterface) def VectorDistance(*args, **kargs): """ Construction of an object of type vector_distance from types (and eventually weights) of variables or from an ASCII file. The type _VectorDistance implements standardization procedures. The objective of standardization is to avoid the dependence on the variable type (chosen among symbolic, ordinal, numeric and circular) and, for numeric variables, on the choice of the measurement units by converting the original variables to unitless variables. :Parameters:
from enums import cluster_type from enums import round_type mode_type = round_type __all__ = [ "_DistanceMatrix", "_Cluster", "_Dendrogram", "Cluster", "Transcode", "Clustering", "ToDistanceMatrix", ] # Extend classes dynamically interface.extend_class(_DistanceMatrix, interface.StatInterface) interface.extend_class(_Cluster, interface.StatInterface) interface.extend_class(_Dendrogram, interface.StatInterface) def Cluster(obj, utype, *args, **kargs): """Clustering of values. In the case of the clustering of values of a frequency distribution on the basis of an information measure criterion (argument `Information`), both the information measure ratio and the selected optimal step are given in the shell window. The clustering mode `Step` (and its variant `Information`) is naturally adapted to numeric variables while the clustering mode `Limit` applies to both symbolic (nominal) and numeric variables. In the case of a symbolic
:Author: Thomas Cokelaer <*****@*****.**> :Revision: $Id$ """ __version__ = "$Id$" #import sys #import os #sys.path.append(os.path.abspath(".")) import interface from openalea.stat_tool._stat_tool import _DiscreteDistributionData # Extend _DistributionData class dynamically interface.extend_class(_DiscreteDistributionData, interface.StatInterface) __all__ = [ "_DiscreteDistributionData", "Histogram", ] def Histogram(*args): """Construction of a frequency distribution from an object of type list(int) or from an ASCII file. In the file syntax, the frequencies *fi* for each possible value *i* are given in two columns. In the case of an argument of type (list(int)), it is simply assumed that each array element represents one data item.
if InitialDistribution: return histo.convolution_estimation1(known_distribution, InitialDistribution, Estimator, NbIteration, Weight, Penalty, Outside) else: return histo.convolution_estimation2(known_distribution, MinInfBound, Estimator, NbIteration, Weight, Penalty, Outside) # Extend _Histogram class _FrequencyDistribution = interface.extend_class(_FrequencyDistribution, EstimateFunctions) def Estimate(histo, itype, *args, **kargs): """Estimate function This function is a dispatcher to several estimate functions depending on the first argument and the type. :param obj: the input object (may be histogram, sequence, compound, ...) :param itype: string. .. seealso:: :func:`~openalea.stat_tool.estimate.EstimateFunctions.estimate_nonparametric`, :func:`~openalea.stat_tool.estimate.EstimateFunctions.estimate_parametric`, :func:`~openalea.stat_tool.estimate.EstimateFunctions.estimate_DiscreteMixture`,
if isinstance(args[0], list): # todo: check that sum equals 1 return vec.moving_average_regression_values( explanatory, response, param, Algorithm) # or a set of distributions # todo: test case of compound, convolution, mixture else: error.CheckType([param], [[ _DiscreteParametricModel, _DiscreteMixture, _Convolution, _Compound ]]) return vec.moving_average_regression_distribution( explanatory, response, param, Algorithm) elif utype in ["NearestNeighbors", "NearestNeighbours"]: error.CheckArgumentsLength(args, 1, 1) span = args[0] error.CheckType([span], [[float, int]]) assert span >= STAT_MINIMUM_SPAN #todo: check this assert return vec.nearest_neighbours_regression(explanatory, response, float(span), Weighting) else: raise TypeError("Bad Regression type. Must be in %s" % possible_types) # Extend _Regression class dynamically interface.extend_class(_Regression, interface.StatInterface)
error.CheckType([args[0]], [str], arg_id=[1]) result = _DiscreteMixture(args[0]) # build list of weights and distributions else: nb_param = len(args) if ((nb_param % 2) != 0): raise TypeError("Number of parameters must be pair") # Extract weights ands distributions weights = [] dists = [] for i in xrange(nb_param / 2): weights.append(args[i * 2]) error.CheckType([args[i * 2 + 1]], [types], arg_id=[i * 2 + 1]) error.CheckType([args[i * 2]], [float], arg_id=[i * 2]) #dists.append(_Distribution(args[i * 2 + 1])) dists.append((args[i * 2 + 1])) result = _DiscreteMixture(weights, dists) return result # Extend _DiscreteMixture interface.extend_class(_DiscreteMixture, interface.StatInterface) # Extend _DiscreteMixtureData interface.extend_class(_DiscreteMixtureData, interface.StatInterface) _DiscreteMixture.__doc__ = Mixture.__doc__
Threshold = kargs.get("Threshold", None) # filename if len(args) == 1: error.CheckType([args[0]], [str]) result = _Compound(args[0]) possible_types = [ _DiscreteParametricModel, _DiscreteMixture, _Compound, _Convolution ] # build from two objects and optional threshold if len(args) == 2: error.CheckType([args[0], args[1]], [possible_types, possible_types], variable_pos=[1, 2]) if Threshold: result = _Compound([args[0], args[1]], Threshold) else: result = _Compound([args[0], args[1]]) return result # Extend _Compound interface.extend_class(_Compound, interface.StatInterface) # Extend _CompoundData interface.extend_class(_CompoundData, interface.StatInterface)