示例#1
0
def convertLimits(limits, form = None):
    if not isinstance(limits, list):
        raise ValueError("No way to convert limits because {0} is not a list.".format(limits))


    if not all(limits):
        return limits  ## return list because there is a None in the list

        
    if form is None:
        if len(limits) < 2:
            raise ValueError("List {0} does not have two or more elements".format(limits))
        if isinstance(limits[0], int):
            dist = rint(limits[0], limits[-1])
        elif isinstance(limits[0], float):
            dist = rfloat(limits[0], limits[-1])
        elif isinstance(limits[0], str):
            dist = limits
        else:
            raise ValueError("Not sure how to process list {0}.".format(limits))
    else:
        if form == "uniform":
            if len(limits) < 2:
                raise ValueError("List {0} does not have two or more elements".format(limits))
            if isinstance(limits[0], int):
                dist = rint(limits[0], limits[-1])
            elif isinstance(limits[0], float):
                dist = rfloat(limits[0], limits[-1])
        else:
            raise ValueError("Form {0} is not supported.".format(form))

    return dist
示例#2
0
def opti_forest(data, n_jobs=1,n_iter=10):
    model = RandomForestRegressor()
    param_dist = {'n_estimators': rint(15, 30),
                  # 'criterion': ['mse','mae'],  # not in 0.18 but in 0.19
                  'min_samples_split': rint(2, 10),
                  'min_samples_leaf': rint(1, 5),
                  'min_weight_fraction_leaf': [0.02],# opti said this is good
                  'max_features': [None], # None is best
                  'min_impurity_split': [0.03, 0.02, 0.01, 0.04],  # min_impurity_decrease
                  "bootstrap": [True],  # false conflicts with oob score thing
                  "oob_score": [False]}

    X,y = getXY(data,data.keys())
    blu = rsearch(model, param_distributions=param_dist, n_iter=n_iter,n_jobs=n_jobs)
    blu.fit(X, y)
    print blu.best_params_
    print blu.best_score_
示例#3
0
文件: tree.py 项目: tgadf/pymva
def getDecisionTreeClassifierParams():
    treeParams = {
        "max_depth": [2, 4, 6, None],
        "max_features": ['auto', 'sqrt', 'log2', None],
        "min_impurity_decrease": rfloat(0.0, 0.25),
        "min_samples_leaf": rint(1, 10)
    }

    params = treeParams
    params["criterion"] = ["gini", "entropy"]

    param_grid = {}
    for param, dist in params.iteritems():
        param_grid[param] = convertDistribution(dist)

    retval = {"dist": params, "grid": param_grid}
    return retval
示例#4
0
文件: ensemble.py 项目: tgadf/pymva
def getExtraTreesClassifierParams():
    treeParams = {"max_depth": [2, 4, 6, 8, None],
                  "max_features": ['auto', 'sqrt', 'log2', None],
                  "min_impurity_decrease": rfloat(0.0, 0.25),
                  "min_samples_leaf": rint(1, 10)}
    
    params = treeParams
    #params["bootstrap"] = [False, True]
    #params["criterion"] = ["mae", "mse"]
    params["n_estimators"] = [100]

    param_grid = {}
    for param,dist in params.iteritems():
        param_grid[param] = convertDistribution(dist)        
        
    retval = {"dist": params, "grid": param_grid}    
    return retval