def get(): config = configuration_parser.parse() exec('max_depth=' + config.get(__name__, 'max_depth'),locals(),globals()) min_samples_split = config.getint(__name__, 'min_samples_split') min_samples_leaf = config.getint(__name__, 'min_samples_leaf') criterion = config.get(__name__, 'split criterion') return tree.DecisionTreeRegressor(criterion=criterion,max_depth=max_depth,min_samples_leaf=min_samples_leaf, min_samples_split=min_samples_split)
def get(): config = configuration_parser.parse() alpha = config.getfloat(__name__, 'alpha') coef0 = config.getint(__name__, 'coef0') degree = config.getint(__name__, 'degree') gamma = config.getfloat(__name__, 'gamma') kernel = config.get(__name__, 'kernel') model = KernelRidge(alpha=alpha, coef0=coef0, degree=degree, gamma=gamma, kernel=kernel, kernel_params=None) return model
def get(): config = configuration_parser.parse() exec('max_depth=' + config.get(__name__, 'max_depth'), locals(), globals()) min_samples_split = config.getint(__name__, 'min_samples_split') min_samples_leaf = config.getint(__name__, 'min_samples_leaf') criterion = config.get(__name__, 'split criterion') return tree.DecisionTreeRegressor(criterion=criterion, max_depth=max_depth, min_samples_leaf=min_samples_leaf, min_samples_split=min_samples_split)
def get(): config = configuration_parser.parse() minmax = ast.literal_eval(config.get(__name__, 'minmax')) size = ast.literal_eval(config.get(__name__, 'size')) epochs = config.getint(__name__, 'epochs') show = config.getboolean(__name__, 'show') goal = config.getfloat(__name__, 'goal') exec('transf = [nl.trans.' + config.get(__name__, 'transfer_function') + '()]*len(size)',locals(),globals()) train = config.get(__name__, 'training_algorithm') return model(nl.net.newff(minmax,size,transf),train,epochs,show,goal)
def get(): config = configuration_parser.parse() estimators = config.getint(__name__, 'estimators') lr = config.getfloat(__name__, 'learning rate') loss = config.get(__name__, 'loss function') exec('max_depth=' + config.get(__name__, 'max_depth'), locals(), globals()) min_samples_split = config.getint(__name__, 'min_samples_split') min_samples_leaf = config.getint(__name__, 'min_samples_leaf') return AdaBoostRegressor(DecisionTreeRegressor(max_depth=max_depth,min_samples_split=min_samples_split, min_samples_leaf=min_samples_leaf),n_estimators=estimators,loss=loss, learning_rate=lr)
def get(): config = configuration_parser.parse() estimators = config.getint(__name__, 'estimators') lr = config.getfloat(__name__, 'learning rate') loss = config.get(__name__, 'loss function') exec('max_depth=' + config.get(__name__, 'max_depth'), locals(), globals()) min_samples_split = config.getint(__name__, 'min_samples_split') min_samples_leaf = config.getint(__name__, 'min_samples_leaf') return AdaBoostRegressor(DecisionTreeRegressor( max_depth=max_depth, min_samples_split=min_samples_split, min_samples_leaf=min_samples_leaf), n_estimators=estimators, loss=loss, learning_rate=lr)
def get(): config = configuration_parser.parse() estimators = config.getint(__name__, 'estimators') exec('max_depth = ' + config.get(__name__, 'max_depth'),locals(),globals()) min_samples_split = config.getint(__name__, 'min_samples_split') min_samples_leaf = config.getint(__name__, 'min_samples_leaf') exec('max_leaf_nodes=' + config.get(__name__, 'max_leaf_nodes'),locals(),globals()) jobs = config.getint(__name__, 'jobs') model = RandomForestRegressor(n_estimators=estimators, max_depth=max_depth, min_samples_split=min_samples_split, min_samples_leaf=min_samples_leaf, max_leaf_nodes=max_leaf_nodes, n_jobs=jobs) return model
def get(): config = configuration_parser.parse() estimators = config.getint(__name__, 'estimators') exec('max_depth = ' + config.get(__name__, 'max_depth'), locals(), globals()) min_samples_split = config.getint(__name__, 'min_samples_split') min_samples_leaf = config.getint(__name__, 'min_samples_leaf') exec('max_leaf_nodes=' + config.get(__name__, 'max_leaf_nodes'), locals(), globals()) jobs = config.getint(__name__, 'jobs') model = RandomForestRegressor(n_estimators=estimators, max_depth=max_depth, min_samples_split=min_samples_split, min_samples_leaf=min_samples_leaf, max_leaf_nodes=max_leaf_nodes, n_jobs=jobs) return model
def get(): config = configuration_parser.parse() alpha = config.getfloat(__name__, 'alpha') gamma = config.getfloat(__name__, 'gamma') kernel = config.get(__name__, 'kernel') return KernelRidge(alpha=alpha, gamma=gamma, kernel=kernel)
def get(): config = configuration_parser.parse() alpha = config.getfloat(__name__, 'alpha') gamma = config.getfloat(__name__, 'gamma') kernel = config.get(__name__, 'kernel') return KernelRidge(alpha=alpha,gamma=gamma,kernel=kernel)
import configuration_parser import importlib import data_parser import matplotlib import sys import numpy as np if len(sys.argv) > 1: config = configuration_parser.parse(sys.argv[1]) else: config = configuration_parser.parse('default.conf') parameter_names = ['model', 'data_path', 'save_path', 'Y', 'X', 'lwr_data_path', 'weights'] all_tests = config.get('AllTests', 'test_cases').split(',') for case_name in all_tests: parameter_values = [] for parameter in parameter_names: if parameter == 'weights': if config.has_option(case_name, parameter): parameter_values.append(config.getboolean(case_name, parameter)) else: parameter_values.append(config.getboolean('AllTests', parameter)) else: if config.has_option(case_name, parameter): parameter_values.append(config.get(case_name, parameter)) else: parameter_values.append(config.get('AllTests', parameter)) model, data_path, save_path, y_data, x_data, lwr_data_path, weights = parameter_values