Exemple #1
0
def kNN(X_train, X_test, y_train, method =0, n_iter_search = 1000):
	
	if method == 0:
		model = KNeighborsClassifier(algorithm = 'kd_tree', leaf_size = 40, n_neighbors = 4, p = 2, weights = 'distance')
		
		model.fit(X_train, y_train.values.ravel())
			
		y_pred = model.predict(X_test)
	
	elif  method == 1:
		knn = KNeighborsClassifier(algorithm = 'kd_tree', leaf_size = 40, n_neighbors = 4, p = 2, weights = 'distance')
	
		model = BaggingClassifier(knn, n_estimators = 100, max_samples=0.5, max_features=0.5, n_jobs = -1)
		
		model.fit(X_train, y_train.values.ravel())
	
		y_pred = model.predict(X_test)
		
	elif method == 2:
		model = KNeighborsClassifier()	
		
		param_dist = {"n_neighbors": list(range(1, 31)),
		    "p": sp_randint(1, 4), 
		    "algorithm": ['auto', 'ball_tree', 'kd_tree', 'brute'],
		    "weights": ['uniform', 'distance'],
		    "leaf_size": [10, 20, 25, 28, 30, 32, 35, 40, 60],
		    "n_jobs": [-1]}
		
		random_search = randomSearch(X_train, y_train, model, param_dist, n_iter_search)	
		
		y_pred = random_search.predict(X_test)
		
	else:
		knn = KNeighborsClassifier(algorithm = 'kd_tree', leaf_size = 40, n_neighbors = 4, p = 2, weights = 'distance')
		
		model = BaggingClassifier(knn)
		
		param_dist = {'n_estimators': sp_randint(10, 200),
		    'max_features': [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1, 1.2, 1.4, 1.6, 1.8, 2],
		    'bootstrap': [True, False],
		    'bootstrap_features': [True, False],
		    'oob_score': [True, False],
		    'n_jobs': [-1]}
		
		random_search = randomSearch(X_train, y_train, model, param_dist, n_iter_search)	
		
		y_pred = random_search.predict(X_test)
		
	return y_pred
def randomForest(X_train, X_test, y_train, search = False, n_iter_search = 100):
	if search:
		model = RandomForestClassifier()	
		
		param_dist = {'n_estimators':  sp_randint(200, 2000),
		    "max_depth": [3, None],
		    "max_features": [sp_randint(1, 11), "auto", "log2", None],
		    "min_samples_split": sp_randint(2, 11),
		    'min_samples_leaf': [1, 2, 4],
		    "bootstrap": [True, False],
		    "criterion": ["gini", "entropy"],
		    "n_jobs": [-1]}
		
		random_search = randomSearch(X_train, y_train, model, param_dist, n_iter_search)	
		
		y_pred = random_search.predict(X_test)
		
	else:
		model = RandomForestClassifier(bootstrap = False, criterion = 'gini', max_depth = None, max_features = 'auto', min_samples_leaf = 4, min_samples_split = 4, n_estimators = 508, n_jobs = -1)
		
		model.fit(X_train, y_train.values.ravel())
			
		y_pred = model.predict(X_test)
	
	return y_pred
Exemple #3
0
def kNNRegression(X_train,
                  X_test,
                  y_train,
                  method=0,
                  degree=1,
                  n_iter_search=100):

    if degree > 1:
        poly = PolynomialFeatures(degree)
        X_train = poly.fit_transform(X_train)
        X_test = poly.fit_transform(X_test)

    if method == 0:
        model = KNeighborsRegressor(algorithm='ball_tree',
                                    leaf_size=28,
                                    n_jobs=-1,
                                    n_neighbors=22,
                                    p=1,
                                    weights='uniform')

        model.fit(X_train, y_train)

        y_pred = model.predict(X_test)

    if method == 1:
        model = KNeighborsRegressor()

        param_dist = {
            "n_neighbors": list(range(1, 31)),
            "p": sp_randint(1, 4),
            "algorithm": ['auto', 'ball_tree', 'kd_tree', 'brute'],
            "weights": ['uniform', 'distance'],
            "leaf_size": [10, 20, 25, 28, 30, 32, 35, 40, 60],
            "n_jobs": [-1]
        }

        random_search = randomSearch(X_train, y_train, model, param_dist,
                                     n_iter_search)

        y_pred = random_search.predict(X_test)

    elif method == 2:
        knnReg = KNeighborsRegressor(algorithm='ball_tree',
                                     leaf_size=28,
                                     n_jobs=-1,
                                     n_neighbors=22,
                                     p=1,
                                     weights='uniform')

        model = BaggingRegressor(knnReg, n_jobs=-1)

        model.fit(X_train, y_train.values.ravel())

        y_pred = model.predict(X_test)
    return y_pred
Exemple #4
0

matrix = readFromFile("filesFlowShopPermutation/Doc1.txt");
order = randomSearch.generateRandomOrder(matrix);
localSearch.firstImprovement(matrix, order);

########################################################################################################################
#                                                                                                                      #
#                                                 RANDOM SEARCH                                                        #
#                                                                                                                      #
########################################################################################################################
#Change values of these variables here below as you want

myMatrix = readFromFile("filesFlowShopPermutation/Doc5.txt")
tries = 100;
randomSearch.randomSearch(myMatrix, tries);


########################################################################################################################
#                                                                                                                      #
#                                                 SIMULATED ANNEALING                                                  #
#                                                                                                                      #
########################################################################################################################
#Change values of these variables here below as you want

myMatrix =  readFromFile("filesFlowShopPermutation/Doc5.txt")
alpha = 0.9
L = 100
finalT = 0.001
bestAnnealing = simulatedAnnealing.simulatedAnnealing(alpha, L, finalT, myMatrix)
localSearch.firstImprovement(myMatrix, bestAnnealing)
Exemple #5
0
def linearRegression(X_train,
                     X_test,
                     y_train,
                     method=1,
                     degree=1,
                     n_iter_search=100):

    if degree > 1:
        poly = PolynomialFeatures(degree)
        X_train = poly.fit_transform(X_train)
        X_test = poly.fit_transform(X_test)

    if method == 0:
        model = linear_model.LinearRegression()

        model.fit(X_train, y_train)

        y_pred = model.predict(X_test)

    elif method == 1:
        model = linear_model.Ridge()

        model.fit(X_train, y_train)

        y_pred = model.predict(X_test)

    elif method == 2:
        model = linear_model.Lasso(alpha=0.0002704959730463137,
                                   fit_intercept=True,
                                   max_iter=1698,
                                   normalize=True,
                                   positive=False,
                                   selection='random',
                                   tol=0.001)

        model.fit(X_train, y_train)

        y_pred = model.predict(X_test)

    elif method == 3:
        model = linear_model.ElasticNet(alpha=0.0002465811075822604,
                                        fit_intercept=True,
                                        l1_ratio=0.9,
                                        max_iter=1508,
                                        normalize=True,
                                        positive=False,
                                        selection='random',
                                        tol=0.001)

        model.fit(X_train, y_train)

        y_pred = model.predict(X_test)
    elif method == 4:
        model = linear_model.Ridge()

        alphas = np.logspace(-10, -2, 200)

        param_dist = {
            'alpha':
            alphas,
            'fit_intercept': [True, False],
            'normalize': [True, False],
            'max_iter':
            sp_randint(500, 2000),
            "tol": [0.0001, 0.00009, 0.00011, 0.0005, 0.00001, 0.001],
            'solver':
            ['auto', 'svd', 'cholesky', 'lsqr', 'sparse_cg', 'sag', 'saga']
        }

        random_search = randomSearch(X_train, y_train, model, param_dist,
                                     n_iter_search)

        y_pred = random_search.predict(X_test)
    elif method == 5:
        model = linear_model.Lasso()

        alphas = np.logspace(-10, -2, 200)

        param_dist = {
            'alpha': alphas,
            'fit_intercept': [True, False],
            'normalize': [True, False],
            'max_iter': sp_randint(500, 2000),
            "tol": [0.0001, 0.00009, 0.00011, 0.0005, 0.00001, 0.001],
            'positive': [True, False],
            'selection': ['random', 'cyclic']
        }

        random_search = randomSearch(X_train, y_train, model, param_dist,
                                     n_iter_search)

        y_pred = random_search.predict(X_test)

    elif method == 6:
        model = linear_model.ElasticNet()

        alphas = np.logspace(-10, -2, 200)

        param_dist = {
            'alpha':
            alphas,
            'l1_ratio': [
                0, 0.1, 0.2, 0.25, 0.35, 0.4, 0.45, 0.5, 0.55, 0.6, 0.65, 0.75,
                0.8, 0.9, 1
            ],
            'fit_intercept': [True, False],
            'normalize': [True, False],
            'max_iter':
            sp_randint(500, 2000),
            "tol": [0.0001, 0.00009, 0.00011, 0.0005, 0.00001, 0.001],
            'positive': [True, False],
            'selection': ['random', 'cyclic']
        }

        random_search = randomSearch(X_train, y_train, model, param_dist,
                                     n_iter_search)

        y_pred = random_search.predict(X_test)

    elif method == 7:
        linReg = linear_model.ElasticNet(alpha=0.0002465811075822604,
                                         fit_intercept=True,
                                         l1_ratio=0.9,
                                         max_iter=1508,
                                         normalize=True,
                                         positive=False,
                                         selection='random',
                                         tol=0.001)

        model = BaggingRegressor(linReg, n_jobs=-1)

        model.fit(X_train, y_train.values.ravel())

        y_pred = model.predict(X_test)

    return y_pred
Exemple #6
0
def logisticRegression(X_train, X_test, y_train, method = 1, degree = 3, n_iter_search = 1000):
	
	if degree > 1:
		poly = PolynomialFeatures(degree)
		X_train = poly.fit_transform(X_train)
		X_test = poly.fit_transform(X_test)
		
	if method == 0:
		model = LogisticRegression(tol = 0.0001, intercept_scaling = 0.9,
			solver = 'newton-cg', n_jobs = -1, multi_class = 'multinomial', 
			max_iter = 100, dual = False, C = 10000)
		
		model.fit(X_train, y_train.values.ravel())
	
		y_pred = model.predict(X_test)
		
	elif method == 1:
		logReg = LogisticRegression(tol = 0.0001, intercept_scaling = 0.9,
			solver = 'newton-cg', n_jobs = -1, multi_class = 'multinomial', 
			max_iter = 100, dual = False, C = 10000)
		
		model = BaggingClassifier(logReg, n_estimators = 100, max_samples=0.5, max_features=0.5, n_jobs = -1)
		
		model.fit(X_train, y_train.values.ravel())
	
		y_pred = model.predict(X_test)
		
	elif method == 2:
		model = LogisticRegression()
		
		param_dist = {"penalty": ['l1', 'l2'],
	              "dual": [True, False],
			  "tol": [0.0001, 0.00009, 0.00011, 0.0005, 0.00001, 0.001],
			  'C': [0.001, 0.01, 0.1, 1, 10, 100, 1000, 10000],
			  'fit_intercept': [True, False],
			  'intercept_scaling': [1, 1.1, 2, 10, 0.5, 0.1, 0.9, 0, 1.2, 30, 3, 0.2, 0.01, 1.3, 0.08, 5, 15, 0.3, 0.2, 0.002],
			  'class_weight': ['balanced', None],
			  'solver': ['newton-cg', 'lbfgs', 'liblinear', 'sag', 'saga'],
			  'max_iter': [100, 105, 110, 140, 160, 180],
			  'multi_class': ['ovr', 'multinomial', 'auto'],
			  'n_jobs': [-1]}
		
		random_search = randomSearch(X_train, y_train, model, param_dist, n_iter_search)	
		
		y_pred = random_search.predict(X_test)
	
	else:
		logReg = LogisticRegression(tol = 0.0001, intercept_scaling = 0.9,
			solver = 'newton-cg', n_jobs = -1, multi_class = 'multinomial', 
			max_iter = 100, dual = False, C = 10000)
		model = BaggingClassifier(logReg)
		
		param_dist = {'n_estimators': sp_randint(10, 200),
		    'max_samples': [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1, 1.2, 1.4, 1.6, 1.8, 2],
		    'max_features': [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1, 1.2, 1.4, 1.6, 1.8, 2],
		    'bootstrap': [True, False],
		    'bootstrap_features': [True, False],
		    'oob_score': [True, False],
		    'n_jobs': [-1]}
		
		random_search = randomSearch(X_train, y_train, model, param_dist, n_iter_search)	
		
		y_pred = random_search.predict(X_test)	
		
	return y_pred