Exemple #1
0
            input=activ_pool1,
            Kernels=Kernels_2,
            stride=stride_2,
            padding=0,
            non_linearialty='ReLu')
        activ_pool2 = functions.poollayer(input=out_a_pool2,
                                          type_pool='max',
                                          pool_size=pool_size_2)
        z_pool2 = functions.poollayer(input=out_z_pool2,
                                      type_pool='max',
                                      pool_size=pool_size_2)

        zs, logits = functions.mlp(input=np.ravel(activ_pool2),
                                   weights=weights,
                                   biases=biases,
                                   num_hidden=num_hidden,
                                   sizes=sizes,
                                   non_linearialty='sigmoid',
                                   output_size=output_size)
        logits[-1] = functions.softmax(zs[-1])

        delta = backprop.cost_derivative(
            logits[-1], y_train) * backprop.softmax_grad(zs[-1])

        der_beta = []
        for i in range(sizes[0]):
            der_beta.append(delta * logits[-2][i])
        der_beta = np.transpose(np.array(der_beta))

        q = 0
        for i in range(10):
print('KNN')

k_values = [1, 2, 5, 7, 10]

functions.knn(X_train, X_test, Y_train, Y_test, k_values, True, ['uniform', 'distance'],
              cfg.default.student_figures, 'knn')

print('Decission Tree Regression')

max_depths = [1, 10, 30, 50, 100, 300]
min_weight_fraction_leafs = [.0, .125, .25, .375, .5]
min_samples_leaf=[1, 10, 100, 200]

functions.decision_tree(X_train, X_test, Y_train, Y_test, max_depths, min_weight_fraction_leafs, min_samples_leaf,
                        cfg.default.student_figures, 'dtree')

print('MLP')

scaler = preprocessing.StandardScaler().fit(X_train)
X_train_scaled = scaler.transform(X_train)
X_test_scaled = scaler.transform(X_test)

max_iteration = 1000
solver = 'lbfgs' # lbfgs, adam, sgd
alpha = [0.001, 0.0001, 0.00001]

list_hidden_layer_sizes = [[10], [5, 2, 5], [60, 20]]

functions.mlp(X_train_scaled, X_test_scaled, Y_train, Y_test, max_iteration, solver, alpha, list_hidden_layer_sizes,
        cfg.default.student_figures, 'mlp')
Exemple #3
0
k_values = [1, 2, 5, 7, 10]

functions.knn(X_train, X_test, y_train, y_test, k_values, True,
              ['uniform', 'distance'], cfg.default.real_estate_figures, 'knn')

print('Decission Tree Regression')

max_depths = [1, 10, 30, 50, 100, 300]
min_weight_fraction_leafs = [.0, .125, .25, .375, .5]
min_samples_leaf = [1, 10, 100, 200]

functions.decision_tree(X_train, X_test, y_train, y_test, max_depths,
                        min_weight_fraction_leafs, min_samples_leaf,
                        cfg.default.real_estate_figures, 'dtree')

print('MLP')

scaler = preprocessing.StandardScaler().fit(X_train)
X_train_scaled = scaler.transform(X_train)
X_test_scaled = scaler.transform(X_test)

max_iteration = 1000
solver = 'lbfgs'  # lbfgs, adam, sgd
alpha = [0.001, 0.0001, 0.00001]

list_hidden_layer_sizes = [[10], [5, 2, 5], [60, 20]]

functions.mlp(X_train_scaled, X_test_scaled, y_train, y_test, max_iteration,
              solver, alpha, list_hidden_layer_sizes,
              cfg.default.real_estate_figures, 'mlp')