Пример #1
0
    def run():

        x_tr = [[0.05, 0.1]]
        y_tr = [[0.01, 0.99]]

        inner_dim = [[2, 2, 2, 2]]
        init_weights = [np.asarray([np.asarray([[0.15, 0.25], [0.2, 0.3]]), np.asarray([[0.4, 0.5], [0.45, 0.55]])])]
        init_bias = [np.asarray([np.asarray([0.35, 0.35]), np.asarray([0.6, 0.6])])]

        activ_fun = [[ACTIVATION_DICT["sigmoid"],
                      ACTIVATION_DICT["sigmoid"],
                      ACTIVATION_DICT["sigmoid"],
                      ACTIVATION_DICT["sigmoid"]]]

        exp = ExperimentSettings()

        optimizer = OPTIMIZER_DICT['ADAMAX'](lr=0.5)

        hyp = HyperParameters(
            inner_dimension=inner_dim,
            init_weights=init_weights,
            init_bias=init_bias,
            activation_function=activ_fun,
            epochs=[1000],
            batch_size=[1],
            optimizer=[optimizer])

        print(NeuralNet.train_without_ms(x_tr, y_tr, x_tr, y_tr, hyp, exp))
Пример #2
0
    def run_validation(self, params, task=None):
        task = self.task if task is None else task
        # Create a list of net results at each permutation of the kfold-cv
        results = []

        for folds in task.folds.folds:
            x_tr = task.inputs[folds.train_indexes]
            y_tr = task.targets[folds.train_indexes]
            x_val = task.inputs[folds.test_indexes]
            y_val = task.targets[folds.test_indexes]

            results.append({
                "score":
                NeuralNet.train_and_result(x_tr, y_tr, x_val, y_val, params),
                "params":
                params
            })

        return {
            "avg_tr_score":
            sum([r["score"]["tr_score"] for r in results]) / len(results),
            "avg_ts_score":
            sum([r["score"]["ts_score"] for r in results]) / len(results),
            "single_result":
            results,
            "params":
            params
        }
Пример #3
0
def testNeuralNetWithGA():
    net = NeuralNet(2, 2, 1)
    t_model = utils.readTrainModel(os.path.join(utils.getResourcesPath(), 'logic_gates/NAND.txt'))
    Population.initPopulation = initPopulation
    Population.evolve = evolve
    p = Population(70, 9)
    p.initPopulation()
    p.evolve(net, t_model)
    

    print(net.getOutputs([0, 0]))
    print(net.getOutputs([0, 1]))
    print(net.getOutputs([1, 0]))
    print(net.getOutputs([1, 1]))
    print(net.getError(t_model))
    
#testPerceptron()
#testNeuralNet()
#testNeuralNetWithGA()
#numberRecognition()
Пример #4
0
def evaluate_and_plot(x_tr, y_tr, x_ts, y_ts, params):
    score = []
    for p in params:
        score.append((params,
                      NeuralNet.train_and_result(x_tr,
                                                 y_tr,
                                                 x_ts,
                                                 y_ts,
                                                 p,
                                                 print_plot=True)))

    pickle.dump(score, open("score_best_net.p", "wb"))
Пример #5
0
def numberRecognition():
    net = NeuralNet(15, 2, 1)
    t_model = utils.readTrainModel(os.path.join(utils.getResourcesPath(), 'number_grids.txt'))
    net.learn(t_model)
    print(net.getOutputs([0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0]))
    print(net.getOutputs([1, 1, 1, 0, 0, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1]))
    print(net.getOutputs([1, 0, 1, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1]))
    print(net.getOutputs([1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1]))
    print(net.getOutputs([1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1]))
    print(net.getOutputs([1, 0, 1, 1, 0, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0]))
    print(net.getOutputs([1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0]))
Пример #6
0
    def run():

        input_s = [[0]]
        target_s = [[0]]

        optimizers = [
            OPTIMIZER_DICT['SGD'](lr=5e-4),
            OPTIMIZER_DICT['SGD'](lr=5e-4, momentum=0.9),
            OPTIMIZER_DICT['SGD'](lr=5e-4, momentum=0.9, nesterov=True),
            OPTIMIZER_DICT['ADAM'](lr=5e-2),
            OPTIMIZER_DICT['ADAMAX'](lr=5e-2)
        ]

        ep = 200
        bs = 1

        for name, err_f in zip(QUAD_FUNCTION_DICT.keys(), QUAD_FUNCTION_DICT.values()):

            err_fun = err_f[0]
            gradient_rule = err_f[1]
            start_x, start_y = err_fun.starting_point
            exp = ExperimentSettings()

            def genhyp(optimizer):
                return HyperParameters(
                    init_weights=[np.asarray([[[start_x]]])],
                    init_bias=[np.asarray([[start_y]])],
                    epochs=[ep],
                    optimizer=[optimizer],
                    gradient_rule=[gradient_rule],
                    batch_size=[bs]
                )

            hyps = [genhyp(opt) for opt in optimizers]

            trained_weights = []
            trained_biases = []
            for hyp in hyps:
                trained_net = NeuralNet.train_without_ms(input_s, target_s, input_s, target_s, hyp, exp, save_weights=True)
                trained_weights.append(trained_net["trained_net"].saved_weights)
                trained_biases.append(trained_net["trained_net"].saved_bias)

            plot_contour(
                err_fun,
                weights=trained_weights,
                biases=trained_biases,
                resolution=150,
                save_name=name,
                legend=["GD", "GD - MOMENTUM", "GD - NESTEROV", "ADAM", "ADAMAX"]
            )
Пример #7
0
def testNeuralNet():
    net = NeuralNet(2, 2, 1)
    net.t_sessions = 20000
    t_model = utils.readTrainModel(os.path.join(utils.getResourcesPath(), 'logic_gates/XOR.txt'))
    net.learn(t_model)
    print(net.getOutputs([0, 0]))
    print(net.getOutputs([0, 1]))
    print(net.getOutputs([1, 0]))
    print(net.getOutputs([1, 1]))
Пример #8
0
    def run():

        input_s, target_s, inner_dim, activ_fun = Test1.init()

        params = dict(performance_function=mean_square_error,
                      select_function=min,
                      inner_dimension=inner_dim[0],
                      epochs=30,
                      batch_size=1,
                      activation_function=activ_fun[0],
                      first_activation=[],
                      learning_rate=0.01,
                      loss=LOSS_DICT["mse"],
                      lambda_regularization=0,
                      momentum=0,
                      task_type=TaskType.regression)

        res = NeuralNet.train_and_result(input_s, target_s, input_s, target_s, params)

        print(res)

        plt.show()
Пример #9
0
    def double_cross_validation(self, n_workers, task=None):
        task = self.task if task is None else task
        if not isinstance(task.folds.folds[0].train_indexes, KFolds):
            raise ValueError(
                "You can't run double cross validation without correct settings"
            )

        nested_res = {"nested_scores": [], "params": []}

        for ext_f in task.folds.folds:
            # Run the model selection over the internal folds
            internal_task = Task(task.inputs, task.targets,
                                 ext_f.train_indexes)
            res = self.run_grid_search(n_workers, internal_task)

            # Train the selected model and test over the esternal fold
            training_indexes = ext_f.train_indexes.folds[
                0].train_indexes + ext_f.train_indexes.folds[0].test_indexes
            x_tr = [task.inputs[i] for i in training_indexes]
            y_tr = [task.targets[i] for i in training_indexes]
            x_val = [task.inputs[i] for i in ext_f.test_indexes]
            y_val = [task.targets[i] for i in ext_f.test_indexes]

            params = (res["results_list"][res["best_score"][0]])["params"]
            nested_res["params"].append(params)
            nested_res["nested_scores"].append(
                NeuralNet.train_and_result(x_tr, y_tr, x_val, y_val, params))

        # Store the results
        score_list = []
        for score in nested_res["nested_scores"]:
            score_list.append(score["ts_score"])
        score_list = np.array(score_list)
        nested_res["mean"] = score_list.mean()
        nested_res["std"] = score_list.std()

        return nested_res
Пример #10
0
    def run():

        input_s, target_s, test_input_s, inner_dim, activ_fun, data_frame = MicheliDataset.init()

        # plot_mds(target_s, "target")

        exp = ExperimentSettings(
            performance_function=LOSS_DICT["mee"].f,
            select_function=min
        )

        optimizer1 = OPTIMIZER_DICT['SGD'](lr=1e-3, momentum=0.5)
        optimizer2 = OPTIMIZER_DICT['ADAM']()
        optimizer3 = OPTIMIZER_DICT['ADAMAX']()

        hyp = HyperParameters(
            inner_dimension=inner_dim,
            activation_function=activ_fun,
            epochs=[250],
            batch_size=[10],
            optimizer=[optimizer3],
            task_type=[TaskType.regression],
            loss=[LOSS_DICT["mee"]],
            verbose=[1]
        )

        res = NeuralNet.train_without_ms(input_s[:-216], target_s[:-216], input_s[-216:], target_s[-216:], hyp, exp, 'SGD')

        # TRAINING plots
        net = res['trained_net']

        prediction = net.predict(input_s[:-216])
        plot_mds_all(target_s[:-216], prediction, "prediction_all")

        # TEST plots
        prediction_test = net.predict(input_s[-216:])
        plot_mds_all(target_s[-216:], prediction_test, "prediction_all_test")
Пример #11
0
    def run():

        input_s, target_s, _, _ = Test1.init()

        params = dict(inner_dimension=[2, 1])
        net = NeuralNet(len(input_s[0]), 0.01, 0, 0, LOSS_DICT["mse"])
        net.add_layer(2, activation_fun=ACTIVATION_DICT["linear"], use_bias=False, init_weights=np.asarray([[0.11, 0.12], [0.21, 0.08]]))
        net.add_layer(1, activation_fun=ACTIVATION_DICT["linear"], use_bias=False, init_weights=np.asarray([[0.14], [0.15]]))
        net.initialize()
        input_s = [[2, 3]]
        target_s = [[17]]
        net.fit(input_s, target_s, 1)
        DrawNN.draw(net, params, "net_graph1")
        net.fit(input_s, target_s, 1)
        DrawNN.draw(net, params, "net_graph2")

        plt.show()
Пример #12
0
    def run():
        tr_dim = 30
        ts_dim = 10

        input_s = [[(x / tr_dim)] for x in range(tr_dim)]
        # target_s = [[np.sin(inp[0] * 15) + np.cos(inp[0] * 2)] for inp in input_s]
        # target_s = [[np.sin(inp[0])] for inp in input_s]
        target_s = [[inp[0] * 2 + 3] for inp in input_s]

        test_input_s = [[x / ts_dim] for x in range(ts_dim)]
        # test_target_s = [[np.sin(inp[0] * 15) + np.cos(inp[0] * 2)] for inp in test_input_s]
        # test_target_s = [[np.sin(inp[0])] for inp in test_input_s]
        test_target_s = [[inp[0] * 2 + 3] for inp in test_input_s]

        plot_sorted(input_s, target_s, "plot/plots/train")
        plot_sorted(test_input_s, test_target_s, "plot/plots/test")

        inner_dim = [[1]]
        activ_fun = [[ACTIVATION_DICT["linear"]]]

        optimizer1 = OPTIMIZER_DICT['SGD'](lr=0.5)
        optimizer2 = OPTIMIZER_DICT['ADAM'](lr=1)
        optimizer3 = OPTIMIZER_DICT['ADAMAX'](lr=1)

        exp = ExperimentSettings()

        hyp1 = HyperParameters(
            inner_dimension=inner_dim,
            activation_function=activ_fun,
            use_bias=[True],
            init_weights=[np.asarray([[[1.]]])],
            init_bias=[np.asarray([[1.]])],
            epochs=[200],
            optimizer=[optimizer1],
            task_type=[TaskType.regression],
            batch_size=[10])

        hyp2 = HyperParameters(
            inner_dimension=inner_dim,
            activation_function=activ_fun,
            use_bias=[True],
            init_weights=[np.asarray([[[1.]]])],
            init_bias=[np.asarray([[1.]])],
            epochs=[200],
            optimizer=[optimizer2],
            task_type=[TaskType.regression],
            batch_size=[10])

        hyp3 = HyperParameters(
            inner_dimension=inner_dim,
            activation_function=activ_fun,
            use_bias=[True],
            init_weights=[np.asarray([[[1.]]])],
            init_bias=[np.asarray([[1.]])],
            epochs=[200],
            optimizer=[optimizer3],
            task_type=[TaskType.regression],
            batch_size=[10])

        hyps = [hyp1, hyp2, hyp3]

        NeuralNet.generate_field_data(
            input_s,
            target_s,
            test_input_s,
            test_target_s,
            hyps,
            exp,
            legend=["GD", "GD - MOMENTUM", "GD - NESTEROV", "ADAM", "ADAMAX"]
        )