Ejemplo n.º 1
0
    def run():
        tr_dim = 100
        ts_dim = 200

        input_s = [[(x / tr_dim)] for x in range(tr_dim)]
        target_s = [[np.sin(inp[0] * 15) + np.cos(inp[0] * 2)] for inp in input_s]

        test_input_s = [[x / ts_dim] for x in range(ts_dim)]
        test_target_s = [[np.sin(inp[0] * 15) + np.cos(inp[0] * 2)] for inp in test_input_s]

        plot_sorted(input_s, target_s, "train")
        plot_sorted(test_input_s, test_target_s, "test")

        inner_dim = [[30, 30, 1]]
        activ_fun = [[ACTIVATION_DICT["tanh"],
                      ACTIVATION_DICT["tanh"],
                      ACTIVATION_DICT["linear"]]]

        exp = ExperimentSettings()
        hyp = HyperParameters(
            inner_dimension=inner_dim,
            activation_function=activ_fun,
            epochs=[500],
            batch_size=[5])

        hyp2 = HyperParameters(
            inner_dimension=inner_dim,
            activation_function=activ_fun,
            epochs=[100],
            batch_size=[1])

        fold_s = KFolds.cross_validation_folds(data_indexes=list(range(len(target_s))), folds_number=2)

        task = Task(input_s, target_s, fold_s)
        mod_sel = ModelSelection(task, exp, hyp)
        mod_sel_with_regul = ModelSelection(task, exp, hyp2)

        res = mod_sel.run_grid_search(n_workers=1)
        res_with_regul = mod_sel_with_regul.run_grid_search(n_workers=1)

        net = (res["results_list"][res["best_score"][0]]["single_result"][0]["score"]["trained_net"])
        net_with_regul = (res_with_regul["results_list"][res_with_regul["best_score"][0]]["single_result"][0]["score"]["trained_net"])

        prediction = net.predict(test_input_s)
        prediction_with_regul = net_with_regul.predict(test_input_s)

        plot_sorted(test_input_s, prediction, "result")
        plot_sorted(test_input_s, prediction_with_regul, "result_with_regul")

        plt.show()
Ejemplo n.º 2
0
    def run():

        x_tr = [[0.05, 0.1]]
        y_tr = [[0.01, 0.99]]

        inner_dim = [[2, 2, 2, 2]]
        init_weights = [np.asarray([np.asarray([[0.15, 0.25], [0.2, 0.3]]), np.asarray([[0.4, 0.5], [0.45, 0.55]])])]
        init_bias = [np.asarray([np.asarray([0.35, 0.35]), np.asarray([0.6, 0.6])])]

        activ_fun = [[ACTIVATION_DICT["sigmoid"],
                      ACTIVATION_DICT["sigmoid"],
                      ACTIVATION_DICT["sigmoid"],
                      ACTIVATION_DICT["sigmoid"]]]

        exp = ExperimentSettings()

        optimizer = OPTIMIZER_DICT['ADAMAX'](lr=0.5)

        hyp = HyperParameters(
            inner_dimension=inner_dim,
            init_weights=init_weights,
            init_bias=init_bias,
            activation_function=activ_fun,
            epochs=[1000],
            batch_size=[1],
            optimizer=[optimizer])

        print(NeuralNet.train_without_ms(x_tr, y_tr, x_tr, y_tr, hyp, exp))
Ejemplo n.º 3
0
    def run_nested():
        input_s, target_s, test_input_s, inner_dim, activ_fun, data_frame = MicheliDataset.init()

        # plot_mds(target_s, "target")

        exp = ExperimentSettings(
            performance_function=LOSS_DICT["mse"].f,
            select_function=min
        )

        optimizer = [OPTIMIZER_DICT['SGD'](), OPTIMIZER_DICT['ADAMAX'](), OPTIMIZER_DICT['ADAM']()]

        hyp = HyperParameters(
            inner_dimension=inner_dim,
            activation_function=activ_fun,
            epochs=[250],
            batch_size=[1],
            optimizer=optimizer,
            task_type=[TaskType.classification]
        )

        fold_s = KFolds.double_cross_validation_folds(data_indexes=list(range(len(target_s))), external_folds_dim=3, internal_folds_dim=4)
        task = Task(input_s, target_s, fold_s)
        mod_sel = ModelSelection(task, exp, hyp)

        res = mod_sel.double_cross_validation(n_workers=50)
        pickle.dump(res, open("cup_nested_res.p", "wb"))
Ejemplo n.º 4
0
    def run():

        input_s, target_s, test_input_s, test_target_s, inner_dim, activ_fun = MonkTest.init(
            3)

        exp = ExperimentSettings(
            # performance_function=accuracy,
            # select_function=max,
        )

        optimizer = OPTIMIZER_DICT['ADAMAX']()

        hyp = HyperParameters(inner_dimension=inner_dim,
                              activation_function=activ_fun,
                              epochs=[500],
                              batch_size=[1],
                              optimizer=[optimizer],
                              task_type=[TaskType.regression])

        # print(NeuralNet.train_without_ms(input_s, target_s, test_input_s, test_target_s, hyp, exp))

        fold_s = KFolds.cross_validation_folds(data_indexes=list(
            range(len(target_s))),
                                               folds_number=6)

        task = Task(input_s, target_s, fold_s)
        mod_sel = ModelSelection(task, exp, hyp)

        res = mod_sel.run_grid_search(n_workers=80)
        pickle.dump(res, open("monk3_validation_res.p", "wb"))

        best_nets = []
        for r in res["results_list"]:
            if r["avg_ts_score"] == res["best_score"][1]:
                best_nets.append(r["params"])
Ejemplo n.º 5
0
    def __init__(self,
                 task,
                 exp_settings=ExperimentSettings(),
                 hyper_param=HyperParameters()):

        self.param = exp_settings.params
        self.hyper_parameters = hyper_param
        self.task = task
Ejemplo n.º 6
0
 def genhyp(optimizer):
     return HyperParameters(
         init_weights=[np.asarray([[[start_x]]])],
         init_bias=[np.asarray([[start_y]])],
         epochs=[ep],
         optimizer=[optimizer],
         gradient_rule=[gradient_rule],
         batch_size=[bs]
     )
Ejemplo n.º 7
0
    def run():

        input_s, target_s, inner_dim, activ_fun = Test1.init()

        exp = ExperimentSettings()
        hyp = HyperParameters(inner_dimension=inner_dim, activation_function=activ_fun)
        fold_s = KFolds.cross_validation_folds(data_indexes=list(range(len(target_s))), folds_number=2)
        # fold_s = KFolds.double_cross_validation_folds(data_indexes=list(range(len(target_s))), external_folds_dim=1, internal_folds_dim=2)

        task = Task(input_s, target_s, fold_s)
        mod_sel = ModelSelection(task, exp, hyp)

        res = mod_sel.run_grid_search(n_workers=1)
        # res = mod_sel.double_cross_validation(n_workers=2)

        print(res)
        plt.show()
Ejemplo n.º 8
0
    def run_grid_search(self, n_workers=0, task=None):
        """
        This function perform the grid search, need a KFolds instance to be run
        """
        task = self.task if task is None else task

        if isinstance(task.folds.folds[0].train_indexes, KFolds):
            raise ValueError(
                "You can't run gridsearch over double cross validation settings"
            )

        params = HyperParameters.extraploate_hyperparameters(
            self.hyper_parameters, self.param)

        # Define name for plot with relative legend
        with open("plot_legend.txt", "w") as f:
            f.write("nome file\t->\tparametri\n")
            for i, p in enumerate(params):
                f.write("\nplot_" + str(i) + "\t->\t" + str(p) + "\n")
                params[i]["name"] = "plot_" + str(i)

        results = []
        if n_workers > 1:
            # nodes - number(and potentially description) of workers, if not given
            #         will autodetect processors
            # ncpus - number of worker processors
            pool = ProcessingPool(nodes=n_workers)

            # For all combinations of hyper-parameters (grid search)
            # run and return the results of a net with those parameters
            results = pool.map(self.run_validation, params,
                               [task] * len(params))
        else:
            for _, par in enumerate(params):
                results.append(self.run_validation(par, task))

        score_list = [r["avg_ts_score"] for r in results]
        most_valuable_res = self.param["select_function"](score_list)
        return {
            "results_list": results,
            "best_score":
            (score_list.index(most_valuable_res), most_valuable_res),
            "task": task
        }
Ejemplo n.º 9
0
    def run():

        input_s, target_s, test_input_s, test_target_s, inner_dim, activ_fun = MonkTest.init(
            2)

        exp = ExperimentSettings(
            performance_function=accuracy,
            select_function=max,
        )

        optimizer1 = OPTIMIZER_DICT['SGD'](lr=0.001,
                                           momentum=0.1,
                                           nesterov=True)
        optimizer2 = OPTIMIZER_DICT['ADAM']()
        optimizer3 = OPTIMIZER_DICT['ADAMAX']()

        hyp = HyperParameters(inner_dimension=inner_dim,
                              activation_function=activ_fun,
                              epochs=[500],
                              batch_size=[1],
                              optimizer=[optimizer1],
                              task_type=[TaskType.classification])

        #print(NeuralNet.train_without_ms(input_s, target_s, test_input_s, test_target_s, hyp, exp))

        fold_s = KFolds.cross_validation_folds(data_indexes=list(
            range(len(target_s))),
                                               folds_number=2)

        task = Task(input_s, target_s, fold_s)
        mod_sel = ModelSelection(task, exp, hyp)

        res = mod_sel.run_grid_search(n_workers=1)
        pickle.dump(res, open("monk2_validation_res.p", "wb"))

        best_nets = []
        for r in res["results_list"]:
            if r["avg_ts_score"] == res["best_score"][1]:
                best_nets.append(r["params"])

        print("Evaluating best net:")
        evaluate_and_plot(input_s, target_s, test_input_s, test_target_s,
                          best_nets)
Ejemplo n.º 10
0
    def run():

        input_s, target_s, test_input_s, test_target_s, inner_dim, activ_fun, \
            data_frame, test_data_frame = MicheliModelSelection.init()

        exp = ExperimentSettings(
            performance_function=LOSS_DICT["mee"].f,
            select_function=min
        )

        optimizer = [OPTIMIZER_DICT['SGD'](), OPTIMIZER_DICT['ADAMAX'](), OPTIMIZER_DICT['ADAM']()]

        hyp = HyperParameters(
            inner_dimension=inner_dim,
            activation_function=activ_fun,
            epochs=[250],
            batch_size=[1],
            optimizer=optimizer,
            task_type=[TaskType.classification]
        )

        fold_s = KFolds.cross_validation_folds(data_indexes=list(range(len(target_s))), folds_number=2)

        task = Task(input_s, target_s, fold_s)
        mod_sel = ModelSelection(task, exp, hyp)

        res = mod_sel.run_grid_search(n_workers=1)
        pickle.dump(res, open("cup_validation_res.p", "wb"))

        best_nets = []
        for r in res["results_list"]:
            if r["avg_ts_score"] == res["best_score"][1]:
                best_nets.append(r["params"])

        evaluate_and_plot(input_s, target_s, test_input_s, test_target_s, best_nets)

        score = exp.params["performance_function"](prediction, target_s)
        print("score: ", score)

        plt.show()
Ejemplo n.º 11
0
    def run():

        input_s, target_s, test_input_s, inner_dim, activ_fun, data_frame = MicheliDataset.init()

        # plot_mds(target_s, "target")

        exp = ExperimentSettings(
            performance_function=LOSS_DICT["mee"].f,
            select_function=min
        )

        optimizer1 = OPTIMIZER_DICT['SGD'](lr=1e-3, momentum=0.5)
        optimizer2 = OPTIMIZER_DICT['ADAM']()
        optimizer3 = OPTIMIZER_DICT['ADAMAX']()

        hyp = HyperParameters(
            inner_dimension=inner_dim,
            activation_function=activ_fun,
            epochs=[250],
            batch_size=[10],
            optimizer=[optimizer3],
            task_type=[TaskType.regression],
            loss=[LOSS_DICT["mee"]],
            verbose=[1]
        )

        res = NeuralNet.train_without_ms(input_s[:-216], target_s[:-216], input_s[-216:], target_s[-216:], hyp, exp, 'SGD')

        # TRAINING plots
        net = res['trained_net']

        prediction = net.predict(input_s[:-216])
        plot_mds_all(target_s[:-216], prediction, "prediction_all")

        # TEST plots
        prediction_test = net.predict(input_s[-216:])
        plot_mds_all(target_s[-216:], prediction_test, "prediction_all_test")
Ejemplo n.º 12
0
    def run():
        tr_dim = 30
        ts_dim = 10

        input_s = [[(x / tr_dim)] for x in range(tr_dim)]
        # target_s = [[np.sin(inp[0] * 15) + np.cos(inp[0] * 2)] for inp in input_s]
        # target_s = [[np.sin(inp[0])] for inp in input_s]
        target_s = [[inp[0] * 2 + 3] for inp in input_s]

        test_input_s = [[x / ts_dim] for x in range(ts_dim)]
        # test_target_s = [[np.sin(inp[0] * 15) + np.cos(inp[0] * 2)] for inp in test_input_s]
        # test_target_s = [[np.sin(inp[0])] for inp in test_input_s]
        test_target_s = [[inp[0] * 2 + 3] for inp in test_input_s]

        plot_sorted(input_s, target_s, "plot/plots/train")
        plot_sorted(test_input_s, test_target_s, "plot/plots/test")

        inner_dim = [[1]]
        activ_fun = [[ACTIVATION_DICT["linear"]]]

        optimizer1 = OPTIMIZER_DICT['SGD'](lr=0.5)
        optimizer2 = OPTIMIZER_DICT['ADAM'](lr=1)
        optimizer3 = OPTIMIZER_DICT['ADAMAX'](lr=1)

        exp = ExperimentSettings()

        hyp1 = HyperParameters(
            inner_dimension=inner_dim,
            activation_function=activ_fun,
            use_bias=[True],
            init_weights=[np.asarray([[[1.]]])],
            init_bias=[np.asarray([[1.]])],
            epochs=[200],
            optimizer=[optimizer1],
            task_type=[TaskType.regression],
            batch_size=[10])

        hyp2 = HyperParameters(
            inner_dimension=inner_dim,
            activation_function=activ_fun,
            use_bias=[True],
            init_weights=[np.asarray([[[1.]]])],
            init_bias=[np.asarray([[1.]])],
            epochs=[200],
            optimizer=[optimizer2],
            task_type=[TaskType.regression],
            batch_size=[10])

        hyp3 = HyperParameters(
            inner_dimension=inner_dim,
            activation_function=activ_fun,
            use_bias=[True],
            init_weights=[np.asarray([[[1.]]])],
            init_bias=[np.asarray([[1.]])],
            epochs=[200],
            optimizer=[optimizer3],
            task_type=[TaskType.regression],
            batch_size=[10])

        hyps = [hyp1, hyp2, hyp3]

        NeuralNet.generate_field_data(
            input_s,
            target_s,
            test_input_s,
            test_target_s,
            hyps,
            exp,
            legend=["GD", "GD - MOMENTUM", "GD - NESTEROV", "ADAM", "ADAMAX"]
        )