Пример #1
0
    def test_play(self):
        population = self.tree_generator.init()

        # with cache
        start_time = time.time()
        details = {
            "population": population,
            "functions": self.functions,
            "evaluate": evaluate,
            "selection": self.selection,
            "crossover": self.crossover,
            "mutation": self.mutation,
            "config": self.config
        }
        play.play(details)
        end_time = time.time()
        print("GP run with cache: %2.2fsec\n" % (end_time - start_time))

        # without cache
        start_time = time.time()
        details = {
            "population": population,
            "functions": self.functions,
            "evaluate": evaluate,
            "selection": self.selection,
            "crossover": self.crossover,
            "mutation": self.mutation,
            "config": self.config
        }
        play.play(details)
        end_time = time.time()
        print("GP run without cache: %2.2fsec\n" % (end_time - start_time))
Пример #2
0
        population = generator.init()

        start_time = time.time()
        details = play.play_details(
            population=population,
            evaluate=evaluate,
            functions=functions,
            selection=selection,
            crossover=crossover,
            mutation=mutation,
            print_func=print_func,
            stop_func=default_stop_func,
            config=config,
            editor=edit_trees,
        )
        play.play(details)

        import networkx as nx
        import matplotlib.pyplot as plt
        best = population.best_individuals[0]

        graph = nx.DiGraph()
        traverse_tree(best.root, graph)
        labels = dict((n, d["label"]) for n, d in graph.nodes(data=True))

        pos = nx.graphviz_layout(graph, prog='dot')
        nx.draw(
            graph,
            pos,
            with_labels=True,
            labels=labels,
Пример #3
0
        population = generator.init()

        start_time = time.time()
        details = play.play_details(
            population=population,
            evaluate=evaluate,
            functions=functions,
            selection=selection,
            crossover=crossover,
            mutation=mutation,
            print_func=print_func,
            stop_func=default_stop_func,
            config=config,
            editor=edit_trees,
        )
        play.play(details)

        import networkx as nx
        import matplotlib.pyplot as plt
        best = population.best_individuals[0]

        graph = nx.DiGraph()
        traverse_tree(best.root, graph)
        labels = dict((n, d["label"]) for n, d in graph.nodes(data=True))

        pos = nx.graphviz_layout(graph, prog='dot')
        nx.draw(graph,
                pos,
                with_labels=True,
                labels=labels,
                arrows=False,
Пример #4
0
def gp_predict(train_data, test_data, train_cat, xx, yy):
    # setup
    config = {
        "max_population": 800,
        "max_generation": 30,
        "stale_limit": 10,

        "tree_generation": {
            "tree_type": "CLASSIFICATION_TREE",
            "method": "RAMPED_HALF_AND_HALF_METHOD",
            "depth_ranges": [
                {"size": 1, "percentage": 1.0}
            ]
        },

        "evaluator": {"use_cache": True},

        "selection": {
            "method": "TOURNAMENT_SELECTION",
            "tournament_size": 100
        },

        "crossover": {
            "method": "POINT_CROSSOVER",
            "probability": 0.8
        },

        "mutation": {
            "methods": [
                "SUBTREE_MUTATION"
            ],
            "probability": 0.8
        },

        "function_nodes": [
            {
                "type": "CLASS_FUNCTION",
                "name": "GREATER_THAN",
                "arity": 2,

                "data_range": {
                    "lower_bound": -1.0,
                    "upper_bound": 1.0,
                    "decimal_places": 2,
                }
            },
            {
                "type": "CLASS_FUNCTION",
                "name": "LESS_THAN",
                "arity": 2,

                "data_range": {
                    "lower_bound": -1.0,
                    "upper_bound": 1.0,
                    "decimal_places": 2,
                }
            },
            {
                "type": "CLASS_FUNCTION",
                "name": "EQUALS",
                "arity": 2,

                "data_range": {
                    "lower_bound": -1.0,
                    "upper_bound": 1.0,
                    "decimal_places": 2
                }
            }
        ],

        "terminal_nodes": [
            {
                "type": "RANDOM_CONSTANT",
                "name": "category",
                "range": [
                    0.0,
                    1.0
                ]
            },
        ],

        "class_attributes": [
            "x",
            "y"
        ],

        "input_variables": [
            {"name": "x"},
            {"name": "y"}
        ],
        "response_variables": [{"name": "category"}]
    }

    # load data
    config["data"] = {}
    config["data"]["rows"] = len(train_data)
    config["data"]["x"] = []
    config["data"]["y"] = []
    config["data"]["category"] = train_cat
    for row in train_data:
        config["data"]["x"].append(row[0])
        config["data"]["y"].append(row[1])

    functions = GPFunctionRegistry("CLASSIFICATION")
    generator = TreeGenerator(config)

    # genetic operators
    selection = Selection(config)
    crossover = TreeCrossover(config)
    mutation = TreeMutation(config)

    # run symbolic regression
    population = generator.init()

    details = play.play_details(
        population=population,
        evaluate=evaluate,
        functions=functions,
        selection=selection,
        crossover=crossover,
        mutation=mutation,
        print_func=print_func,
        stop_func=default_stop_func,
        config=config,
        editor=edit_trees,
    )
    play.play(details)

    best_tree = population.best_individuals[0]
    # gp_plot_dt(best_tree, True)

    # load test data
    config["data"] = {}
    config["data"]["rows"] = len(test_data)
    config["data"]["x"] = []
    config["data"]["y"] = []
    for row in test_data:
        config["data"]["x"].append(row[0])
        config["data"]["y"].append(row[1])

    # predict
    predicted = gp_eval.predict_tree(best_tree, functions, config)

    # load test data
    config["data"] = {}
    config["data"]["rows"] = xx.shape[0] * xx.shape[1]
    config["data"]["x"] = np.reshape(xx, xx.shape[0] * xx.shape[1])
    config["data"]["y"] = np.reshape(yy, yy.shape[0] * yy.shape[1])

    contour = gp_eval.predict_tree(best_tree, functions, config)
    contour = np.array(contour)
    contour = contour.reshape(xx.shape)

    return predicted, contour
Пример #5
0
def gp_benchmark_loop(config):
    try:
        # setup
        random.seed(config["random_seed"])  # VERY IMPORTANT!
        load_data(config, config["call_path"])
        json_store = JSONStore(config)
        # functions = GPFunctionRegistry("SYMBOLIC_REGRESSION")
        generator = TreeGenerator(config)

        # genetic operators
        selection = Selection(config, recorder=json_store)
        crossover = TreeCrossover(config, recorder=json_store)
        mutation = TreeMutation(config, recorder=json_store)

        # setup the initial random population
        population = generator.init()

        # create play details
        details = play.play_details(
            population=population,
            functions=config["functions"],
            evaluate=evaluate,
            selection=selection,
            crossover=crossover,
            mutation=mutation,
            editor=edit_trees,
            stop_func=default_stop_func,
            # print_func=print_func,
            config=config,
            recorder=json_store)

        # run symbolic regression
        start_time = time.time()
        play.play(details)
        end_time = time.time()
        time_taken = end_time - start_time

        # print msg
        print("DONE -> pop: {0} cross: {1} mut: {2} seed: {3} [{4}s]".format(
            config["max_population"], config["crossover"]["probability"],
            config["mutation"]["probability"], config["random_seed"],
            round(time_taken, 2)))

        # log on completion
        if config.get("log_path", False):
            config.pop("data")
            msg = {
                "timestamp": time.mktime(datetime.now().timetuple()),
                "status": "DONE",
                "config": config,
                "runtime": time_taken,
                "best_score": population.find_best_individuals()[0].score,
                "best": str(population.find_best_individuals()[0])
            }
            log_path = os.path.expandvars(config["log_path"])
            log_file = open(log_path, "a+")
            log_file.write(json.dumps(msg) + "\n")
            log_file.close()

    except Exception as err_msg:
        import traceback
        traceback.print_exc()

        # log exception
        if config.get("log_path", False):
            msg = {
                "timestamp": time.mktime(datetime.now().timetuple()),
                "status": "ERROR",
                "config": config,
                "error": err_msg
            }
            log_path = os.path.expandvars(config["log_path"])
            log_file = open(log_path, "a+")
            log_file.write(json.dumps(msg) + "\n")
            log_file.close()

        raise  # raise the exception

    return config
Пример #6
0
def gp_predict(train_data, test_data, train_cat, xx, yy):
    # setup
    config = {
        "max_population":
        800,
        "max_generation":
        30,
        "stale_limit":
        10,
        "tree_generation": {
            "tree_type": "CLASSIFICATION_TREE",
            "method": "RAMPED_HALF_AND_HALF_METHOD",
            "depth_ranges": [{
                "size": 1,
                "percentage": 1.0
            }]
        },
        "evaluator": {
            "use_cache": True
        },
        "selection": {
            "method": "TOURNAMENT_SELECTION",
            "tournament_size": 100
        },
        "crossover": {
            "method": "POINT_CROSSOVER",
            "probability": 0.8
        },
        "mutation": {
            "methods": ["SUBTREE_MUTATION"],
            "probability": 0.8
        },
        "function_nodes": [{
            "type": "CLASS_FUNCTION",
            "name": "GREATER_THAN",
            "arity": 2,
            "data_range": {
                "lower_bound": -1.0,
                "upper_bound": 1.0,
                "decimal_places": 2,
            }
        }, {
            "type": "CLASS_FUNCTION",
            "name": "LESS_THAN",
            "arity": 2,
            "data_range": {
                "lower_bound": -1.0,
                "upper_bound": 1.0,
                "decimal_places": 2,
            }
        }, {
            "type": "CLASS_FUNCTION",
            "name": "EQUALS",
            "arity": 2,
            "data_range": {
                "lower_bound": -1.0,
                "upper_bound": 1.0,
                "decimal_places": 2
            }
        }],
        "terminal_nodes": [
            {
                "type": "RANDOM_CONSTANT",
                "name": "category",
                "range": [0.0, 1.0]
            },
        ],
        "class_attributes": ["x", "y"],
        "input_variables": [{
            "name": "x"
        }, {
            "name": "y"
        }],
        "response_variables": [{
            "name": "category"
        }]
    }

    # load data
    config["data"] = {}
    config["data"]["rows"] = len(train_data)
    config["data"]["x"] = []
    config["data"]["y"] = []
    config["data"]["category"] = train_cat
    for row in train_data:
        config["data"]["x"].append(row[0])
        config["data"]["y"].append(row[1])

    functions = GPFunctionRegistry("CLASSIFICATION")
    generator = TreeGenerator(config)

    # genetic operators
    selection = Selection(config)
    crossover = TreeCrossover(config)
    mutation = TreeMutation(config)

    # run symbolic regression
    population = generator.init()

    details = play.play_details(
        population=population,
        evaluate=evaluate,
        functions=functions,
        selection=selection,
        crossover=crossover,
        mutation=mutation,
        print_func=print_func,
        stop_func=default_stop_func,
        config=config,
        editor=edit_trees,
    )
    play.play(details)

    best_tree = population.best_individuals[0]
    # gp_plot_dt(best_tree, True)

    # load test data
    config["data"] = {}
    config["data"]["rows"] = len(test_data)
    config["data"]["x"] = []
    config["data"]["y"] = []
    for row in test_data:
        config["data"]["x"].append(row[0])
        config["data"]["y"].append(row[1])

    # predict
    predicted = gp_eval.predict_tree(best_tree, functions, config)

    # load test data
    config["data"] = {}
    config["data"]["rows"] = xx.shape[0] * xx.shape[1]
    config["data"]["x"] = np.reshape(xx, xx.shape[0] * xx.shape[1])
    config["data"]["y"] = np.reshape(yy, yy.shape[0] * yy.shape[1])

    contour = gp_eval.predict_tree(best_tree, functions, config)
    contour = np.array(contour)
    contour = contour.reshape(xx.shape)

    return predicted, contour
Пример #7
0
def gp_benchmark_loop(config):
    try:
        # setup
        random.seed(config["random_seed"])  # VERY IMPORTANT!
        load_data(config, config["call_path"])
        json_store = JSONStore(config)
        # functions = GPFunctionRegistry("SYMBOLIC_REGRESSION")
        generator = TreeGenerator(config)

        # genetic operators
        selection = Selection(config, recorder=json_store)
        crossover = TreeCrossover(config, recorder=json_store)
        mutation = TreeMutation(config, recorder=json_store)

        # setup the initial random population
        population = generator.init()

        # create play details
        details = play.play_details(
            population=population,
            functions=config["functions"],
            evaluate=evaluate,
            selection=selection,
            crossover=crossover,
            mutation=mutation,
            editor=edit_trees,
            stop_func=default_stop_func,
            # print_func=print_func,
            config=config,
            recorder=json_store
        )

        # run symbolic regression
        start_time = time.time()
        play.play(details)
        end_time = time.time()
        time_taken = end_time - start_time

        # print msg
        print(
            "DONE -> pop: {0} cross: {1} mut: {2} seed: {3} [{4}s]".format(
                config["max_population"],
                config["crossover"]["probability"],
                config["mutation"]["probability"],
                config["random_seed"],
                round(time_taken, 2)
            )
        )

        # log on completion
        if config.get("log_path", False):
            config.pop("data")
            msg = {
                "timestamp": time.mktime(datetime.now().timetuple()),
                "status": "DONE",
                "config": config,
                "runtime": time_taken,
                "best_score": population.find_best_individuals()[0].score,
                "best": str(population.find_best_individuals()[0])
            }
            log_path = os.path.expandvars(config["log_path"])
            log_file = open(log_path, "a+")
            log_file.write(json.dumps(msg) + "\n")
            log_file.close()

    except Exception as err_msg:
        import traceback
        traceback.print_exc()

        # log exception
        if config.get("log_path", False):
            msg = {
                "timestamp": time.mktime(datetime.now().timetuple()),
                "status": "ERROR",
                "config": config,
                "error": err_msg
            }
            log_path = os.path.expandvars(config["log_path"])
            log_file = open(log_path, "a+")
            log_file.write(json.dumps(msg) + "\n")
            log_file.close()

        raise  # raise the exception

    return config