Exemple #1
0
    "abcde", "", "E", "Hi", "Tom", "leprechaun", "zoomzoomzoom",
    "qwertyuiopasd", "GallopTrotCanter", "Quinona", "_abc"
]).reshape(-1, 1)
y = np.array([[target_function(s[0])] for s in X])

spawner = GeneSpawner(
    n_inputs=1,
    instruction_set=InstructionSet().register_core_by_stack({"str", "int"}),
    literals=[],
    erc_generators=[
        lambda: random.randint(0, 10),
    ]
)

if __name__ == "__main__":
    est = PushEstimator(
        spawner=spawner,
        population_size=300,
        max_generations=30,
        initial_genome_size=(10, 50),
        simplification_steps=500,
        parallelism=False,
        verbose=1
    )

    est.fit(X=X, y=y)
    print("Best program found:")
    print(est.solution.program.pretty_str())
    print("Errors:")
    print(est.score(X, y))
Exemple #2
0
print(instruction_set.keys())

spawner = GeneSpawner(
    instruction_set=instruction_set,
    literals=[2.0],
    erc_generators=[]
)


# Our estimator with a custom interpreter defined.
est = PushEstimator(
    spawner=spawner,
    population_size=500,
    max_generations=20,
    simplification_steps=1000,
    interpreter=PushInterpreter(instruction_set),
    verbose=2
)


if __name__ == "__main__":
    logging.basicConfig(
        level=logging.INFO,
        format="%(asctime)s %(levelname)s %(message)s",
        stream=sys.stdout
    )
    est.fit(X, y)
    print(est._result.program)
    print(est.predict(X))
    print(est.score(X, y))
Exemple #3
0
if __name__ == "__main__":
    est = PushEstimator(spawner=spawner,
                        simplification_steps=2000,
                        parallelism=False,
                        verbose=2)
    """
    Now we begin our PushGP run!
    We will call the estimator's `.fit` method on our training data. 
    This function evolves and finds the best program that solves the given problem (tripling the numbers in 'X').
    This  function also prints a lot of information as it evolves and runs. You can ignore most of this for now. 
    After it prints "End Run," it will state whether or not a solution was found.      
    """

    est.fit(X=X, y=y)
    print()
    """
    Next we print the push program using '.solution.program.pretty_str().'
    This will print out a String of push commands that make up the program. 
    If no solution was found, it will print the best program it found instead.
    """

    print("Best program found:")
    print(est.solution.program.pretty_str())
    """
    Lastly, we call '.score' on our test data. 
    If the test errors in the array are all zero, we found a generalizing solution!
    """

    print("Test errors:")
    print(est.score(testX, testy))
Exemple #4
0
         [equal_vectors() for _ in range(100)] + \
         [random_vectors() for _ in range(100)]
y_test = [[target_function(x[0], x[1])] for x in X_test]

spawner = GeneSpawner(
    n_inputs=2,
    instruction_set=InstructionSet().register_core_by_stack(
        {"int", "bool", "vector_int", "exec"}),
    literals=[" ", "\n"],
    erc_generators=[
        lambda: random.random() < 0.5,
    ],
)

if __name__ == "__main__":
    est = PushEstimator(search="GA",
                        population_size=500,
                        max_generations=150,
                        spawner=spawner,
                        simplification_steps=100,
                        verbose=2)

    start = time.time()
    est.fit(X=X_train, y=y_train)
    end = time.time()
    print("========================================")
    print("post-evolution stats")
    print("========================================")
    print("Runtime: ", end - start)
    print("Test Error: ", np.sum(est.score(X_test, y_test)))
Exemple #5
0

spawner = GeneSpawner(
    n_inputs=1,
    instruction_set=InstructionSet().register_core_by_stack(
        {"int", "bool", "string", "char", "exec", "stdout"}),
    literals=[" ", "\n"],
    erc_generators=[
        random_char,
    ],
)

if __name__ == "__main__":
    est = PushEstimator(search="GA",
                        population_size=500,
                        max_generations=150,
                        spawner=spawner,
                        simplification_steps=100,
                        last_str_from_stdout=True,
                        parallelism=True,
                        verbose=2)

    start = time.time()
    est.fit(X=X_train, y=y_train)
    end = time.time()
    print("train_error: ", est.solution.total_error)
    print("test_error: ", np.sum(est.score(X_test, y_test)))
    print("runtime: ", end - start)
    print("final_generation: ", est.search.generation)
    print("best_genome: ", est.solution.genome)
Exemple #6
0
and the specify the tap should print before by implementing the `pre` method of `Tap`.
"""


class MyCustomTap(Tap):
    def pre(self, id: str, args, kwargs, obj=None):
        """Print population stats before the next step of the run."""
        search = args[0]
        best_individual = search.population.best()
        print()
        print("Generation:", search.generation)
        print("Best Program:", best_individual.program.pretty_str())
        print("Best Error Vector:", best_individual.error_vector)
        print("Best Total Error:", best_individual.total_error)


TapManager.register("pyshgp.gp.search.SearchAlgorithm.step", MyCustomTap())
"""
Now let's kick off our PushGP run!

We will call the estimator's `.fit` method on our training data, and then call `.score` on our 
test data. If the test errors are all zero, we found a generalizing solution!
"""

if __name__ == "__main__":
    est.fit(X=x_train, y=y_train)
    best_found = est.solution

    print("Program:\n", best_found.program.code.pretty_str())
    print("Test errors:\n", est.score(x_test, y_test))