Exemple #1
0
    def read_all_genotypes_per_generation(config_dir: str, results_dir: str,
                                          experiment_name: str):
        import os, pickle

        print(f"Loading {experiment_name}")
        config = Configuration.from_json(os.path.join(
            config_dir, f"{experiment_name}.json"),
                                         validation=False)
        experiment_results = os.path.join(results_dir, experiment_name)
        # Read all genotypes first
        population = []
        patterns = []

        files = find_filetype_recursivly(experiment_results, filetype=".obj")
        for file_path in files:
            with open(file_path, "rb") as ptr:
                obj = pickle.load(ptr)
                if not isinstance(obj, Pattern):
                    population += [obj]
                else:
                    patterns += [obj]
                print(
                    f" - Reading files, {(len(population) + len(patterns)) / len(files) * 100:.0f}% complete"
                    + "\t" * 3,
                    end="\r",
                )
        print()
        # Map genotype to generation
        generations = []
        pattern_generations = []
        directory = os.path.join(experiment_results, "generations")
        gens = [
            int(generation) for generation in os.listdir(directory)
            if isint(generation)
        ]
        gens.sort()
        for gen in gens:
            gen_dir = os.path.join(directory, str(gen))
            generation = []
            p_generation = []
            for identifier in os.listdir(gen_dir):
                identifier = identifier.strip("'")
                for ind in population:
                    if ind.ID == identifier:
                        generation += [ind]
                        break
                else:
                    for ind in patterns:
                        if ind.ID == identifier:
                            p_generation += [ind]
                            break
            generations += [generation]
            pattern_generations += [p_generation]
        print(" - population sorted into generations")
        return Simulation(config, population, generations, patterns,
                          pattern_generations)
    def setUpClass(cls):
        # Create a network:
        cls.patterns = initialize_patterns(count=5)
        nets = recombine.combine(cls.patterns,
                                 num_nets=1,
                                 min_size=3,
                                 max_size=5)

        # Parameters:
        cls.epochs = 1
        server_id = 0
        dev_id = 0
        job_id = 0

        # Setting necessary properties:
        cls.net = nets[0]
        cls.config = Configuration.from_json("./tests/fixtures/config.json")
        cls.config.type = "PatternNets"
        cls.training_args = (pickle.dumps(cls.net), pickle.dumps(cls.config),
                             cls.epochs, server_id, dev_id, job_id)
Exemple #3
0
    setproctitle.setproctitle("NAS-EVOLVE")
except ImportError:
    pass
from firebase.upload import create_new_run, update_run

import src.single_net.main as ea_nas

if __name__ == "__main__":
    # Reading input arguments:
    if len(sys.argv) > 2:
        raise IOError("Program requires dataset config file.")
    if not os.path.isfile(sys.argv[1]):
        raise IOError("File {} does not exist!".format(sys.argv[1]))

    # Setting up config:
    config = Configuration.from_json(sys.argv[1])
    config.type = "ea-nas"
    run_id = create_new_run(config)
    if run_id:
        config.results_name = run_id
    print_config_stats(config)

    # Running the algorithm:
    status = "Running"
    try:
        ea_nas.run(config)
        status = "Finished"
    except KeyboardInterrupt:
        status = "Closed"
    except Exception as e:
        status = "Crashed"