def test_init_from_example_configs(self, tmpdir): current_directory = os.path.dirname(os.path.realpath(__file__)) example_conf_path = os.path.join(current_directory, "..", "configurations") for conf_name in os.listdir(example_conf_path): print("Next configuration: " + conf_name) path = os.path.join(example_conf_path, conf_name) if os.path.isdir(path): print("Skipping, because its a directory") continue if conf_name == "temp.json": print("Skipping, because its a temp file") continue if "design" in conf_name: with open(path, "r") as read_file: design_space = json.load(read_file) c = ConfigReader.config_from_dict( sample_from_design_space(design_space)) else: c = ConfigReader.config_from_file(path) if c.environment in ["ReacherMemory-v0"]: print("Skipping, because its a Mujoco environment") continue Experiment(configuration=c, result_path=tmpdir, from_checkpoint=None, processing_framework="mp")
def test_cnn_init_exp(self, tmpdir): config_location = os.path.join(os.getcwd(), "../configurations/cnn_ctrnn.json") config = ConfigReader.config_from_file(config_location) Experiment(configuration=config, result_path=tmpdir, from_checkpoint=None, processing_framework="dask")
def test_basic_init(self, tmpdir): config_location = os.path.join(os.getcwd(), "tests/basic_test_config.json") config = ConfigReader.config_from_file(config_location) assert config.brain.number_neurons == 2 Experiment(configuration=config, result_path=tmpdir, from_checkpoint=None, processing_framework="dask")
def test_basic_init(self): config_location = os.path.join(os.getcwd(), "basic_test_config.json") config = ConfigReader.config_from_file(config_location) assert config.brain.number_neurons == 2
# complex type and default self.add_argument("--result_path", type=os.path.abspath, default=os.path.join( "..", "CTRNN_Simulation_Results", "data", datetime.now().strftime("%Y-%m-%d_%H-%M-%S"))) return self if __name__ == "__main__": # pragma: no cover """Everything outside this block will be executed by every worker-thread, while this block is only run on the main thread. Every object that is later passed to a worker must be pickle-able, that's why we initialise everything that is not pickle-able before this point. Especially the DEAP-toolbox's creator-object is not pickle-able. """ os.environ["CUDA_VISIBLE_DEVICES"] = "" args = TrainArgs(underscores_to_dashes=True).parse_args() experiment = Experiment(configuration=ConfigReader.config_from_file( args.configuration), result_path=args.result_path, from_checkpoint=args.from_checkpoint, processing_framework=args.processing_framework, number_of_workers=args.num_workers, reset_hof=args.reset_hof, write_final_checkpoint=args.write_final_checkpoint) os.mkdir(args.result_path) experiment.run()
"resources/kanbans", description= "Operations related to kanban boards located in management module", ) kanban_model = api.model( "Kanban Model", { "name": fields.String(required=True, description="Kanban name"), "description": fields.String(required=True, description="Kanban description"), }, ) # Config file reader (from env variable) config = ConfigReader() @ns.route("/") class KanbansAll(Resource): """ Endpoints for kanbans """ @api.response(200, "Kanban boards fetched") @api.response(500, "Could not fetch Kanban boards info") def get(self): """Returns all kanban boards with info""" kanban_finder = KanbanFinder() logger.info("Fetching all kanbans info") try: all_kanbans_info_list = kanban_finder.return_all_kanabans_info( kanbans_directory=config.kanbans_directory) print(all_kanbans_info_list)
# positional argument: self.add_argument("dir") return self args = RenderArgs(underscores_to_dashes=True).parse_args() logging.basicConfig(format="%(levelname)s: %(message)s", level=logging.INFO) try: with open(os.path.join(args.dir, "Log.pkl"), "rb") as read_file_log: log = pickle.load(read_file_log) except: with open(os.path.join(args.dir, "Log.json"), "r") as read_file_log: log = json.load(read_file_log) config = ConfigReader.config_from_file(os.path.join(args.dir, "Configuration.json")) experiment = Experiment(configuration=config, result_path="/tmp/not-used", from_checkpoint=None, processing_framework='sequential') with open(os.path.join(args.dir, "HallOfFame.pickle"), "rb") as read_file_hof: # creator is needed to unpickle HOF # creator is registered when loading experiment try: hall_of_fame = pickle.load(read_file_hof) except AttributeError: # workaround to render experiments that were created before this PR was merged: # https://github.com/neuroevolution-ai/NeuroEvolution-CTRNN_new/pull/48 # feel free to remove this workaround when experiments from before february 2021 are no longer relevant creator.create("FitnessMax", base.Fitness, weights=(1.0,))
def config() -> ExperimentCfg: current_directory = os.path.dirname(os.path.realpath(__file__)) config_location = os.path.join(current_directory, "basic_test_config.json") global_config = ConfigReader.config_from_file(config_location) return global_config