def save_results(R_dict, individuals_fitness_per_generation): meta_data_file = os.path.join(os.environ['FCM'], 'Examples', 'compute', 'genetic_algorithm_multinode', 'results', 'metadata.json') id = str(random.randint(0, 1e16)) results_loc = os.path.join( 'Examples/compute/genetic_algorithm_multinode/results', main.args.dataset, id) comments = main.args.comment meta_data_result = manager_results.metadata_template( id, main.args.dataset, results_loc, comments) params = main.args.__dict__ # Save dictionary of ensemble results manager_results.save_results(meta_data_file, meta_data_result, params, R_dict) # Additionally save ensemble results per generation (list) io.save_pickle( os.path.join(os.environ['FCM'], results_loc, 'individuals_fitness_per_generation.pkl'), individuals_fitness_per_generation)
for th0 in np.arange(0, 1+step_th, step_th): for ic1 in range(ic0+1, len(models_paths)): c1 = models_paths[ic1] #for th2 in np.arange(0.2, 1, step_th): # for ic2 in range(ic1+1, len(models)): # c2 = models[ic2] r = build_evaluate_chain([c0, c1], [th0]) sysid = "%s-%f-%s" % (c0.split('/')[-1], th0, c1.split('/')[-1]) print(sysid) records[sysid] = r # Crear la meta_data meta_data_file = os.path.join(os.environ['FCM'], 'Examples', 'compute', 'fully_connected_chain', 'results', 'metadata.json') id = str(random.randint(0, 1e16)) results_loc = os.path.join('Examples/compute/fully_connected_chain/results', dataset, id) meta_data_result = manager_results.metadata_template(id, dataset, results_loc, args.comment) # Obtenir el diccionari de params params = args.__dict__ # Guardar els resultats en la carpeta del dataset manager_results.save_results(meta_data_file, meta_data_result, params, records) records = {}
hvolume_current = compute_hvolume(obj) # Info about current generation print("Generation %d" % iteration) print("Hyper-volume %f" % hvolume_current) print("TIME: Seconds per generation: %f " % (time.time() - start)) # Save the results import Examples.metadata_manager_results as manager_results meta_data_file = os.path.join(os.environ['FCM'], 'Examples', 'compute', 'bagging_boosting_of_chains_GA', 'results', 'metadata.json') id = str(random.randint(0, 1e16)) results_loc = os.path.join( 'Examples/compute/bagging_boosting_of_chains_GA/results', main.args.dataset, id) comments = main.args.comment meta_data_result = manager_results.metadata_template( id, main.args.dataset, results_loc, comments) # Save the ensemble evaluation results R_dict_old.update(R_dict) params = main.args.__dict__ manager_results.save_results(meta_data_file, meta_data_result, params, R_dict_old) io.save_pickle( os.path.join(os.environ['FCM'], results_loc, 'individuals_fitness_per_generation.pkl'), individuals_fitness_per_generation)
file = m model = make.make_classifier(m, file) sys.add_classifier(model) classifiers_ids.append(model.id) merger = make.make_merger("MERGER", classifiers_ids, merge_type=protocol) sys.add_merger(merger) sys.set_start(merger.id) r = eval.evaluate(sys, sys.get_start(), phases=["test"]) results[generate_system_id(sys)] = r # Save the evaluation results import Examples.metadata_manager_results as manager_results meta_data_file = os.path.join(os.environ['FCM'], 'Examples', 'compute', 'merger_combinations', 'results', 'metadata.json') id = str(random.randint(0, 1e16)) results_loc = os.path.join('Examples/compute/merger_combinations/results', dataset, id) meta_data_result = manager_results.metadata_template(id, dataset, results_loc, "") # Save the ensemble evaluation results params = args.__dict__ manager_results.save_results(meta_data_file, meta_data_result, params, results)