def test_load_reload_huge(self, all_gps): """ Unit tests that loading and reloading a huge GP works. :param all_gps: :return: """ test_gp = deepcopy(all_gps[False]) test_gp.set_L_alpha() dummy_gp = deepcopy(test_gp) N_data = len(dummy_gp.training_data) prev_ky_mat = deepcopy(dummy_gp.ky_mat) prev_l_mat = deepcopy(dummy_gp.l_mat) for model_format in ["pickle", "json"]: dummy_gp.write_model("test_gp_write", model_format, N_data - 1) new_gp = GaussianProcess.from_file(f"test_gp_write.{model_format}") assert np.allclose(prev_ky_mat, new_gp.ky_mat) assert np.allclose(prev_l_mat, new_gp.l_mat) assert new_gp.training_data is not test_gp.training_data os.remove(f"test_gp_write.{model_format}") dummy_gp = deepcopy(test_gp) os.remove(f"test_gp_write_ky_mat.npy")
def restart(self): # Recover atomic configuration: positions, velocities, forces positions, self.nsteps = self.read_frame('positions.xyz', -1) self.atoms.set_positions(positions) self.atoms.set_velocities(self.read_frame('velocities.dat', -1)[0]) self.atoms.calc.results['forces'] = self.read_frame('forces.dat', -1)[0] print('Last frame recovered') # # Recover training data set # gp_model = self.atoms.calc.gp_model # atoms = deepcopy(self.atoms) # nat = len(self.atoms.positions) # dft_positions = self.read_all_frames('dft_positions.xyz', nat) # dft_forces = self.read_all_frames('dft_forces.dat', nat) # added_atoms = self.read_all_frames('added_atoms.dat', 1, 1, 'int') # for i, frame in enumerate(dft_positions): # atoms.set_positions(frame) # curr_struc = Structure.from_ase_atoms(atoms) # gp_model.update_db(curr_struc, dft_forces[i], added_atoms[i].tolist()) # gp_model.set_L_alpha() # print('GP training set ready') # Recover FLARE calculator self.atoms.calc.gp_model = GaussianProcess.from_file(self.restart_from+'/gp_model.pickle') # gp_model.ky_mat_inv = np.load(self.restart_from+'/ky_mat_inv.npy') # gp_model.alpha = np.load(self.restart_from+'/alpha.npy') if self.atoms.calc.use_mapping: for map_3 in self.atoms.calc.mgp_model.maps_3: map_3.load_grid = self.restart_from + '/' self.atoms.calc.build_mgp(skip=False) self.atoms.calc.mgp_updated = True print('GP and MGP ready') self.l_bound = 10
def test_load_and_reload(self, all_gps, validation_env, multihyps): test_gp = all_gps[multihyps] test_gp.write_model('test_gp_write', 'pickle') new_gp = GaussianProcess.from_file('test_gp_write.pickle') for d in [0, 1, 2]: assert np.all( test_gp.predict(x_t=validation_env, d=d) == new_gp.predict( x_t=validation_env, d=d)) os.remove('test_gp_write.pickle') test_gp.write_model('test_gp_write', 'json') with open('test_gp_write.json', 'r') as f: new_gp = GaussianProcess.from_dict(json.loads(f.readline())) for d in [0, 1, 2]: assert np.all( test_gp.predict(x_t=validation_env, d=d) == new_gp.predict( x_t=validation_env, d=d)) os.remove('test_gp_write.json') with raises(ValueError): test_gp.write_model('test_gp_write', 'cucumber')
def test_otf_parser_from_checkpt(software): if not os.environ.get(cmd[software], False): pytest.skip(f"{cmd[software]} not found in environment:" " Please install the code " f" and set the {cmd[software]} env. " "variable to point to the executable.") if software == "cp2k": pytest.skip() example = 1 casename = name_list[example] log_name = f"{casename}_otf_{software}" output_name = f"{log_name}.out" otf_traj = OtfAnalysis(output_name) try: replicated_gp = otf_traj.make_gp() except: init_gp = GaussianProcess.from_file(log_name + "_gp.json") replicated_gp = otf_traj.make_gp(init_gp=init_gp) outdir = f"test_outputs_{software}" if not os.path.isdir(outdir): os.mkdir(outdir) for f in os.listdir("./"): if f"{casename}_otf_{software}" in f: shutil.move(f, outdir) cleanup(software, f"{casename}_otf_{software}")
def test_load_and_reload(self, all_gps, validation_env, multihyps): test_gp = all_gps[multihyps] test_gp.write_model("test_gp_write", "pickle") new_gp = GaussianProcess.from_file("test_gp_write.pickle") for d in [1, 2, 3]: assert np.all( test_gp.predict(x_t=validation_env, d=d) == new_gp.predict( x_t=validation_env, d=d)) try: os.remove("test_gp_write.pickle") except: pass test_gp.write_model("test_gp_write", "json") with open("test_gp_write.json", "r") as f: new_gp = GaussianProcess.from_dict(json.loads(f.readline())) for d in [1, 2, 3]: assert np.all( test_gp.predict(x_t=validation_env, d=d) == new_gp.predict( x_t=validation_env, d=d)) os.remove("test_gp_write.json") with raises(ValueError): test_gp.write_model("test_gp_write", "cucumber") # Test logic for auto-detecting format in write command for format in ["json", "pickle"]: write_string = "format_write_test." + format if os.path.exists(write_string): os.remove(write_string) test_gp.write_model(write_string) assert os.path.exists(write_string) os.remove(write_string)