def test_generate_by_line(): pm = ParameterHelper(verbose="DEBUG") pm.define_group('specie', 'O', ['O']) pm.define_group('specie', 'C', ['C']) pm.define_group('specie', 'H', ['H']) pm.define_group('twobody', '**', ['C', 'H']) pm.define_group('twobody', 'OO', ['O', 'O'], atomic_str=True) pm.define_group('threebody', '***', ['O', 'O', 'C']) pm.define_group('threebody', 'OOO', ['O', 'O', 'O']) pm.define_group('manybody', '1.5', ['C', 'H']) pm.define_group('manybody', '1.5', ['C', 'O']) pm.define_group('manybody', '1.5', ['O', 'H']) pm.define_group('manybody', '2', ['O', 'O']) pm.define_group('manybody', '2', ['H', 'O']) pm.define_group('manybody', '2.8', ['O', 'O']) pm.set_parameters('**', [1, 0.5]) pm.set_parameters('OO', [1, 0.5]) pm.set_parameters('***', [1, 0.5]) pm.set_parameters('OOO', [1, 0.5]) pm.set_parameters('1.5', [1, 0.5, 1.5]) pm.set_parameters('2', [1, 0.5, 2]) pm.set_parameters('2.8', [1, 0.5, 2.8]) pm.set_constraints('2', [True, False]) pm.set_constraints('2.8', False) pm.set_parameters('cutoff_twobody', 5) pm.set_parameters('cutoff_threebody', 4) pm.set_parameters('cutoff_manybody', 3) hm = pm.as_dict() Parameters.check_instantiation( hm['hyps'], hm['cutoffs'], hm['kernels'], hm)
def generate_hm(ntwobody, nthreebody, nmanybody=1, constraint=False, multihyps=True): cutoff = 0.8 if multihyps is False: kernels = [] parameters = {} if ntwobody > 0: kernels += ["twobody"] parameters["cutoff_twobody"] = cutoff if nthreebody > 0: kernels += ["threebody"] parameters["cutoff_threebody"] = cutoff if nmanybody > 0: kernels += ["manybody"] parameters["cutoff_manybody"] = cutoff pm = ParameterHelper(kernels=kernels, random=True, parameters=parameters) hm = pm.as_dict() hyps = hm["hyps"] cut = hm["cutoffs"] return hyps, hm, cut pm = ParameterHelper(species=["H", "He"], parameters={"noise": 0.05}) if ntwobody > 0: pm.define_group("twobody", "b1", ["*", "*"], parameters=random(2)) pm.set_parameters("cutoff_twobody", cutoff) if nthreebody > 0: pm.define_group("threebody", "t1", ["*", "*", "*"], parameters=random(2)) pm.set_parameters("cutoff_threebody", cutoff) if nmanybody > 0: pm.define_group("manybody", "manybody1", ["*", "*"], parameters=random(2)) pm.set_parameters("cutoff_manybody", cutoff) if ntwobody > 1: pm.define_group("twobody", "b2", ["H", "H"], parameters=random(2)) if nthreebody > 1: pm.define_group("threebody", "t2", ["H", "H", "H"], parameters=random(2)) if constraint is False: hm = pm.as_dict() hyps = hm["hyps"] cut = hm["cutoffs"] return hyps, hm, cut pm.set_constraints("b1", opt=[True, False]) pm.set_constraints("t1", opt=[False, True]) hm = pm.as_dict() hyps = hm["hyps"] cut = hm["cutoffs"] return hyps, hm, cut
def test_generate_by_line(): pm = ParameterHelper(verbose="DEBUG") pm.define_group("specie", "O", ["O"]) pm.define_group("specie", "C", ["C"]) pm.define_group("specie", "H", ["H"]) pm.define_group("twobody", "**", ["C", "H"]) pm.define_group("twobody", "OO", ["O", "O"], atomic_str=True) pm.define_group("threebody", "***", ["O", "O", "C"]) pm.define_group("threebody", "OOO", ["O", "O", "O"]) pm.define_group("manybody", "1.5", ["C", "H"]) pm.define_group("manybody", "1.5", ["C", "O"]) pm.define_group("manybody", "1.5", ["O", "H"]) pm.define_group("manybody", "2", ["O", "O"]) pm.define_group("manybody", "2", ["H", "O"]) pm.define_group("manybody", "2.8", ["O", "O"]) pm.set_parameters("**", [1, 0.5]) pm.set_parameters("OO", [1, 0.5]) pm.set_parameters("***", [1, 0.5]) pm.set_parameters("OOO", [1, 0.5]) pm.set_parameters("1.5", [1, 0.5, 1.5]) pm.set_parameters("2", [1, 0.5, 2]) pm.set_parameters("2.8", [1, 0.5, 2.8]) pm.set_constraints("2", [True, False]) pm.set_constraints("2.8", False) pm.set_parameters("cutoff_twobody", 5) pm.set_parameters("cutoff_threebody", 4) pm.set_parameters("cutoff_manybody", 3) hm = pm.as_dict() Parameters.check_instantiation(hm["hyps"], hm["cutoffs"], hm["kernels"], hm)
def generate_hm(ntwobody, nthreebody, nmanybody=1, constraint=False, multihyps=True): cutoff = 0.8 if (multihyps is False): kernels = [] parameters = {} if (ntwobody > 0): kernels += ['twobody'] parameters['cutoff_twobody'] = cutoff if (nthreebody > 0): kernels += ['threebody'] parameters['cutoff_threebody'] = cutoff if (nmanybody > 0): kernels += ['manybody'] parameters['cutoff_manybody'] = cutoff pm = ParameterHelper(kernels=kernels, random=True, parameters=parameters) hm = pm.as_dict() hyps = hm['hyps'] cut = hm['cutoffs'] return hyps, hm, cut pm = ParameterHelper(species=['H', 'He'], parameters={'noise':0.05}) if (ntwobody > 0): pm.define_group('twobody', 'b1', ['*', '*'], parameters=random(2)) pm.set_parameters('cutoff_twobody', cutoff) if (nthreebody > 0): pm.define_group('threebody', 't1', ['*', '*', '*'], parameters=random(2)) pm.set_parameters('cutoff_threebody', cutoff) if (nmanybody > 0): pm.define_group('manybody', 'manybody1', ['*', '*'], parameters=random(2)) pm.set_parameters('cutoff_manybody', cutoff) if (ntwobody > 1): pm.define_group('twobody', 'b2', ['H', 'H'], parameters=random(2)) if (nthreebody > 1): pm.define_group('threebody', 't2', ['H', 'H', 'H'], parameters=random(2)) if (constraint is False): hm = pm.as_dict() hyps = hm['hyps'] cut = hm['cutoffs'] return hyps, hm, cut pm.set_constraints('b1', opt=[True, False]) pm.set_constraints('t1', opt=[False, True]) hm = pm.as_dict() hyps = hm['hyps'] cut = hm['cutoffs'] return hyps, hm, cut
def get_random_training_set(nenv, nstruc): """Create a random training_set array with parameters And generate four different kinds of hyperparameter sets: * multi hypper parameters with two twobody type and two threebody type * constrained optimization, with noise parameter optimized * constrained optimization, without noise parameter optimized * simple hyper parameters without multihyps set up """ np.random.seed(0) cutoffs = {"twobody": 0.8, "threebody": 0.8} parameters = { "cutoff_twobody": 0.8, "cutoff_threebody": 0.8, "noise": 0.05 } # 9 different hyper-parameters pm = ParameterHelper( species=["H", "He"], kernels={ "twobody": [["*", "*"], ["H", "H"]], "threebody": [["*", "*", "*"], ["H", "H", "H"]], }, parameters=parameters, ones=True, random=False, verbose="DEBUG", ) hyps_mask1 = pm.as_dict() # 9 different hyper-parameters, onlye train the 0, 2, 4, 6, 8 pm.set_constraints("twobody0", [True, True]) pm.set_constraints("twobody1", [False, False]) pm.set_constraints("threebody0", [True, True]) pm.set_constraints("threebody1", [False, False]) hyps_mask2 = pm.as_dict() # 9 different hyper-parameters, only train the 0, 2, 4, 6 pm.set_constraints("noise", False) hyps_mask3 = pm.as_dict() # 5 different hyper-parameters, equivalent to no multihyps pm = ParameterHelper( species=["H", "He"], kernels={ "twobody": [["*", "*"]], "threebody": [["*", "*", "*"]] }, parameters=parameters, ones=True, random=False, verbose="DEBUG", ) hyps_mask4 = pm.as_dict() # 5 different hyper-parameters, no multihyps pm = ParameterHelper( kernels=["twobody", "threebody"], parameters=parameters, ones=True, random=False, verbose="DEBUG", ) hyps_mask5 = pm.as_dict() hyps_mask_list = [ hyps_mask1, hyps_mask2, hyps_mask3, hyps_mask4, hyps_mask5 ] # create training environments and forces cell = np.eye(3) unique_species = [0, 1] noa = 5 training_data = [] training_labels = [] for _ in range(nenv): positions = np.random.uniform(-1, 1, [noa, 3]) species = np.random.randint(0, len(unique_species), noa) + 1 struc = Structure(cell, species, positions) training_data += [AtomicEnvironment(struc, 1, cutoffs)] training_labels += [np.random.uniform(-1, 1, 3)] training_labels = np.hstack(training_labels) # create training structures and energies training_structures = [] energy_labels = [] for _ in range(nstruc): positions = np.random.uniform(-1, 1, [noa, 3]) species = np.random.randint(0, len(unique_species), noa) + 1 struc = Structure(cell, species, positions) struc_envs = [] for n in range(noa): struc_envs.append(AtomicEnvironment(struc, n, cutoffs)) training_structures.append(struc_envs) energy_labels.append(np.random.uniform(-1, 1)) energy_labels = np.array(energy_labels) # store it as global variables name = "unit_test" flare.gp_algebra._global_training_data[name] = training_data flare.gp_algebra._global_training_labels[name] = training_labels flare.gp_algebra._global_training_structures[name] = training_structures flare.gp_algebra._global_energy_labels[name] = energy_labels energy_noise = 0.01 return name, cutoffs, hyps_mask_list, energy_noise