示例#1
0
 def __init__(self, model_name, Instance=None, resolution=50):
     self.geoms = []
     self.figure = plt.figure()
     self.ax = Axes3D(self.figure)
     #self.ax.set_zlim(bottom=-1,top=5)
     plt.title(model_name)
     self.resolution = resolution
     self.theta = np.linspace(60 * np.pi / 180, 300 * np.pi / 180,
                              self.resolution)
     self.rhs = np.linspace(2, 8, self.resolution)
     self.meshX, self.meshY = np.meshgrid(self.rhs,
                                          self.theta * 180 / np.pi)
     meshZ = np.zeros((len(self.theta), len(self.rhs)))
     self.surf = self.ax.plot_surface(self.meshX,
                                      self.meshY,
                                      meshZ,
                                      cmap=cm.magma,
                                      animated=True)
     self.model = model_name
     self.create_geoms()
     self.Evaluation = NN.AtomicNeuralNetInstance()
     self.Evaluation.CalcDatasetStatistics = False
     self.Evaluation.TextOutput = False
     if Instance != None:
         self.Evaluation._IsFromCheck = True
         self.Evaluation.Rs = Instance.Rs
         self.Evaluation.R_Etas = Instance.R_Etas
         self.Evaluation.NumberOfRadialFunctions = Instance.NumberOfRadialFunctions
         self.Evaluation.Etas = Instance.Etas
         self.Evaluation.Lambs = Instance.Lambs
         self.Evaluation.Zetas = Instance.Zetas
     self.Data = None
     self.ZData = []
示例#2
0
def get_multi_data(model, data_files):

    predictions = []
    targets = []
    i = 0
    while i < len(data_files):
        try:
            data_file = data_files[i]
            print(data_file)
            #Load trainings instance
            Training = _NN.AtomicNeuralNetInstance()
            Training.TextOutput = False
            Training.CalcDatasetStatistics = False
            Reader = _readers.QE_MD_Reader()
            Reader.E_conv_factor = 13.605698066
            Reader.read_folder(data_file)
            offset = (
                -774.90203736 * Reader.nr_atoms_per_type[1] +
                Reader.nr_atoms_per_type[0] * -185.29265964) * 13.605698066
            Training.prepare_evaluation(model, Reader.nr_atoms_per_type)
            Training.create_eval_data(Reader.geometries)
            temp = [
                pred[0] / sum(Reader.nr_atoms_per_type)
                for pred in Training.eval_dataset_energy(Training.EvalData)
            ]
            predictions += temp
            targets += list(
                np.divide(np.subtract(Reader.e_pot, offset),
                          sum(Reader.nr_atoms_per_type)))
            i += 1
            gc.collect()
        except:
            print("Failed for file " + str(data_file) + " retrying...")

    return predictions, targets
示例#3
0
from NeuralNetworks import NeuralNetworkUtilities
from NeuralNetworks.data_generation import data_readers
from os import listdir
from os.path import isfile, join
import numpy as np

input_reader = data_readers.SimpleInputReader()
mypath = "/home/afuchs/Documents/Validation_big/"
files = [f for f in listdir(mypath) if isfile(join(mypath, f))]

for file in files:
    input_reader.read(join(mypath, file), skip=3)
energies = np.asarray(input_reader.energies)

Training = NeuralNetworkUtilities.AtomicNeuralNetInstance()
Training.TextOutput = False
Training.CalcDatasetStatistics = False
Training.prepare_evaluation(
    "/home/afuchs/Documents/NiAu_Training/multi_morse_smallds",
    nr_atoms_per_type=[1, 146])
#Training.create_eval_data(input_reader.geometries)
#out=Training.eval_dataset_energy(Training.EvalData)
out = []
for geometry in input_reader.geometries:
    out.append(Training.energy_for_geometry(geometry))
offset = (-774.90203736 * 146 + 1 * -185.29265964) * 13.605698066
ref = energies - offset
res = out
min_ref = energies - min(energies)
min_res = res - min(res)
rel_error = []
for this in data_files:
    folder_files = [os.path.join(this, f) for f in os.listdir(this)]
    for myfile in folder_files:
        temp = open(myfile, "r").read()
        if "error" in temp:
            print("Error in :" + str(myfile))
print(data_files)

Multi = _NN.MultipleInstanceTraining()
Multi.GlobalLearningRate = learning_rate
for i in range(len(data_files)):
    data_file = data_files[i]
    print(data_file)

    #Load trainings instance
    Training = _NN.AtomicNeuralNetInstance()
    Training.IsPartitioned = False
    Training.IncludeMorse = True
    Training.CalcDatasetStatistics = True
    Training.UseForce = force
    #Default symmetry function set
    #Training.NumberOfRadialFunctions=15
    bohr2ang = 0.529177249
    #Training.Lambs=[1.0,-1.0]
    #Training.Zetas=[0.2,0.5,1,3,10]#[0.025,0.045,0.075,0.1,0.15,0.2,0.3,0.5,0.7,1,1.5,2,3,5,10,18,36,100]
    #Training.Etas=[0.01]
    Training.Rs = [
        0, 0, 0, 0, 0, 0, 0, 0
    ]  #,1.16674542, 1.81456625,2.3, 2.89256287, 4.53134823, 6.56226301, 6.92845869]
    Training.R_Etas = [
        0.4 / bohr2ang**2, 0.2 / bohr2ang**2, 0.1 / bohr2ang**2,