def predict(self, images, batch_size=32): if len(images) < 1: warnings.warn("No images found!", stacklevel=2) return images a2d = AtomsToData( descriptor=self.train_dataset.descriptor, r_energy=False, r_forces=False, save_fps=True, fprimes=self.forcetraining, cores=1, ) data_list = a2d.convert_all(images, disable_tqdm=True) self.net.module.eval() collate_fn = DataCollater(train=False, forcetraining=self.forcetraining) predictions = {"energy": [], "forces": []} for data in data_list: collated = collate_fn([data]) energy, forces = self.net.module(collated) energy = self.target_scaler.denorm( energy, pred="energy").detach().tolist() forces = self.target_scaler.denorm(forces, pred="forces").detach().numpy() predictions["energy"].extend(energy) predictions["forces"].append(forces) return predictions
def load_skorch(self): skorch.net.to_tensor = to_tensor collate_fn = DataCollater(train=True, forcetraining=self.forcetraining) self.net = NeuralNetRegressor( module=self.model, criterion=self.criterion, criterion__force_coefficient=self.config["optim"].get( "force_coefficient", 0), criterion__loss=self.config["optim"].get("loss", "mse"), optimizer=self.optimizer, lr=self.config["optim"].get("lr", 1e-1), batch_size=self.config["optim"].get("batch_size", 32), max_epochs=self.config["optim"].get("epochs", 100), iterator_train__collate_fn=collate_fn, iterator_train__shuffle=True, iterator_valid__collate_fn=collate_fn, iterator_valid__shuffle=False, device=self.device, train_split=self.split, callbacks=self.callbacks, verbose=self.config["cmd"].get("verbose", True), ) print("Loading skorch trainer")
def load_model(self): elements = list_symbols_to_indices(self.elements) self.model = BPNN( elements=elements, input_dim=self.input_dim, **self.config["model"] ) print("Loading model: {} parameters".format(self.model.num_params)) collate_fn = DataCollater(train=True, forcetraining=self.forcetraining) self.parallel_collater = ParallelCollater(self.gpus, collate_fn) if self.gpus > 0: self.model = DataParallel( self.model, output_device=self.output_device, num_gpus=self.gpus, )
def predict(self, images, disable_tqdm=True): if len(images) < 1: warnings.warn("No images found!", stacklevel=2) return images self.descriptor = construct_descriptor(self.config["dataset"]["descriptor"]) a2d = AtomsToData( descriptor=self.descriptor, r_energy=False, r_forces=False, save_fps=self.config["dataset"].get("save_fps", True), fprimes=self.forcetraining, cores=1, ) data_list = a2d.convert_all(images, disable_tqdm=disable_tqdm) self.feature_scaler.norm(data_list, disable_tqdm=disable_tqdm) self.net.module.eval() collate_fn = DataCollater(train=False, forcetraining=self.forcetraining) predictions = {"energy": [], "forces": []} for data in data_list: collated = collate_fn([data]).to(self.device) energy, forces = self.net.module([collated]) energy = self.target_scaler.denorm( energy.detach().cpu(), pred="energy" ).tolist() forces = self.target_scaler.denorm( forces.detach().cpu(), pred="forces" ).numpy() predictions["energy"].extend(energy) predictions["forces"].append(forces) return predictions