def save_checkpoint(path: str, model: MoleculeModel, scaler: StandardScaler = None, features_scaler: StandardScaler = None, args: TrainArgs = None) -> None: """ Saves a model checkpoint. :param model: A :class:`~chemprop.models.model.MoleculeModel`. :param scaler: A :class:`~chemprop.data.scaler.StandardScaler` fitted on the data. :param features_scaler: A :class:`~chemprop.data.scaler.StandardScaler` fitted on the features. :param args: The :class:`~chemprop.args.TrainArgs` object containing the arguments the model was trained with. :param path: Path where checkpoint will be saved. """ # Convert args to namespace for backwards compatibility if args is not None: args = Namespace(**args.as_dict()) state = { 'args': args, 'state_dict': model.state_dict(), 'data_scaler': { 'means': scaler.means, 'stds': scaler.stds } if scaler is not None else None, 'features_scaler': { 'means': features_scaler.means, 'stds': features_scaler.stds } if features_scaler is not None else None } torch.save(state, path)
def save_checkpoint(path: str, model: MoleculeModel, scaler: StandardScaler = None, features_scaler: StandardScaler = None, args: TrainArgs = None): """ Saves a model checkpoint. :param model: A MoleculeModel. :param scaler: A StandardScaler fitted on the data. :param features_scaler: A StandardScaler fitted on the features. :param args: Arguments. :param path: Path where checkpoint will be saved. """ # Convert args to namespace for backwards compatibility if args is not None: args = Namespace(**args.as_dict()) state = { 'args': args, 'state_dict': model.state_dict(), 'data_scaler': { 'means': scaler.means, 'stds': scaler.stds } if scaler is not None else None, 'features_scaler': { 'means': features_scaler.means, 'stds': features_scaler.stds } if features_scaler is not None else None } torch.save(state, path)
def save_checkpoint( path: str, model: MoleculeModel, scaler: StandardScaler = None, features_scaler: StandardScaler = None, atom_descriptor_scaler: StandardScaler = None, bond_feature_scaler: StandardScaler = None, args: TrainArgs = None, ) -> None: """ Saves a model checkpoint. :param model: A :class:`~chemprop.models.model.MoleculeModel`. :param scaler: A :class:`~chemprop.data.scaler.StandardScaler` fitted on the data. :param features_scaler: A :class:`~chemprop.data.scaler.StandardScaler` fitted on the features. :param atom_descriptor_scaler: A :class:`~chemprop.data.scaler.StandardScaler` fitted on the atom descriptors. :param bond_feature_scaler: A :class:`~chemprop.data.scaler.StandardScaler` fitted on the bond_fetaures. :param args: The :class:`~chemprop.args.TrainArgs` object containing the arguments the model was trained with. :param path: Path where checkpoint will be saved. """ # Convert args to namespace for backwards compatibility if args is not None: args = Namespace(**args.as_dict()) data_scaler = { "means": scaler.means, "stds": scaler.stds } if scaler is not None else None if features_scaler is not None: features_scaler = { "means": features_scaler.means, "stds": features_scaler.stds } if atom_descriptor_scaler is not None: atom_descriptor_scaler = { "means": atom_descriptor_scaler.means, "stds": atom_descriptor_scaler.stds, } if bond_feature_scaler is not None: bond_feature_scaler = { "means": bond_feature_scaler.means, "stds": bond_feature_scaler.stds } state = { "args": args, "state_dict": model.state_dict(), "data_scaler": data_scaler, "features_scaler": features_scaler, "atom_descriptor_scaler": atom_descriptor_scaler, "bond_feature_scaler": bond_feature_scaler, } torch.save(state, path)