def __init__( self, num_atoms, bond_feat_dim, num_targets, use_pbc=True, regress_forces=True, atom_embedding_size=64, num_graph_conv_layers=6, fc_feat_size=128, num_fc_layers=4, otf_graph=False, cutoff=6.0, num_gaussians=50, ): super(CGCNN, self).__init__(num_atoms, bond_feat_dim, num_targets) self.regress_forces = regress_forces self.use_pbc = use_pbc self.cutoff = cutoff self.otf_graph = otf_graph # Get CGCNN atom embeddings self.embedding = torch.zeros(100, 92) for i in range(100): self.embedding[i] = torch.tensor(EMBEDDINGS[i + 1]) self.embedding_fc = nn.Linear(92, atom_embedding_size) self.convs = nn.ModuleList( [ CGCNNConv( node_dim=atom_embedding_size, edge_dim=bond_feat_dim, cutoff=cutoff, ) for _ in range(num_graph_conv_layers) ] ) self.conv_to_fc = nn.Sequential( nn.Linear(atom_embedding_size, fc_feat_size), nn.Softplus() ) if num_fc_layers > 1: layers = [] for _ in range(num_fc_layers - 1): layers.append(nn.Linear(fc_feat_size, fc_feat_size)) layers.append(nn.Softplus()) self.fcs = nn.Sequential(*layers) self.fc_out = nn.Linear(fc_feat_size, self.num_targets) self.cutoff = cutoff self.distance_expansion = GaussianSmearing(0.0, cutoff, num_gaussians)
def __init__( self, num_spherical: int, radial_basis: RadialBasis, cbf: str, efficient: bool = False, ): super().__init__() self.radial_basis = radial_basis self.efficient = efficient cbf_name = cbf["name"].lower() cbf_hparams = cbf.copy() del cbf_hparams["name"] if cbf_name == "gaussian": self.cosφ_basis = GaussianSmearing(start=-1, stop=1, num_gaussians=num_spherical, **cbf_hparams) elif cbf_name == "spherical_harmonics": Y_lm = real_sph_harm(num_spherical, use_theta=False, zero_m_only=True) sph_funcs = [] # (num_spherical,) # convert to tensorflow functions z = sym.symbols("z") modules = {"sin": torch.sin, "cos": torch.cos, "sqrt": torch.sqrt} m_order = 0 # only single angle for l_degree in range(len(Y_lm)): # num_spherical if ( l_degree == 0 ): # Y_00 is only a constant -> function returns value and not tensor first_sph = sym.lambdify([z], Y_lm[l_degree][m_order], modules) sph_funcs.append( lambda z: torch.zeros_like(z) + first_sph(z)) else: sph_funcs.append( sym.lambdify([z], Y_lm[l_degree][m_order], modules)) self.cosφ_basis = lambda cosφ: torch.stack( [f(cosφ) for f in sph_funcs], dim=1) else: raise ValueError(f"Unknown cosine basis function '{cbf_name}'.")
def __init__( self, num_radial: int, cutoff: float, rbf: dict = {"name": "gaussian"}, envelope: dict = { "name": "polynomial", "exponent": 5 }, ): super().__init__() self.inv_cutoff = 1 / cutoff env_name = envelope["name"].lower() env_hparams = envelope.copy() del env_hparams["name"] if env_name == "polynomial": self.envelope = PolynomialEnvelope(**env_hparams) elif env_name == "exponential": self.envelope = ExponentialEnvelope(**env_hparams) else: raise ValueError(f"Unknown envelope function '{env_name}'.") rbf_name = rbf["name"].lower() rbf_hparams = rbf.copy() del rbf_hparams["name"] # RBFs get distances scaled to be in [0, 1] if rbf_name == "gaussian": self.rbf = GaussianSmearing(start=0, stop=1, num_gaussians=num_radial, **rbf_hparams) elif rbf_name == "spherical_bessel": self.rbf = SphericalBesselBasis(num_radial=num_radial, cutoff=cutoff, **rbf_hparams) elif rbf_name == "bernstein": self.rbf = BernsteinBasis(num_radial=num_radial, **rbf_hparams) else: raise ValueError(f"Unknown radial basis function '{rbf_name}'.")