Пример #1
0
def too_close_atom_pairs(structure: Structure,
                         radius: float = 2,
                         too_close_criterion_factor: float = 0.7) -> bool:
    """Check whether too close atomic pairs exist in the structure or not.

    Raise a ValueError if the number of atoms in structure is 1.


    Args:
        structure (Structure):
            Input structure


    """

    distances = structure.get_all_neighbors(radius)
    for i, periodic_neighbors in enumerate(distances):
        elem1 = structure[i].species_string
        frac1 = structure[i].frac_coords
        for periodic_neighbor in periodic_neighbors:
            elem2 = periodic_neighbor.species_string
            frac2 = periodic_neighbor.frac_coords
            dist = periodic_neighbor.nn_distance
            rcore_sum = rcore[elem1] + rcore[elem2]
            if dist < rcore_sum * too_close_criterion_factor:
                logger.warning(
                    f"Element {elem1} (index: {i}) at {frac1} & Element {elem2}"
                    f" (index: {periodic_neighbor.index}) at {frac2} are too "
                    f"close with distance {round(dist, 4)}.")

                return True
    return False
Пример #2
0
 def test_get_all_neighbors_outside_cell(self):
     s = Structure(Lattice.cubic(2), ['Li', 'Li', 'Li', 'Si'],
                   [[3.1] * 3, [0.11] * 3, [-1.91] * 3, [0.5] * 3])
     all_nn = s.get_all_neighbors(0.2, True)
     for site, nns in zip(s, all_nn):
         for nn in nns:
             self.assertTrue(nn[0].is_periodic_image(s[nn[2]]))
             d = sum((site.coords - nn[0].coords) ** 2) ** 0.5
             self.assertAlmostEqual(d, nn[1])
     self.assertEqual(list(map(len, all_nn)), [2, 2, 2, 0])
Пример #3
0
 def test_get_all_neighbors_outside_cell(self):
     s = Structure(Lattice.cubic(2), ['Li', 'Li', 'Li', 'Si'],
                   [[3.1] * 3, [0.11] * 3, [-1.91] * 3, [0.5] * 3])
     all_nn = s.get_all_neighbors(0.2, True)
     for site, nns in zip(s, all_nn):
         for nn in nns:
             self.assertTrue(nn[0].is_periodic_image(s[nn[2]]))
             d = sum((site.coords - nn[0].coords)**2)**0.5
             self.assertAlmostEqual(d, nn[1])
     self.assertEqual(list(map(len, all_nn)), [2, 2, 2, 0])
Пример #4
0
def neighbor_list_and_relative_vec_lattice(pos,
                                           lattice,
                                           r_max,
                                           self_interaction=True,
                                           r_min=1e-8):
    """
    Create neighbor list (edge_index) and relative vectors (edge_attr)
    based on radial cutoff and periodic lattice.

    :param pos: torch.tensor of coordinates with shape (N, 3)
    :param r_max: float of radial cutoff
    :param self_interaction: whether or not to include self edge

    :return: list of edges [(2, num_edges)], Tensor of relative vectors [num_edges, 3]

    edges are given by the convention
    edge_list[0] = source (convolution center)
    edge_list[1] = target (neighbor index)

    Thus, the edge_list has the same convention vector notation for relative vectors
    \vec{r}_{source, target}

    Relative vectors are given for the different images of the neighbor atom within r_max.
    """
    N, _ = pos.shape
    structure = Structure(lattice, ['H'] * N, pos, coords_are_cartesian=True)

    nei_list = []
    geo_list = []

    neighbors = structure.get_all_neighbors(r_max,
                                            include_index=True,
                                            include_image=True,
                                            numerical_tol=r_min)
    for i, (site, neis) in enumerate(zip(structure, neighbors)):
        indices, cart = zip(*[(n.index, n.coords) for n in neis])
        cart = torch.tensor(cart)
        indices = torch.LongTensor([[i, target] for target in indices])
        dist = cart - torch.tensor(site.coords)
        if self_interaction:
            self_index = torch.LongTensor([[i, i]])
            indices = torch.cat([self_index, indices], dim=0)
            self_dist = torch.zeros(1, 3, dtype=dist.dtype)
            dist = torch.cat([self_dist, dist], dim=0)
        nei_list.append(indices)
        geo_list.append(dist)
    return torch.cat(nei_list, dim=0).transpose(1, 0), torch.cat(geo_list,
                                                                 dim=0)
Пример #5
0
    def test_get_all_neighbors_and_get_neighbors(self):
        s = self.struct
        r = random.uniform(3, 6)
        all_nn = s.get_all_neighbors(r, True)
        for i in range(len(s)):
            self.assertEqual(len(all_nn[i]), len(s.get_neighbors(s[i], r)))

        for site, nns in zip(s, all_nn):
            for nn in nns:
                self.assertTrue(nn[0].is_periodic_image(s[nn[2]]))
                d = sum((site.coords - nn[0].coords)**2)**0.5
                self.assertAlmostEqual(d, nn[1])

        s = Structure(Lattice.cubic(1), ['Li'], [[0, 0, 0]])
        s.make_supercell([2, 2, 2])
        self.assertEqual(sum(map(len, s.get_all_neighbors(3))), 976)
Пример #6
0
    def test_get_all_neighbors_and_get_neighbors(self):
        s = self.struct
        r = random.uniform(3, 6)
        all_nn = s.get_all_neighbors(r, True)
        for i in range(len(s)):
            self.assertEqual(len(all_nn[i]), len(s.get_neighbors(s[i], r)))

        for site, nns in zip(s, all_nn):
            for nn in nns:
                self.assertTrue(nn[0].is_periodic_image(s[nn[2]]))
                d = sum((site.coords - nn[0].coords) ** 2) ** 0.5
                self.assertAlmostEqual(d, nn[1])

        s = Structure(Lattice.cubic(1), ['Li'], [[0,0,0]])
        s.make_supercell([2,2,2])
        self.assertEqual(sum(map(len, s.get_all_neighbors(3))), 976)
Пример #7
0
    def edge_features(self, structure: Structure, **kwargs):
        def expand_distance(distances, dmin=0, step=0.2, var=None):
            """
            Parameters
            ----------
    
            dmin: float
              Minimum interatomic distance
            dmax: float
              Maximum interatomic distance
            step: float
              Step size for the Gaussian filter
            """
            filter_ = np.arange(dmin, self.radius + step, step)
            if var is None:
                var = step

            return np.exp(-(distances[..., np.newaxis] - filter_)**2 / var**2)

        all_nbrs = structure.get_all_neighbors(self.radius, include_index=True)
        all_nbrs = [sorted(nbrs, key=lambda x: x[1]) for nbrs in all_nbrs]
        nbr_fea_idx, nbr_fea = [], []
        for nbr in all_nbrs:
            if len(nbr) < self.max_num_nbr:
                warnings.warn('can not find enough neighbors to build graph. '
                              'If it happens frequently, consider increase '
                              'radius.')
                nbr_fea_idx.append(
                    list(map(lambda x: x[2], nbr)) + [0] *
                    (self.max_num_nbr - len(nbr)))
                nbr_fea.append(
                    list(map(lambda x: x[1], nbr)) + [self.radius + 1.] *
                    (self.max_num_nbr - len(nbr)))
            else:
                nbr_fea_idx.append(
                    list(map(lambda x: x[2], nbr[:self.max_num_nbr])))
                nbr_fea.append(
                    list(map(lambda x: x[1], nbr[:self.max_num_nbr])))
        nbr_fea = np.array(nbr_fea)
        nbr_fea = expand_distance(nbr_fea)
        nbr_fea = torch.Tensor(nbr_fea)

        nbr_fea_idx = torch.LongTensor(nbr_fea_idx)

        return nbr_fea, nbr_fea_idx
Пример #8
0
    def __getitem__(self, idx):
        # NOTE sites must be given in fractional co-ordinates
        cif_id, comp, target, cell, sites = self.df.iloc[idx]
        cif_id = str(cif_id)

        if self.use_cache:
            cache_path = os.path.join(self.cachedir, cif_id + ".pkl")

        if self.use_cache and os.path.exists(cache_path):
            with open(cache_path, "rb") as f:
                try:
                    pkl_data = pickle.load(f)
                except EOFError:
                    raise EOFError(f"Check {f} for issue")
            atom_fea = pkl_data[0]
            nbr_fea = pkl_data[1]
            self_fea_idx = pkl_data[2]
            nbr_fea_idx = pkl_data[3]

        else:
            cell, elems, coords = parse_cgcnn(cell, sites)
            # NOTE getting primative structure before constructing graph
            # significantly harms the performnace of this model.
            crystal = Structure(
                lattice=cell, species=elems, coords=coords, to_unit_cell=True
            )

            # atom features
            atom_fea = [atom.specie.symbol for atom in crystal]

            # neighbours
            all_nbrs = crystal.get_all_neighbors(self.radius, include_index=True)
            all_nbrs = [sorted(nbrs, key=lambda x: x[1]) for nbrs in all_nbrs]
            self_fea_idx, nbr_fea_idx, nbr_fea = [], [], []

            for i, nbr in enumerate(all_nbrs):
                # NOTE due to using a geometric learning library we do not
                # need to set a maximum number of neighbours but do so in
                # order to replicate the original code.
                if len(nbr) < self.max_num_nbr:
                    nbr_fea_idx.extend(list(map(lambda x: x[2], nbr)))
                    nbr_fea.extend(list(map(lambda x: x[1], nbr)))
                else:
                    nbr_fea_idx.extend(
                        list(map(lambda x: x[2], nbr[: self.max_num_nbr]))
                    )
                    nbr_fea.extend(list(map(lambda x: x[1], nbr[: self.max_num_nbr])))

                if len(nbr) == 0:
                    raise ValueError(
                        f"Isolated atom found in {cif_id} ({comp}) - "
                        "increase maximum radius or remove structure"
                    )
                self_fea_idx.extend([i] * min(len(nbr), self.max_num_nbr))

            nbr_fea = np.array(nbr_fea)

            if self.use_cache:
                with open(cache_path, "wb") as f:
                    pickle.dump((atom_fea, nbr_fea, self_fea_idx, nbr_fea_idx), f)

        nbr_fea = self.gdf.expand(nbr_fea)
        atom_fea = np.vstack([self.ari.get_fea(atom) for atom in atom_fea])

        atom_fea = torch.Tensor(atom_fea)
        nbr_fea = torch.Tensor(nbr_fea)
        self_fea_idx = torch.LongTensor(self_fea_idx)
        nbr_fea_idx = torch.LongTensor(nbr_fea_idx)

        if self.task == "regression":
            target = torch.Tensor([float(target)])
        elif self.task == "classification":
            target = torch.LongTensor([target])

        return (atom_fea, nbr_fea, self_fea_idx, nbr_fea_idx), target, comp, cif_id