Esempio n. 1
0
 def test_from_string(self):
     sp = DummySpecie.from_string("X")
     self.assertEqual(sp.oxi_state, 0)
     sp = DummySpecie.from_string("X2+")
     self.assertEqual(sp.oxi_state, 2)
     sp = DummySpecie.from_string("X2+spin=5")
     self.assertEqual(sp.oxi_state, 2)
     self.assertEqual(sp.spin, 5)
Esempio n. 2
0
 def test_get_el_sp(self):
     self.assertEqual(get_el_sp("Fe2+"), Specie("Fe", 2))
     self.assertEqual(get_el_sp("3"), Element.Li)
     self.assertEqual(get_el_sp("3.0"), Element.Li)
     self.assertEqual(get_el_sp("U"), Element.U)
     self.assertEqual(get_el_sp("X2+"), DummySpecie("X", 2))
     self.assertEqual(get_el_sp("Mn3+"), Specie("Mn", 3))
Esempio n. 3
0
    def from_dict(cls, d, lattice=None):
        """
        Create PeriodicSite from dict representation.

        Args:
            d (dict): dict representation of PeriodicSite
            lattice: Optional lattice to override lattice specified in d.
                Useful for ensuring all sites in a structure share the same
                lattice.

        Returns:
            PeriodicSite
        """
        atoms_n_occu = {}
        for sp_occu in d["species"]:
            if "oxidation_state" in sp_occu and Element.is_valid_symbol(
                    sp_occu["element"]):
                sp = Specie.from_dict(sp_occu)
            elif "oxidation_state" in sp_occu:
                sp = DummySpecie.from_dict(sp_occu)
            else:
                sp = Element(sp_occu["element"])
            atoms_n_occu[sp] = sp_occu["occu"]
        props = d.get("properties", None)
        lattice = lattice if lattice else Lattice.from_dict(d["lattice"])
        return cls(atoms_n_occu, d["abc"], lattice, properties=props)
Esempio n. 4
0
    def from_dict(cls, d, lattice=None):
        """
        Create PeriodicSite from dict representation.

        Args:
            d (dict): dict representation of PeriodicSite
            lattice: Optional lattice to override lattice specified in d.
                Useful for ensuring all sites in a structure share the same
                lattice.

        Returns:
            PeriodicSite
        """
        species = {}
        for sp_occu in d["species"]:
            if "oxidation_state" in sp_occu and Element.is_valid_symbol(
                    sp_occu["element"]):
                sp = Specie.from_dict(sp_occu)
            elif "oxidation_state" in sp_occu:
                sp = DummySpecie.from_dict(sp_occu)
            else:
                sp = Element(sp_occu["element"])
            species[sp] = sp_occu["occu"]
        props = d.get("properties", None)
        if props is not None:
            for key in props.keys():
                props[key] = json.loads(json.dumps(props[key], cls=MontyEncoder), cls=MontyDecoder)
        lattice = lattice if lattice else Lattice.from_dict(d["lattice"])
        return cls(species, d["abc"], lattice, properties=props)
Esempio n. 5
0
    def from_dict(cls, d, lattice=None):
        """
        Create PeriodicSite from dict representation.

        Args:
            d (dict): dict representation of PeriodicSite
            lattice: Optional lattice to override lattice specified in d.
                Useful for ensuring all sites in a structure share the same
                lattice.

        Returns:
            PeriodicSite
        """
        atoms_n_occu = {}
        for sp_occu in d["species"]:
            if "oxidation_state" in sp_occu and Element.is_valid_symbol(
                    sp_occu["element"]):
                sp = Specie.from_dict(sp_occu)
            elif "oxidation_state" in sp_occu:
                sp = DummySpecie.from_dict(sp_occu)
            else:
                sp = Element(sp_occu["element"])
            atoms_n_occu[sp] = sp_occu["occu"]
        props = d.get("properties", None)
        lattice = lattice if lattice else Lattice.from_dict(d["lattice"])
        return cls(atoms_n_occu, d["abc"], lattice, properties=props)
Esempio n. 6
0
def add_dummy_atoms(mol, ring_atom_ids):
    data = np.array([mol[i].coords for i in ring_atom_ids])
    centre = np.average(data, axis=0)

    # fit the atomic positions to a plane using least squares method, taken
    # from: https://gist.github.com/amroamroamro/1db8d69b4b65e8bc66a6#file-curve_fitting-py-L27
    A = np.c_[data[:, 0], data[:, 1], np.ones(data.shape[0])]
    C, _, _, _ = scipy.linalg.lstsq(A, data[:, 2])

    # calculate the normal unit vector
    normal = np.array(np.cross([1, 0, C[0]], [0, 1, C[1]]))
    unit = normal / scipy.linalg.norm(normal)

    # add the dummy atoms above and below the ring
    mol.append(DummySpecie('X-Bq'), centre + unit)
    mol.append(DummySpecie('X-Bq'), centre - unit)
    return mol
Esempio n. 7
0
    def transform_entries(self, entries, terminal_compositions):
        """
        Method to transform all entries to the composition coordinate in the
        terminal compositions. If the entry does not fall within the space
        defined by the terminal compositions, they are excluded. For example,
        Li3PO4 is mapped into a Li2O:1.5, P2O5:0.5 composition. The terminal
        compositions are represented by DummySpecies.

        Args:
            entries:
                Sequence of all input entries
            terminal_compositions:
                Terminal compositions of phase space.

        Returns:
            Sequence of TransformedPDEntries falling within the phase space.
        """
        new_entries = []
        if self.normalize_terminals:
            fractional_comp = [
                c.get_fractional_composition() for c in terminal_compositions
            ]
        else:
            fractional_comp = terminal_compositions

        #Map terminal compositions to unique dummy species.
        sp_mapping = collections.OrderedDict()
        for i, comp in enumerate(fractional_comp):
            sp_mapping[comp] = DummySpecie("X" + chr(102 + i))

        for entry in entries:
            try:
                rxn = Reaction(fractional_comp, [entry.composition])
                rxn.normalize_to(entry.composition)
                #We only allow reactions that have positive amounts of
                #reactants.
                if all([
                        rxn.get_coeff(comp) <= CompoundPhaseDiagram.amount_tol
                        for comp in fractional_comp
                ]):
                    newcomp = {
                        sp_mapping[comp]: -rxn.get_coeff(comp)
                        for comp in fractional_comp
                    }
                    newcomp = {
                        k: v
                        for k, v in newcomp.items()
                        if v > CompoundPhaseDiagram.amount_tol
                    }
                    transformed_entry = \
                        TransformedPDEntry(Composition(newcomp), entry)
                    new_entries.append(transformed_entry)
            except ReactionError:
                #If the reaction can't be balanced, the entry does not fall
                #into the phase space. We ignore them.
                pass
        return new_entries, sp_mapping
Esempio n. 8
0
 def get_inter_sites_after_symmetry(struct_with_voronoi_vects):
     spa=SpacegroupAnalyzer(struct_with_voronoi_vects,symprec=0.001, angle_tolerance=1)
     stru=spa.get_symmetrized_structure()
     eqs=stru.equivalent_sites
     re_inters=[]
     for i in eqs:
         if i[0].specie==DummySpecie():
             re_inters.append(list(i[0].frac_coords))
     re_inters.sort()
     return re_inters
Esempio n. 9
0
 def from_dict(cls, d):
     """
     Create Site from dict representation
     """
     atoms_n_occu = {}
     for sp_occu in d["species"]:
         if "oxidation_state" in sp_occu and Element.is_valid_symbol(
                 sp_occu["element"]):
             sp = Specie.from_dict(sp_occu)
         elif "oxidation_state" in sp_occu:
             sp = DummySpecie.from_dict(sp_occu)
         else:
             sp = Element(sp_occu["element"])
         atoms_n_occu[sp] = sp_occu["occu"]
     props = d.get("properties", None)
     return cls(atoms_n_occu, d["xyz"], properties=props)
Esempio n. 10
0
 def from_dict(cls, d):
     """
     Create Site from dict representation
     """
     atoms_n_occu = {}
     for sp_occu in d["species"]:
         if "oxidation_state" in sp_occu and Element.is_valid_symbol(
                 sp_occu["element"]):
             sp = Specie.from_dict(sp_occu)
         elif "oxidation_state" in sp_occu:
             sp = DummySpecie.from_dict(sp_occu)
         else:
             sp = Element(sp_occu["element"])
         atoms_n_occu[sp] = sp_occu["occu"]
     props = d.get("properties", None)
     return cls(atoms_n_occu, d["xyz"], properties=props)
Esempio n. 11
0
 def from_dict(cls, d: dict):
     """
     Create Site from dict representation
     """
     atoms_n_occu = {}
     for sp_occu in d["species"]:
         if "oxidation_state" in sp_occu and Element.is_valid_symbol(
                 sp_occu["element"]):
             sp = Specie.from_dict(sp_occu)
         elif "oxidation_state" in sp_occu:
             sp = DummySpecie.from_dict(sp_occu)
         else:
             sp = Element(sp_occu["element"])
         atoms_n_occu[sp] = sp_occu["occu"]
     props = d.get("properties", None)
     if props is not None:
         for key in props.keys():
             props[key] = json.loads(json.dumps(props[key], cls=MontyEncoder), cls=MontyDecoder)
     return cls(atoms_n_occu, d["xyz"], properties=props)
Esempio n. 12
0
    def __init__(
        self,
        base_structure: Structure,
        index: int,
        num_types: int,
        mapping_color_species: list[SpeciesLike] = None,
        composition_constraints=None,
        base_site_constraints=None,
        color_exchange=True,
        remove_superperiodic=True,
        remove_incomplete=True,
        verbose=True,
    ):
        self.base_structure = base_structure
        self.index = index
        self.num_types = num_types
        self.composition_constraints = composition_constraints

        # settings
        self.color_exchange = color_exchange
        self.remove_superperiodic = remove_superperiodic
        self.remove_incomplete = remove_incomplete

        list_reduced_HNF, rotations, translations = generate_symmetry_distinct_superlattices(
            index, base_structure, return_symops=True
        )
        self.list_reduced_HNF = list_reduced_HNF
        self.rotations = rotations
        self.translations = translations

        # self.site_constraints[i] is a list of allowed species at the i-th site in supercell
        self.site_constraints = None
        if base_site_constraints:
            assert len(base_site_constraints) == self.num_sites_base
            self.site_constraints = convert_site_constraints(base_site_constraints, self.index)

        if mapping_color_species and len(mapping_color_species) != self.num_types:
            raise ValueError("mapping_color_species must have num_type species.")
        if mapping_color_species is None:
            mapping_color_species = [DummySpecie(str(i)) for i in range(1, self.num_types + 1)]
        self.mapping_color_species = mapping_color_species

        self.verbose = verbose
Esempio n. 13
0
def get_lattice(kind):
    if kind == "hcp":
        latt = Lattice(
            np.array([[1, 0, 0], [0.5, np.sqrt(3) / 2, 0],
                      [0, 0, 2 * np.sqrt(6) / 3]]))
        coords = [[0, 0, 0], [1 / 3, 1 / 3, 0.5]]
    else:
        coords = [[0, 0, 0]]

    if kind == "sc":
        latt = Lattice(np.eye(3))
    elif kind == "fcc":
        latt = Lattice(np.array([[0, 1, 1], [1, 0, 1], [1, 1, 0]]))
    elif kind == "bcc":
        latt = Lattice(np.array([[-1, 1, 1], [1, -1, 1], [1, 1, -1]]))
    elif kind == "hex":
        latt = Lattice.hexagonal(1, 2 * np.sqrt(6) / 3)
    elif kind == "tet":
        latt = Lattice(np.diag([1, 1, 1.2]))

    struct = Structure(latt, [DummySpecie("X")] * len(coords), coords)
    return struct
Esempio n. 14
0
def num_equivalent_clusters(structure: Structure,
                            inserted_atom_coords: Optional[list],
                            removed_atom_indices: Optional[list],
                            symprec: float = SYMMETRY_TOLERANCE,
                            angle_tolerance: float = ANGLE_TOL
                            ) -> Tuple[int, str]:
    """Calculate number of equivalent clusters in the structure.

    Args:
        structure (Structure):
            Supercell is assumed to big enough.
        inserted_atom_coords (list):
        removed_atom_indices (list):
            Needs to begin from 0.
        symprec (float):
        angle_tolerance (float):
            Angle tolerance in degree used for identifying the space group.

    Returns:
        Tuple of (num_equivalent_clusters (int), point_group (str))
    """
    inserted_atom_coords = inserted_atom_coords or []
    removed_atom_indices = removed_atom_indices or []

    sga = SpacegroupAnalyzer(structure, symprec, angle_tolerance)
    num_symmop = len(sga.get_symmetry_operations())

    structure_with_cluster = structure.copy()
    for i in inserted_atom_coords:
        structure_with_cluster.append(DummySpecie(), i)
    structure_with_cluster.remove_sites(removed_atom_indices)

    sga_with_cluster = \
        SpacegroupAnalyzer(structure_with_cluster, symprec, angle_tolerance)
    sym_dataset = sga_with_cluster.get_symmetry_dataset()
    point_group = sym_dataset["pointgroup"]

    return int(num_symmop / num_symmetry_operation(point_group)), point_group
Esempio n. 15
0
 def struct_with_voronoi_verts(structure):
     struct = structure.copy()
     sites_cart=struct.cart_coords
     s=[]
     for x in sites_cart:
         for i in range(-2,4):
             for j in range(-2,4):
                 for k in range(-2,4):
                     s.append(x+i*lat[0]+j*lat[1]+k*lat[2])
     vor=Voronoi(s)
     verts=vor.vertices
     inters=[]
     for i in verts:
         fracs= struct.lattice.get_fractional_coords(i)
         inbox=True
         for j in fracs:
             if j<0 or j>=1.0:
                 inbox = False
                 break
         if inbox:
             inters.append(list(fracs)) 
             struct.append(DummySpecie(),fracs)
     return struct, inters
 def test_sort(self):
     r = sorted([Element.Fe, DummySpecie("X")])
     self.assertEqual(r, [DummySpecie("X"), Element.Fe])
     self.assertTrue(DummySpecie("X", 3) < DummySpecie("X", 4))
Esempio n. 17
0
    def _gen_input_file(self):
        """
        Generate the necessary struct_enum.in file for enumlib. See enumlib
        documentation for details.
        """
        coord_format = "{:.6f} {:.6f} {:.6f}"
        # Using symmetry finder, get the symmetrically distinct sites.
        fitter = SpacegroupAnalyzer(self.structure, self.symm_prec)
        symmetrized_structure = fitter.get_symmetrized_structure()
        logger.debug("Spacegroup {} ({}) with {} distinct sites".format(
            fitter.get_space_group_symbol(), fitter.get_space_group_number(),
            len(symmetrized_structure.equivalent_sites)))
        """
        Enumlib doesn"t work when the number of species get too large. To
        simplify matters, we generate the input file only with disordered sites
        and exclude the ordered sites from the enumeration. The fact that
        different disordered sites with the exact same species may belong to
        different equivalent sites is dealt with by having determined the
        spacegroup earlier and labelling the species differently.
        """

        # index_species and index_amounts store mappings between the indices
        # used in the enum input file, and the actual species and amounts.
        index_species = []
        index_amounts = []

        # Stores the ordered sites, which are not enumerated.
        ordered_sites = []
        disordered_sites = []
        coord_str = []
        for sites in symmetrized_structure.equivalent_sites:
            if sites[0].is_ordered:
                ordered_sites.append(sites)
            else:
                sp_label = []
                species = {k: v for k, v in sites[0].species_and_occu.items()}
                if sum(species.values()) < 1 - EnumlibAdaptor.amount_tol:
                    # Let us first make add a dummy element for every single
                    # site whose total occupancies don't sum to 1.
                    species[DummySpecie("X")] = 1 - sum(species.values())
                for sp in species.keys():
                    if sp not in index_species:
                        index_species.append(sp)
                        sp_label.append(len(index_species) - 1)
                        index_amounts.append(species[sp] * len(sites))
                    else:
                        ind = index_species.index(sp)
                        sp_label.append(ind)
                        index_amounts[ind] += species[sp] * len(sites)
                sp_label = "/".join(["{}".format(i) for i in sorted(sp_label)])
                for site in sites:
                    coord_str.append("{} {}".format(
                        coord_format.format(*site.coords), sp_label))
                disordered_sites.append(sites)

        def get_sg_info(ss):
            finder = SpacegroupAnalyzer(Structure.from_sites(ss),
                                        self.symm_prec)
            return finder.get_space_group_number()

        curr_sites = list(itertools.chain.from_iterable(disordered_sites))
        min_sgnum = get_sg_info(curr_sites)
        logger.debug("Disorderd sites has sgnum %d" % (min_sgnum))
        # It could be that some of the ordered sites has a lower symmetry than
        # the disordered sites.  So we consider the lowest symmetry sites as
        # disordered in our enumeration.
        self.ordered_sites = []
        to_add = []

        if self.check_ordered_symmetry:
            for sites in ordered_sites:
                temp_sites = list(curr_sites) + sites
                sgnum = get_sg_info(temp_sites)
                if sgnum < min_sgnum:
                    logger.debug("Adding {} to sites to be ordered. "
                                 "New sgnum {}".format(sites, sgnum))
                    to_add = sites
                    min_sgnum = sgnum

        for sites in ordered_sites:
            if sites == to_add:
                index_species.append(sites[0].specie)
                index_amounts.append(len(sites))
                sp_label = len(index_species) - 1
                logger.debug(
                    "Lowest symmetry {} sites are included in enum.".format(
                        sites[0].specie))
                for site in sites:
                    coord_str.append("{} {}".format(
                        coord_format.format(*site.coords), sp_label))
                disordered_sites.append(sites)
            else:
                self.ordered_sites.extend(sites)

        self.index_species = index_species

        lattice = self.structure.lattice

        output = [self.structure.formula, "bulk"]
        for vec in lattice.matrix:
            output.append(coord_format.format(*vec))
        output.append("{}".format(len(index_species)))
        output.append("{}".format(len(coord_str)))
        output.extend(coord_str)

        output.append("{} {}".format(self.min_cell_size, self.max_cell_size))
        output.append(str(self.enum_precision_parameter))
        output.append("partial")

        ndisordered = sum([len(s) for s in disordered_sites])

        base = int(ndisordered * reduce(lcm, [
            f.limit_denominator(ndisordered * self.max_cell_size).denominator
            for f in map(fractions.Fraction, index_amounts)
        ]))
        # base = ndisordered #10 ** int(math.ceil(math.log10(ndisordered)))
        # To get a reasonable number of structures, we fix concentrations to the
        # range expected in the original structure.
        total_amounts = sum(index_amounts)
        for amt in index_amounts:
            conc = amt / total_amounts
            if abs(conc * base - round(conc * base)) < 1e-5:
                output.append("{} {} {}".format(int(round(conc * base)),
                                                int(round(conc * base)), base))
            else:
                min_conc = int(math.floor(conc * base))
                output.append("{} {} {}".format(min_conc - 1, min_conc + 1,
                                                base))
        output.append("")
        logger.debug("Generated input file:\n{}".format("\n".join(output)))
        with open("struct_enum.in", "w") as f:
            f.write("\n".join(output))
Esempio n. 18
0
 def test_get_composition(self):
     comp = self.transformed_entry.composition
     expected_comp = Composition({DummySpecie('Xa'): 1,
                                  DummySpecie('Xb'): 1})
     self.assertEqual(comp, expected_comp, "Wrong composition!")
Esempio n. 19
0
def get_interstitial_sites(structure, dis_bar=2.0, num=2):
    """
    get interstitial positions(IntrPoints) for a specific structure. 
    method:
    1st, collect all atoms in a 3x3x3 supercell.  
    2nd, get all Voronoi cell cornors by Voronoi analysis 
    3rd, take all the atoms and Voronoi cell cornors (DummySpecie atoms) in the centeral unit cell as a new unit cell to make a crystal.
    4th, keep nonequivalent DummySpecie atoms by space group symmetry analysis.
    5th, if the distances among some DummySpecie atoms are less than 'criter', just leave one of them.
    6th, screen the final DummySpecie atom to make each one have different neighbours. 
            
    Args:
        struct: the structure under consideration for obtaining the interstitial positions.
        standadized: True or False. must be the same as the standidized parameter in DefectsMaker function.  
        save_inters: True or False. If True, the unitcell structure with all interstitial positions occupied by 'Lr' or 'No' atoms are output with cif format.
    """
    criter=2.0
    struct = structure.copy()
    struct_orig = structure.copy()
    lat=struct.lattice.matrix
    def struct_with_voronoi_verts(structure):
        struct = structure.copy()
        sites_cart=struct.cart_coords
        s=[]
        for x in sites_cart:
            for i in range(-2,4):
                for j in range(-2,4):
                    for k in range(-2,4):
                        s.append(x+i*lat[0]+j*lat[1]+k*lat[2])
        vor=Voronoi(s)
        verts=vor.vertices
        inters=[]
        for i in verts:
            fracs= struct.lattice.get_fractional_coords(i)
            inbox=True
            for j in fracs:
                if j<0 or j>=1.0:
                    inbox = False
                    break
            if inbox:
                inters.append(list(fracs)) 
                struct.append(DummySpecie(),fracs)
        return struct, inters
    def get_inter_sites_after_symmetry(struct_with_voronoi_vects):
        spa=SpacegroupAnalyzer(struct_with_voronoi_vects,symprec=0.001, angle_tolerance=1)
        stru=spa.get_symmetrized_structure()
        eqs=stru.equivalent_sites
        re_inters=[]
        for i in eqs:
            if i[0].specie==DummySpecie():
                re_inters.append(list(i[0].frac_coords))
        re_inters.sort()
        return re_inters
    struct_withvv, inters =  struct_with_voronoi_verts(struct)
    try:
        inters_sym = get_inter_sites_after_symmetry(struct_withvv,)
    except:
        inters_sym = inters    
    polys_all = [polyhedral_largest(i,struct_orig) for i in inters_sym]
    polys_cut = [polyhedral_cut(i) for i in polys_all if i]
    polys = [i for i in polys_cut if i]
    OK = False
    while not OK:
        polys_dis = screen_polys_dist(polys,struct_orig,dis_bar)
        if polys == polys_dis:
            OK = True
        else:
            polys = polys_dis
    inter_sites =  [i['frac_coord'] for i in screen_polys(polys_dis,num)]
    for i in inter_sites:
        struct_orig.append(DummySpecie(),i)
    struct_orig.to('poscar','POSCAR')
    return inter_sites
Esempio n. 20
0
 def test_safe_from_composition(self):
     c = Composition({"Xa": 1, "Fe": 1})
     self.assertEqual(DummySpecie.safe_from_composition(c).symbol, "Xb")
     self.assertEqual(DummySpecie.safe_from_composition(c, 1).symbol, "Xb")
Esempio n. 21
0
    def _get_structure(self, data, primitive):
        """
        Generate structure from part of the cif.
        """
        def parse_symbol(sym):
            # Common representations for elements/water in cif files
            # TODO: fix inconsistent handling of water
            special = {"D": "D", "Hw": "H", "Ow": "O", "Wat": "O",
                       "wat": "O", "OH": "", "OH2": ""}
            m = re.findall(r"w?[A-Z][a-z]*", sym)
            if m and m != "?":
                if sym in special:
                    v = special[sym]
                else:
                    v = special.get(m[0], m[0])
                if len(m) > 1 or (m[0] in special):
                    warnings.warn("{} parsed as {}".format(sym, v))
                return v

        lattice = self.get_lattice(data)
        self.symmetry_operations = self.get_symops(data)
        oxi_states = self.parse_oxi_states(data)

        coord_to_species = OrderedDict()

        def get_matching_coord(coord):
            keys = list(coord_to_species.keys())
            coords = np.array(keys)
            for op in self.symmetry_operations:
                c = op.operate(coord)
                inds = find_in_coord_list_pbc(coords, c, atol=self._site_tolerance)
                # cant use if inds, because python is dumb and np.array([0]) evaluates
                # to False
                if len(inds):
                    return keys[inds[0]]
            return False

        ############################################################
        """
        This part of the code deals with handling formats of data as found in
        CIF files extracted from the Springer Materials/Pauling File
        databases, and that are different from standard ICSD formats.
        """

        # Check to see if "_atom_site_type_symbol" exists, as some test CIFs do
        # not contain this key.
        if "_atom_site_type_symbol" in data.data.keys():

            # Keep a track of which data row needs to be removed.
            # Example of a row: Nb,Zr '0.8Nb + 0.2Zr' .2a .m-3m 0 0 0 1 14
            # 'rhombic dodecahedron, Nb<sub>14</sub>'
            # Without this code, the above row in a structure would be parsed
            # as an ordered site with only Nb (since
            # CifParser would try to parse the first two characters of the
            # label "Nb,Zr") and occupancy=1.
            # However, this site is meant to be a disordered site with 0.8 of
            # Nb and 0.2 of Zr.
            idxs_to_remove = []

            for idx, el_row in enumerate(data["_atom_site_label"]):

                # CIF files from the Springer Materials/Pauling File have
                # switched the label and symbol. Thus, in the
                # above shown example row, '0.8Nb + 0.2Zr' is the symbol.
                # Below, we split the strings on ' + ' to
                # check if the length (or number of elements) in the label and
                # symbol are equal.
                if len(data["_atom_site_type_symbol"][idx].split(' + ')) > \
                        len(data["_atom_site_label"][idx].split(' + ')):

                    # Dictionary to hold extracted elements and occupancies
                    els_occu = {}

                    # parse symbol to get element names and occupancy and store
                    # in "els_occu"
                    symbol_str = data["_atom_site_type_symbol"][idx]
                    symbol_str_lst = symbol_str.split(' + ')
                    for elocc_idx in range(len(symbol_str_lst)):
                        # Remove any bracketed items in the string
                        symbol_str_lst[elocc_idx] = re.sub(r'\([0-9]*\)', '',
                            symbol_str_lst[elocc_idx].strip())

                        # Extract element name and its occupancy from the
                        # string, and store it as a
                        # key-value pair in "els_occ".
                        els_occu[str(re.findall(r'\D+', symbol_str_lst[
                            elocc_idx].strip())[1]).replace('<sup>', '')] = \
                            float('0' + re.findall(r'\.?\d+', symbol_str_lst[
                                elocc_idx].strip())[1])

                    x = str2float(data["_atom_site_fract_x"][idx])
                    y = str2float(data["_atom_site_fract_y"][idx])
                    z = str2float(data["_atom_site_fract_z"][idx])

                    coord = (x, y, z)
                    # Add each partially occupied element on the site coordinate
                    for et in els_occu:
                        match = get_matching_coord(coord)
                        if not match:
                            coord_to_species[coord] = Composition(
                                {parse_symbol(et): els_occu[parse_symbol(et)]})
                        else:
                            coord_to_species[match] += {
                                parse_symbol(et): els_occu[parse_symbol(et)]}
                    idxs_to_remove.append(idx)

            # Remove the original row by iterating over all keys in the CIF
            # data looking for lists, which indicates
            # multiple data items, one for each row, and remove items from the
            # list that corresponds to the removed row,
            # so that it's not processed by the rest of this function (which
            # would result in an error).
            for cif_key in data.data:
                if type(data.data[cif_key]) == list:
                    for id in sorted(idxs_to_remove, reverse=True):
                        del data.data[cif_key][id]

        ############################################################
        for i in range(len(data["_atom_site_label"])):
            try:
                # If site type symbol exists, use it. Otherwise, we use the
                # label.
                symbol = parse_symbol(data["_atom_site_type_symbol"][i])
            except KeyError:
                symbol = parse_symbol(data["_atom_site_label"][i])
            if not symbol:
                continue

            if oxi_states is not None:
                o_s = oxi_states.get(symbol, 0)
                # use _atom_site_type_symbol if possible for oxidation state
                if "_atom_site_type_symbol" in data.data.keys():
                    oxi_symbol = data["_atom_site_type_symbol"][i]
                    o_s = oxi_states.get(oxi_symbol, o_s)
                try:
                    el = Specie(symbol, o_s)
                except:
                    el = DummySpecie(symbol, o_s)
            else:
                el = get_el_sp(symbol)

            x = str2float(data["_atom_site_fract_x"][i])
            y = str2float(data["_atom_site_fract_y"][i])
            z = str2float(data["_atom_site_fract_z"][i])

            try:
                occu = str2float(data["_atom_site_occupancy"][i])
            except (KeyError, ValueError):
                occu = 1

            if occu > 0:
                coord = (x, y, z)
                match = get_matching_coord(coord)
                if not match:
                    coord_to_species[coord] = Composition({el: occu})
                else:
                    coord_to_species[match] += {el: occu}

        sum_occu = [sum(c.values()) for c in coord_to_species.values()]
        if any([o > 1 for o in sum_occu]):
            warnings.warn("Some occupancies (%s) sum to > 1! If they are within "
                          "the tolerance, they will be rescaled." % str(sum_occu))

        allspecies = []
        allcoords = []

        if coord_to_species.items():
            for species, group in groupby(
                    sorted(list(coord_to_species.items()), key=lambda x: x[1]),
                    key=lambda x: x[1]):
                tmp_coords = [site[0] for site in group]

                coords = self._unique_coords(tmp_coords)

                allcoords.extend(coords)
                allspecies.extend(len(coords) * [species])

            # rescale occupancies if necessary
            for i, species in enumerate(allspecies):
                totaloccu = sum(species.values())
                if 1 < totaloccu <= self._occupancy_tolerance:
                    allspecies[i] = species / totaloccu

        if allspecies and len(allspecies) == len(allcoords):
            struct = Structure(lattice, allspecies, allcoords)
            struct = struct.get_sorted_structure()

            if primitive:
                struct = struct.get_primitive_structure()
                struct = struct.get_reduced_structure()
            return struct
Esempio n. 22
0
 def test_safe_from_composition(self):
     c = Composition({'Xa': 1, 'Fe': 1})
     self.assertEqual(DummySpecie.safe_from_composition(c).symbol, 'Xb')
     self.assertEqual(DummySpecie.safe_from_composition(c, 1).symbol, 'Xb')
Esempio n. 23
0
 def test_from_string(self):
     sp = DummySpecie.from_string("X")
     self.assertEqual(sp.oxi_state, 0)
     sp = DummySpecie.from_string("X2+")
     self.assertEqual(sp.oxi_state, 2)
Esempio n. 24
0
    def _gen_input_file(self):
        """
        Generate the necessary struct_enum.in file for enumlib. See enumlib
        documentation for details.
        """
        coord_format = "{:.6f} {:.6f} {:.6f}"
        # Using symmetry finder, get the symmetrically distinct sites.
        fitter = SpacegroupAnalyzer(self.structure, self.symm_prec)
        symmetrized_structure = fitter.get_symmetrized_structure()
        logger.debug("Spacegroup {} ({}) with {} distinct sites".format(
            fitter.get_space_group_symbol(), fitter.get_space_group_number(),
            len(symmetrized_structure.equivalent_sites)))
        """
        Enumlib doesn"t work when the number of species get too large. To
        simplify matters, we generate the input file only with disordered sites
        and exclude the ordered sites from the enumeration. The fact that
        different disordered sites with the exact same species may belong to
        different equivalent sites is dealt with by having determined the
        spacegroup earlier and labelling the species differently.
        """

        # index_species and index_amounts store mappings between the indices
        # used in the enum input file, and the actual species and amounts.
        index_species = []
        index_amounts = []

        # Stores the ordered sites, which are not enumerated.
        ordered_sites = []
        disordered_sites = []
        coord_str = []
        for sites in symmetrized_structure.equivalent_sites:
            if sites[0].is_ordered:
                ordered_sites.append(sites)
            else:
                sp_label = []
                species = {k: v for k, v in sites[0].species.items()}
                if sum(species.values()) < 1 - EnumlibAdaptor.amount_tol:
                    # Let us first make add a dummy element for every single
                    # site whose total occupancies don't sum to 1.
                    species[DummySpecie("X")] = 1 - sum(species.values())
                for sp in species.keys():
                    if sp not in index_species:
                        index_species.append(sp)
                        sp_label.append(len(index_species) - 1)
                        index_amounts.append(species[sp] * len(sites))
                    else:
                        ind = index_species.index(sp)
                        sp_label.append(ind)
                        index_amounts[ind] += species[sp] * len(sites)
                sp_label = "/".join(["{}".format(i) for i in sorted(sp_label)])
                for site in sites:
                    coord_str.append("{} {}".format(
                        coord_format.format(*site.coords), sp_label))
                disordered_sites.append(sites)

        def get_sg_info(ss):
            finder = SpacegroupAnalyzer(Structure.from_sites(ss),
                                        self.symm_prec)
            return finder.get_space_group_number()

        target_sgnum = get_sg_info(symmetrized_structure.sites)
        curr_sites = list(itertools.chain.from_iterable(disordered_sites))
        sgnum = get_sg_info(curr_sites)
        ordered_sites = sorted(ordered_sites, key=lambda sites: len(sites))
        logger.debug("Disordered sites has sg # %d" % (sgnum))
        self.ordered_sites = []

        # progressively add ordered sites to our disordered sites
        # until we match the symmetry of our input structure
        if self.check_ordered_symmetry:
            while sgnum != target_sgnum and len(ordered_sites) > 0:
                sites = ordered_sites.pop(0)
                temp_sites = list(curr_sites) + sites
                new_sgnum = get_sg_info(temp_sites)
                if sgnum != new_sgnum:
                    logger.debug("Adding %s in enum. New sg # %d" %
                                 (sites[0].specie, new_sgnum))
                    index_species.append(sites[0].specie)
                    index_amounts.append(len(sites))
                    sp_label = len(index_species) - 1
                    for site in sites:
                        coord_str.append("{} {}".format(
                            coord_format.format(*site.coords), sp_label))
                    disordered_sites.append(sites)
                    curr_sites = temp_sites
                    sgnum = new_sgnum
                else:
                    self.ordered_sites.extend(sites)

        for sites in ordered_sites:
            self.ordered_sites.extend(sites)

        self.index_species = index_species

        lattice = self.structure.lattice

        output = [self.structure.formula, "bulk"]
        for vec in lattice.matrix:
            output.append(coord_format.format(*vec))
        output.append("%d" % len(index_species))
        output.append("%d" % len(coord_str))
        output.extend(coord_str)

        output.append("{} {}".format(self.min_cell_size, self.max_cell_size))
        output.append(str(self.enum_precision_parameter))
        output.append("full")

        ndisordered = sum([len(s) for s in disordered_sites])
        base = int(ndisordered * lcm(*[
            f.limit_denominator(ndisordered * self.max_cell_size).denominator
            for f in map(fractions.Fraction, index_amounts)
        ]))

        # This multiplicative factor of 10 is to prevent having too small bases
        # which can lead to rounding issues in the next step.
        # An old bug was that a base was set to 8, with a conc of 0.4:0.6. That
        # resulted in a range that overlaps and a conc of 0.5 satisfying this
        # enumeration. See Cu7Te5.cif test file.
        base *= 10

        # base = ndisordered #10 ** int(math.ceil(math.log10(ndisordered)))
        # To get a reasonable number of structures, we fix concentrations to the
        # range expected in the original structure.
        total_amounts = sum(index_amounts)
        for amt in index_amounts:
            conc = amt / total_amounts

            if abs(conc * base - round(conc * base)) < 1e-5:
                output.append("{} {} {}".format(int(round(conc * base)),
                                                int(round(conc * base)), base))
            else:
                min_conc = int(math.floor(conc * base))
                output.append("{} {} {}".format(min_conc - 1, min_conc + 1,
                                                base))
        output.append("")
        logger.debug("Generated input file:\n{}".format("\n".join(output)))
        with open("struct_enum.in", "w") as f:
            f.write("\n".join(output))
Esempio n. 25
0
    def structure_graph(self,
                        include_critical_points=("bond", "ring", "cage")):
        """
        A StructureGraph object describing bonding information
        in the crystal.
        Args:
            include_critical_points: add DummySpecie for
            the critical points themselves, a list of
            "nucleus", "bond", "ring", "cage", set to None
            to disable

        Returns: a StructureGraph
        """

        structure = self.structure.copy()

        point_idx_to_struct_idx = {}
        if include_critical_points:
            # atoms themselves don't have field information
            # so set to 0
            for prop in ("ellipticity", "laplacian", "field"):
                structure.add_site_property(prop, [0] * len(structure))
            for idx, node in self.nodes.items():
                cp = self.critical_points[node["unique_idx"]]
                if cp.type.value in include_critical_points:
                    specie = DummySpecie("X{}cp".format(cp.type.value[0]),
                                         oxidation_state=None)
                    structure.append(
                        specie,
                        node["frac_coords"],
                        properties={
                            "ellipticity": cp.ellipticity,
                            "laplacian": cp.laplacian,
                            "field": cp.field,
                        },
                    )
                    point_idx_to_struct_idx[idx] = len(structure) - 1

        edge_weight = "bond_length"
        edge_weight_units = "Å"

        sg = StructureGraph.with_empty_graph(
            structure,
            name="bonds",
            edge_weight_name=edge_weight,
            edge_weight_units=edge_weight_units,
        )

        edges = self.edges.copy()
        idx_to_delete = []
        # check for duplicate bonds
        for idx, edge in edges.items():
            unique_idx = self.nodes[idx]["unique_idx"]
            # only check edges representing bonds, not rings
            if self.critical_points[unique_idx].type == CriticalPointType.bond:
                if idx not in idx_to_delete:
                    for idx2, edge2 in edges.items():
                        if idx != idx2 and edge == edge2:
                            idx_to_delete.append(idx2)
                            warnings.warn(
                                "Duplicate edge detected, try re-running "
                                "critic2 with custom parameters to fix this. "
                                "Mostly harmless unless user is also "
                                "interested in rings/cages.")
                            logger.debug(
                                "Duplicate edge between points {} (unique point {})"
                                "and {} ({}).".format(
                                    idx,
                                    self.nodes[idx]["unique_idx"],
                                    idx2,
                                    self.nodes[idx2]["unique_idx"],
                                ))
        # and remove any duplicate bonds present
        for idx in idx_to_delete:
            del edges[idx]

        for idx, edge in edges.items():
            unique_idx = self.nodes[idx]["unique_idx"]
            # only add edges representing bonds, not rings
            if self.critical_points[unique_idx].type == CriticalPointType.bond:

                from_idx = edge["from_idx"]
                to_idx = edge["to_idx"]

                # have to also check bond is between nuclei if non-nuclear
                # attractors not in structure
                skip_bond = False
                if include_critical_points and "nnattr" not in include_critical_points:
                    from_type = self.critical_points[self.nodes[from_idx]
                                                     ["unique_idx"]].type
                    to_type = self.critical_points[self.nodes[from_idx]
                                                   ["unique_idx"]].type
                    skip_bond = (from_type != CriticalPointType.nucleus) or (
                        to_type != CriticalPointType.nucleus)

                if not skip_bond:
                    from_lvec = edge["from_lvec"]
                    to_lvec = edge["to_lvec"]

                    relative_lvec = np.subtract(to_lvec, from_lvec)

                    # for edge case of including nnattrs in bonding graph when other critical
                    # points also included, indices may get mixed
                    struct_from_idx = point_idx_to_struct_idx.get(
                        from_idx, from_idx)
                    struct_to_idx = point_idx_to_struct_idx.get(to_idx, to_idx)

                    weight = self.structure.get_distance(struct_from_idx,
                                                         struct_to_idx,
                                                         jimage=relative_lvec)

                    crit_point = self.critical_points[unique_idx]

                    edge_properties = {
                        "field": crit_point.field,
                        "laplacian": crit_point.laplacian,
                        "ellipticity": crit_point.ellipticity,
                        "frac_coords": self.nodes[idx]["frac_coords"],
                    }

                    sg.add_edge(
                        struct_from_idx,
                        struct_to_idx,
                        from_jimage=from_lvec,
                        to_jimage=to_lvec,
                        weight=weight,
                        edge_properties=edge_properties,
                    )

        return sg
Esempio n. 26
0
    def _get_structure(self, data, primitive):
        """
        Generate structure from part of the cif.
        """
        def parse_symbol(sym):
            # Common representations for elements/water in cif files
            # TODO: fix inconsistent handling of water
            special = {
                "D": "D",
                "Hw": "H",
                "Ow": "O",
                "Wat": "O",
                "wat": "O",
                "OH": "",
                "OH2": ""
            }
            m = re.findall(r"w?[A-Z][a-z]*", sym)
            if m and m != "?":
                if sym in special:
                    v = special[sym]
                else:
                    v = special.get(m[0], m[0])
                if len(m) > 1 or (m[0] in special):
                    warnings.warn("{} parsed as {}".format(sym, v))
                return v

        lattice = self.get_lattice(data)

        # if magCIF, get magnetic symmetry moments and magmoms
        # else standard CIF, and use empty magmom dict
        if self.feature_flags["magcif_incommensurate"]:
            raise NotImplementedError(
                "Incommensurate structures not currently supported.")
        elif self.feature_flags["magcif"]:
            self.symmetry_operations = self.get_magsymops(data)
            magmoms = self.parse_magmoms(data, lattice=lattice)
        else:
            self.symmetry_operations = self.get_symops(data)
            magmoms = {}

        oxi_states = self.parse_oxi_states(data)

        coord_to_species = OrderedDict()
        coord_to_magmoms = OrderedDict()

        def get_matching_coord(coord):
            keys = list(coord_to_species.keys())
            coords = np.array(keys)
            for op in self.symmetry_operations:
                c = op.operate(coord)
                inds = find_in_coord_list_pbc(coords,
                                              c,
                                              atol=self._site_tolerance)
                # cant use if inds, because python is dumb and np.array([0]) evaluates
                # to False
                if len(inds):
                    return keys[inds[0]]
            return False

        for i in range(len(data["_atom_site_label"])):
            try:
                # If site type symbol exists, use it. Otherwise, we use the
                # label.
                symbol = parse_symbol(data["_atom_site_type_symbol"][i])
            except KeyError:
                symbol = parse_symbol(data["_atom_site_label"][i])
            if not symbol:
                continue

            if oxi_states is not None:
                o_s = oxi_states.get(symbol, 0)
                # use _atom_site_type_symbol if possible for oxidation state
                if "_atom_site_type_symbol" in data.data.keys():
                    oxi_symbol = data["_atom_site_type_symbol"][i]
                    o_s = oxi_states.get(oxi_symbol, o_s)
                try:
                    el = Specie(symbol, o_s)
                except:
                    el = DummySpecie(symbol, o_s)
            else:
                el = get_el_sp(symbol)

            x = str2float(data["_atom_site_fract_x"][i])
            y = str2float(data["_atom_site_fract_y"][i])
            z = str2float(data["_atom_site_fract_z"][i])
            magmom = magmoms.get(data["_atom_site_label"][i], Magmom(0))

            try:
                occu = str2float(data["_atom_site_occupancy"][i])
            except (KeyError, ValueError):
                occu = 1

            if occu > 0:
                coord = (x, y, z)
                match = get_matching_coord(coord)
                if not match:
                    coord_to_species[coord] = Composition({el: occu})
                    coord_to_magmoms[coord] = magmom
                else:
                    coord_to_species[match] += {el: occu}
                    coord_to_magmoms[
                        match] = None  # disordered magnetic not currently supported

        sum_occu = [sum(c.values()) for c in coord_to_species.values()]
        if any([o > 1 for o in sum_occu]):
            warnings.warn(
                "Some occupancies (%s) sum to > 1! If they are within "
                "the tolerance, they will be rescaled." % str(sum_occu))

        allspecies = []
        allcoords = []
        allmagmoms = []

        # check to see if magCIF file is disordered
        if self.feature_flags["magcif"]:
            for k, v in coord_to_magmoms.items():
                if v is None:
                    # Proposed solution to this is to instead store magnetic moments
                    # as Specie 'spin' property, instead of site property, but this
                    # introduces ambiguities for end user (such as unintended use of
                    # `spin` and Specie will have fictious oxidation state).
                    raise NotImplementedError(
                        'Disordered magnetic structures not currently supported.'
                    )

        if coord_to_species.items():
            for species, group in groupby(sorted(list(
                    coord_to_species.items()),
                                                 key=lambda x: x[1]),
                                          key=lambda x: x[1]):
                tmp_coords = [site[0] for site in group]
                tmp_magmom = [
                    coord_to_magmoms[tmp_coord] for tmp_coord in tmp_coords
                ]

                if self.feature_flags["magcif"]:
                    coords, magmoms = self._unique_coords(
                        tmp_coords, tmp_magmom)
                else:
                    coords, magmoms = self._unique_coords(tmp_coords)

                allcoords.extend(coords)
                allspecies.extend(len(coords) * [species])
                allmagmoms.extend(magmoms)

            # rescale occupancies if necessary
            for i, species in enumerate(allspecies):
                totaloccu = sum(species.values())
                if 1 < totaloccu <= self._occupancy_tolerance:
                    allspecies[i] = species / totaloccu

        if allspecies and len(allspecies) == len(allcoords) and len(
                allspecies) == len(allmagmoms):

            if self.feature_flags["magcif"]:
                struct = Structure(lattice,
                                   allspecies,
                                   allcoords,
                                   site_properties={"magmom": allmagmoms})
            else:
                struct = Structure(lattice, allspecies, allcoords)

            struct = struct.get_sorted_structure()

            if primitive:
                struct = struct.get_primitive_structure()
                struct = struct.get_reduced_structure()
            return struct
 def test_cached(self):
     sp1 = DummySpecie("X", 2)
     sp2 = DummySpecie("X", 2)
Esempio n. 28
0
    def compute_composition_vector(self, composition_space):
        """
        Returns the composition vector of the organism, as a numpy array.

        Args:
            composition_space: the CompositionSpace of the search.
        """

        if composition_space.objective_function == 'epa':
            return None
        elif composition_space.objective_function == 'pd':
            # make CompoundPhaseDiagram and PDAnalyzer objects
            pdentries = []
            for endpoint in composition_space.endpoints:
                pdentries.append(PDEntry(endpoint, -10))
            compound_pd = CompoundPhaseDiagram(pdentries,
                                               composition_space.endpoints)

            # transform the organism's composition
            transformed_entry = compound_pd.transform_entries(
                [PDEntry(self.composition, 10)], composition_space.endpoints)

            # get the transformed species and amounts
            if len(transformed_entry[0]) == 0:
                return None
            transformed_list = str(transformed_entry[0][0]).split()
            del transformed_list[0]
            popped = ''
            while popped != 'with':
                popped = transformed_list.pop()

            # separate the dummy species symbols from the amounts
            symbols = []
            amounts = []
            for entry in transformed_list:
                split_entry = entry.split('0+')
                symbols.append(split_entry[0])
                amounts.append(float(split_entry[1]))

            # make a dictionary mapping dummy species to amounts
            dummy_species_amounts = {}
            for i in range(len(symbols)):
                dummy_species_amounts[DummySpecie(symbol=symbols[i])] = \
                    amounts[i]

            # make Composition object with dummy species, get decomposition
            dummy_comp = Composition(dummy_species_amounts)
            decomp = compound_pd.get_decomposition(dummy_comp)

            # get amounts of the decomposition in terms of the (untransformed)
            # composition space endpoints
            formatted_decomp = {}
            for key in decomp:
                key_dict = key.as_dict()
                comp = Composition(key_dict['entry']['composition'])
                formatted_decomp[comp] = decomp[key]

            # make the composition vector
            composition_vector = []
            # because the random organism creator shuffles the endpoints
            composition_space.endpoints.sort()
            for endpoint in composition_space.endpoints:
                if endpoint in formatted_decomp:
                    composition_vector.append(formatted_decomp[endpoint])
                else:
                    composition_vector.append(0.0)
            return np.array(composition_vector)
Esempio n. 29
0
    def _add_dummy_species(structure, order_parameters):
        """
        :param structure: ordered Structure
        :param order_parameters: list of MagOrderParameterConstraints
        :return: A structure decorated with disordered
        DummySpecies on which to perform the enumeration.
        Note that the DummySpecies are super-imposed on
        to the original sites, to make it easier to
        retrieve the original site after enumeration is
        performed (this approach is preferred over a simple
        mapping since multiple species may have the same
        DummySpecie, depending on the constraints specified).
        This approach can also preserve site properties even after
        enumeration.
        """

        dummy_struct = structure.copy()

        def generate_dummy_specie():
            """
            Generator which returns DummySpecie symbols Mma, Mmb, etc.
            """
            subscript_length = 1
            while True:
                for subscript in product(ascii_lowercase,
                                         repeat=subscript_length):
                    yield "Mm" + "".join(subscript)
                subscript_length += 1

        dummy_species_gen = generate_dummy_specie()

        # one dummy species for each order parameter constraint
        dummy_species_symbols = [
            next(dummy_species_gen) for i in range(len(order_parameters))
        ]
        dummy_species = [{
            DummySpecie(symbol, properties={'spin': Spin.up}):
            constraint.order_parameter,
            DummySpecie(symbol, properties={'spin': Spin.down}):
            1 - constraint.order_parameter
        } for symbol, constraint in zip(dummy_species_symbols,
                                        order_parameters)]

        sites_to_add = []

        for idx, site in enumerate(dummy_struct):
            satisfies_constraints = [
                c.satisfies_constraint(site) for c in order_parameters
            ]
            if satisfies_constraints.count(True) > 1:
                # site should either not satisfy any constraints, or satisfy
                # one constraint
                raise ValueError(
                    "Order parameter constraints conflict for site: {}, {}".
                    format(str(site.specie), site.properties))
            elif any(satisfies_constraints):
                dummy_specie_idx = satisfies_constraints.index(True)
                dummy_struct.append(dummy_species[dummy_specie_idx],
                                    site.coords, site.lattice)

        return dummy_struct
Esempio n. 30
0
 def test_safe_from_composition(self):
     c = Composition({'Xa': 1, 'Fe': 1})
     self.assertEqual(DummySpecie.safe_from_composition(c).symbol, 'Xb')
     self.assertEqual(DummySpecie.safe_from_composition(c, 1).symbol, 'Xb')
Esempio n. 31
0
 def test_cached(self):
     sp1 = DummySpecie("X", 2)
     sp2 = DummySpecie("X", 2)
     self.assertEqual(id(sp1), id(sp2))
Esempio n. 32
0
 def test_eq(self):
     self.assertFalse(DummySpecie("Xg") == DummySpecie("Xh"))
     self.assertFalse(DummySpecie("Xg") == DummySpecie("Xg", 3))
     self.assertTrue(DummySpecie("Xg", 3) == DummySpecie("Xg", 3))
Esempio n. 33
0
 def test_pickle(self):
     el1 = DummySpecie("X", 3)
     o = pickle.dumps(el1)
     self.assertEqual(el1, pickle.loads(o))
Esempio n. 34
0
 def test_smart_element_or_specie(self):
     self.assertEqual(smart_element_or_specie("Fe2+"), Specie("Fe", 2))
     self.assertEqual(smart_element_or_specie("3"), Element("Li"))
     self.assertEqual(smart_element_or_specie("U"), Element("U"))
     self.assertEqual(smart_element_or_specie("X2+"), DummySpecie("X", 2))
     self.assertEqual(smart_element_or_specie("Mn3+"), Specie("Mn", 3))
Esempio n. 35
0
 def test_sort(self):
     r = sorted([Element('Fe'), DummySpecie("X")])
     self.assertEqual(r, [DummySpecie("X"), Element('Fe')])