Ejemplo n.º 1
0
    def test_get_mapping(self):
        sm = StructureMatcher(ltol=0.2, stol=0.3, angle_tol=5,
                              primitive_cell=False, scale=True,
                              attempt_supercell=False,
                              allow_subset=True)
        l = Lattice.orthorhombic(1, 2, 3)
        s1 = Structure(l, ['Ag', 'Si', 'Si'],
                       [[.7, .4, .5], [0, 0, 0.1], [0, 0, 0.2]])
        s1.make_supercell([2, 1, 1])
        s2 = Structure(l, ['Si', 'Si', 'Ag'],
                       [[0, 0.1, -0.95], [0, 0.1, 0], [-.7, .5, .375]])

        shuffle = [2, 0, 1, 3, 5, 4]
        s1 = Structure.from_sites([s1[i] for i in shuffle])
        # test the mapping
        s2.make_supercell([2, 1, 1])
        # equal sizes
        for i, x in enumerate(sm.get_mapping(s1, s2)):
            self.assertEqual(s1[x].species,
                             s2[i].species)

        del s1[0]
        # s1 is subset of s2
        for i, x in enumerate(sm.get_mapping(s2, s1)):
            self.assertEqual(s1[i].species,
                             s2[x].species)
        # s2 is smaller than s1
        del s2[0]
        del s2[1]
        self.assertRaises(ValueError, sm.get_mapping, s2, s1)
Ejemplo n.º 2
0
    def setUp(self):
        self.structure = Structure.from_spacegroup("Fm-3m", Lattice.cubic(3.5),
                                                   ["Ni"], [[0, 0, 0]])
        lattice = Lattice.cubic(3.010)
        frac_coords = [[0.00000, 0.00000, 0.00000],
                       [0.00000, 0.50000, 0.50000],
                       [0.50000, 0.00000, 0.50000],
                       [0.50000, 0.50000, 0.00000],
                       [0.50000, 0.00000, 0.00000],
                       [0.50000, 0.50000, 0.50000],
                       [0.00000, 0.00000, 0.50000],
                       [0.00000, 0.50000, 0.00000]]
        species = ['Mg', 'Mg', 'Mg', 'Mg', 'O', 'O', 'O', 'O']
        self.MgO = Structure(lattice, species, frac_coords)

        slabs = generate_all_slabs(self.structure, max_index=2,
                                   min_slab_size=6.0, min_vacuum_size=15.0,
                                   max_normal_search=1, center_slab=True)
        self.slab_dict = {''.join([str(i) for i in slab.miller_index]):
                              slab for slab in slabs}
        self.asf_211 = AdsorbateSiteFinder(self.slab_dict["211"])
        self.asf_100 = AdsorbateSiteFinder(self.slab_dict["100"])
        self.asf_111 = AdsorbateSiteFinder(self.slab_dict["111"])
        self.asf_110 = AdsorbateSiteFinder(self.slab_dict["110"])
        self.asf_struct = AdsorbateSiteFinder(
            Structure.from_sites(self.slab_dict["111"].sites))
Ejemplo n.º 3
0
 def setUp(self):
     c1 = [[0.5] * 3, [0.9] * 3]
     c2 = [[0.5] * 3, [0.9, 0.1, 0.1]]
     s1 = Structure(Lattice.cubic(5), ['Si', 'Si'], c1)
     s2 = Structure(Lattice.cubic(5), ['Si', 'Si'], c2)
     structs = []
     for s in s1.interpolate(s2, 3, pbc=True):
         structs.append(Structure.from_sites(s.sites, to_unit_cell=True))
     self.structures = structs
     self.vis = MITNEBSet(self.structures)
Ejemplo n.º 4
0
    def test_write_inputs(self):
        c1 = [[0.5] * 3, [0.9] * 3]
        c2 = [[0.5] * 3, [0.9, 0.1, 0.1]]
        s1 = Structure(Lattice.cubic(5), ['Si', 'Si'], c1)
        s2 = Structure(Lattice.cubic(5), ['Si', 'Si'], c2)
        structs = []
        for s in s1.interpolate(s2, 3, pbc=True):
            structs.append(Structure.from_sites(s.sites, to_unit_cell=True))

        fc = self.vis._process_structures(structs)[2].frac_coords
        self.assertTrue(np.allclose(fc, [[0.5]*3,[0.9, 1.033333, 1.0333333]]))
Ejemplo n.º 5
0
    def get_extended_surface_mesh(self, repeat=(5, 5, 1)):
        """
        Gets an extended surface mesh for to use for adsorption
        site finding by constructing supercell of surface sites

        Args:
            repeat (3-tuple): repeat for getting extended surface mesh
        """
        surf_str = Structure.from_sites(self.surface_sites)
        surf_str.make_supercell(repeat)
        return surf_str
Ejemplo n.º 6
0
    def get_extended_surface_mesh(self, repeat=(5, 5, 1)):
        """
        Gets an extended surface mesh for to use for adsorption
        site finding by constructing supercell of surface sites

        Args:
            repeat (3-tuple): repeat for getting extended surface mesh
        """
        surf_str = Structure.from_sites(self.surface_sites)
        surf_str.make_supercell(repeat)
        return surf_str
 def test_structure_is_base(self):
     """
     Check whether MigrationGraph can be constructed with structure_is_base = True
     """
     struct = Structure.from_file(f"{dir_path}/full_path_files/MnO2_full_Li.vasp")
     base_struct = Structure.from_sites([s for s in struct if str(s.specie) != "Li"])
     mg = MigrationGraph(
         structure=base_struct,
         migration_graph=self.fpm.migration_graph,
         structure_is_base=True,
     )
     self.assertTrue(mg.structure == self.fpm.structure)
Ejemplo n.º 8
0
    def test_write_inputs(self):
        c1 = [[0.5] * 3, [0.9] * 3]
        c2 = [[0.5] * 3, [0.9, 0.1, 0.1]]
        s1 = Structure(Lattice.cubic(5), ['Si', 'Si'], c1)
        s2 = Structure(Lattice.cubic(5), ['Si', 'Si'], c2)
        structs = []
        for s in s1.interpolate(s2, 3, pbc=True):
            structs.append(Structure.from_sites(s.sites, to_unit_cell=True))

        fc = self.vis._process_structures(structs)[2].frac_coords
        self.assertTrue(
            np.allclose(fc, [[0.5] * 3, [0.9, 1.033333, 1.0333333]]))
Ejemplo n.º 9
0
def add_vacuum_padding(slab, vacuum, hkl=(0, 0, 1)):
    """
    add vacuum spacing to the given structure
    Args:
        slab (Structure/Slab): sructure/slab object to be padded
        vacuum (float): in angstroms
        hkl (tuple/list): miller index
    Returns:
         Structure object
    """
    min_z = np.min([fcoord[2] for fcoord in slab.frac_coords])
    slab.translate_sites(list(range(len(slab))), [0, 0, -min_z])
    a, b, c = slab.lattice.matrix
    z = [coord[2] for coord in slab.cart_coords]
    zmax = np.max(z)
    zmin = np.min(z)
    thickness = zmax - zmin
    new_c = c / np.linalg.norm(c) * (thickness + vacuum)
    new_lattice = Lattice(np.array([a, b, new_c]))
    new_sites = []
    for site in slab:
        new_sites.append(
            PeriodicSite(site.species_and_occu,
                         site.coords,
                         new_lattice,
                         properties=site.properties,
                         coords_are_cartesian=True))
    new_struct = Structure.from_sites(new_sites)
    # center the slab
    avg_z = np.average([fcoord[2] for fcoord in new_struct.frac_coords])
    new_struct.translate_sites(list(range(len(new_struct))),
                               [0, 0, 0.5 - avg_z])
    return Slab(new_struct.lattice,
                new_struct.species_and_occu,
                new_struct.frac_coords,
                hkl,
                Structure.from_sites(new_struct, to_unit_cell=True),
                shift=0,
                scale_factor=np.eye(3, dtype=np.int),
                site_properties=new_struct.site_properties)
Ejemplo n.º 10
0
def get_sym_migration_ion_sites(
    base_struct: Structure,
    inserted_struct: Structure,
    migrating_ion: str,
    symprec: float = 0.01,
    angle_tol: float = 5.0,
) -> Structure:
    """
    Take one inserted entry then map out all symmetry equivalent copies of the cation sites in base entry.
    Each site is decorated with the insertion energy calculated from the base and inserted entries.

    Args:
        inserted_entry: entry that contains cation
        base_struct_entry: the entry containing the base structure
        migrating_ion_entry: the name of the migrating species
        symprec: the symprec tolerance for the space group analysis
        angle_tol: the angle tolerance for the space group analysis

    Returns:
        Structure with only the migrating ion sites decorated with insertion energies.
    """
    wi_ = migrating_ion

    sa = SpacegroupAnalyzer(base_struct,
                            symprec=symprec,
                            angle_tolerance=angle_tol)
    # start with the base structure but empty
    sym_migration_ion_sites = list(
        filter(
            lambda isite: isite.species_string == wi_,
            inserted_struct.sites,
        ))

    sym_migration_struct = Structure.from_sites(sym_migration_ion_sites)
    for op in sa.get_space_group_operations():
        struct_tmp = sym_migration_struct.copy()
        struct_tmp.apply_operation(symmop=op, fractional=True)
        for isite in struct_tmp.sites:
            if isite.species_string == wi_:
                sym_migration_struct.insert(
                    0,
                    wi_,
                    coords=np.mod(isite.frac_coords, 1.0),
                    properties=isite.properties,
                )

            # must clean up as you go or the number of sites explodes
            if len(sym_migration_struct) > 1:
                sym_migration_struct.merge_sites(
                    tol=SITE_MERGE_R,
                    mode="average")  # keeps removing duplicates
    return sym_migration_struct
Ejemplo n.º 11
0
    def from_dict(cls, d):
        lattice = Lattice.from_dict(d["lattice"])
        sites = [PeriodicSite.from_dict(sd, lattice) for sd in d["sites"]]
        s = Structure.from_sites(sites)

        return Interface(
            lattice=lattice,
            species=s.species_and_occu, coords=s.frac_coords,
            sub_plane=d["sub_plane"], film_plane=d["film_plane"],
            sub_init_cell=d["sub_init_cell"], film_init_cell=d["film_init_cell"],
            modified_sub_structure=d["modified_sub_structure"], modified_film_structure=d["modified_film_structure"],
            strained_sub_structure=d["strained_sub_structure"], strained_film_structure=d["strained_film_structure"],
            site_properties=s.site_properties, init_inplane_shift=d["init_inplane_shift"]
        )
Ejemplo n.º 12
0
    def test3(self):
        l = Lattice.cubic(100)
        s1 = PeriodicSite('Li', [99.5, 0, 1], l, coords_are_cartesian=True)
        s2 = PeriodicSite('Sn', [99.5, 1, 0], l, coords_are_cartesian=True)
        s3 = PeriodicSite('Li', [0.5, 0, 0], l, coords_are_cartesian=True)

        s = Structure.from_sites([s1, s2, s3])
        coords = np.array(s.cart_coords)[:, None, :]
        cd = CoordData(coords, s)
        gtc = GaussianTripletColumns(rrange=[1, 10], c=0.5)
        f = gtc.make(cd)
        self.assertAlmostEqual(f[0, 0], 0)
        self.assertAlmostEqual(f[1, 8], -0.41849511)
        self.assertAlmostEqual(f[2, 8], 0.83699023)
        self.assertAlmostEqual(f[3, 32], -0.83699023)
Ejemplo n.º 13
0
def write_structures(structures, file_name, format):
    import tempfile
    import zipfile
    from os.path import basename, join
    import os
    format_name, Writer = format
    file_type = format_name
    if len(structures) > 1:
        result_archive = zipfile.ZipFile(join(os.getcwd(),
                                              basename(file_name) + '.zip'),
                                         mode='w')
        for name, structure in structures.items():
            sorted_sites = list(
                sorted(structure.sites.copy(),
                       key=lambda site: site.specie.symbol))
            sorted_structure = Structure.from_sites(sorted_sites)
            with tempfile.NamedTemporaryFile() as tmpfile:
                Writer(sorted_structure).write_file(tmpfile.name)
                result_archive.write(tmpfile.name,
                                     arcname='{}.{}'.format(name, file_type))
        result_archive.close()
        write_message('Archive file: {0}'.format(
            join(os.getcwd(),
                 basename(file_name) + '.zip')),
                      level=DEBUG)
    else:
        #There is just one file write only that one
        structure_key = list(structures.keys())[0]
        sorted_sites = list(
            sorted(structures[structure_key].sites.copy(),
                   key=lambda site: site.specie.symbol))
        sorted_structure = Structure.from_sites(sorted_sites)
        p = Writer(sorted_structure)
        fname = join(os.getcwd(), '{}.{}'.format(structure_key, file_type))
        p.write_file(fname)
        write_message('Output file: {0}'.format(fname), level=DEBUG)
Ejemplo n.º 14
0
def clusters_from_structure(structure, rcut, elements):
    """
    Take a pymatgen structure, and converts it to a graph object

    Args:

        - structure (Structure): pymatgen structure object to set up graph from
        - rcut (float):   cut-off radii for node-node connections in forming clusters
        - elements ({str,str,.....}): set of element strings to include in setting up graph

    Returns:

        - clusters ({clusters}): set of clusters 

    """

    symbols = set([species for species in structure.symbol_set])

    if elements.issubset(structure.symbol_set):

        all_elements = set([species for species in structure.symbol_set])
        remove_elements = [x for x in all_elements if x not in elements]

        structure.remove_species(remove_elements)
        folded_structure = Structure.from_sites(structure.sites, to_unit_cell=True)

        nodes = nodes_from_structure(folded_structure, rcut, get_halo=True)
        set_fort_nodes(nodes)

        clusters = set()

        uc_nodes = set([node for node in nodes if node.labels["Halo"] == False])

        while uc_nodes:
            node = uc_nodes.pop()
            if node.labels["Halo"] == False:
                cluster = Cluster({node})
                cluster.grow_cluster()
                uc_nodes.difference_update(cluster.nodes)
                clusters.add(cluster)
                set_cluster_periodic(cluster)

        return clusters
    else:
        raise ValueError(
            "The element set fed to 'clusters_from_file' is not a subset of the elements in the file"
        )
Ejemplo n.º 15
0
def makeNewPos(specs, frac_coords, new_latt, dim):
    """
    Make a new POSCAR file using input species, coordinates, and lattice.
    Used in conjunction with functions "getNewLattice","alignMono",
    "alignChains"
    
    inputs
    --------
        specs (list):       List of species associated with each coordinate
        
        frac_coords (list): List of fractional coordinates of each species,
                            relative to the new lattice
                            
        new_latt (list):    List of vectors representing the new lattice 
                            vectors of the low-dimensional material
                            
        dim (int):  Number of periodic directions in the crystal
        
    outputs
    --------
        new_struct (Structure): Structure object for low-dimensional material
                                with non-periodic directions being 
                                orthogonal to the periodic directions
    
    """

    a, b, c = magni(new_latt[0]), magni(new_latt[1]), magni(new_latt[2])
    if dim == 2:
        ang = getAngle(new_latt[0], new_latt[1]) / 2
        new_latt = [[np.cos(ang) * a, -np.sin(ang) * a, 0],
                    [np.cos(ang) * b, np.sin(ang) * b, 0], [0, 0, c]]
    elif dim == 1:
        new_latt = [[a, 0, 0], [0, b, 0], [0, 0, c]]
    i = 0
    new_sites = []
    for site in frac_coords:
        p = PeriodicSite(species=Element(specs[i]),
                         lattice=Lattice(new_latt),
                         coords=np.dot(site, new_latt),
                         coords_are_cartesian=True)
        new_sites.append(p)
        i += 1

    new_struct = Structure.from_sites(new_sites)
    return (new_struct)
Ejemplo n.º 16
0
    def __init__(self, supercell_matrix, cluster_expansion):
        """
        Args:
            supercell matrix: array describing the supercell, e.g. [[1,0,0],[0,1,0],[0,0,1]]
            cluster_expansion: ClusterExpansion object
        """
        self.basis = cluster_expansion.basis
        self.supercell_matrix = np.array(supercell_matrix)
        self.prim_to_supercell = np.linalg.inv(self.supercell_matrix)
        self.cluster_expansion = cluster_expansion

        self.supercell = cluster_expansion.structure.copy()
        self.supercell.make_supercell(self.supercell_matrix)
        self.size = int(round(np.abs(np.linalg.det(self.supercell_matrix))))

        self.bits = get_bits(self.supercell)
        self.nbits = np.array([len(b)-1 for b in self.bits])
        self.fcoords = np.array(self.supercell.frac_coords)

        self._generate_mappings()

        if self.cluster_expansion.use_ewald:
            #lazily generate the difficult ewald parts
            self.ewald_inds = []
            ewald_sites = []
            for bits, s in zip(self.bits, self.supercell):
                inds = np.zeros(max(self.nbits) + 1) - 1
                for i, b in enumerate(bits):
                    if b == 'Vacancy':
                        #inds.append(-1)
                        continue
                    inds[i] = len(ewald_sites)
                    ewald_sites.append(PeriodicSite(b, s.frac_coords, s.lattice))
                self.ewald_inds.append(inds)
            self.ewald_inds = np.array(self.ewald_inds, dtype=np.int)
            self._ewald_structure = Structure.from_sites(ewald_sites)
            self._ewald_matrix = None
            self._partial_ems = None
            self._all_ewalds = None
            self._range = np.arange(len(self.nbits))
        else:
            self._all_ewalds = np.zeros((0, 0, 0), dtype=np.float)
            self.ewald_inds = np.zeros((0, 0), dtype=np.int)
Ejemplo n.º 17
0
    def _calculate_energies(self):
        energies = np.zeros((len(self.structure), len(self.structure)))

        dm = self.structure.distance_matrix

        non_dup_sites = []
        blocked = []
        for i, site in enumerate(self.structure):
            if i in blocked:
                continue
            non_dup_sites.append(site)
            blocked.extend(np.where(dm[i] < 0.001)[0])

        cs = self.ce.supercell_from_structure(
            Structure.from_sites(non_dup_sites))
        sm = StructureMatcher(primitive_cell=False,
                              attempt_supercell=False,
                              allow_subset=True,
                              scale=True,
                              comparator=OrderDisorderElementComparator())

        aligned = sm.get_s2_like_s1(cs.supercell, self.structure)
        dists = aligned.lattice.get_all_distances(aligned.frac_coords,
                                                  cs.supercell.frac_coords)
        lw_mapping = np.argmin(dists, axis=-1)

        bits = cs.bits
        exp_inds = np.where(
            np.sum(aligned.distance_matrix < 0.001, axis=-1) > 1)[0]

        for i in exp_inds:
            for j in exp_inds:
                if lw_mapping[i] == lw_mapping[j]:
                    continue
                occu = np.copy(cs.nbits)
                occu[lw_mapping[i]] = bits[lw_mapping[i]].index(
                    str(aligned[i].specie))
                occu[lw_mapping[j]] = bits[lw_mapping[j]].index(
                    str(aligned[j].specie))
                energies[i][j] = np.dot(cs.corr_from_occupancy(occu),
                                        self.ecis)

        return energies
Ejemplo n.º 18
0
 def setUp(self):
     self.structure = Structure.from_spacegroup("Fm-3m", Lattice.cubic(3.5),
                                                ["Ni"], [[0, 0, 0]])
     slabs = generate_all_slabs(self.structure,
                                max_index=2,
                                min_slab_size=6.0,
                                min_vacuum_size=15.0,
                                max_normal_search=1,
                                center_slab=True)
     self.slab_dict = {
         ''.join([str(i) for i in slab.miller_index]): slab
         for slab in slabs
     }
     self.asf_211 = AdsorbateSiteFinder(self.slab_dict["211"])
     self.asf_100 = AdsorbateSiteFinder(self.slab_dict["100"])
     self.asf_111 = AdsorbateSiteFinder(self.slab_dict["111"])
     self.asf_110 = AdsorbateSiteFinder(self.slab_dict["110"])
     self.asf_struct = AdsorbateSiteFinder(
         Structure.from_sites(self.slab_dict["111"].sites))
Ejemplo n.º 19
0
def slab_from_file(hkl, filename):
    """
    reads in structure from the file and returns slab object.
    useful for reading in 2d/substrate structures from file.
    Args:
         hkl: miller index of the slab in the input file.
         filename: structure file in any format
                   supported by pymatgen
    Returns:
         Slab object
    """
    slab_input = Structure.from_file(filename)
    return Slab(slab_input.lattice,
                slab_input.species_and_occu,
                slab_input.frac_coords,
                hkl,
                Structure.from_sites(slab_input, to_unit_cell=True),
                shift=0,
                scale_factor=np.eye(3, dtype=np.int),
                site_properties=slab_input.site_properties)
Ejemplo n.º 20
0
    def test2(self):
        l = Lattice.cubic(100)
        s1 = PeriodicSite('Li', [99.5, 0, 1.1], l, coords_are_cartesian=True)
        s2 = PeriodicSite('Sn', [99.5, 1, 0], l, coords_are_cartesian=True)
        s3 = PeriodicSite('Li', [0.5, 0, 0], l, coords_are_cartesian=True)
        s4 = PeriodicSite('Li', [59.5, 0, 1.1], l, coords_are_cartesian=True)
        s5 = PeriodicSite('Sn', [59.5, 1, 0], l, coords_are_cartesian=True)
        s6 = PeriodicSite('Li', [60.5, 0, 0], l, coords_are_cartesian=True)
        s = Structure.from_sites([s1, s2, s3, s4, s5, s6])
        coords = np.array(s.cart_coords)[:, None, :]
        cd = CoordData(coords, s)
        gtc = GaussianTripletColumns(rrange=np.arange(1, 1.1, 1), c=0.5)
        f = gtc.make(cd)
        self.assertAlmostEqual(f[9, 0], 0)
        self.assertAlmostEqual(f[10, 1], -0.36030969)
        self.assertAlmostEqual(f[11, 1], 0.79268132)
        self.assertAlmostEqual(f[12, 4], -0.64481206)
        self.assertAlmostEqual(f[13, 4], 1.36543144)

        self.assertAlmostEqual(np.sum(f), 0)
Ejemplo n.º 21
0
def slab_from_file(structure, hkl):
    """
    Reads in structure from the file and returns slab object.

    Args:
         structure (str): structure file in any format supported by pymatgen
         hkl (tuple): Miller index of the slab in the input file.

    Returns:
         Slab object
    """
    slab_input = Structure.from_file(structure)
    return Slab(slab_input.lattice,
                slab_input.species_and_occu,
                slab_input.frac_coords,
                hkl,
                Structure.from_sites(slab_input, to_unit_cell=True),
                shift=0,
                scale_factor=np.eye(3, dtype=np.int),
                site_properties=slab_input.site_properties)
Ejemplo n.º 22
0
def slab_from_file(hkl, filename):
    """
    reads in structure from the file and returns slab object.
    useful for reading in 2d/substrate structures from file.
    Args:
         hkl: miller index of the slab in the input file.
         filename: structure file in any format
                   supported by pymatgen
    Returns:
         Slab object
    """
    slab_input = Structure.from_file(filename)
    return Slab(slab_input.lattice,
                slab_input.species_and_occu,
                slab_input.frac_coords,
                hkl,
                Structure.from_sites(slab_input, to_unit_cell=True),
                shift=0,
                scale_factor=np.eye(3, dtype=np.int),
                site_properties=slab_input.site_properties)
Ejemplo n.º 23
0
def sublattice_iterations(options):
    sublattice_composition = options['lattice']
    structure = options['structure']
    sublattice_species = list(sublattice_composition.keys())
    remaining_site_collection = list(
        filter(lambda site: site.specie.symbol not in sublattice_species,
               structure.sites))

    structures = {}
    sublattice_structure_mapping = {}
    for sublattice, mole_fractions in sublattice_composition.items():
        sublattice_site_collection = list(
            filter(lambda site: site.specie.symbol == sublattice,
                   structure.sites))
        # initial_structure = structure
        sublattice_structure = Structure(
            structure.lattice,
            [site.specie for site in sublattice_site_collection],
            [site.frac_coords for site in sublattice_site_collection])
        if options['sqs']:
            sublattice_structures, decompositions, iterations = do_sqs_iterations(
                sublattice_structure,
                mole_fractions,
                options['weights'],
                iterations=options['iterations'],
                prefix=colored('{0} => '.format(sublattice), color='magenta'),
                verbosity=options['verbosity'],
                parallel=options['parallel'],
                output_structures=options['output'],
                objective=options['objective'])
            print_result(options, decompositions[0], options['verbosity'])
        elif options['dosqs']:
            main_sum_weight, anisotropy_weights = options['anisotropy']
            sublattice_structures, decompositions, iterations = do_dosqs_iterations(
                sublattice_structure,
                mole_fractions,
                options['weights'],
                main_sum_weight,
                anisotropy_weights,
                iterations=options['iterations'],
                prefix=colored('{0} => '.format(sublattice), color='magenta'),
                verbosity=options['verbosity'],
                parallel=options['parallel'],
                output_structures=options['output'])
            print_result(options, decompositions[0], options['verbosity'])
        #Merge both two sublattices
        #map sites to collections
        sublattice_structure_mapping[sublattice] = sublattice_structures
        write_message('{0} structures found on sublattice {1}'.format(
            len(sublattice_structures), sublattice),
                      level=DEBUG)

    structure_list = []
    for sl, sc in sublattice_structure_mapping.items():
        for i, s in enumerate(sc):
            structure_list.append((i, sl))
    from itertools import combinations

    strc_count = 0
    for i, combination in enumerate(
            combinations(structure_list, len(sublattice_structure_mapping))):
        subl_spec = [tup[1] for tup in combination]
        if len(set(subl_spec)) == len(subl_spec):
            collection = remaining_site_collection.copy()
            name = '{0}'.format(strc_count)
            for j, sl in combination:
                structure = sublattice_structure_mapping[sl][j]
                spec_set = set(
                    [site.specie.symbol for site in structure.sites])
                collection.extend([site for site in structure.sites])
                name += '-{1}-{0}'.format(''.join(spec_set), j)
            structures[name] = Structure.from_sites(collection)
            strc_count += 1

    return structures
Ejemplo n.º 24
0
 def _make_structure(self):
     """ """
     self.structure = Structure.from_sites(self.sites)
     return
Ejemplo n.º 25
0
def ewald_correction(ce, supmat, ecis):
    """
     Decompose electrostatic interactions in a supercell into 2 body
     and point cluster terms.
    """

    print("Making up all ewald interactions for supercell:", supmat)
    cs = ce.supercell_from_matrix(supmat)

    ew_str = Structure.from_sites(
        [PeriodicSite('H+', s.frac_coords, s.lattice) for s in cs.supercell])
    H = EwaldSummation(ew_str, eta=ce.eta).total_energy_matrix

    #Ewald energy E_ew = (q+r)*H*(q+r)'. 1/2 already absorbed I used a stupid way to get H but quite effective.
    supbits = cs.bits
    r = np.array([GetIonChg(bits[-1]) for bits in supbits])
    chg_bits = [[GetIonChg(bit) - GetIonChg(bits[-1]) for bit in bits[:-1]]
                for bits in supbits]
    H_r = np.dot(H, r)
    ewald_0 = np.dot(r, np.dot(H, r))

    ewald_clusters = []
    ewald_interactions = []
    bit_inds = get_bit_inds(cs.supercell)
    #bit_inds is 1 based

    if not ce.use_inv_r:
        eci_ew = ecis[-1]
        for i in range(len(bit_inds)):
            for j in range(len(bit_inds)):
                if i != j:
                    for k in range(len(bit_inds[i])):
                        for l in range(len(bit_inds[j])):
                            bit_a = bit_inds[i][k]
                            bit_b = bit_inds[j][l]
                            ewald_clusters.append([bit_a, bit_b])
                            ewald_interactions.append(eci_ew * chg_bits[i][k] *
                                                      chg_bits[j][l] * H[i][j])
                else:
                    for k in range(len(bit_inds[i])):
                        bit = bit_inds[i][k]
                        ewald_clusters.append([bit])
                        ewald_interactions.append(
                            eci_ew * (chg_bits[i][k]**2 * H[i][i] +
                                      H_r[i] * chg_bits[i][k] * 2))
    else:
        #When using inv_r, an independent ewald sum is generated for each specie-specie pair, and the sums are
        #considered components of corr, and also the original ewald summation.
        N_sp = sum([
            len(site.species_and_occu)
            for site in clus_sup.cluster_expansion.structure
        ])
        N_eweci = 1 + N_sp + N_sp * (
            N_sp -
            1) // 2  #original ewald term, then species, then specie pairs.
        eci_ew = ecis[-N_eweci:]

        for sc, inds in cs.cluster_indices:
            if len(sc.bits) > 1:
                break
            equiv_sites.append(inds[:, 0])

        sp_list = []
        sp_id = 0
        for sublat in equiv_sites:
            sublat_sp_list = []
            for specie_id in bit_inds[sublat[0]]:
                sublat_sp_list.append(sp_id)
                sp_id += 1
            sp_list.extend([sublat_sp_list] * len(sublat))

        for i in range(len(bit_inds)):
            for j in range(len(bit_inds)):
                if i != j:
                    for k in range(len(bit_inds[i])):
                        for l in range(len(bit_inds[j])):
                            bit_a = bit_inds[i][k]
                            bit_b = bit_inds[j][l]
                            ewald_clusters.append([bit_a, bit_b])
                            id_a = sp_list[i][k]
                            id_b = sp_list[j][l]

                            id_abpair = id_a * (
                                2 * N_sp - id_a - 1
                            ) // 2 + id_b - id_a - 1  # Serial id of a,b pair in eci_ew list.
                            ewald_interactions.append(
                                eci_ew[1 + N_sp + id_abpair] *
                                (chg_bits[i][k] * chg_bits[j][l] * H[i][j]))
                else:  #Point terms
                    for k in range(len(bit_inds[i])):
                        bit = bit_inds[i][k]
                        b_clusters_ew.append([bit])
                        id_bit = 1 + sp_list[i][k]
                        point_eci = eci_ew[id_bit] * chg_bits[i][k]**2 * H[i][i]
                        for m in range(len(bit_inds)):
                            id_a = id_bit
                            id_b = sp_list[m][-1]  #id of the reference specie
                            id_abpair = id_a * (2 * N_sp - id_a -
                                                1) // 2 + id_b - id_a - 1
                            #Calculate H_r term with weight!
                            point_eci += 2 * chg_bits[i][k] * H[i][m] * r[
                                m] * eci_ew[1 + N_sp + id_abpair]
                        ewald_interactions.append(point_eci)

    return ewald_clusters, ewald_interactions
Ejemplo n.º 26
0
 def film(self):
     """
     Return the film (Structure) of the interface.
     """
     return Structure.from_sites(self.film_sites)
Ejemplo n.º 27
0
 def substrate(self):
     """
     Return the substrate (Structure) of the interface.
     """
     return Structure.from_sites(self.substrate_sites)
Ejemplo n.º 28
0
def get_aligned_lattices(slab_sub, slab_2d, max_area=200,
                         max_mismatch=0.05,
                         max_angle_diff=1, r1r2_tol=0.2, best_match='area'):
    """
    given the 2 slab structures and the alignment paramters, return
    slab structures with lattices that are aligned with respect to each
    other
    """

    # get the matching substrate and 2D material lattices
    uv_substrate, uv_mat2d = get_matching_lattices(
                                                slab_sub, slab_2d,
                                                max_area=max_area,
                                                max_mismatch=max_mismatch,
                                                max_angle_diff=max_angle_diff,
                                                r1r2_tol=r1r2_tol,
                                                best_match=best_match)
    if not uv_substrate and not uv_mat2d:
        print("no matching u and v, trying adjusting the parameters")
        return None, None
        #sys.exit()

    substrate = Structure.from_sites(slab_sub)
    mat2d = Structure.from_sites(slab_2d)

    # map the intial slabs to the newly found matching lattices
    substrate_latt = Lattice(np.array(
            [
                uv_substrate[0][:],
                uv_substrate[1][:],
                substrate.lattice.matrix[2, :]
            ]))
    # to avoid numerical issues with find_mapping
    mat2d_fake_c = mat2d.lattice.matrix[2, :] / np.linalg.norm(
            mat2d.lattice.matrix[2, :]) * 5.0
    mat2d_latt = Lattice(np.array(
            [
                uv_mat2d[0][:],
                uv_mat2d[1][:],
                mat2d_fake_c
            ]))

    mat2d_latt_fake = Lattice(np.array(
            [
                mat2d.lattice.matrix[0, :],
                mat2d.lattice.matrix[1, :],
                mat2d_fake_c
            ]))

    # Supercell matrix for primitive lattices -> match lattices
    _, __, scell_1 = substrate.lattice.find_mapping(substrate_latt,
                                                      ltol=0.05,
                                                      atol=max_angle_diff)
    _, __, scell_2 = mat2d_latt_fake.find_mapping(mat2d_latt,
                                                    ltol=0.05,
                                                    atol=max_angle_diff)
    scell_1[2] = np.array([0, 0, 1])
    substrate.make_supercell(scell_1)
    scell_2[2] = np.array([0, 0, 1])
    mat2d.make_supercell(scell_2)

    # modify the substrate lattice
    lmap = Lattice(np.array(
            [
                substrate.lattice.matrix[0, :],
                substrate.lattice.matrix[1, :],
                mat2d.lattice.matrix[2, :]
            ]))
    mat2d.lattice = lmap

    return substrate, mat2d
Ejemplo n.º 29
0
def _get_mc_structs(SCLst,
                    CE,
                    ecis,
                    merge_sublats=None,
                    TLst=[500, 1500, 10000],
                    compaxis=None,
                    outdir='vasp_run'):
    '''This function checks the previous calculation directories when called. If no previous calculations, it
       generates the intial pool. If there are previous calculations, add new sampled structures based on 
       structure selection rule.
       For CE sampling using MC, use three set of temperature, merge this with LocalOrdering code.
       ce_file: directory of CE Mson data file
       outdir: directory to write outputs
       SCLst: a list contaning enumerated SC's and RO pairs.
       TLst: temprature list to do MC enumeration on
       useX: a list of compounds of which we may want to calculate component.
       compaxis: a list of compound names. If specified, the program will caculate the composition in compound ratio,
                but ususally not used since we don't think its necessary nor applicable in complexed disordered 
                rocksalt systems.
    '''
    print('#### MC Initialization ####')
    Prim = CE.structure
    print('SM type:', CE.sm_type)
    calculated_structures = {}

    if os.path.isdir(outdir):
        print("Checking previously enumerated structures.")
        for root, dirs, files in os.walk(outdir):
            if _was_generated(files):
                parentdir = os.path.join(*root.split(os.sep)[0:-1])
                with open(os.path.join(parentdir,
                                       'composition_by_site')) as RO_file:
                    RO_old = json.load(RO_file)
                    RO_old_string = json.dumps(RO_old)
                if RO_old_string not in calculated_structures:
                    calculated_structures[RO_old_string] = []
                calculated_structures[RO_old_string].append(
                    Poscar.from_file(os.path.join(root, 'POSCAR')).structure)
                # struct_id = int(root.split(os.sep)[-1])
    else:
        print("No previous calculations, generating the initial pool.")

    mc_structs = {}
    if compaxis:
        ro_axis_strings = {}

    sc_ro_pair_id = 0

    if merge_sublats is None:
        sublat_list = [[i] for i in range(len(Prim))]
    else:
        sublat_list = merge_sublats

    for SC, RO, sublats_WorthToExpand in SCLst:
        #len(RO)==len(sublats_WorthToExpand)==len(merge_sublats) if merge_sublats else ==len(SC)

        print("Processing composition:\n", RO, '\nSupercell:\n', SC,
              '\nsize:\n', int(round(np.abs(np.linalg.det(SC)))))
        clusSC = CE.supercell_from_matrix(SC)

        Bits = clusSC.bits
        scs = int(round(np.abs(np.linalg.det(SC))))
        # generate a list of groups of sites to swap between! We have known which sites are partially occupied,
        # so we only need to figure out how pymatgen make a group of supercell sites from a primitive cell site.
        # Looks like it simply just replicate sites one by one!
        # indGrps=[list(range(i*scs,(i+1)*scs)) for i in range(len(RO)) if sites_WorthToExpand[i]];

        indGrps = [list(range(i*scs*len(sublat),(i+1)*scs*len(sublat))) for i,sublat in enumerate(sublat_list)\
                  if sublats_WorthToExpand[i]]

        RO_int = [{
            specie: int(round(RO[i][specie] * scs * len(sublat)))
            for specie in RO[i]
        } for i, sublat in enumerate(sublat_list)]
        #species will now be swapped within a 'sublattice', instead of the replicates of a site.

        #Note: indGrps should be generated from a clusSC supercell!!!!!
        #print('indGrps',indGrps,'RO',RO)

        # Replace species according to RO
        randSites = []
        for i, sublat in enumerate(sublat_list):
            for s_id in sublat:
                site = Prim[s_id]
                randSite = PeriodicSite(RO[i],
                                        site.frac_coords,
                                        Prim.lattice,
                                        properties=site.properties)
                randSites.append(randSite)
        randStr = Structure.from_sites(randSites)

        # Get electrostatics enumeration guess
        order = OrderDisorderedStructureTransformation(algo=2)

        randStr.make_supercell(SC)

        randStr = order.apply_transformation(randStr)

        #print('randStr:\n',randStr,'\nce prim:\n',CE.structure)
        # Simulated annealing for better guess at ground state
        # You may want to change the number of MC flips for each temperature

        init_occu = clusSC.occu_from_structure(randStr)
        #print("Starting occupation:", init_occu)
        sa_occu = simulated_anneal(ecis=ecis,
                                   cluster_supercell=clusSC,
                                   occu=init_occu,
                                   ind_groups=indGrps,
                                   n_loops=20000,
                                   init_T=5100,
                                   final_T=100,
                                   n_steps=20)
        print("MC ground state acquired, analyzing composition.")

        # Axis decomposition
        if compaxis:
            axis = _axis_decompose(compaxis, RO_int)
        #convert frac occupation back to integers.
        sp_list = []
        for sublat_occu in RO_int:
            sp_list.extend(sublat_occu.values())
        _gcd = GCD_List(sp_list)
        RO_reduced_int = [{sp: sublat_occu[sp] // _gcd
                           for sp in sublat_occu} for sublat_occu in RO_int]

        #Reduce occupation numbers by GCD.
        RO_string = json.dumps(RO_reduced_int)
        print("Reduced occupation:", RO_string)
        if RO_string not in mc_structs:
            mc_structs[RO_string] = []

        #REFERED AXIS DECOMPOSITION IS WRONG. FIX THIS!

        if compaxis:
            axis_string = json.dumps(axis)
            if RO_string not in ro_axis_strings:
                ro_axis_strings[RO_string] = axis_string
            print("Axis composition: ", axis_string)

        # Add approximate ground state to set of MC structures
        # Format as (structure, temperature) - for ground state, temperature is "0"
        # print("GS structure:",clusSC.structure_from_occu(sa_occu))
        mc_structs[RO_string].append((clusSC.structure_from_occu(sa_occu), 0))
        print("MC GS added to the preset.")

        for T in TLst:
            print("Doing MC sampling under T = {}K".format(T))
            # Equilibration run
            # Play around with the number of MC flips in the run - the current number is very arbitrary
            # We can try to implement VO et.al's sampling method here!

            occu, _, _, _ = run_T(ecis=ecis,
                                  cluster_supercell=clusSC,
                                  occu=deepcopy(sa_occu),
                                  T=T,
                                  n_loops=100000,
                                  ind_groups=indGrps,
                                  n_rand=2,
                                  check_unique=False)

            # Production run
            # Same comment about number of flips - very arbitrary right now
            occu, min_occu, min_e, rand_occu = run_T(ecis=ecis,
                                                     cluster_supercell=clusSC,
                                                     occu=deepcopy(occu),
                                                     T=T,
                                                     n_loops=200000,
                                                     ind_groups=indGrps,
                                                     n_rand=6,
                                                     check_unique=True)

            # Check that returned random structures
            # are all different
            # Save best structure and a few random structures from the production run
            mc_structs[RO_string].append(
                (clusSC.structure_from_occu(min_occu), T))
            for rand, rand_e in rand_occu:
                mc_structs[RO_string].append(
                    (clusSC.structure_from_occu(rand), SC))

        sc_ro_pair_id += 1

    # Deduplicate - first compared to previously calculated structures, then compared to structures within this run
    print('Deduplicating random structures.')

    unique_structs = {}
    unqCnt = 0
    sm = StructureMatcher(ltol=0.3,
                          stol=0.3,
                          angle_tol=5,
                          comparator=ElementComparator())
    for RO_string, structs in mc_structs.items():
        if RO_string not in unique_structs:
            unique_structs[RO_string] = []

        for struct, matrix in structs:
            unique = True
            if RO_string in calculated_structures:
                for ostruct in calculated_structures[RO_string]:
                    if sm.fit(struct, ostruct):
                        unique = False
                        break
            if unique:
                for ostruct, matrix in unique_structs[RO_string]:
                    if sm.fit(struct, ostruct):
                        unique = False
                        break
            if unique:
                try:
                    #Check if structure matcher works for this structure. If not, abandon.
                    cs = CE.supercell_from_matrix(matrix)
                    corr = cs.corr_from_structure(struct)
                    unique_structs[RO_string].append((struct, matrix))
                    unqCnt += 1
                except:
                    continue

    print('Obtained %d unique occupied random structures.' % unqCnt)

    if compaxis:
        return unique_structs, ro_axis_strings
    else:
        return unique_structs, None
Ejemplo n.º 30
0
 def structure_from_occu(self, occu):
     sites = []
     for b, o, s in zip(self.bits, occu, self.supercell):
         if b[o] != 'Vacancy':
             sites.append(PeriodicSite(b[o], s.frac_coords, self.supercell.lattice))
     return Structure.from_sites(sites)
Ejemplo n.º 31
0
def get_structure_type(structure, write_poscar_from_cluster=False):
    """
    This is a topology-scaling algorithm used to describe the
    periodicity of bonded clusters in a bulk structure.

    Args:
        structure (structure): Pymatgen structure object to classify.
        write_poscar_from_cluster (bool): Set to True to write a
            POSCAR from the sites in the cluster.

    Returns:
        string. 'molecular' (0D), 'chain', 'layered', 'heterogeneous'
            (intercalated 3D), or 'conventional' (3D)
    """

    # The conventional standard structure is much easier to work
    # with.

    structure = SpacegroupAnalyzer(
        structure).get_conventional_standard_structure()

    # Noble gases don't have well-defined bonding radii.
    if len([
            e for e in structure.composition
            if e.symbol in ['He', 'Ne', 'Ar', 'Kr', 'Xe']
    ]) != 0:
        type = 'noble gas'
    else:
        if len(structure.sites) < 45:
            structure.make_supercell(2)

        # Create a dict of sites as keys and lists of their
        # bonded neighbors as values.
        sites = structure.sites
        bonds = {}
        for site in sites:
            bonds[site] = []

        for i in range(len(sites)):
            site_1 = sites[i]
            for site_2 in sites[i + 1:]:
                if (site_1.distance(site_2) < float(
                        Element(site_1.specie).atomic_radius +
                        Element(site_2.specie).atomic_radius) * 1.1):
                    bonds[site_1].append(site_2)
                    bonds[site_2].append(site_1)

        # Assimilate all bonded atoms in a cluster; terminate
        # when it stops growing.
        cluster_terminated = False
        while not cluster_terminated:
            original_cluster_size = len(bonds[sites[0]])
            for site in bonds[sites[0]]:
                bonds[sites[0]] += [
                    s for s in bonds[site] if s not in bonds[sites[0]]
                ]
            if len(bonds[sites[0]]) == original_cluster_size:
                cluster_terminated = True

        original_cluster = bonds[sites[0]]

        if len(bonds[sites[0]]) == 0:  # i.e. the cluster is a single atom.
            type = 'molecular'
        elif len(bonds[sites[0]]) == len(sites):  # i.e. all atoms are bonded.
            type = 'conventional'
        else:
            # If the cluster's composition is not equal to the
            # structure's overall composition, it is a heterogeneous
            # compound.
            cluster_composition_dict = {}
            for site in bonds[sites[0]]:
                if Element(site.specie) in cluster_composition_dict:
                    cluster_composition_dict[Element(site.specie)] += 1
                else:
                    cluster_composition_dict[Element(site.specie)] = 1
            uniform = True
            if len(cluster_composition_dict):
                cmp = Composition.from_dict(cluster_composition_dict)
                if cmp.reduced_formula != structure.composition.reduced_formula:
                    uniform = False
            if not uniform:
                type = 'heterogeneous'
            else:
                # Make a 2x2x2 supercell and recalculate the
                # cluster's new size. If the new cluster size is
                # the same as the old size, it is a non-periodic
                # molecule. If it is 2x as big, it's a 1D chain.
                # If it's 4x as big, it is a layered material.
                old_cluster_size = len(bonds[sites[0]])
                structure.make_supercell(2)
                sites = structure.sites
                bonds = {}
                for site in sites:
                    bonds[site] = []

                for i in range(len(sites)):
                    site_1 = sites[i]
                    for site_2 in sites[i + 1:]:
                        if (site_1.distance(site_2) < float(
                                Element(site_1.specie).atomic_radius +
                                Element(site_2.specie).atomic_radius) * 1.1):
                            bonds[site_1].append(site_2)
                            bonds[site_2].append(site_1)

                cluster_terminated = False
                while not cluster_terminated:
                    original_cluster_size = len(bonds[sites[0]])
                    for site in bonds[sites[0]]:
                        bonds[sites[0]] += [
                            s for s in bonds[site] if s not in bonds[sites[0]]
                        ]
                    if len(bonds[sites[0]]) == original_cluster_size:
                        cluster_terminated = True

                if len(bonds[sites[0]]) != 4 * old_cluster_size:
                    type = 'molecular'
                else:
                    type = 'layered'

    if write_poscar_from_cluster:
        Structure.from_sites(original_cluster).to('POSCAR', 'POSCAR')

    return type
Ejemplo n.º 32
0
    def test_supercell_subsets(self):
        sm = StructureMatcher(ltol=0.2, stol=0.3, angle_tol=5,
                              primitive_cell=False, scale=True,
                              attempt_supercell=True, allow_subset=True,
                              supercell_size='volume')
        sm_no_s = StructureMatcher(ltol=0.2, stol=0.3, angle_tol=5,
                                   primitive_cell=False, scale=True,
                                   attempt_supercell=True, allow_subset=False,
                                   supercell_size='volume')
        l = Lattice.orthorhombic(1, 2, 3)
        s1 = Structure(l, ['Ag', 'Si', 'Si'],
                       [[.7, .4, .5], [0, 0, 0.1], [0, 0, 0.2]])
        s1.make_supercell([2, 1, 1])
        s2 = Structure(l, ['Si', 'Si', 'Ag'],
                       [[0, 0.1, -0.95], [0, 0.1, 0], [-.7, .5, .375]])

        shuffle = [0, 2, 1, 3, 4, 5]
        s1 = Structure.from_sites([s1[i] for i in shuffle])

        # test when s1 is exact supercell of s2
        result = sm.get_s2_like_s1(s1, s2)
        for a, b in zip(s1, result):
            self.assertTrue(a.distance(b) < 0.08)
            self.assertEqual(a.species, b.species)

        self.assertTrue(sm.fit(s1, s2))
        self.assertTrue(sm.fit(s2, s1))
        self.assertTrue(sm_no_s.fit(s1, s2))
        self.assertTrue(sm_no_s.fit(s2, s1))

        rms = (0.048604032430991401, 0.059527539448807391)
        self.assertTrue(np.allclose(sm.get_rms_dist(s1, s2), rms))
        self.assertTrue(np.allclose(sm.get_rms_dist(s2, s1), rms))

        # test when the supercell is a subset of s2
        subset_supercell = s1.copy()
        del subset_supercell[0]
        result = sm.get_s2_like_s1(subset_supercell, s2)
        self.assertEqual(len(result), 6)
        for a, b in zip(subset_supercell, result):
            self.assertTrue(a.distance(b) < 0.08)
            self.assertEqual(a.species, b.species)

        self.assertTrue(sm.fit(subset_supercell, s2))
        self.assertTrue(sm.fit(s2, subset_supercell))
        self.assertFalse(sm_no_s.fit(subset_supercell, s2))
        self.assertFalse(sm_no_s.fit(s2, subset_supercell))

        rms = (0.053243049896333279, 0.059527539448807336)
        self.assertTrue(np.allclose(sm.get_rms_dist(subset_supercell, s2), rms))
        self.assertTrue(np.allclose(sm.get_rms_dist(s2, subset_supercell), rms))

        # test when s2 (once made a supercell) is a subset of s1
        s2_missing_site = s2.copy()
        del s2_missing_site[1]
        result = sm.get_s2_like_s1(s1, s2_missing_site)
        for a, b in zip((s1[i] for i in (0, 2, 4, 5)), result):
            self.assertTrue(a.distance(b) < 0.08)
            self.assertEqual(a.species, b.species)

        self.assertTrue(sm.fit(s1, s2_missing_site))
        self.assertTrue(sm.fit(s2_missing_site, s1))
        self.assertFalse(sm_no_s.fit(s1, s2_missing_site))
        self.assertFalse(sm_no_s.fit(s2_missing_site, s1))

        rms = (0.029763769724403633, 0.029763769724403987)
        self.assertTrue(np.allclose(sm.get_rms_dist(s1, s2_missing_site), rms))
        self.assertTrue(np.allclose(sm.get_rms_dist(s2_missing_site, s1), rms))
Ejemplo n.º 33
0
def genacomp(initialstructure,
             savedir,
             A1='Co',
             A2='Mn',
             fixspecies='Ni',
             initiallayers=9):
    # step 1 - load structure and half it (allows easier symmterisation of surfaces)
    # initialstructure = 'C:/Users/Bud/Desktop/test/104vac11/sup141Co4Mnsurfsub/POSCAR'
    obby = Structure.from_file(initialstructure)
    # A1 = 'Co'
    # A2 = 'Mn'
    # fixspecies = 'Ni'
    # initiallayers = 5
    cdim = []
    for element in obby:
        cdim.append(element.coords[2])
    cdim.sort()
    # TODO - Add this improved method to the dyna package. it seems to be more flexible.
    listy = []
    ranvar = 0.01
    while len(listy) != initiallayers:
        listy = [
            list(g) for k, g in itt.groupby(cdim, partial(the_key, ranvar))
        ]
        ranvar = ranvar * 1.01
        print(ranvar)
    keepcount = math.ceil(initiallayers / 2)

    # need to half the listy here
    listy = listy[0:keepcount]

    flat_list = [item for sublist in listy for item in sublist]
    setter = list(set(flat_list))
    cutstru = []
    for c in setter:
        for k in obby:
            print(k)
            if c == k.coords[2]:
                print('yes')
                cutstru.append(k)

    newstruc = Structure.from_sites(cutstru)
    # now that structure has been halved-ish, it'll be ideal to count the number of sites!
    print('total number of sites = ' + str(newstruc.num_sites))

    ### Time to generate all possible combinations! - detect prepresentdefect
    counter = 0
    it = 0
    fixlist = []
    while it < newstruc.species.__len__():
        if newstruc.species[it].name == fixspecies:
            print('species found - ' + fixspecies + ' @ ' +
                  str(newstruc.frac_coords[it]))
            fixlist.append(newstruc[it])
            counter += 1
        it += 1
    newstruc.remove_species([fixspecies])

    # find A1 and A2 total count!
    metal = 0
    it = 0
    itlist = []
    while it < newstruc.species.__len__():
        if newstruc.species[it].name == A1:
            print('species found - ' + A1 + ' @ ' +
                  str(newstruc.frac_coords[it]))
            itlist.append(newstruc[it])
            metal += 1
        elif newstruc.species[it].name == A2:
            print('species found - ' + A1 + ' @ ' +
                  str(newstruc.frac_coords[it]))
            itlist.append(newstruc[it])
            metal += 1
        it += 1

    print('total changable sites is ' + str(metal))
    if metal > 7:
        print("that's a lot of sites do you want to proceed")
        proceed = input('y/n')
        if proceed != 'y':
            print('good idea, your computer will have been sad')
            exit()

    # Need to now split this list into a numerous lists of all possible combos.
    # step 1, generate the pure A1 metal if it isn't already
    changestru = newstruc.copy()
    for i in itlist:
        i.species = A1
        changestru.remove(i)

    allset = list(itt.product([A1, A2], repeat=metal))
    county = 0
    for combo in allset:
        iterstru = changestru.copy()
        i = 0
        while i < len(itlist):
            itlist[i].species = combo[i]
            i += 1
        makelist = itlist + fixlist + list(changestru)
        # this is 1 half of the structure.
        savenew = Structure.from_sites(makelist)

        # making the otherside
        cdim = []
        for element in makelist:
            cdim.append(element.coords[2])
        cdim.sort()

        listy = []
        ranvar = 0.01
        while len(listy) != math.ceil(initiallayers / 2):
            listy = [
                list(g) for k, g in itt.groupby(cdim, partial(the_key, ranvar))
            ]
            ranvar = ranvar * 1.01
            print(ranvar)
        # need to half the listy here
        listy.pop()
        flat_list = [item for sublist in listy for item in sublist]
        setter = list(set(flat_list))
        cutstru = []
        for c in setter:
            for k in makelist:
                print(k)
                if c == k.coords[2]:
                    print('yes')
                    cutstru.append(k)
        strr = Structure.from_sites(cutstru, to_unit_cell=True)
        strr.apply_operation(
            SymmOp.reflection((0, 0, 1),
                              origin=(strr.lattice.a / 2, strr.lattice.b / 2,
                                      strr.lattice.c / 2)))

        newest = list(strr) + list(savenew)
        both = Structure.from_sites(newest)
        for element in both:
            if element.coords[2] < 0:
                element.coords[2] = abs(element.coords[2])
                element.frac_coords[2] = abs(element.frac_coords[2])
            if element.frac_coords[2] < 0:
                element.frac_coords[2] = abs(element.frac_coords[2])
        both.sort()
        os.makedirs(savedir + '/' + A2 + str(int(both.composition.get(A2))) +
                    '_' + str(county),
                    exist_ok=True)
        both.to(filename=(savedir + "/" + A2 +
                          str(int(both.composition.get(A2))) + '_' +
                          str(county) + '/POSCAR'))
        county += 1
Ejemplo n.º 34
0
    def test_supercell_subsets(self):
        sm = StructureMatcher(ltol=0.2,
                              stol=0.3,
                              angle_tol=5,
                              primitive_cell=False,
                              scale=True,
                              attempt_supercell=True,
                              allow_subset=True,
                              supercell_size='volume')
        sm_no_s = StructureMatcher(ltol=0.2,
                                   stol=0.3,
                                   angle_tol=5,
                                   primitive_cell=False,
                                   scale=True,
                                   attempt_supercell=True,
                                   allow_subset=False,
                                   supercell_size='volume')
        l = Lattice.orthorhombic(1, 2, 3)
        s1 = Structure(l, ['Ag', 'Si', 'Si'],
                       [[.7, .4, .5], [0, 0, 0.1], [0, 0, 0.2]])
        s1.make_supercell([2, 1, 1])
        s2 = Structure(l, ['Si', 'Si', 'Ag'],
                       [[0, 0.1, -0.95], [0, 0.1, 0], [-.7, .5, .375]])

        shuffle = [0, 2, 1, 3, 4, 5]
        s1 = Structure.from_sites([s1[i] for i in shuffle])

        #test when s1 is exact supercell of s2
        result = sm.get_s2_like_s1(s1, s2)
        for a, b in zip(s1, result):
            self.assertTrue(a.distance(b) < 0.08)
            self.assertEqual(a.species_and_occu, b.species_and_occu)

        self.assertTrue(sm.fit(s1, s2))
        self.assertTrue(sm.fit(s2, s1))
        self.assertTrue(sm_no_s.fit(s1, s2))
        self.assertTrue(sm_no_s.fit(s2, s1))

        rms = (0.048604032430991401, 0.059527539448807391)
        self.assertTrue(np.allclose(sm.get_rms_dist(s1, s2), rms))
        self.assertTrue(np.allclose(sm.get_rms_dist(s2, s1), rms))

        #test when the supercell is a subset of s2
        subset_supercell = s1.copy()
        del subset_supercell[0]
        result = sm.get_s2_like_s1(subset_supercell, s2)
        self.assertEqual(len(result), 6)
        for a, b in zip(subset_supercell, result):
            self.assertTrue(a.distance(b) < 0.08)
            self.assertEqual(a.species_and_occu, b.species_and_occu)

        self.assertTrue(sm.fit(subset_supercell, s2))
        self.assertTrue(sm.fit(s2, subset_supercell))
        self.assertFalse(sm_no_s.fit(subset_supercell, s2))
        self.assertFalse(sm_no_s.fit(s2, subset_supercell))

        rms = (0.053243049896333279, 0.059527539448807336)
        self.assertTrue(np.allclose(sm.get_rms_dist(subset_supercell, s2),
                                    rms))
        self.assertTrue(np.allclose(sm.get_rms_dist(s2, subset_supercell),
                                    rms))

        #test when s2 (once made a supercell) is a subset of s1
        s2_missing_site = s2.copy()
        del s2_missing_site[1]
        result = sm.get_s2_like_s1(s1, s2_missing_site)
        for a, b in zip((s1[i] for i in (0, 2, 4, 5)), result):
            self.assertTrue(a.distance(b) < 0.08)
            self.assertEqual(a.species_and_occu, b.species_and_occu)

        self.assertTrue(sm.fit(s1, s2_missing_site))
        self.assertTrue(sm.fit(s2_missing_site, s1))
        self.assertFalse(sm_no_s.fit(s1, s2_missing_site))
        self.assertFalse(sm_no_s.fit(s2_missing_site, s1))

        rms = (0.029763769724403633, 0.029763769724403987)
        self.assertTrue(np.allclose(sm.get_rms_dist(s1, s2_missing_site), rms))
        self.assertTrue(np.allclose(sm.get_rms_dist(s2_missing_site, s1), rms))
Ejemplo n.º 35
0
    def __init__(self,
                 structure=None,
                 percolating_species=None,
                 cutoff=None,
                 static_species=None,
                 flip_sequence=None,
                 formula_units=1,
                 sublattices=None,
                 bonds=None,
                 sort_sites=False,
                 **kwargs):
        """
        Args:
          The constructor takes as arguments the same keys expected in
          the JSON input file (doc object doc string).

        """

        self.input_dict = kwargs

        if structure is None:
            raise KeyError("No structure file specified.")
        else:
            self.structure_path = structure
            self.input_structure = Poscar.from_file(
                self.structure_path).structure

        if sort_sites:
            idx = np.lexsort(
                np.array([s.coords for s in self.input_structure]).T)
            sites = [self.input_structure[i] for i in idx]
            self.input_structure = Structure.from_sites(sites)

        self.percolating_species = percolating_species

        self.cutoff = cutoff
        self.static_species = [] if static_species is None else static_species
        self.flip_sequence = [] if flip_sequence is None else flip_sequence
        self.formula_units = formula_units

        self.sublattices = {}
        if sublattices is not None:
            for sl in sublattices:
                sl_dict = sublattices[sl]
                self.sublattices[sl] = Sublattice(self.input_structure,
                                                  **sl_dict)

        self.bonds = {}
        if bonds is not None:
            for b in bonds:
                bond = Bond.from_dict(b)
                # dictionary, so that the following syntax is valid:
                # if Bond('A', 'B') in self.bonds:
                #    rules = self.bonds[Bond('A', 'B')].bond_rules
                self.bonds[bond] = bond

        # reduce structure to sites only from sublattices that are not
        # ignored
        active_sites = []
        for sl in self.sublattices:
            if not self.sublattices[sl].ignore:
                active_sites.extend(self.sublattices[sl].sites)
        active_sites.sort()
        self.structure = Structure.from_sites(
            [self.input_structure[i] for i in active_sites])
        if len(active_sites) < self.input_structure.num_sites:
            inactive_sites = [
                i for i in range(self.input_structure.num_sites)
                if i not in active_sites
            ]
            self.static_sites = Structure.from_sites(
                [self.input_structure[i] for i in inactive_sites])
        else:
            self.static_sites = None

        # assign a sublattice label to each site in the reduced structure
        self.site_labels = ["" for i in active_sites]
        for sl in self.sublattices:
            for s in self.sublattices[sl].sites:
                if s in active_sites:
                    self.site_labels[active_sites.index(s)] = sl