Esempio n. 1
0
    class Output(object):
        """
        Output manager.
        """
        def __init__(self):
            self.drone = SimpleVaspToComputedEntryDrone(inc_structure=True)
            self.queen = BorgQueen(self.drone)

        def get_results(self, workdir):
            """
            Get energy and structure obtained by the solver program.

            Parameters
            ----------
            workdir : str
                Path to the working directory.

            Returns
            -------
            phys : named_tuple("energy", "structure")
                Total energy and atomic structure.
                The energy is measured in the units of eV
                and coodinates is measured in the units of Angstrom.
            """
            # Read results from files in output_dir and calculate values
            Phys = namedtuple("PhysVaules", ("energy", "structure"))
            self.queen.serial_assimilate(workdir)
            results = self.queen.get_data()[-1]
            return Phys(np.float64(results.energy), results.structure)
Esempio n. 2
0
def get_energies(rootdir, reanalyze, verbose, detailed, sort):
    """
    Doc string.
    """
    if verbose:
        FORMAT = "%(relativeCreated)d msecs : %(message)s"
        logging.basicConfig(level=logging.INFO, format=FORMAT)

    if not detailed:
        drone = SimpleVaspToComputedEntryDrone(inc_structure=True)
    else:
        drone = VaspToComputedEntryDrone(inc_structure=True,
                                         data=["filename",
                                               "initial_structure"])

    ncpus = multiprocessing.cpu_count()
    logging.info("Detected {} cpus".format(ncpus))
    queen = BorgQueen(drone, number_of_drones=ncpus)
    if os.path.exists(SAVE_FILE) and not reanalyze:
        msg = "Using previously assimilated data from {}.".format(SAVE_FILE) \
            + " Use -f to force re-analysis."
        queen.load_data(SAVE_FILE)
    else:
        if ncpus > 1:
            queen.parallel_assimilate(rootdir)
        else:
            queen.serial_assimilate(rootdir)
        msg = "Analysis results saved to {} for faster ".format(SAVE_FILE) + \
              "subsequent loading."
        queen.save_data(SAVE_FILE)

    entries = queen.get_data()
    if sort == "energy_per_atom":
        entries = sorted(entries, key=lambda x: x.energy_per_atom)
    elif sort == "filename":
        entries = sorted(entries, key=lambda x: x.data["filename"])

    all_data = []
    for e in entries:
        if not detailed:
            delta_vol = "{:.2f}".format(e.data["delta_volume"] * 100)
        else:
            delta_vol = e.structure.volume / \
                e.data["initial_structure"].volume - 1
            delta_vol = "{:.2f}".format(delta_vol * 100)
        all_data.append((e.data["filename"].replace("./", ""),
                         re.sub("\s+", "", e.composition.formula),
                         "{:.5f}".format(e.energy),
                         "{:.5f}".format(e.energy_per_atom),
                         delta_vol))
    if len(all_data) > 0:
        headers = ("Directory", "Formula", "Energy", "E/Atom", "% vol chg")
        t = PrettyTable(headers)
        t.align["Directory"] = "l"
        for d in all_data:
            t.add_row(d)
        print(t)
        print(msg)
    else:
        print("No valid vasp run found.")
Esempio n. 3
0
 def test_load_data(self):
     with warnings.catch_warnings():
         warnings.simplefilter("ignore")
         drone = VaspToComputedEntryDrone()
         queen = BorgQueen(drone)
         queen.load_data(os.path.join(test_dir, "assimilated.json"))
         self.assertEqual(len(queen.get_data()), 1)
Esempio n. 4
0
 def test_get_data(self):
     with warnings.catch_warnings():
         warnings.simplefilter("ignore")
         drone = VaspToComputedEntryDrone()
         self.queen = BorgQueen(drone, test_dir, 1)
         data = self.queen.get_data()
         self.assertEqual(len(data), 7)
Esempio n. 5
0
 def test_load_data(self):
     with warnings.catch_warnings():
         warnings.simplefilter("ignore")
         drone = VaspToComputedEntryDrone()
         queen = BorgQueen(drone)
         queen.load_data(os.path.join(test_dir, "assimilated.json"))
         self.assertEqual(len(queen.get_data()), 1)
Esempio n. 6
0
 def run_task(self, fw_spec):
     """
     go through the measurement job dirs and
     put the measurement jobs in the database
     """
     drone = MPINTVaspToDbTaskDrone(**self.get("dbase_params", {}))
     queen = BorgQueen(drone)  # , number_of_drones=ncpus)
     queen.serial_assimilate(self["measure_dir"])
     return FWAction()
Esempio n. 7
0
 def run_task(self, fw_spec):
     """
     go through the measurement job dirs and 
     put the measurement jobs in the database
     """
     drone = MPINTVaspToDbTaskDrone(**self.get("dbase_params", {}))
     queen = BorgQueen(drone)  # , number_of_drones=ncpus)
     queen.serial_assimilate(self["measure_dir"])
     return FWAction()
Esempio n. 8
0
 def setUpClass(cls):
     try:
         drone = VaspToDbTaskDrone(database="creator_unittest")
         queen = BorgQueen(drone)
         queen.serial_assimilate(os.path.join(test_dir, "db_test", "success_mp_aflow"))
         cls.conn = MongoClient()
         cls.qe = QueryEngine(database="creator_unittest")
     except ConnectionFailure:
         cls.qe = None
         cls.conn = None
Esempio n. 9
0
def get_energies(rootdir, reanalyze, verbose, pretty):
    if verbose:
        FORMAT = "%(relativeCreated)d msecs : %(message)s"
        logging.basicConfig(level=logging.INFO, format=FORMAT)
    drone = GaussianToComputedEntryDrone(inc_structure=True,
                                         parameters=['filename'])
    ncpus = multiprocessing.cpu_count()
    logging.info('Detected {} cpus'.format(ncpus))
    queen = BorgQueen(drone, number_of_drones=ncpus)
    if os.path.exists(save_file) and not reanalyze:
        msg = 'Using previously assimilated data from {}. ' + \
              'Use -f to force re-analysis'.format(save_file)
        queen.load_data(save_file)
    else:
        queen.parallel_assimilate(rootdir)
        msg = 'Results saved to {} for faster reloading.'.format(save_file)
        queen.save_data(save_file)

    entries = queen.get_data()
    entries = sorted(entries, key=lambda x: x.parameters['filename'])
    all_data = [(e.parameters['filename'].replace("./", ""),
                 re.sub("\s+", "", e.composition.formula),
                 "{}".format(e.parameters['charge']),
                 "{}".format(e.parameters['spin_mult']),
                 "{:.5f}".format(e.energy), "{:.5f}".format(e.energy_per_atom),
                 ) for e in entries]
    headers = ("Directory", "Formula", "Charge", "Spin Mult.", "Energy",
               "E/Atom")
    print(tabulate(all_data, headers=headers))
    print("")
    print(msg)
Esempio n. 10
0
 def setUpClass(cls):
     try:
         drone = VaspToDbTaskDrone(database="qetransmuter_unittest")
         queen = BorgQueen(drone)
         queen.serial_assimilate(
             os.path.join(test_dir, 'db_test', 'success_mp_aflow'))
         cls.conn = MongoClient()
         cls.qe = QueryEngine(database="qetransmuter_unittest")
     except ConnectionFailure:
         cls.qe = None
         cls.conn = None
Esempio n. 11
0
    class Output(object):
        def __init__(self):
            self.drone = SimpleVaspToComputedEntryDrone(inc_structure=True)
            self.queen = BorgQueen(self.drone)

        def get_results(self, output_dir):
            # Read results from files in output_dir and calculate values
            Phys = namedtuple("PhysVaules", ("energy", "structure"))
            self.queen.serial_assimilate(output_dir)
            results = self.queen.get_data()[-1]
            return Phys(np.float64(results.energy), results.structure)
Esempio n. 12
0
    def _extract_MP_data(self, MP_data_filename):

        drone = VaspToComputedEntryDrone()
        queen = BorgQueen(drone, "dummy", 1)

        queen.load_data(MP_data_filename)
        computed_entries = queen.get_data()

        del drone
        del queen

        return computed_entries
Esempio n. 13
0
    def _extract_MP_data(self,MP_data_filename):

        drone = VaspToComputedEntryDrone()
        queen = BorgQueen(drone, "dummy", 1)

        queen.load_data(MP_data_filename)
        computed_entries = queen.get_data()

        del drone
        del queen

        return computed_entries 
Esempio n. 14
0
def get_energies(rootdir, reanalyze, verbose, pretty, detailed, sort):
    if verbose:
        FORMAT = "%(relativeCreated)d msecs : %(message)s"
        logging.basicConfig(level=logging.INFO, format=FORMAT)

    if not detailed:
        drone = SimpleVaspToComputedEntryDrone(inc_structure=True)
    else:
        drone = VaspToComputedEntryDrone(
            inc_structure=True, data=["filename", "initial_structure"])

    ncpus = multiprocessing.cpu_count()
    logging.info("Detected {} cpus".format(ncpus))
    queen = BorgQueen(drone, number_of_drones=ncpus)
    if os.path.exists(save_file) and not reanalyze:
        msg = "Using previously assimilated data from {}.".format(save_file) \
            + " Use -f to force re-analysis."
        queen.load_data(save_file)
    else:
        queen.parallel_assimilate(rootdir)
        msg = "Analysis results saved to {} for faster ".format(save_file) + \
              "subsequent loading."
        queen.save_data(save_file)

    entries = queen.get_data()
    if sort == "energy_per_atom":
        entries = sorted(entries, key=lambda x: x.energy_per_atom)
    elif sort == "filename":
        entries = sorted(entries, key=lambda x: x.data["filename"])

    all_data = []
    for e in entries:
        if not detailed:
            delta_vol = "{:.2f}".format(e.data["delta_volume"] * 100)
        else:
            delta_vol = e.structure.volume / \
                e.data["initial_structure"].volume - 1
            delta_vol = "{:.2f}".format(delta_vol * 100)
        all_data.append(
            (e.data["filename"].replace("./", ""),
             re.sub("\s+", "",
                    e.composition.formula), "{:.5f}".format(e.energy),
             "{:.5f}".format(e.energy_per_atom), delta_vol))
    if len(all_data) > 0:
        headers = ("Directory", "Formula", "Energy", "E/Atom", "% vol chg")
        if pretty:
            from prettytable import PrettyTable
            t = PrettyTable(headers)
            t.set_field_align("Directory", "l")
            map(t.add_row, all_data)
            print t
        else:
            print str_aligned(all_data, headers)
        print msg
    else:
        print "No valid vasp run found."
Esempio n. 15
0
    def __init__(self, rootdir):
        """read vasp output via drone and extract all data

        :param rootdir: root directory containing collection of VASP dirs
        :type rootdir: str
        """
        from pymatgen.apps.borg.hive import SimpleVaspToComputedEntryDrone
        from pymatgen.apps.borg.queen import BorgQueen
        self.rootdir = rootdir
        self.drone = SimpleVaspToComputedEntryDrone(inc_structure=True)
        self.queen = BorgQueen(
            self.drone, rootdir,
            1)  # TODO: make sure uw2_si2 also works in parallel
        self.data = self.queen.get_data()
Esempio n. 16
0
class AbstractVaspDirCollParser(six.with_metaclass(abc.ABCMeta, object)):
    """Abstract base class for parsers of a collection of VASP directories

    To implement a new parser, inherit from this class and
    define the :meth:`compile` method.
    """

    def __init__(self, rootdir):
        """read vasp output via drone and extract all data

        :param rootdir: root directory containing collection of VASP dirs
        :type rootdir: str
        """
        from pymatgen.apps.borg.hive import SimpleVaspToComputedEntryDrone
        from pymatgen.apps.borg.queen import BorgQueen
        self.rootdir = rootdir
        self.drone = SimpleVaspToComputedEntryDrone(inc_structure=True)
        self.queen = BorgQueen(self.drone, rootdir, 1) # TODO: make sure uw2_si2 also works in parallel
        self.data = self.queen.get_data()

    def find_entry_for_directory(self, regex, oszicar=True):
        """returns the computed entry for a VASP directory matching the regex"""
        # scan in reverse alpha-numeric order under the assumption that
        # directories with the highest (local) index correspond to final VaspRun
        for entry in reversed(self.data):
            if fnmatch.fnmatch(entry.data['filename'], regex):
                if oszicar and not entry.energy < 1e10: continue
                return entry

    @abc.abstractmethod
    def compile(self):
        """compile the extracted data into a reduced dataset to be contributed"""
        return
Esempio n. 17
0
 def test_get_data(self):
     with warnings.catch_warnings():
         warnings.simplefilter("ignore")
         drone = VaspToComputedEntryDrone()
         self.queen = BorgQueen(drone, test_dir, 1)
         data = self.queen.get_data()
         self.assertEqual(len(data), 11)
Esempio n. 18
0
class AbstractVaspDirCollParser(six.with_metaclass(abc.ABCMeta, object)):
    """Abstract base class for parsers of a collection of VASP directories

    To implement a new parser, inherit from this class and
    define the :meth:`compile` method.
    """
    def __init__(self, rootdir):
        """read vasp output via drone and extract all data

        :param rootdir: root directory containing collection of VASP dirs
        :type rootdir: str
        """
        from pymatgen.apps.borg.hive import SimpleVaspToComputedEntryDrone
        from pymatgen.apps.borg.queen import BorgQueen
        self.rootdir = rootdir
        self.drone = SimpleVaspToComputedEntryDrone(inc_structure=True)
        self.queen = BorgQueen(
            self.drone, rootdir,
            1)  # TODO: make sure uw2_si2 also works in parallel
        self.data = self.queen.get_data()

    def find_entry_for_directory(self, regex, oszicar=True):
        """returns the computed entry for a VASP directory matching the regex"""
        # scan in reverse alpha-numeric order under the assumption that
        # directories with the highest (local) index correspond to final VaspRun
        for entry in reversed(self.data):
            if fnmatch.fnmatch(entry.data['filename'], regex):
                if oszicar and not entry.energy < 1e10: continue
                return entry

    @abc.abstractmethod
    def compile(self):
        """compile the extracted data into a reduced dataset to be contributed"""
        return
Esempio n. 19
0
    def energy(self, spch_config):
        """ Calculate total energy of the space charge model"""

        structure = spch_config.structure.get_sorted_structure()

        # if len(spinel_config.calc_history) >= 20:
        #    print("truncate calc_history")
        #    del spinel_config.calc_history[0:10]
        # calc_history = spinel_config.calc_history
        # if calc_history:
        #    # Try to avoid doing dft calculation for the same structure.
        #    # Assuming that calc_history is a list of ComputedStructureEntries
        #    for i in range(len(calc_history)):
        #        if self.matcher.fit(structure, calc_history[i].structure):
        #            print("match found in history")
        #            return calc_history[i].energy
        # print("before poscar")
        if self.selective_dynamics:
            seldyn_arr = [[True, True, True] for i in range(len(structure))]
            for specie in self.selective_dynamics:
                indices = structure.indices_from_symbol(specie)
                for i in indices:
                    seldyn_arr[i] = [False, False, False]
        else:
            seldyn_arr = None

        poscar = Poscar(structure=structure, selective_dynamics=seldyn_arr)
        # print("before vaspinput")
        vaspinput = self.base_vaspinput
        vaspinput.update({"POSCAR": poscar})
        exitcode = self.vasp_run.submit(vaspinput, os.getcwd() + "/output")
        # print("vasp exited with exit code", exitcode)
        if exitcode != 0:
            print("something went wrong")
            sys.exit(1)
        queen = BorgQueen(self.drone)
        # print(os.getcwd())
        queen.serial_assimilate("./output")
        # print(queen.get_data())
        # results = self.queen.get_data()[-1]
        results = queen.get_data()[-1]
        # calc_history.append(results)
        spch_config.structure = results.structure
        # print(results.energy)
        # sys.stdout.flush()

        return np.float64(results.energy)
Esempio n. 20
0
def get_e_v(path):
    """
    uses pymatgen drone and borgqueen classes to get energy and 
    volume data from the given directory path.
    """
    volumes = []
    energies = []
    drone = MPINTVaspDrone(inc_structure=True)
    bg = BorgQueen(drone)
    # bg.parallel_assimilate(path)
    bg.serial_assimilate(path)
    allentries = bg.get_data()
    for e in allentries:
        if e:
            energies.append(e.energy)
            volumes.append(e.structure.lattice.volume)
    return (volumes, energies)
Esempio n. 21
0
def get_e_v(path):
    """
    uses pymatgen drone and borgqueen classes to get energy and 
    volume data from the given directory path.
    """
    volumes = []
    energies = []
    drone = MPINTVaspDrone(inc_structure=True)
    bg = BorgQueen(drone)
    # bg.parallel_assimilate(path)
    bg.serial_assimilate(path)
    allentries = bg.get_data()
    for e in allentries:
        if e:
            energies.append(e.energy)
            volumes.append(e.structure.lattice.volume)
    return (volumes, energies)
Esempio n. 22
0
 def __init__(self, rootdir):
     """read vasp output via drone and extract all data
     
     :param rootdir: root directory containing collection of VASP dirs
     :type rootdir: str
     """
     self.rootdir = rootdir
     self.drone = SimpleVaspToComputedEntryDrone(inc_structure=True)
     self.queen = BorgQueen(self.drone, rootdir, 1) # TODO: make sure uw2_si2 also works in parallel
     self.data = self.queen.get_data()
Esempio n. 23
0
    def energy(self, HAp_config, save_history=False):
        """ Calculate total energy of the space charge model"""

        structure = HAp_config.structure.get_sorted_structure()
        if save_history and self.matcher != None:
            calc_history = HAp_config.calc_history
            # Try to avoid doing dft calculation for the same structure.
            # Assuming that calc_history is a list of ComputedStructureEntries
            for i in range(len(calc_history)):
                if self.matcher.fit(structure, calc_history[i].structure0):
                    print("match found in history")
                    return calc_history[i].energy

        if self.selective_dynamics:
            seldyn_arr = [[True, True, True] for i in range(len(structure))]
            for specie in self.selective_dynamics:
                indices = structure.indices_from_symbol(specie)
                for i in indices:
                    seldyn_arr[i] = [False, False, False]
        else:
            seldyn_arr = None

        poscar = Poscar(structure=structure, selective_dynamics=seldyn_arr)
        vaspinput = self.base_vaspinput
        vaspinput.update({"POSCAR": poscar})
        exitcode = self.vasp_run.submit(vaspinput, os.getcwd() + "/output")
        if exitcode != 0:
            print("something went wrong")
            sys.exit(1)
        queen = BorgQueen(self.drone)
        queen.serial_assimilate("./output")
        results = queen.get_data()[-1]

        if save_history:
            results.structure0 = HAp_config.structure
            HAp_config.calc_history.append(results)

        HAp_config.structure = results.structure
        return np.float64(results.energy)
Esempio n. 24
0
class BorgQueenTest(unittest.TestCase):
    def setUp(self):
        drone = VaspToComputedEntryDrone()
        self.queen = BorgQueen(drone, test_dir, 1)

    def test_get_data(self):
        data = self.queen.get_data()
        self.assertEqual(len(data), 8)

    def test_load_data(self):
        drone = VaspToComputedEntryDrone()
        queen = BorgQueen(drone)
        queen.load_data(os.path.join(test_dir, "assimilated.json"))
        self.assertEqual(len(queen.get_data()), 1)
Esempio n. 25
0
class BorgQueenTest(unittest.TestCase):

    def setUp(self):
        drone = VaspToComputedEntryDrone()
        self.queen = BorgQueen(drone, test_dir, 1)

    def test_get_data(self):
        data = self.queen.get_data()
        self.assertEqual(len(data), 8)

    def test_load_data(self):
        drone = VaspToComputedEntryDrone()
        queen = BorgQueen(drone)
        queen.load_data(os.path.join(test_dir, "assimilated.json"))
        self.assertEqual(len(queen.get_data()), 1)
Esempio n. 26
0
class BorgQueenTest(unittest.TestCase):
    def setUp(self):
        warnings.simplefilter("ignore")

    def tearDown(self):
        warnings.simplefilter("default")

    def test_get_data(self):
        drone = VaspToComputedEntryDrone()
        self.queen = BorgQueen(drone, PymatgenTest.TEST_FILES_DIR, 1)
        data = self.queen.get_data()
        self.assertEqual(len(data), 12)

    def test_load_data(self):
        drone = VaspToComputedEntryDrone()
        queen = BorgQueen(drone)
        queen.load_data(os.path.join(PymatgenTest.TEST_FILES_DIR, "assimilated.json"))
        self.assertEqual(len(queen.get_data()), 1)
Esempio n. 27
0
def getEhull(new=''):
    drone = VaspToComputedEntryDrone()
    queen = BorgQueen(drone, './', 4)
    entriesorig = queen.get_data()
    queen.load_data(
        os.path.join(os.path.dirname(__file__), '../ML/data/missingels.json'))
    entriesextra = queen.get_data()

    if new != '':
        compat = MaterialsProjectCompatibility(check_potcar=False)
        entriesorig = compat.process_entries(entriesorig)

    for entry in entriesorig:
        name = entry.name
        line = re.findall('[A-Z][^A-Z]*',
                          name.replace('(', '').replace(')', ''))

    searchset = set(re.sub('\d', ' ', ' '.join(line)).split())
    entries = filter(
        lambda e: set(
            re.sub('\d', ' ',
                   str(e.composition).replace(' ', '')).split()) == searchset,
        entriesorig)

    entriesextra = filter(
        lambda e: set(
            re.sub('\d', ' ',
                   str(e.composition).replace(' ', '')).split()) & searchset,
        entriesextra)

    a = MPRester("s2vUo6mzETOHLdbu")

    all_entries = a.get_entries_in_chemsys(
        set(searchset)) + list(entries) + list(entriesextra)

    pd = PhaseDiagram(all_entries)  #,None

    for e in pd.stable_entries:
        if e.entry_id == None:
            reaction = pd.get_equilibrium_reaction_energy(e)
            return str(reaction) + ' None'

    for e in pd.unstable_entries:
        decomp, e_above_hull = pd.get_decomp_and_e_above_hull(e)
        pretty_decomp = [("{}:{}".format(k.composition.reduced_formula,
                                         k.entry_id), round(v, 2))
                         for k, v in decomp.items()]
        if e.entry_id == None:
            return str(e_above_hull) + ' ' + str(pretty_decomp)
Esempio n. 28
0
 def test_load_data(self):
     drone = VaspToComputedEntryDrone()
     queen = BorgQueen(drone)
     queen.load_data(os.path.join(PymatgenTest.TEST_FILES_DIR, "assimilated.json"))
     self.assertEqual(len(queen.get_data()), 1)
Esempio n. 29
0
 def test_get_data(self):
     drone = VaspToComputedEntryDrone()
     self.queen = BorgQueen(drone, PymatgenTest.TEST_FILES_DIR, 1)
     data = self.queen.get_data()
     self.assertEqual(len(data), 12)
    def test_assimilate(self):
        simulate = True if VaspToDbTaskDroneTest.conn is None else False
        drone = VaspToDbTaskDrone(database="creator_unittest",
                                  simulate_mode=simulate)
        queen = BorgQueen(drone)
        queen.serial_assimilate(os.path.join(test_dir, 'db_test'))
        data = queen.get_data()
        self.assertEqual(len(data), 5)
        if VaspToDbTaskDroneTest.conn:
            db = VaspToDbTaskDroneTest.conn["creator_unittest"]
            data = db.tasks.find()
            self.assertEqual(data.count(), 5)
            warnings.warn("Actual db insertion mode.")

        for d in data:
            dir_name = d['dir_name']
            if dir_name.endswith("killed_mp_aflow"):
                self.assertEqual(d['state'], "killed")
                self.assertFalse(d['is_hubbard'])
                self.assertEqual(d['pretty_formula'], "SiO2")
            elif dir_name.endswith("stopped_mp_aflow"):
                self.assertEqual(d['state'], "stopped")
                self.assertEqual(d['pretty_formula'], "ThFe5P3")
            elif dir_name.endswith("success_mp_aflow"):
                self.assertEqual(d['state'], "successful")
                self.assertEqual(d['pretty_formula'], "TbZn(BO2)5")
                self.assertAlmostEqual(d['output']['final_energy'],
                                       -526.66747274, 4)
            elif dir_name.endswith("Li2O_aflow"):
                self.assertEqual(d['state'], "successful")
                self.assertEqual(d['pretty_formula'], "Li2O")
                self.assertAlmostEqual(d['output']['final_energy'],
                                       -14.31446494, 6)
                self.assertEqual(len(d["calculations"]), 2)
            elif dir_name.endswith("Li2O"):
                self.assertEqual(d['state'], "successful")
                self.assertEqual(d['pretty_formula'], "Li2O")
                self.assertAlmostEqual(d['output']['final_energy'],
                                       -14.31337758, 6)
                self.assertEqual(len(d["calculations"]), 1)
                self.assertEqual(len(d["custodian"]), 1)
                self.assertEqual(len(d["custodian"][0]["corrections"]), 1)

        if VaspToDbTaskDroneTest.conn:
            warnings.warn("Testing query engine mode.")
            qe = QueryEngine(database="creator_unittest")
            self.assertEqual(qe.query().count(), 5)
            #Test mappings by query engine.
            for r in qe.query(criteria={"pretty_formula": "Li2O"},
                              properties=["dir_name", "energy",
                                          "calculations"]):
                if r["dir_name"].endswith("Li2O_aflow"):
                    self.assertAlmostEqual(r['energy'], -14.31446494, 4)
                    self.assertEqual(len(r["calculations"]), 2)
                elif r["dir_name"].endswith("Li2O"):
                    self.assertAlmostEqual(r['energy'],
                                           -14.31337758, 4)
                    self.assertEqual(len(r["calculations"]), 1)

            # Test query one.
            d = qe.query_one(criteria={"pretty_formula": "TbZn(BO2)5"},
                             properties=["energy"])
            self.assertAlmostEqual(d['energy'], -526.66747274, 4)

            d = qe.get_entries_in_system(["Li", "O"])
            self.assertEqual(len(d), 2)
            self.assertIsInstance(d[0], ComputedEntry)

            s = qe.get_structure_from_id(d[0].entry_id)
            self.assertIsInstance(s, Structure)
            self.assertEqual(s.formula, "Li2 O1")
Esempio n. 31
0
 def test_load_data(self):
     drone = VaspToComputedEntryDrone()
     queen = BorgQueen(drone)
     queen.load_data(os.path.join(test_dir, "assimilated.json"))
     self.assertEqual(len(queen.get_data()), 1)
    def get_phase_diagram_data(self):
        """
        Returns grand potential phase diagram data to external plot
        Assumes openelement specific element equals None
        :return: Data to external plot
        """
        open_elements_specific = None
        open_element_all = Element(self.open_element)
        mpr = MPRester(settings.apiKey)

        drone = VaspToComputedEntryDrone()
        queen = BorgQueen(drone, rootpath=".")
        entries = queen.get_data()

        # Get data to make phase diagram
        mp_entries = mpr.get_entries_in_chemsys(self.system,
                                                compatible_only=True)

        entries.extend(mp_entries)

        compat = MaterialsProjectCompatibility()
        entries = compat.process_entries(entries)
        #explanation_output = open("explain.txt",'w')
        #entries_output = open("entries.txt", 'w')
        compat.explain(entries[0])
        #print(entries, file=entries_output)

        if open_elements_specific:
            gcpd = GrandPotentialPhaseDiagram(entries, open_elements_specific)
            self.plot_phase_diagram(gcpd, False)
            self.analyze_phase_diagram(gcpd)

        if open_element_all:
            pd = PhaseDiagram(entries)
            chempots = pd.get_transition_chempots(open_element_all)
            # print(chempots)
            #all_gcpds = list()
            toplot = []
            # dic = {}
            for idx in range(len(chempots)):
                if idx == len(chempots) - 1:
                    avgchempot = chempots[idx] - 0.1
                else:
                    avgchempot = 0.5 * (chempots[idx] + chempots[idx + 1])
                gcpd = GrandPotentialPhaseDiagram(
                    entries, {open_element_all: avgchempot}, pd.elements)

                # min_chempot = None if idx == len(
                #     chempots) - 1 else chempots[idx + 1]
                # max_chempot = chempots[idx]
                #gcpd = GrandPotentialPhaseDiagram(entries, {open_element_all: max_chempot}, pd.elements)

                toplot.append(self.get_grand_potential_phase_diagram(gcpd))
                # toplot.append(max_chempot)

                #self.plot_phase_diagram(gcpd, False)
                #print({open_element_all: max_chempot})

        all_phase_diagrams = toplot
        # print(all_phase_diagrams)

        number_of_phase_diagrams = len(all_phase_diagrams)

        #pd3 = PhaseDiagram(entries)

        chempot_list = pd.get_transition_chempots(open_element_all)
        pd_index = 0

        chempots_range_of_each_phase = {}
        for particular_phase_diagram in all_phase_diagrams:
            chempot = chempot_list[pd_index]

            if pd_index is not number_of_phase_diagrams - 1:
                next_chempot = chempot_list[pd_index + 1]
            else:
                next_chempot = chempot_list[pd_index] - 2.0
            chempot_range = [chempot, next_chempot]

            phases_list = particular_phase_diagram[0]

            for phase in phases_list:
                if phase in chempots_range_of_each_phase.keys():
                    chempots_range_of_each_phase[phase][1] = next_chempot.copy(
                    )
                else:
                    chempots_range_of_each_phase[phase] = chempot_range.copy()

            pd_index = pd_index + 1

        return chempots_range_of_each_phase
Esempio n. 33
0
    vasprun = runner(
        Solver=solver,
        nprocs=nprocs_per_vasp,
        comm=MPI.COMM_SELF,
    )
    baseinput = VaspInput.from_directory(
        "baseinput")  # (os.path.join(os.path.dirname(__file__), "baseinput"))
    ltol = 0.1
    matcher = StructureMatcher(ltol=ltol, primitive_cell=False)
    matcher_base = StructureMatcher(ltol=ltol,
                                    primitive_cell=False,
                                    stol=0.5,
                                    allow_subset=True)  # ,
    # comparator=FrameworkComparator(), ignored_species=["Pt","Zr"])
    drone = SimpleVaspToComputedEntryDrone(inc_structure=True)
    queen = BorgQueen(drone)
    model = dft_HAp(
        calcode="VASP",
        vasp_run=vasprun,
        base_vaspinput=baseinput,
        matcher_base=matcher_base,
        queen=queen,
        matcher=matcher,
    )
    # matcher=matcher, matcher_site=matcher_site, queen=queen, selective_dynamics=["Pt"])

    myrank = comm.Get_rank()
    os.mkdir(str(myrank))
    os.chdir(str(myrank))
    for i in range(len(reps)):
        if i % nreplicas == myrank:
    def test_assimilate(self):
        """Borg assimilation code.
        This takes too long for a unit test!
        """
        simulate = True if VaspToDbTaskDroneTest.conn is None else False
        drone = VaspToDbTaskDrone(database="creator_unittest",
                                  simulate_mode=simulate,
                                  parse_dos=True, compress_dos=1)
        queen = BorgQueen(drone)
        queen.serial_assimilate(os.path.join(test_dir, 'db_test'))
        data = queen.get_data()
        self.assertEqual(len(data), 6)
        if VaspToDbTaskDroneTest.conn:
            db = VaspToDbTaskDroneTest.conn["creator_unittest"]
            data = db.tasks.find()
            self.assertEqual(data.count(), 6)
            warnings.warn("Actual db insertion mode.")

        for d in data:
            dir_name = d['dir_name']
            if dir_name.endswith("killed_mp_aflow"):
                self.assertEqual(d['state'], "killed")
                self.assertFalse(d['is_hubbard'])
                self.assertEqual(d['pretty_formula'], "SiO2")
            elif dir_name.endswith("stopped_mp_aflow"):
                self.assertEqual(d['state'], "stopped")
                self.assertEqual(d['pretty_formula'], "ThFe5P3")
            elif dir_name.endswith("success_mp_aflow"):
                self.assertEqual(d['state'], "successful")
                self.assertEqual(d['pretty_formula'], "TbZn(BO2)5")
                self.assertAlmostEqual(d['output']['final_energy'],
                                       -526.66747274, 4)
            elif dir_name.endswith("Li2O_aflow"):
                self.assertEqual(d['state'], "successful")
                self.assertEqual(d['pretty_formula'], "Li2O")
                self.assertAlmostEqual(d['output']['final_energy'],
                                       -14.31446494, 6)
                self.assertEqual(len(d["calculations"]), 2)
                self.assertEqual(d['input']['is_lasph'], False)
                self.assertEqual(d['input']['xc_override'], None)
                self.assertEqual(d["oxide_type"], "oxide")
            elif dir_name.endswith("Li2O"):
                self.assertEqual(d['state'], "successful")
                self.assertEqual(d['pretty_formula'], "Li2O")
                self.assertAlmostEqual(d['output']['final_energy'],
                                       -14.31337758, 6)
                self.assertEqual(len(d["calculations"]), 1)
                self.assertEqual(len(d["custodian"]), 1)
                self.assertEqual(len(d["custodian"][0]["corrections"]), 1)
            elif dir_name.endswith("Li2O_aflow_lasph"):
                self.assertEqual(d['state'], "successful")
                self.assertEqual(d['pretty_formula'], "Li2O")
                self.assertAlmostEqual(d['output']['final_energy'],
                                       -13.998171, 6)
                self.assertEqual(len(d["calculations"]), 2)
                self.assertEqual(d['input']['is_lasph'], True)
                self.assertEqual(d['input']['xc_override'], "PS")

        if VaspToDbTaskDroneTest.conn:
            warnings.warn("Testing query engine mode.")
            qe = QueryEngine(database="creator_unittest")
            self.assertEqual(qe.query().count(), 6)
            #Test mappings by query engine.
            for r in qe.query(criteria={"pretty_formula": "Li2O"},
                              properties=["dir_name", "energy",
                                          "calculations", "input"]):
                if r["dir_name"].endswith("Li2O_aflow"):
                    self.assertAlmostEqual(r['energy'], -14.31446494, 4)
                    self.assertEqual(len(r["calculations"]), 2)
                    self.assertEqual(r["input"]["is_lasph"], False)
                    self.assertEqual(r['input']['xc_override'], None)
                    self.assertEqual(d["oxide_type"], "oxide")
                elif r["dir_name"].endswith("Li2O"):
                    self.assertAlmostEqual(r['energy'],
                                           -14.31337758, 4)
                    self.assertEqual(len(r["calculations"]), 1)
                    self.assertEqual(r["input"]["is_lasph"], False)
                    self.assertEqual(r['input']['xc_override'], None)

            # Test lasph
            e = qe.get_entries({"dir_name":{"$regex":"lasph"}})
            self.assertEqual(len(e), 1)
            self.assertEqual(e[0].parameters["is_lasph"], True)
            self.assertEqual(e[0].parameters["xc_override"], "PS")

            # Test query one.
            d = qe.query_one(criteria={"pretty_formula": "TbZn(BO2)5"},
                             properties=["energy"])
            self.assertAlmostEqual(d['energy'], -526.66747274, 4)

            d = qe.get_entries_in_system(["Li", "O"])
            self.assertEqual(len(d), 3)
            self.assertIsInstance(d[0], ComputedEntry)
            self.assertEqual(d[0].data["oxide_type"], "oxide")

            s = qe.get_structure_from_id(d[0].entry_id)
            self.assertIsInstance(s, Structure)
            self.assertEqual(s.formula, "Li2 O1")

            self.assertIsInstance(qe.get_dos_from_id(d[0].entry_id), CompleteDos)
Esempio n. 35
0
 def test_load_data(self):
     drone = VaspToComputedEntryDrone()
     queen = BorgQueen(drone)
     queen.load_data(os.path.join(test_dir, "assimilated.json"))
     self.assertEqual(len(queen.get_data()), 1)
Esempio n. 36
0
 def setUp(self):
     drone = VaspToComputedEntryDrone()
     self.queen = BorgQueen(drone, test_dir, 1)
Esempio n. 37
0
def get_energies(rootdir, reanalyze, verbose, quick, sort, fmt):
    """
    Get energies of all vaspruns in directory (nested).
    Args:
        rootdir (str): Root directory.
        reanalyze (bool): Whether to ignore saved results and reanalyze
        verbose (bool): Verbose mode or not.
        quick (bool): Whether to perform a quick analysis (using OSZICAR instead
            of vasprun.xml
        sort (bool): Whether to sort the results in ascending order.
        fmt (str): tablefmt passed to tabulate.
    """
    if verbose:
        logformat = "%(relativeCreated)d msecs : %(message)s"
        logging.basicConfig(level=logging.INFO, format=logformat)

    if quick:
        drone = SimpleVaspToComputedEntryDrone(inc_structure=True)
    else:
        drone = VaspToComputedEntryDrone(
            inc_structure=True, data=["filename", "initial_structure"])

    ncpus = multiprocessing.cpu_count()
    logging.info("Detected {} cpus".format(ncpus))
    queen = BorgQueen(drone, number_of_drones=ncpus)
    if os.path.exists(SAVE_FILE) and not reanalyze:
        msg = ("Using previously assimilated data from {}.".format(SAVE_FILE) +
               " Use -r to force re-analysis.")
        queen.load_data(SAVE_FILE)
    else:
        if ncpus > 1:
            queen.parallel_assimilate(rootdir)
        else:
            queen.serial_assimilate(rootdir)
        msg = ("Analysis results saved to {} for faster ".format(SAVE_FILE) +
               "subsequent loading.")
        queen.save_data(SAVE_FILE)

    entries = queen.get_data()
    if sort == "energy_per_atom":
        entries = sorted(entries, key=lambda x: x.energy_per_atom)
    elif sort == "filename":
        entries = sorted(entries, key=lambda x: x.data["filename"])

    all_data = []
    for e in entries:
        if quick:
            delta_vol = "NA"
        else:
            delta_vol = e.structure.volume / e.data[
                "initial_structure"].volume - 1
            delta_vol = "{:.2f}".format(delta_vol * 100)
        all_data.append((
            e.data["filename"].replace("./", ""),
            re.sub(r"\s+", "", e.composition.formula),
            "{:.5f}".format(e.energy),
            "{:.5f}".format(e.energy_per_atom),
            delta_vol,
        ))
    if len(all_data) > 0:
        headers = ("Directory", "Formula", "Energy", "E/Atom", "% vol chg")
        print(tabulate(all_data, headers=headers, tablefmt=fmt))
        print("")
        print(msg)
    else:
        print("No valid vasp run found.")
        os.unlink(SAVE_FILE)
    return 0
Esempio n. 38
0
def get_energies(rootdir, reanalyze, verbose, detailed, sort, fmt):
    """
    Doc string.
    """
    if verbose:
        logformat = "%(relativeCreated)d msecs : %(message)s"
        logging.basicConfig(level=logging.INFO, format=logformat)

    if not detailed:
        drone = SimpleVaspToComputedEntryDrone(inc_structure=True)
    else:
        drone = VaspToComputedEntryDrone(
            inc_structure=True, data=["filename", "initial_structure"])

    ncpus = multiprocessing.cpu_count()
    logging.info("Detected {} cpus".format(ncpus))
    queen = BorgQueen(drone, number_of_drones=ncpus)
    if os.path.exists(SAVE_FILE) and not reanalyze:
        msg = "Using previously assimilated data from {}.".format(SAVE_FILE) \
              + " Use -r to force re-analysis."
        queen.load_data(SAVE_FILE)
    else:
        if ncpus > 1:
            queen.parallel_assimilate(rootdir)
        else:
            queen.serial_assimilate(rootdir)
        msg = "Analysis results saved to {} for faster ".format(SAVE_FILE) + \
              "subsequent loading."
        queen.save_data(SAVE_FILE)

    entries = queen.get_data()
    if sort == "energy_per_atom":
        entries = sorted(entries, key=lambda x: x.energy_per_atom)
    elif sort == "filename":
        entries = sorted(entries, key=lambda x: x.data["filename"])

    all_data = []
    for e in entries:
        if not detailed:
            delta_vol = "{:.2f}".format(e.data["delta_volume"] * 100)
        else:
            delta_vol = e.structure.volume / \
                        e.data["initial_structure"].volume - 1
            delta_vol = "{:.2f}".format(delta_vol * 100)
        all_data.append(
            (e.data["filename"].replace("./", ""),
             re.sub(r"\s+", "",
                    e.composition.formula), "{:.5f}".format(e.energy),
             "{:.5f}".format(e.energy_per_atom), delta_vol))
    if len(all_data) > 0:
        headers = ("Directory", "Formula", "Energy", "E/Atom", "% vol chg")
        print(tabulate(all_data, headers=headers, tablefmt=fmt))
        print("")
        print(msg)
    else:
        print("No valid vasp run found.")
        os.unlink(SAVE_FILE)
Esempio n. 39
0
 def setUp(self):
     drone = VaspToComputedEntryDrone()
     self.queen = BorgQueen(drone, test_dir, 1)
Esempio n. 40
0
    def submit_vasp_directory(self,
                              rootdir,
                              authors,
                              projects=None,
                              references='',
                              remarks=None,
                              master_data=None,
                              master_history=None,
                              created_at=None,
                              ncpus=None):
        """
        Assimilates all vasp run directories beneath a particular
        directory using BorgQueen to obtain structures, and then submits thhem
        to the Materials Project as SNL files. VASP related meta data like
        initial structure and final energies are automatically incorporated.

        .. note::

            As of now, this MP REST feature is open only to a select group of
            users. Opening up submissions to all users is being planned for
            the future.

        Args:
            rootdir (str): Rootdir to start assimilating VASP runs from.
            authors: *List* of {"name":'', "email":''} dicts,
                *list* of Strings as 'John Doe <*****@*****.**>',
                or a single String with commas separating authors. The same
                list of authors should apply to all runs.
            projects ([str]): List of Strings ['Project A', 'Project B'].
                This applies to all structures.
            references (str): A String in BibTeX format. Again, this applies to
                all structures.
            remarks ([str]): List of Strings ['Remark A', 'Remark B']
            master_data (dict): A free form dict. Namespaced at the root
                level with an underscore, e.g. {"_materialsproject":<custom
                data>}. This data is added to all structures detected in the
                directory, in addition to other vasp data on a per structure
                basis.
            master_history: A master history to be added to all entries.
            created_at (datetime): A datetime object
            ncpus (int): Number of cpus to use in using BorgQueen to
                assimilate. Defaults to None, which means serial.
        """
        from pymatgen.apps.borg.hive import VaspToComputedEntryDrone
        from pymatgen.apps.borg.queen import BorgQueen
        drone = VaspToComputedEntryDrone(
            inc_structure=True, data=["filename", "initial_structure"])
        queen = BorgQueen(drone, number_of_drones=ncpus)
        queen.parallel_assimilate(rootdir)

        structures = []
        metadata = []
        histories = []
        for e in queen.get_data():
            structures.append(e.structure)
            m = {
                "_vasp": {
                    "parameters": e.parameters,
                    "final_energy": e.energy,
                    "final_energy_per_atom": e.energy_per_atom,
                    "initial_structure": e.data["initial_structure"].as_dict()
                }
            }
            if "history" in e.parameters:
                histories.append(e.parameters["history"])
            if master_data is not None:
                m.update(master_data)
            metadata.append(m)
        if master_history is not None:
            histories = master_history * len(structures)

        return self.submit_structures(structures,
                                      authors,
                                      projects=projects,
                                      references=references,
                                      remarks=remarks,
                                      data=metadata,
                                      histories=histories,
                                      created_at=created_at)
Esempio n. 41
0
from __future__ import division, unicode_literals, print_function

from mpinterfaces.database import MPINTVaspToDbTaskDrone
from pymatgen.apps.borg.queen import BorgQueen
#import multiprocessing

additional_fields = {"author":"kiran"} #"doi":"10.1063/1.4865107"
drone = MPINTVaspToDbTaskDrone(host="127.0.0.1", port=27017,
                               database="vasp", collection="collection_name",
                               user="******", password="******",
                               additional_fields=additional_fields)

ncpus = 4 #multiprocessing.cpu_count()
queen = BorgQueen(drone, number_of_drones=ncpus)
queen.parallel_assimilate('path_to_vasp_calculation_folders')
    def test_assimilate(self):
        """Borg assimilation code.
        This takes too long for a unit test!
        """
        simulate = True if VaspToDbTaskDroneTest.conn is None else False
        drone = VaspToDbTaskDrone(database="creator_unittest",
                                  simulate_mode=simulate,
                                  parse_dos=True,
                                  compress_dos=1)
        queen = BorgQueen(drone)
        queen.serial_assimilate(os.path.join(test_dir, 'db_test'))
        data = queen.get_data()
        self.assertEqual(len(data), 6)
        if VaspToDbTaskDroneTest.conn:
            db = VaspToDbTaskDroneTest.conn["creator_unittest"]
            data = db.tasks.find()
            self.assertEqual(data.count(), 6)
            warnings.warn("Actual db insertion mode.")

        for d in data:
            dir_name = d['dir_name']
            if dir_name.endswith("killed_mp_aflow"):
                self.assertEqual(d['state'], "killed")
                self.assertFalse(d['is_hubbard'])
                self.assertEqual(d['pretty_formula'], "SiO2")
            elif dir_name.endswith("stopped_mp_aflow"):
                self.assertEqual(d['state'], "stopped")
                self.assertEqual(d['pretty_formula'], "ThFe5P3")
            elif dir_name.endswith("success_mp_aflow"):
                self.assertEqual(d['state'], "successful")
                self.assertEqual(d['pretty_formula'], "TbZn(BO2)5")
                self.assertAlmostEqual(d['output']['final_energy'],
                                       -526.66747274, 4)
            elif dir_name.endswith("Li2O_aflow"):
                self.assertEqual(d['state'], "successful")
                self.assertEqual(d['pretty_formula'], "Li2O")
                self.assertAlmostEqual(d['output']['final_energy'],
                                       -14.31446494, 6)
                self.assertEqual(len(d["calculations"]), 2)
                self.assertEqual(d['input']['is_lasph'], False)
                self.assertEqual(d['input']['xc_override'], None)
            elif dir_name.endswith("Li2O"):
                self.assertEqual(d['state'], "successful")
                self.assertEqual(d['pretty_formula'], "Li2O")
                self.assertAlmostEqual(d['output']['final_energy'],
                                       -14.31337758, 6)
                self.assertEqual(len(d["calculations"]), 1)
                self.assertEqual(len(d["custodian"]), 1)
                self.assertEqual(len(d["custodian"][0]["corrections"]), 1)
            elif dir_name.endswith("Li2O_aflow_lasph"):
                self.assertEqual(d['state'], "successful")
                self.assertEqual(d['pretty_formula'], "Li2O")
                self.assertAlmostEqual(d['output']['final_energy'], -13.998171,
                                       6)
                self.assertEqual(len(d["calculations"]), 2)
                self.assertEqual(d['input']['is_lasph'], True)
                self.assertEqual(d['input']['xc_override'], "PS")

        if VaspToDbTaskDroneTest.conn:
            warnings.warn("Testing query engine mode.")
            qe = QueryEngine(database="creator_unittest")
            self.assertEqual(qe.query().count(), 6)
            #Test mappings by query engine.
            for r in qe.query(
                    criteria={"pretty_formula": "Li2O"},
                    properties=["dir_name", "energy", "calculations",
                                "input"]):
                if r["dir_name"].endswith("Li2O_aflow"):
                    self.assertAlmostEqual(r['energy'], -14.31446494, 4)
                    self.assertEqual(len(r["calculations"]), 2)
                    self.assertEqual(r["input"]["is_lasph"], False)
                    self.assertEqual(r['input']['xc_override'], None)
                elif r["dir_name"].endswith("Li2O"):
                    self.assertAlmostEqual(r['energy'], -14.31337758, 4)
                    self.assertEqual(len(r["calculations"]), 1)
                    self.assertEqual(r["input"]["is_lasph"], False)
                    self.assertEqual(r['input']['xc_override'], None)

            #Test lasph
            e = qe.get_entries({"dir_name": {"$regex": "lasph"}})
            self.assertEqual(len(e), 1)
            self.assertEqual(e[0].parameters["is_lasph"], True)
            self.assertEqual(e[0].parameters["xc_override"], "PS")

            # Test query one.
            d = qe.query_one(criteria={"pretty_formula": "TbZn(BO2)5"},
                             properties=["energy"])
            self.assertAlmostEqual(d['energy'], -526.66747274, 4)

            d = qe.get_entries_in_system(["Li", "O"])
            self.assertEqual(len(d), 3)
            self.assertIsInstance(d[0], ComputedEntry)

            s = qe.get_structure_from_id(d[0].entry_id)
            self.assertIsInstance(s, Structure)
            self.assertEqual(s.formula, "Li2 O1")

            self.assertIsInstance(qe.get_dos_from_id(d[0].entry_id),
                                  CompleteDos)
Esempio n. 43
0
from pymatgen.ext.matproj import MPRester
from pymatgen.apps.borg.hive import VaspToComputedEntryDrone
from pymatgen.apps.borg.queen import BorgQueen
from pymatgen.entries.compatibility import MaterialsProjectCompatibility
from pymatgen.analysis.phase_diagram import PhaseDiagram
from pymatgen.analysis.phase_diagram import PDPlotter

# Assimilate VASP calculations into ComputedEntry object. Let's assume that
# the calculations are for a series of new LixFeyOz phases that we want to
# know the phase stability.
drone = VaspToComputedEntryDrone()
queen = BorgQueen(drone, rootpath=".")
entries = queen.get_data()

# Obtain all existing Li-Fe-O phases using the Materials Project REST API
with MPRester("key") as m:
    mp_entries = m.get_entries_in_chemsys(["Li", "Sn", "S"])

# Combined entry from calculated run with Materials Project entries
entries.extend(mp_entries)

# Process entries using the MaterialsProjectCompatibility
compat = MaterialsProjectCompatibility()
entries = compat.process_entries(entries)

# Generate and plot Li-Fe-O phase diagram
pd = PhaseDiagram(entries)
plotter = PDPlotter(pd)
plotter.show()
    def get_phase_diagram_data(self):
        """
        Returns grand potential phase diagram data to external plot
        Assumes openelement specific element equals None
        :return: Data to external plot
        """
        open_elements_specific = None
        open_element_all = Element(self.open_element)
        mpr = MPRester("key")

        # import do dados dos arquivos tipo vasp
        drone = VaspToComputedEntryDrone()
        queen = BorgQueen(drone, rootpath=".")
        entries = queen.get_data()

        # Get data to make phase diagram
        mp_entries = mpr.get_entries_in_chemsys(self.system,
                                                compatible_only=True)

        entries.extend(mp_entries)

        compat = MaterialsProjectCompatibility()
        entries = compat.process_entries(entries)
        #explanation_output = open("explain.txt",'w')
        entries_output = open("entries.txt", 'w')
        compat.explain(entries[0])
        print(entries, file=entries_output)
        #print(entries)

        if open_elements_specific:
            gcpd = GrandPotentialPhaseDiagram(entries, open_elements_specific)
            self.plot_phase_diagram(gcpd, False)
            self.analyze_phase_diagram(gcpd)

        if open_element_all:
            pd = PhaseDiagram(entries)
            chempots = pd.get_transition_chempots(open_element_all)
            #print(chempots)
            #all_gcpds = list()
            toplot = []
            # dic = {}
            for idx in range(len(chempots)):
                if idx == len(chempots) - 1:
                    avgchempot = chempots[idx] - 0.1
                else:
                    avgchempot = 0.5 * (chempots[idx] + chempots[idx + 1])
                gcpd = GrandPotentialPhaseDiagram(
                    entries, {open_element_all: avgchempot}, pd.elements)
                # toplot.append(self.get_grand_potential_phase_diagram(gcpd))

                min_chempot = None if idx == len(chempots) - 1 else chempots[
                    idx + 1]
                max_chempot = chempots[idx]
                #gcpd = GrandPotentialPhaseDiagram(entries, {open_element_all: max_chempot}, pd.elements)

                toplot.append(self.get_grand_potential_phase_diagram(gcpd))
                #toplot.append(max_chempot)

                #self.plot_phase_diagram(gcpd, False)
                #print({open_element_all: max_chempot})

            # Data to plot phase diagram
            return toplot
Esempio n. 45
0
def get_energies(rootdir, reanalyze, verbose, detailed,
                 sort, formulaunit, debug, hull, threshold, args, templatestructure):

    ion_list = 'Novalue'
    ave_key_list = 'Novalue'
    threscount = 0

    """
    Doc string.
    """
    if (verbose and not debug):
        FORMAT = "%(relativeCreated)d msecs : %(message)s"
        logging.basicConfig(level=logging.INFO, format=FORMAT)

    elif debug:
        logging.basicConfig(level=logging.DEBUG)

    if not detailed:
        drone = SimpleVaspToComputedEntryDrone(inc_structure=True)
    else:
        drone = VaspToComputedEntryDrone(inc_structure=True,
                                         data=["filename",
                                               "initial_structure"])



    ncpus = multiprocessing.cpu_count()
    logging.info("Detected {} cpus".format(ncpus))
    queen = BorgQueen(drone, number_of_drones=ncpus)


    if os.path.exists(SAVE_FILE) and not reanalyze:
        msg = "Using previously assimilated data from {}.".format(SAVE_FILE) \
            + " Use -f to force re-analysis."
        queen.load_data(SAVE_FILE)
    else:
        if ncpus > 1:
            queen.parallel_assimilate(rootdir)
        else:
            queen.serial_assimilate(rootdir)
        msg = "Analysis results saved to {} for faster ".format(SAVE_FILE) + \
              "subsequent loading."
        queen.save_data(SAVE_FILE)

    entries = queen.get_data()
    if sort == "energy_per_atom":
        entries = sorted(entries, key=lambda x: x.energy_per_atom)
    elif sort == "filename":
        entries = sorted(entries, key=lambda x: x.data["filename"])

    # logging.debug('First Energy entry is {}'.format(entries[0]))

    base_energy = entries[0].energy
    logging.debug('Type of entries is: {}'.format(type(entries)))
    logging.debug('First Element of Entries is:{}'.format(entries[0]))

    # logging.debug('First Energy entry structure is {}'.format(entries[0].structure))

    xy_direction = int(args.XYdirection)
    tolerance = float(args.tolerance)


    if args.template:

        logging.debug('Temp Structure site info is: {}'.format(Na12(['Co','Mn'],['Na'],templatestructure,templatestructure,XY_Direction=xy_direction,tol=tolerance)))
        template_site_info = Na12(['Co','Mn'],['Na'],templatestructure,templatestructure,XY_Direction=xy_direction,tol=tolerance)

    all_data = []
    energy_diff = []

    threshold=float(threshold)

    Structure_info_dict = {}
    check_ion_seq = [args.dupion]


    for e in entries:

        if not detailed:
            delta_vol = "{:.2f}".format(e.data["delta_volume"] * 100)
        else:
            delta_vol = e.structure.volume / \
                e.data["initial_structure"].volume - 1
            delta_vol = "{:.2f}".format(delta_vol * 100)


        entry_path = e.data['filename'].rsplit('/',1)[0]

        entry_site_info = Na12(['Co','Mn'],['Na'],e.structure,e.structure,XY_Direction=xy_direction,tol=tolerance)

        logging.debug('Total Na site: {}'.format(entry_site_info['Total_Na_Site']))

        #Coordination extraction part
        # na_layer_site_fcoords = [site._fcoords for site in s if site.specie.symbol == "Na"]
        # if 'Cif_Structure' in e.data.keys():
        #     na_sites_fcoords = [site._fcoords for site in e.data['Cif_Structure'] if site.specie.symbol == 'Na']
        #     na_sites_fcoords_list_tuple = [tuple(coord) for coord in na_sites_fcoords]

        na_sites_fcoords = [site._fcoords for site in e.data['CONTCAR_Structure'] if site.specie.symbol == 'Na']
        na_sites_fcoords_list_tuple = [tuple(coord) for coord in na_sites_fcoords]





        if args.nupdown:
            entry_data= [rootdir,e.data["filename"].replace("./", ""),
                             re.sub("\s+", "", e.composition.formula),
                             "{:.5f}".format(e.energy),
                             "{:.5f}".format(1000*(e.energy-base_energy)/int(formulaunit)),
                             "{:.5f}".format(e.energy_per_atom),
                             delta_vol,e.parameters['run_type'],
                             e.data['NUPDOWN'],e.data['ISMEAR'],na_sites_fcoords_list_tuple]
        else:
            entry_data= [rootdir,e.data["filename"].replace("./", ""),
                             re.sub("\s+", "", e.composition.formula),
                             "{:.5f}".format(e.energy),
                             "{:.5f}".format(1000*(e.energy-base_energy)/int(formulaunit)),
                             "{:.5f}".format(e.energy_per_atom),
                             delta_vol,e.parameters['run_type'],na_sites_fcoords_list_tuple]


        if args.structure:
            entry_data.extend([entry_site_info['Total_Na_Site'],entry_site_info['Na2_Site'],entry_site_info['Na1_Mn_Site'],
            entry_site_info['Na1_Co_Site'],entry_site_info['Na1_Mn_Co_Site']])

        if args.template:
            entry_data.extend([template_site_info['Total_Na_Site'],template_site_info['Na2_Site'],template_site_info['Na1_Mn_Site'],
            template_site_info['Na1_Co_Site'],template_site_info['Na1_Mn_Co_Site']])








        # sitelist = ['Existed','Duplicate_Entry']
        logging.debug(e.data)
        if args.duplicate:
            # filename.rsplit('/',2)[-2]

            Duplicate, Duplicat_Entry, Structure_info_dict = check_ex(check_ion_seq,Structure_info_dict,
                                                                      e,args.tolerance)
            entry_data.extend([Duplicate,Duplicat_Entry])


        if args.ion_list:
            if args.ion_list[0] == "All":
                ion_list = None
            else:
                (start, end) = [int(i) for i in re.split("-", args.ion_list[0])]
                ion_list = list(range(start, end + 1))
            for d in entry_path:
                magdata = get_magnetization(d, ion_list)
                entry_data.extend(magdata)

        if args.ion_avg_list:
            ave_mag_data, ave_key_list = get_ave_magnetization(entry_path,args.ion_avg_list)
            entry_data.extend(ave_mag_data)

        if threshold != 0:
            all_data.append(entry_data)
            if float(entry_data[4])<threshold:
                threscount +=1

        elif threshold == 0:
            all_data.append(entry_data)

        energy_diff.append("{:.5f}".format(1000*(e.energy-base_energy)/int(formulaunit)))


    # if len(all_data) > 0:
    #     headers = ("Directory", "Formula", "Energy", "Energy Diff (meV)/F.U.","E/Atom", "% vol chg")
    #     t = PrettyTable(headers)
    #     t.align["Directory"] = "l"
    #     for d in all_data:
    #         logging.debug('data row in all data is: \n {}'.format(d))
    #         t.add_row(d)
    #     print(t)
    #     print(msg)
    # else:
    #     print("No valid vasp run found.")

    if hull:
        print 'Analyzing group: {}\n'.format(rootdir)
        print 'Energy above hull is: \n'
        print map(lambda x: x.encode('ascii'), energy_diff)

    logging.info('In group: {}, number of entries fall in threshold is {}'.format(rootdir,threscount))
    all_data.append([])

    return all_data
Esempio n. 46
0
    def submit_vasp_directory(self, rootdir, authors, projects=None,
                              references='', remarks=None, master_data=None,
                              master_history=None, created_at=None,
                              ncpus=None):
        """
        Assimilates all vasp run directories beneath a particular
        directory using BorgQueen to obtain structures, and then submits thhem
        to the Materials Project as SNL files. VASP related meta data like
        initial structure and final energies are automatically incorporated.

        .. note::

            As of now, this MP REST feature is open only to a select group of
            users. Opening up submissions to all users is being planned for
            the future.

        Args:
            rootdir:
                Rootdir to start assimilating VASP runs from.
            authors:
                *List* of {"name":'', "email":''} dicts,
                *list* of Strings as 'John Doe <*****@*****.**>',
                or a single String with commas separating authors. The same
                list of authors should apply to all runs.
            projects:
                List of Strings ['Project A', 'Project B']. This applies to
                all structures.
            references:
                A String in BibTeX format. Again, this applies to all
                structures.
            remarks:
                List of Strings ['Remark A', 'Remark B']
            masterdata:
                A free form dict. Namespaced at the root level with an
                underscore, e.g. {"_materialsproject":<custom data>}. This
                data is added to all structures detected in the directory,
                in addition to other vasp data on a per structure basis.
            created_at:
                A datetime object
            ncpus:
                Number of cpus to use in using BorgQueen to assimilate
       """
        drone = VaspToComputedEntryDrone(inc_structure=True,
                                         data=["filename",
                                               "initial_structure"])
        queen = BorgQueen(drone, number_of_drones=ncpus)
        queen.parallel_assimilate(rootdir)

        structures = []
        metadata = []
        # TODO: Get histories from the data.
        for e in queen.get_data():
            structures.append(e.structure)
            m = {
                "_vasp": {
                    "parameters": e.parameters,
                    "final_energy": e.energy,
                    "final_energy_per_atom": e.energy_per_atom,
                    "initial_structure": e.data["initial_structure"].to_dict
                }
            }
            if master_data is not None:
                m.update(master_data)
            metadata.append(m)
        histories = None
        if master_history is not None:
            histories = master_history * len(structures)
        return self.submit_structures(
            structures, authors, projects=projects, references=references,
            remarks=remarks, data=metadata, histories=histories,
            created_at=created_at)
Esempio n. 47
0
def extract_json_data():
    """
    Routine tries to read VASP data into pymatgen objects, and
    then extracts only the relevant data. This is then written to json,
    allowing the voluminous OUTCAR and vasprun.xml files to be discarded.
    """

    try:
        o  = Outcar('OUTCAR')
        found_outcar = True
    except:
        print('OUTCAR file missing or not readable')
        found_outcar = False

    try:
        vr = Vasprun('vasprun.xml')
        found_vasprun = True
    except:
        print('vasprun.xml file missing or not readable')
        found_vasprun = False


    dictionary_data  = {}

    if found_outcar:
        dictionary_data['OUTCAR'] =  o.as_dict()

    if found_vasprun:
    
        try:
            # try to extract a Computed Entry object, using 
            # pymatgen technology
            drone = VaspToComputedEntryDrone()
            queen = BorgQueen(drone, './', 1)
            entry = queen.get_data()[0]

            dictionary_data['ComputedEntry'] =  entry.as_dict()
        except:
            print('ComputedEntry COULD NOT BE EXTRACTED BY PYMATGEN...')


        """
        #  Do not extract DOS in run_data.json; this is too memory intensive!
        try:
            dictionary_data['DOS'] = vr.complete_dos.as_dict()
            pymatgen_dos_success = True
        except:
            print('DOS COULD NOT BE EXTRACTED BY PYMATGEN...')
            pymatgen_dos_success = False
        """


        relaxation_data = []
        for step in vr.ionic_steps:

            data_dict = {}
            for key in ['forces','structure','stress']:
                if key in step:
                    data_dict[key] = step[key]

            data_dict['electronic'] = step['electronic_steps'][-1]

            relaxation_data.append(data_dict)

        dictionary_data['relaxation'] = relaxation_data 


    if found_outcar or found_vasprun:
        pmg_dump(dictionary_data, 'run_data.json')

    return    
Esempio n. 48
0
# coding: utf-8
# Copyright (c) Henniggroup.
# Distributed under the terms of the MIT License.

from __future__ import division, print_function, unicode_literals, \
    absolute_import

from pymatgen.apps.borg.queen import BorgQueen

from mpinterfaces.database import MPINTVaspToDbTaskDrone

# import multiprocessing

additional_fields = {"author": "kiran"}  # "doi":"10.1063/1.4865107"
drone = MPINTVaspToDbTaskDrone(host="127.0.0.1",
                               port=27017,
                               database="vasp",
                               collection="collection_name",
                               user="******",
                               password="******",
                               additional_fields=additional_fields)

ncpus = 4  # multiprocessing.cpu_count()
queen = BorgQueen(drone, number_of_drones=ncpus)
queen.parallel_assimilate('path_to_vasp_calculation_folders')