Ejemplo n.º 1
0
def get_energies(rootdir, reanalyze, verbose, pretty):
    if verbose:
        FORMAT = "%(relativeCreated)d msecs : %(message)s"
        logging.basicConfig(level=logging.INFO, format=FORMAT)
    drone = GaussianToComputedEntryDrone(inc_structure=True,
                                         parameters=['filename'])
    ncpus = multiprocessing.cpu_count()
    logging.info('Detected {} cpus'.format(ncpus))
    queen = BorgQueen(drone, number_of_drones=ncpus)
    if os.path.exists(save_file) and not reanalyze:
        msg = 'Using previously assimilated data from {}. ' + \
              'Use -f to force re-analysis'.format(save_file)
        queen.load_data(save_file)
    else:
        queen.parallel_assimilate(rootdir)
        msg = 'Results saved to {} for faster reloading.'.format(save_file)
        queen.save_data(save_file)

    entries = queen.get_data()
    entries = sorted(entries, key=lambda x: x.parameters['filename'])
    all_data = [(e.parameters['filename'].replace("./", ""),
                 re.sub("\s+", "", e.composition.formula),
                 "{}".format(e.parameters['charge']),
                 "{}".format(e.parameters['spin_mult']),
                 "{:.5f}".format(e.energy), "{:.5f}".format(e.energy_per_atom),
                 ) for e in entries]
    headers = ("Directory", "Formula", "Charge", "Spin Mult.", "Energy",
               "E/Atom")
    print(tabulate(all_data, headers=headers))
    print("")
    print(msg)
Ejemplo n.º 2
0
def get_energies(rootdir, reanalyze, verbose, detailed, sort):
    """
    Doc string.
    """
    if verbose:
        FORMAT = "%(relativeCreated)d msecs : %(message)s"
        logging.basicConfig(level=logging.INFO, format=FORMAT)

    if not detailed:
        drone = SimpleVaspToComputedEntryDrone(inc_structure=True)
    else:
        drone = VaspToComputedEntryDrone(inc_structure=True,
                                         data=["filename",
                                               "initial_structure"])

    ncpus = multiprocessing.cpu_count()
    logging.info("Detected {} cpus".format(ncpus))
    queen = BorgQueen(drone, number_of_drones=ncpus)
    if os.path.exists(SAVE_FILE) and not reanalyze:
        msg = "Using previously assimilated data from {}.".format(SAVE_FILE) \
            + " Use -f to force re-analysis."
        queen.load_data(SAVE_FILE)
    else:
        if ncpus > 1:
            queen.parallel_assimilate(rootdir)
        else:
            queen.serial_assimilate(rootdir)
        msg = "Analysis results saved to {} for faster ".format(SAVE_FILE) + \
              "subsequent loading."
        queen.save_data(SAVE_FILE)

    entries = queen.get_data()
    if sort == "energy_per_atom":
        entries = sorted(entries, key=lambda x: x.energy_per_atom)
    elif sort == "filename":
        entries = sorted(entries, key=lambda x: x.data["filename"])

    all_data = []
    for e in entries:
        if not detailed:
            delta_vol = "{:.2f}".format(e.data["delta_volume"] * 100)
        else:
            delta_vol = e.structure.volume / \
                e.data["initial_structure"].volume - 1
            delta_vol = "{:.2f}".format(delta_vol * 100)
        all_data.append((e.data["filename"].replace("./", ""),
                         re.sub("\s+", "", e.composition.formula),
                         "{:.5f}".format(e.energy),
                         "{:.5f}".format(e.energy_per_atom),
                         delta_vol))
    if len(all_data) > 0:
        headers = ("Directory", "Formula", "Energy", "E/Atom", "% vol chg")
        t = PrettyTable(headers)
        t.align["Directory"] = "l"
        for d in all_data:
            t.add_row(d)
        print(t)
        print(msg)
    else:
        print("No valid vasp run found.")
Ejemplo n.º 3
0
def get_energies(rootdir, reanalyze, verbose, pretty, detailed, sort):
    if verbose:
        FORMAT = "%(relativeCreated)d msecs : %(message)s"
        logging.basicConfig(level=logging.INFO, format=FORMAT)

    if not detailed:
        drone = SimpleVaspToComputedEntryDrone(inc_structure=True)
    else:
        drone = VaspToComputedEntryDrone(
            inc_structure=True, data=["filename", "initial_structure"])

    ncpus = multiprocessing.cpu_count()
    logging.info("Detected {} cpus".format(ncpus))
    queen = BorgQueen(drone, number_of_drones=ncpus)
    if os.path.exists(save_file) and not reanalyze:
        msg = "Using previously assimilated data from {}.".format(save_file) \
            + " Use -f to force re-analysis."
        queen.load_data(save_file)
    else:
        if ncpus > 1:
            queen.parallel_assimilate(rootdir)
        else:
            queen.serial_assimilate(rootdir)
        msg = "Analysis results saved to {} for faster ".format(save_file) + \
              "subsequent loading."
        queen.save_data(save_file)

    entries = queen.get_data()
    if sort == "energy_per_atom":
        entries = sorted(entries, key=lambda x: x.energy_per_atom)
    elif sort == "filename":
        entries = sorted(entries, key=lambda x: x.data["filename"])

    all_data = []
    for e in entries:
        if not detailed:
            delta_vol = "{:.2f}".format(e.data["delta_volume"] * 100)
        else:
            delta_vol = e.structure.volume / \
                e.data["initial_structure"].volume - 1
            delta_vol = "{:.2f}".format(delta_vol * 100)
        all_data.append(
            (e.data["filename"].replace("./", ""),
             re.sub("\s+", "",
                    e.composition.formula), "{:.5f}".format(e.energy),
             "{:.5f}".format(e.energy_per_atom), delta_vol))
    if len(all_data) > 0:
        headers = ("Directory", "Formula", "Energy", "E/Atom", "% vol chg")
        if pretty:
            from prettytable import PrettyTable
            t = PrettyTable(headers)
            t.set_field_align("Directory", "l")
            map(t.add_row, all_data)
            print t
        else:
            print str_aligned(all_data, headers)
        print msg
    else:
        print "No valid vasp run found."
Ejemplo n.º 4
0
def get_energies(rootdir, reanalyze, verbose, detailed, sort, fmt):
    """
    Doc string.
    """
    if verbose:
        logformat = "%(relativeCreated)d msecs : %(message)s"
        logging.basicConfig(level=logging.INFO, format=logformat)

    if not detailed:
        drone = SimpleVaspToComputedEntryDrone(inc_structure=True)
    else:
        drone = VaspToComputedEntryDrone(
            inc_structure=True, data=["filename", "initial_structure"])

    ncpus = multiprocessing.cpu_count()
    logging.info("Detected {} cpus".format(ncpus))
    queen = BorgQueen(drone, number_of_drones=ncpus)
    if os.path.exists(SAVE_FILE) and not reanalyze:
        msg = "Using previously assimilated data from {}.".format(SAVE_FILE) \
              + " Use -r to force re-analysis."
        queen.load_data(SAVE_FILE)
    else:
        if ncpus > 1:
            queen.parallel_assimilate(rootdir)
        else:
            queen.serial_assimilate(rootdir)
        msg = "Analysis results saved to {} for faster ".format(SAVE_FILE) + \
              "subsequent loading."
        queen.save_data(SAVE_FILE)

    entries = queen.get_data()
    if sort == "energy_per_atom":
        entries = sorted(entries, key=lambda x: x.energy_per_atom)
    elif sort == "filename":
        entries = sorted(entries, key=lambda x: x.data["filename"])

    all_data = []
    for e in entries:
        if not detailed:
            delta_vol = "{:.2f}".format(e.data["delta_volume"] * 100)
        else:
            delta_vol = e.structure.volume / \
                        e.data["initial_structure"].volume - 1
            delta_vol = "{:.2f}".format(delta_vol * 100)
        all_data.append(
            (e.data["filename"].replace("./", ""),
             re.sub(r"\s+", "",
                    e.composition.formula), "{:.5f}".format(e.energy),
             "{:.5f}".format(e.energy_per_atom), delta_vol))
    if len(all_data) > 0:
        headers = ("Directory", "Formula", "Energy", "E/Atom", "% vol chg")
        print(tabulate(all_data, headers=headers, tablefmt=fmt))
        print("")
        print(msg)
    else:
        print("No valid vasp run found.")
        os.unlink(SAVE_FILE)
Ejemplo n.º 5
0
def get_energies(rootdir, reanalyze, verbose, quick, sort, fmt):
    """
    Get energies of all vaspruns in directory (nested).
    Args:
        rootdir (str): Root directory.
        reanalyze (bool): Whether to ignore saved results and reanalyze
        verbose (bool): Verbose mode or not.
        quick (bool): Whether to perform a quick analysis (using OSZICAR instead
            of vasprun.xml
        sort (bool): Whether to sort the results in ascending order.
        fmt (str): tablefmt passed to tabulate.
    """
    if verbose:
        logformat = "%(relativeCreated)d msecs : %(message)s"
        logging.basicConfig(level=logging.INFO, format=logformat)

    if quick:
        drone = SimpleVaspToComputedEntryDrone(inc_structure=True)
    else:
        drone = VaspToComputedEntryDrone(
            inc_structure=True, data=["filename", "initial_structure"])

    ncpus = multiprocessing.cpu_count()
    logging.info("Detected {} cpus".format(ncpus))
    queen = BorgQueen(drone, number_of_drones=ncpus)
    if os.path.exists(SAVE_FILE) and not reanalyze:
        msg = ("Using previously assimilated data from {}.".format(SAVE_FILE) +
               " Use -r to force re-analysis.")
        queen.load_data(SAVE_FILE)
    else:
        if ncpus > 1:
            queen.parallel_assimilate(rootdir)
        else:
            queen.serial_assimilate(rootdir)
        msg = ("Analysis results saved to {} for faster ".format(SAVE_FILE) +
               "subsequent loading.")
        queen.save_data(SAVE_FILE)

    entries = queen.get_data()
    if sort == "energy_per_atom":
        entries = sorted(entries, key=lambda x: x.energy_per_atom)
    elif sort == "filename":
        entries = sorted(entries, key=lambda x: x.data["filename"])

    all_data = []
    for e in entries:
        if quick:
            delta_vol = "NA"
        else:
            delta_vol = e.structure.volume / e.data[
                "initial_structure"].volume - 1
            delta_vol = "{:.2f}".format(delta_vol * 100)
        all_data.append((
            e.data["filename"].replace("./", ""),
            re.sub(r"\s+", "", e.composition.formula),
            "{:.5f}".format(e.energy),
            "{:.5f}".format(e.energy_per_atom),
            delta_vol,
        ))
    if len(all_data) > 0:
        headers = ("Directory", "Formula", "Energy", "E/Atom", "% vol chg")
        print(tabulate(all_data, headers=headers, tablefmt=fmt))
        print("")
        print(msg)
    else:
        print("No valid vasp run found.")
        os.unlink(SAVE_FILE)
    return 0
Ejemplo n.º 6
0
def get_energies(rootdir, reanalyze, verbose, detailed,
                 sort, formulaunit, debug, hull, threshold, args, templatestructure):

    ion_list = 'Novalue'
    ave_key_list = 'Novalue'
    threscount = 0

    """
    Doc string.
    """
    if (verbose and not debug):
        FORMAT = "%(relativeCreated)d msecs : %(message)s"
        logging.basicConfig(level=logging.INFO, format=FORMAT)

    elif debug:
        logging.basicConfig(level=logging.DEBUG)

    if not detailed:
        drone = SimpleVaspToComputedEntryDrone(inc_structure=True)
    else:
        drone = VaspToComputedEntryDrone(inc_structure=True,
                                         data=["filename",
                                               "initial_structure"])



    ncpus = multiprocessing.cpu_count()
    logging.info("Detected {} cpus".format(ncpus))
    queen = BorgQueen(drone, number_of_drones=ncpus)


    if os.path.exists(SAVE_FILE) and not reanalyze:
        msg = "Using previously assimilated data from {}.".format(SAVE_FILE) \
            + " Use -f to force re-analysis."
        queen.load_data(SAVE_FILE)
    else:
        if ncpus > 1:
            queen.parallel_assimilate(rootdir)
        else:
            queen.serial_assimilate(rootdir)
        msg = "Analysis results saved to {} for faster ".format(SAVE_FILE) + \
              "subsequent loading."
        queen.save_data(SAVE_FILE)

    entries = queen.get_data()
    if sort == "energy_per_atom":
        entries = sorted(entries, key=lambda x: x.energy_per_atom)
    elif sort == "filename":
        entries = sorted(entries, key=lambda x: x.data["filename"])

    # logging.debug('First Energy entry is {}'.format(entries[0]))

    base_energy = entries[0].energy
    logging.debug('Type of entries is: {}'.format(type(entries)))
    logging.debug('First Element of Entries is:{}'.format(entries[0]))

    # logging.debug('First Energy entry structure is {}'.format(entries[0].structure))

    xy_direction = int(args.XYdirection)
    tolerance = float(args.tolerance)


    if args.template:

        logging.debug('Temp Structure site info is: {}'.format(Na12(['Co','Mn'],['Na'],templatestructure,templatestructure,XY_Direction=xy_direction,tol=tolerance)))
        template_site_info = Na12(['Co','Mn'],['Na'],templatestructure,templatestructure,XY_Direction=xy_direction,tol=tolerance)

    all_data = []
    energy_diff = []

    threshold=float(threshold)

    Structure_info_dict = {}
    check_ion_seq = [args.dupion]


    for e in entries:

        if not detailed:
            delta_vol = "{:.2f}".format(e.data["delta_volume"] * 100)
        else:
            delta_vol = e.structure.volume / \
                e.data["initial_structure"].volume - 1
            delta_vol = "{:.2f}".format(delta_vol * 100)


        entry_path = e.data['filename'].rsplit('/',1)[0]

        entry_site_info = Na12(['Co','Mn'],['Na'],e.structure,e.structure,XY_Direction=xy_direction,tol=tolerance)

        logging.debug('Total Na site: {}'.format(entry_site_info['Total_Na_Site']))

        #Coordination extraction part
        # na_layer_site_fcoords = [site._fcoords for site in s if site.specie.symbol == "Na"]
        # if 'Cif_Structure' in e.data.keys():
        #     na_sites_fcoords = [site._fcoords for site in e.data['Cif_Structure'] if site.specie.symbol == 'Na']
        #     na_sites_fcoords_list_tuple = [tuple(coord) for coord in na_sites_fcoords]

        na_sites_fcoords = [site._fcoords for site in e.data['CONTCAR_Structure'] if site.specie.symbol == 'Na']
        na_sites_fcoords_list_tuple = [tuple(coord) for coord in na_sites_fcoords]





        if args.nupdown:
            entry_data= [rootdir,e.data["filename"].replace("./", ""),
                             re.sub("\s+", "", e.composition.formula),
                             "{:.5f}".format(e.energy),
                             "{:.5f}".format(1000*(e.energy-base_energy)/int(formulaunit)),
                             "{:.5f}".format(e.energy_per_atom),
                             delta_vol,e.parameters['run_type'],
                             e.data['NUPDOWN'],e.data['ISMEAR'],na_sites_fcoords_list_tuple]
        else:
            entry_data= [rootdir,e.data["filename"].replace("./", ""),
                             re.sub("\s+", "", e.composition.formula),
                             "{:.5f}".format(e.energy),
                             "{:.5f}".format(1000*(e.energy-base_energy)/int(formulaunit)),
                             "{:.5f}".format(e.energy_per_atom),
                             delta_vol,e.parameters['run_type'],na_sites_fcoords_list_tuple]


        if args.structure:
            entry_data.extend([entry_site_info['Total_Na_Site'],entry_site_info['Na2_Site'],entry_site_info['Na1_Mn_Site'],
            entry_site_info['Na1_Co_Site'],entry_site_info['Na1_Mn_Co_Site']])

        if args.template:
            entry_data.extend([template_site_info['Total_Na_Site'],template_site_info['Na2_Site'],template_site_info['Na1_Mn_Site'],
            template_site_info['Na1_Co_Site'],template_site_info['Na1_Mn_Co_Site']])








        # sitelist = ['Existed','Duplicate_Entry']
        logging.debug(e.data)
        if args.duplicate:
            # filename.rsplit('/',2)[-2]

            Duplicate, Duplicat_Entry, Structure_info_dict = check_ex(check_ion_seq,Structure_info_dict,
                                                                      e,args.tolerance)
            entry_data.extend([Duplicate,Duplicat_Entry])


        if args.ion_list:
            if args.ion_list[0] == "All":
                ion_list = None
            else:
                (start, end) = [int(i) for i in re.split("-", args.ion_list[0])]
                ion_list = list(range(start, end + 1))
            for d in entry_path:
                magdata = get_magnetization(d, ion_list)
                entry_data.extend(magdata)

        if args.ion_avg_list:
            ave_mag_data, ave_key_list = get_ave_magnetization(entry_path,args.ion_avg_list)
            entry_data.extend(ave_mag_data)

        if threshold != 0:
            all_data.append(entry_data)
            if float(entry_data[4])<threshold:
                threscount +=1

        elif threshold == 0:
            all_data.append(entry_data)

        energy_diff.append("{:.5f}".format(1000*(e.energy-base_energy)/int(formulaunit)))


    # if len(all_data) > 0:
    #     headers = ("Directory", "Formula", "Energy", "Energy Diff (meV)/F.U.","E/Atom", "% vol chg")
    #     t = PrettyTable(headers)
    #     t.align["Directory"] = "l"
    #     for d in all_data:
    #         logging.debug('data row in all data is: \n {}'.format(d))
    #         t.add_row(d)
    #     print(t)
    #     print(msg)
    # else:
    #     print("No valid vasp run found.")

    if hull:
        print 'Analyzing group: {}\n'.format(rootdir)
        print 'Energy above hull is: \n'
        print map(lambda x: x.encode('ascii'), energy_diff)

    logging.info('In group: {}, number of entries fall in threshold is {}'.format(rootdir,threscount))
    all_data.append([])

    return all_data