def plot_line_neighbors(self, site_index, radius, num=200, max_nn=10, fontsize=12, **kwargs): """ Plot (interpolated) density/potential in real space along the lines connecting an atom specified by ``site_index`` and all neighbors within a sphere of given ``radius``. .. warning:: This routine can produce lots of plots! Be careful with the value of ``radius``. See also ``max_nn``. Args: site_index: Index of the atom in the structure. radius: Radius of the sphere in Angstrom num: Number of points sampled along the line. max_nn: By default, only the first `max_nn` neighbors are showed. fontsize: legend and title fontsize Return: |matplotlib-Figure| """ site = self.structure[site_index] nn_list = self.structure.get_neighbors(site, radius, include_index=True) if not nn_list: cprint("Zero neighbors found for radius %s Ang. Returning None." % radius, "yellow") return None # Sorte sites by distance. nn_list = list(sorted(nn_list, key=lambda t: t[1])) if max_nn is not None and len(nn_list) > max_nn: cprint("For radius %s, found %s neighbors but only max_nn %s sites are show." % (radius, len(nn_list), max_nn), "yellow") nn_list = nn_list[:max_nn] # Get grid of axes. nrows, ncols = len(nn_list), 1 ax_list, fig, plt = get_axarray_fig_plt(None, nrows=nrows, ncols=ncols, sharex=True, sharey=True, squeeze=True) ax_list = ax_list.ravel() interpolator = self.get_interpolator() for i, (nn, ax) in enumerate(zip(nn_list, ax_list)): nn_site, nn_dist, nn_sc_index = nn title = "%s, %s, dist=%.3f A" % (nn_site.species_string, str(nn_site.frac_coords), nn_dist) r = interpolator.eval_line(site.frac_coords, nn_site.frac_coords, num=num, kpoint=None) for ispden in range(self.nspden): ax.plot(r.dist, r.values[ispden], label=latexlabel_ispden(ispden, self.nspden) if i == 0 else None) ax.set_title(title, fontsize=fontsize) ax.grid(True) if i == nrows - 1: ax.set_xlabel("Distance from site_index %s [Angstrom]" % site_index) ax.set_ylabel(self.latex_label) if self.nspden > 1: ax.legend(loc="best", fontsize=fontsize, shadow=True) return fig
def make_open_notebook(self, nbpath=None, foreground=False): """ Generate an ipython notebook and open it in the browser. Args: nbpath: If nbpath is None, a temporay file is created. foreground: By default, jupyter is executed in background and stdout, stderr are redirected to devnull. Use foreground to run the process in foreground Return: system exit code. Raise: RuntimeError if jupyter is not in $PATH """ nbpath = self.write_notebook(nbpath=nbpath) if foreground: cmd = "jupyter notebook %s" % nbpath return os.system(cmd) else: cmd = "jupyter notebook %s &> /dev/null &" % nbpath print("Executing:", cmd) import subprocess try: from subprocess import DEVNULL # py3k except ImportError: DEVNULL = open(os.devnull, "wb") process = subprocess.Popen(cmd.split(), shell=False, stdout=DEVNULL) #, stderr=DEVNULL) cprint("pid: %s" % str(process.pid), "yellow")
def final_structures(self): """List of final |Structure|.""" if self.run_completed: return self._get_structures("footer") else: cprint("Cannot extract final structures from file.\n %s" % self.filepath, "red") return []
def get_dataframe(self, with_geo=True, with_dims=True, abspath=False, funcs=None): """ Return a |pandas-DataFrame| with the most important results and the filenames as index. Args: with_geo: True if structure info should be added to the dataframe with_dims: True if dimensions should be added abspath: True if paths in index should be absolute. Default: Relative to getcwd(). funcs: Function or list of functions to execute to add more data to the DataFrame. Each function receives a |GsrFile| object and returns a tuple (key, value) where key is a string with the name of column and value is the value to be inserted. """ rows, row_names = [], [] for label, abo in self.items(): row_names.append(label) d = OrderedDict() if with_dims: dims_dataset, spg_dataset = abo.get_dims_spginfo_dataset() if len(dims_dataset) > 1: cprint("Multiple datasets are not supported. ARGH!", "yellow") d.update(dims_dataset[1]) # Add info on structure. if with_geo and abo.run_completed: d.update(abo.final_structure.get_dict4pandas(with_spglib=True)) # Execute functions if funcs is not None: d.update(self._exec_funcs(funcs, abo)) rows.append(d) row_names = row_names if not abspath else self._to_relpaths(row_names) return pd.DataFrame(rows, index=row_names, columns=list(rows[0].keys()))
def from_files(cls, filenames, labels=None, abspath=False): """ Build a Robot from a list of `filenames`. if labels is None, labels are automatically generated from absolute paths. Args: abspath: True if paths in index should be absolute. Default: Relative to `top`. """ filenames = list_strings(filenames) from abipy.abilab import abiopen filenames = [f for f in filenames if cls.class_handles_filename(f)] items = [] for i, f in enumerate(filenames): try: abifile = abiopen(f) except Exception as exc: cprint("Exception while opening file: `%s`" % str(f), "red") cprint(exc, "red") abifile = None if abifile is not None: label = abifile.filepath if labels is None else labels[i] items.append((label, abifile)) new = cls(*items) if labels is None and not abspath: new.trim_paths(start=None) return new
def read_doses(self): """ Return a |AttrDict| with the DOSes available in the file. Empty dict if DOSes are not available. """ if "gruns_nomega" not in self.rootgrp.dimensions: cprint("File `%s` does not contain ph-DOSes, returning empty dict" % self.path, "yellow") return {} # Read q-point sampling used to compute DOSes. qptrlatt = self.read_value("gruns_qptrlatt") shifts = self.read_value("gruns_shiftq") qsampling = KSamplingInfo.from_kptrlatt(qptrlatt, shifts, kptopt=1) frac_coords_ibz = self.read_value("gruns_qibz") weights = self.read_value("gruns_wtq") qpoints = IrredZone(self.structure.reciprocal_lattice, frac_coords_ibz, weights=weights, names=None, ksampling=qsampling) # DOSes are in 1/Hartree. d = AttrDict(wmesh=self.read_value("gruns_omega_mesh") * abu.Ha_eV, qpoints=qpoints) for dos_name in _ALL_DOS_NAMES: dos_idos = self.read_value(dos_name) dos_idos[0] *= abu.eV_Ha # Here we convert to eV. IDOS are not changed. d[dos_name] = dos_idos return d
def wrapper(*args, **kwargs): parser = build_bench_main_parser() options = parser.parse_args() # loglevel is bound to the string value obtained from the command line argument. # Convert to upper case to allow the user to specify --loglevel=DEBUG or --loglevel=debug import logging numeric_level = getattr(logging, options.loglevel.upper(), None) if not isinstance(numeric_level, int): raise ValueError('Invalid log level: %s' % options.loglevel) logging.basicConfig(level=numeric_level) bench_monkey_patch_options(options) # Istantiate the manager. from abipy.abilab import TaskManager options.manager = TaskManager.as_manager(options.manager) flow = main(options) if flow is None: return 0 if options.validate: # Validate inputs and return retcode = 0 for task in flow.iflat_tasks(): v = task.input.abivalidate() if v.retcode != 0: cprint(v, color="red") retcode += v.retcode print("input validation retcode: %d" % retcode) return retcode if options.scheduler: return flow.make_scheduler().start() return 0
def abinb_mkstemp(force_abinb_workdir=False, use_relpath=False, **kwargs): """ Invoke mkstep with kwargs, return the (fd, name) of the temporary file. kwargs are passed to ``mkstemp`` except for ``dir`` if we are inside a jupyter notebook. Args: use_abipy_nbworkdir: use_relpath: Return relative path (os.path.relpath) if True else absolute (default) Relative paths are required if we are gonna use the temporary file in notebooks or in web browers. In this case, the caller is responsbile for calling the function with the correct flag. .. example: _, filename = abinb_mkstep(suffix="." + ext, text=True) """ if in_notebook() or force_abinb_workdir: d = kwargs.pop("dir", None) if d is not None: cprint("Files should be created inside abipy_nbworkdir if we are inside jupyter or force_abinb_workdir", "yellow") fd, path = tempfile.mkstemp(dir=get_abinb_workdir(), **kwargs) else: fd, path = tempfile.mkstemp(**kwargs) if use_relpath: path = os.path.relpath(path) return fd, path
def write_cube(self, filename=None, spin="total"): """Write density in CUBE format.""" if filename is None: filename = self.basename.replace(".nc", ".cube") cprint("Writing density in CUBE format to file: %s" % filename, "yellow") return self.density.export_to_cube(filename, spin=spin)
def make_and_open_notebook(self, nbpath=None, foreground=False): # pragma: no cover """ Generate an jupyter_ notebook and open it in the browser. Args: nbpath: If nbpath is None, a temporay file is created. foreground: By default, jupyter is executed in background and stdout, stderr are redirected to devnull. Use foreground to run the process in foreground Return: system exit code. Raise: `RuntimeError` if jupyter_ is not in $PATH """ nbpath = self.write_notebook(nbpath=nbpath) if which("jupyter") is None: raise RuntimeError("Cannot find jupyter in $PATH. Install it with `conda install jupyter or `pip install jupyter`") if foreground: return os.system("jupyter notebook %s" % nbpath) else: fd, tmpname = tempfile.mkstemp(text=True) print(tmpname) cmd = "jupyter notebook %s" % nbpath print("Executing:", cmd) print("stdout and stderr redirected to %s" % tmpname) import subprocess process = subprocess.Popen(cmd.split(), shell=False, stdout=fd, stderr=fd) cprint("pid: %s" % str(process.pid), "yellow") return 0
def build_abinit_input_from_file(options, **abivars): """ Build and return an AbinitInput instance from filepath. abivars are optional variables that will be added to the input. """ from abipy.abio.abivars import AbinitInputFile abifile = AbinitInputFile(options.filepath) pseudos = get_pseudotable(options) jdtset = options.jdtset # Get vars from input abi_kwargs = abifile.datasets[jdtset - 1].get_vars() if abifile.ndtset != 1: cprint( "# Input file contains %s datasets, will select jdtset index %s:" % (abifile.ndtset, jdtset), "yellow") abi_kwargs["jdtset"] = jdtset # Add input abivars (if any). abi_kwargs.update(abivars) return abilab.AbinitInput(abifile.structure, pseudos, pseudo_dir=None, comment=None, decorators=None, abi_args=None, abi_kwargs=abi_kwargs, tags=None)
def get_dims_dataframe(self, with_time=True, index=None): """ Build and return |pandas-DataFrame| with the dimensions of the calculation. Args: with_time: True if walltime and cputime should be added index: Index of the dataframe. Use relative paths of files if None. """ rows, my_index = [], [] for i, abo in enumerate(self.abifiles): try: dims_dataset, spg_dataset = abo.get_dims_spginfo_dataset() except Exception as exc: cprint("Exception while trying to get dimensions from %s\n%s" % (abo.relpath, str(exc)), "yellow") continue for dtindex, dims in dims_dataset.items(): dims = dims.copy() dims.update({"dtset": dtindex}) # Add walltime and cputime in seconds if with_time: dims.update(OrderedDict([(k, getattr(abo, k)) for k in ("overall_cputime", "proc0_cputime", "overall_walltime", "proc0_walltime")])) rows.append(dims) my_index.append(abo.relpath if index is None else index[i]) return pd.DataFrame(rows, index=my_index, columns=list(rows[0].keys()))
def abinb_mkstemp(force_abinb_workdir=False, use_relpath=False, **kwargs): """ Invoke mkstep with kwargs, return the (fd, name) of the temporary file. kwargs are passed to ``mkstemp`` except for ``dir`` if we are inside a jupyter notebook. Args: use_abipy_nbworkdir: use_relpath: Return relative path (os.path.relpath) if True else absolute (default) Relative paths are required if we are gonna use the temporary file in notebooks or in web browers. In this case, the caller is responsbile for calling the function with the correct flag. .. example: _, filename = abinb_mkstep(suffix="." + ext, text=True) """ if in_notebook() or force_abinb_workdir: d = kwargs.pop("dir", None) if d is not None: cprint( "Files should be created inside abipy_nbworkdir if we are inside jupyter or force_abinb_workdir", "yellow") fd, path = tempfile.mkstemp(dir=get_abinb_workdir(), **kwargs) else: fd, path = tempfile.mkstemp(**kwargs) if use_relpath: path = os.path.relpath(path) return fd, path
def get_dims_dataframe(self, with_time=True, index=None): """ Build and return |pandas-DataFrame| with the dimensions of the calculation. Args: with_time: True if walltime and cputime should be added index: Index of the dataframe. Use relative paths of files if None. """ rows, my_index = [], [] for i, abo in enumerate(self.abifiles): try: dims_dataset, spg_dataset = abo.get_dims_spginfo_dataset() except Exception as exc: cprint( "Exception while trying to get dimensions from %s\n%s" % (abo.relpath, str(exc)), "yellow") continue for dtindex, dims in dims_dataset.items(): dims = dims.copy() dims.update({"dtset": dtindex}) # Add walltime and cputime in seconds if with_time: dims.update( OrderedDict([ (k, getattr(abo, k)) for k in ("overall_cputime", "proc0_cputime", "overall_walltime", "proc0_walltime") ])) rows.append(dims) my_index.append(abo.relpath if index is None else index[i]) import pandas as pd return pd.DataFrame(rows, index=my_index, columns=list(rows[0].keys()))
def abicomp_time(options): """ Analyze/plot the timing data of single or multiple runs. """ paths = options.paths from abipy.abio.timer import AbinitTimerParser if len(options.paths) == 1 and os.path.isdir(paths[0]): top = options.paths[0] print("Walking directory tree from top:", top, "Looking for file extension:", options.ext) parser, paths_found, okfiles = AbinitTimerParser.walk(top=top, ext=options.ext) if not paths_found: cprint("Empty file list!", color="magenta") return 1 print("Found %d files\n" % len(paths_found)) if okfiles != paths_found: badfiles = [f for f in paths_found if f not in okfiles] cprint("Cannot parse timing data from the following files:", color="magenta") for bad in badfiles: print(bad) else: parser = AbinitTimerParser() okfiles = parser.parse(options.paths) if okfiles != options.paths: badfiles = [f for f in options.paths if f not in okfiles] cprint("Cannot parse timing data from the following files:", color="magenta") for bad in badfiles: print(bad) if parser is None: cprint("Cannot analyze timing data. parser is None", color="magenta") return 1 print(parser.summarize()) if options.verbose: for timer in parser.timers(): print(timer.get_dataframe()) if options.ipython: cprint( "Invoking ipython shell. Use parser to access the object inside ipython", color="blue") import IPython IPython.start_ipython(argv=[], user_ns={"parser": parser}) elif options.notebook: parser.make_and_open_notebook(foreground=options.foreground) else: parser.plot_all() return 0
def plot_unfolded(self, kbounds, klabels, ylims=None, dist_tol=1e-12, verbose=0, colormap="afmhot", facecolor="black", ax=None, fontsize=12, **kwargs): r""" Plot unfolded band structure with spectral weights. Args: klabels: dictionary whose keys are tuple with the reduced coordinates of the k-points. The values are the labels. e.g. ``klabels = {(0.0,0.0,0.0): "$\Gamma$", (0.5,0,0): "L"}``. ylims: Set the data limits for the y-axis. Accept tuple e.g. ``(left, right)`` or scalar e.g. ``left``. If left (right) is None, default values are used dist_tol: A point is considered to be on the path if its distance from the line is less than dist_tol. verbose: Verbosity level. colormap: Have a look at the colormaps here and decide which one you like: http://matplotlib.sourceforge.net/examples/pylab_examples/show_colormaps.html facecolor: ax: |matplotlib-Axes| or None if a new figure should be created. fontsize: Legend and title fontsize. Returns: |matplotlib-Figure| """ cart_bounds = [self.pc_lattice.reciprocal_lattice.get_cartesian_coords(c) for c in np.reshape(kbounds, (-1, 3))] uf_cart = self.uf_kpoints.get_cart_coords() p = find_points_along_path(cart_bounds, uf_cart, dist_tol) if len(p.ikfound) == 0: cprint("Warning: find_points_along_path returned zero points along the path. Try to increase dist_tol.", "yellow") return None if verbose: uf_frac_coords = np.reshape([k.frac_coords for k in self.uf_kpoints], (-1, 3)) fcoords = uf_frac_coords[p.ikfound] print("Found %d points along input k-path" % len(fcoords)) print("k-points of path in reduced coordinates:") print(fcoords) fact = 8.0 e0 = self.ebands.fermie ax, fig, plt = get_ax_fig_plt(ax=ax) ax.set_facecolor(facecolor) xs = np.tile(p.dist_list, self.nband) marker_spin = {0: "^", 1: "v"} if self.nss == 2 else {0: "o"} for spin in range(self.nss): ys = self.uf_eigens[spin, p.ikfound, :] - e0 ws = self.uf_weights[spin, p.ikfound, :] s = ax.scatter(xs, ys.T, s=fact * ws.T, c=ws.T, marker=marker_spin[spin], label=None if self.nss == 1 else "spin %s" % spin, linewidth=1, edgecolors='none', cmap=plt.get_cmap(colormap)) plt.colorbar(s, ax=ax, orientation='vertical') ax.set_xticks(p.path_ticks, minor=False) ax.set_xticklabels(klabels, fontdict=None, minor=False, size=kwargs.pop("klabel_size", "large")) ax.grid(True) ax.set_ylabel('Energy (eV)') set_axlims(ax, ylims, "y") if self.nss == 2: ax.legend(loc="best", fontsize=fontsize, shadow=True) return fig
def get_interpolated_ebands_plotter(self, vertices_names=None, knames=None, line_density=20, ngkpt=None, shiftk=(0, 0, 0), kpoints=None, **kwargs): """ Args: vertices_names: Used to specify the k-path for the interpolated QP band structure It's a list of tuple, each tuple is of the form (kfrac_coords, kname) where kfrac_coords are the reduced coordinates of the k-point and kname is a string with the name of the k-point. Each point represents a vertex of the k-path. ``line_density`` defines the density of the sampling. If None, the k-path is automatically generated according to the point group of the system. knames: List of strings with the k-point labels defining the k-path. It has precedence over `vertices_names`. line_density: Number of points in the smallest segment of the k-path. Used with ``vertices_names``. ngkpt: Mesh divisions. Used if bands should be interpolated in the IBZ. shiftk: Shifts for k-meshs. Used with ngkpt. kpoints: |KpointList| object taken e.g from a previous ElectronBands. Has precedence over vertices_names and line_density. Return: |ElectronBandsPlotter| object. """ diff_str = self.has_different_structures() if diff_str: cprint(diff_str, "yellow") # Need KpointList object (assume same structures in Robot) nc0 = self.abifiles[0] if kpoints is None: if ngkpt is not None: # IBZ sampling kpoints = IrredZone.from_ngkpt(nc0.structure, ngkpt, shiftk, kptopt=1, verbose=0) else: # K-Path if knames is not None: kpoints = Kpath.from_names(nc0.structure, knames, line_density=line_density) else: if vertices_names is None: vertices_names = [(k.frac_coords, k.name) for k in nc0.structure.hsym_kpoints] kpoints = Kpath.from_vertices_and_names( nc0.structure, vertices_names, line_density=line_density) plotter = ElectronBandsPlotter() for label, abiwan in self.items(): plotter.add_ebands(label, abiwan.interpolate_ebands(kpoints=kpoints)) return plotter
def handle_overwrite(path, options): """Exit 1 if file ``path`` exists and not options.force else return path.""" name_parts = os.path.splitext(path) print("Writing %s file:" % name_parts[-1].replace("." , "").upper()) if os.path.exists(path) and not options.force: cprint("Cannot overwrite pre-existent file. Use `-f` options.", "red") sys.exit(1) return path
def check_ordered_structure(structure): """Print a warning and sys.exit 1 if structure is disordered.""" if not structure.is_ordered: cprint(""" Cannot handle disordered structure with fractional site occupancies. Use OrderDisorderedStructureTransformation or EnumerateStructureTransformation to build an appropriate supercell from partial occupancies.""", color="magenta") sys.exit(1)
def handle_overwrite(path, options): """Exit 1 if file ``path`` exists and not options.force else return path.""" name_parts = os.path.splitext(path) print("Writing %s file:" % name_parts[-1].replace(".", "").upper()) if os.path.exists(path) and not options.force: cprint("Cannot overwrite pre-existent file. Use `-f` options.", "red") sys.exit(1) return path
def interpolate_ebands(self, vertices_names=None, line_density=20, ngkpt=None, shiftk=(0, 0, 0), kpoints=None): """ Build new |ElectronBands| object by interpolating the KS Hamiltonian with Wannier functions. Supports k-path via (vertices_names, line_density), IBZ mesh defined by ngkpt and shiftk or input list of kpoints. Args: vertices_names: Used to specify the k-path for the interpolated QP band structure List of tuple, each tuple is of the form (kfrac_coords, kname) where kfrac_coords are the reduced coordinates of the k-point and kname is a string with the name of the k-point. Each point represents a vertex of the k-path. ``line_density`` defines the density of the sampling. If None, the k-path is automatically generated according to the point group of the system. line_density: Number of points in the smallest segment of the k-path. Used with ``vertices_names``. ngkpt: Mesh divisions. Used if bands should be interpolated in the IBZ. shiftk: Shifts for k-meshs. Used with ngkpt. kpoints: |KpointList| object taken e.g from a previous ElectronBands. Has precedence over vertices_names and line_density. Returns: |ElectronBands| object with Wannier-interpolated energies. """ # Need KpointList object. if kpoints is None: if ngkpt is not None: # IBZ sampling kpoints = IrredZone.from_ngkpt(self.structure, ngkpt, shiftk, kptopt=1, verbose=0) else: # K-Path if vertices_names is None: vertices_names = [(k.frac_coords, k.name) for k in self.structure.hsym_kpoints] kpoints = Kpath.from_vertices_and_names(self.structure, vertices_names, line_density=line_density) nk = len(kpoints) eigens = np.zeros((self.nsppol, nk, self.mwan)) # Interpolate Hamiltonian for each kpoint and spin. start = time.time() write_warning = True for spin in range(self.nsppol): num_wan = self.nwan_spin[spin] for ik, kpt in enumerate(kpoints): oeigs = self.hwan.eval_sk(spin, kpt.frac_coords) eigens[spin, ik, :num_wan] = oeigs if num_wan < self.mwan: # May have different number of wannier functions if nsppol == 2. # Here I use the last value to fill eigens matrix (not very clean but oh well). eigens[spin, ik, num_wan:self.mwan] = oeigs[-1] if write_warning: cprint("Different number of wannier functions for spin. Filling last bands with oeigs[-1]", "yellow") write_warning = False print("Interpolation completed in %.3f [s]" % (time.time() - start)) occfacts = np.zeros_like(eigens) return ElectronBands(self.structure, kpoints, eigens, self.ebands.fermie, occfacts, self.ebands.nelect, self.nspinor, self.nspden, smearing=self.ebands.smearing)
def main(): def str_examples(): return """\ Usage example: abicheck.py """ def show_examples_and_exit(err_msg=None, error_code=1): """Display the usage of the script.""" sys.stderr.write(str_examples()) if err_msg: sys.stderr.write("Fatal Error\n" + err_msg + "\n") sys.exit(error_code) parser = argparse.ArgumentParser( epilog=str_examples(), formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument( '--loglevel', default="ERROR", type=str, help= "set the loglevel. Possible values: CRITICAL, ERROR (default), WARNING, INFO, DEBUG" ) parser.add_argument('-V', '--version', action='version', version="%(prog)s version " + abilab.__version__) parser.add_argument( '-v', '--verbose', default=0, action='count', # -vv --> verbose=2 help='verbose, can be supplied multiple times to increase verbosity.') # Parse the command line. try: options = parser.parse_args() except Exception: show_examples_and_exit(error_code=1) # loglevel is bound to the string value obtained from the command line argument. # Convert to upper case to allow the user to specify --loglevel=DEBUG or --loglevel=debug import logging numeric_level = getattr(logging, options.loglevel.upper(), None) if not isinstance(numeric_level, int): raise ValueError('Invalid log level: %s' % options.loglevel) logging.basicConfig(level=numeric_level) errmsg = abilab.abicheck(verbose=options.verbose) if errmsg: cprint(errmsg, "red") else: print() cprint("Abipy requirements are properly configured", "green") return len(errmsg)
def ebands(self): """ |ElectronBands| object with the single-particle energies used to compute the screening. """ ebands = ElectronBands.from_file(self.filepath) # FIXME cprint("Setting Fermi energy to zero since `fermie_energy` is not initialized in Abinit v8.2", "yellow") ebands.fermie = 0 return ebands
def write_xsf(self, filename=None): """ Write density in XSF format (xcrysden_) """ if filename is None: filename = self.basename.replace(".nc", ".xsf") cprint("Writing density in XSF format to file: %s" % filename, "yellow") return self.density.export(filename)
def write_chgcar(self, filename=None): """ Write density in CHGCAR format. Return :class:`ChgCar` instance. """ if filename is None: filename = self.basename.replace(".nc", "_CHGCAR") cprint("Writing density in CHGCAR format to file: %s" % filename, "yellow") return self.density.to_chgcar(filename=filename)
def parse_trials(s): if s == "all": return DojoReport.ALL_TRIALS trials = s.split(",") unknowns = [t for t in trials if t not in DojoReport.ALL_TRIALS] if unknowns: cprint("The following names are not valid PseudoDojo trials", "red") print(str(unknowns)) raise SystemExit() return trials
def remove_disordered(structures, paths): """Remove disordered structures and print warning message.""" slist = [] for s, p in zip(structures, paths): if not s.is_ordered: cprint("Removing disordered structure: %s found in %s" % (s.formula, p), "magenta") else: slist.append(s) return slist
def get_eos_fits_dataframe(self, eos_names="murnaghan"): """ Fit energy as function of volume to get the equation of state, equilibrium volume, bulk modulus and its derivative wrt to pressure. Args: eos_names: String or list of strings with EOS names. For the list of available models, see pymatgen.analysis.eos. Return: (fits, dataframe) namedtuple. fits is a list of ``EOSFit object`` dataframe is a |pandas-DataFrame| with the final results. """ # Read volumes and energies from the GSR files. energies, volumes = [], [] for label, gsr in self.items(): energies.append(float(gsr.energy)) volumes.append(float(gsr.structure.volume)) # Order data by volumes if needed. if np.any(np.diff(volumes) < 0): ves = sorted(zip(volumes, energies), key=lambda t: t[0]) volumes = [t[0] for t in ves] energies = [t[1] for t in ves] # Note that eos.fit expects lengths in Angstrom, and energies in eV. # I'm also monkey-patching the plot method. from pymatgen.analysis.eos import EOS if eos_names == "all": # Use all the available models. eos_names = [ n for n in EOS.MODELS if n not in ("deltafactor", "numerical_eos") ] else: eos_names = list_strings(eos_names) fits, index, rows = [], [], [] for eos_name in eos_names: try: fit = EOS(eos_name=eos_name).fit(volumes, energies) except Exception as exc: cprint("EOS %s raised exception:\n%s" % (eos_name, str(exc))) continue # Replace plot with plot_ax method fit.plot = fit.plot_ax fits.append(fit) index.append(eos_name) rows.append( OrderedDict([(aname, getattr(fit, aname)) for aname in ("v0", "e0", "b0_GPa", "b1")])) dataframe = pd.DataFrame( rows, index=index, columns=list(rows[0].keys()) if rows else None) return dict2namedtuple(fits=fits, dataframe=dataframe)
def abicomp_dfpt2_scf(options): """ Compare DFPT SCF cycles. """ paths = options.paths f0 = abilab.AbinitOutputFile(paths[0]) figures = f0.compare_d2de_scf_cycles(paths[1:]) if not figures: cprint("Cannot find DFPT-SCF sections in output files.", "yellow") return 0
def plot_graphs(): for task in tasks: if hasattr(task, "inspect"): try: task.inspect() except Exception as exc: cprint("%s: inspect method raised %s " % (task, exc), color="blue") else: cprint("Task %s does not provide an inspect method" % task, color="blue")
def abicomp_gs_scf(options): """ Compare ground-state SCF cycles. """ paths = options.paths f0 = abilab.AbinitOutputFile(paths[0]) figures = f0.compare_gs_scf_cycles(paths[1:]) if not figures: cprint("Cannot find GS-SCF sections in output files.", "yellow") return 0
def make_and_open_notebook(options): """ Generate an jupyter notebook and open it in the browser. Return system exit code. Raise: RuntimeError if jupyther is not in $PATH """ import os import nbformat nbf = nbformat.v4 nb = nbf.new_notebook() nb.cells.extend([ nbf.new_markdown_cell("## This is an auto-generated notebook for %s" % os.path.relpath(options.filepath)), nbf.new_code_cell("""\ from __future__ import print_function, division, unicode_literals, absolute_import %matplotlib notebook import numpy as np #import seaborn as sns #sns.set(context='notebook', style='darkgrid', palette='deep', # font='sans-serif', font_scale=1, color_codes=False, rc=None) from abipy import abilab\ """), nbf.new_code_cell("abifile = abilab.abiopen('%s')" % options.filepath) ]) import io, tempfile _, nbpath = tempfile.mkstemp(prefix="abinb_", suffix='.ipynb', dir=os.getcwd(), text=True) with io.open(nbpath, 'wt', encoding="utf8") as f: nbformat.write(nb, f) if which("jupyter") is None: raise RuntimeError( "Cannot find jupyter in PATH. Install it with `pip install`") if options.foreground: return os.system("jupyter notebook %s" % nbpath) else: fd, tmpname = tempfile.mkstemp(text=True) print(tmpname) cmd = "jupyter notebook %s" % nbpath print("Executing:", cmd) print("stdout and stderr redirected to %s" % tmpname) import subprocess process = subprocess.Popen(cmd.split(), shell=False, stdout=fd, stderr=fd) cprint("pid: %s" % str(process.pid), "yellow")
def check_pseudo_path(path, verbose=0): """ Check a pseudopotential given the filepath. Warnings are printed to stdout. Return 0 if success. """ pseudo = dojopseudo_from_file(path) if pseudo is None: cprint("[%s] Pseudo.from_file returned None. Something wrong in file!" % path, "red") return 1 return check_pseudo(pseudo, verbose=verbose)
def vesta_open(self, temp=300): # pragma: no cover """ Visualize termal displacement ellipsoids at temperature `temp` (Kelvin) with Vesta_ application. """ filepath = self.write_cif_file(filepath=None, temp=temp) cprint("Writing structure + Debye-Waller tensor in CIF format for T = %s (K) to file: %s" % (temp, filepath), "green") cprint("In the Vesta GUI, select: Properties -> Atoms -> Show as displament ellipsoids.", "green") from abipy.iotools import Visualizer visu = Visualizer.from_name("vesta") return visu(filepath)()
def yield_figs(self, **kwargs): # pragma: no cover """ This function *generates* a predefined list of matplotlib figures with minimal input from the user. """ yield self.interpolate_ebands().plot(show=False) yield self.hwan.plot(show=False) if kwargs.get("verbose"): linestyle_dict = {"Interpolated": dict(linewidth=0, color="red", marker="o")} yield self.get_plotter_from_ebands(self.ebands).combiplot(linestyle_dict=linestyle_dict, show=False) else: cprint("Use verbose option to compare ab-initio points with interpolated values", "yellow")
def gbrv_rundb(options): """Build flow and run it.""" dbpath = os.path.abspath(options.path) retcode = 0 # Get list of jobs to execute. with FileLock(dbpath): outdb = GbrvOutdb.from_file(dbpath) jobs = outdb.find_jobs_torun(options.max_njobs) if not jobs: cprint("Nothing to do, returning 0", "yellow") return 0 gbrv_factory = GbrvCompoundsFactory(xc=outdb["xc_name"]) # Build workdir. s = "-".join(job.formula for job in jobs) m = hashlib.md5() m.update(s) workdir = os.path.join( os.getcwd(), "GBRV_OUTDB_" + jobs[0].formula + "_" + jobs[-1].formula + "_" + m.hexdigest()) #workdir = os.path.join(os.getcwd(), "GBRV_OUTDB_" + s) flow = GbrvCompoundsFlow(workdir=workdir) for job in jobs: #for accuracy in ("low", "normal", "high"): #for accuracy in ("high",): for accuracy in ("normal", "high"): ecut = max(p.hint_for_accuracy(accuracy).ecut for p in job.pseudos) pawecutdg = max( p.hint_for_accuracy(accuracy).pawecutdg for p in job.pseudos) if ecut <= 0.0: raise RuntimeError("Pseudos do not have hints") # Increase by 10 since many pseudos only have ppgen_hints #ecut += 10 work = gbrv_factory.relax_and_eos_work(accuracy, job.pseudos, job.formula, job.struct_type, ecut=ecut, pawecutdg=pawecutdg) # Attach the database to the work to trigger the storage of the results. flow.register_work(work.set_outdb(dbpath)) print("Working in:", flow.workdir) flow.build_and_pickle_dump() #abivalidate=options.dry_run) if options.dry_run: return 0 # Run the flow with the scheduler (enable smart_io) flow.use_smartio() retcode += flow.make_scheduler().start() return retcode
def plot_hints(self, with_soc=False, **kwargs): # Build pandas dataframe with results. rows = [] for p in self: if not p.has_dojo_report: cprint("Cannot find dojo_report in %s" % p.basename, "magenta") continue report = p.dojo_report row = {att: getattr(p, att) for att in ("basename", "symbol", "Z", "Z_val", "l_max")} # Get deltafactor data with/without SOC df_dict = report.get_last_df_results(with_soc=with_soc) row.update(df_dict) for struct_type in ["fcc", "bcc"]: gbrv_dict = report.get_last_gbrv_results(struct_type, with_soc=with_soc) row.update(gbrv_dict) # Get the hints hint = p.hint_for_accuracy(accuracy="normal") row.update(dict(ecut=hint.ecut, pawecutdg=hint.pawecutdg)) rows.append(row) import pandas as pd frame = pd.DataFrame(rows) def print_frame(x): import pandas as pd with pd.option_context('display.max_rows', len(x), 'display.max_columns', len(list(x.keys()))): print(x) print_frame(frame) # Create axes #import matplotlib.pyplot as plt import seaborn as sns ax, fig, plt = get_ax_fig_plt(ax=None) #order = sort_symbols_by_Z(set(frame["element"])) # Box plot ax = sns.boxplot(x="symbol", y="ecut", data=frame, ax=ax, #order=order, whis=np.inf, color="c") # Add in points to show each observation sns.stripplot(x="symbol", y="ecut", data=frame, ax=ax, #order=order, jitter=True, size=5, color=".3", linewidth=0) sns.despine(left=True) ax.set_ylabel("Relative error %") ax.grid(True) return fig
def ifc(self): """ The interatomic force constants calculated by anaddb. The following anaddb variables should be used in the run: ``ifcflag``, ``natifc``, ``atifc``, ``ifcout``. Return None, if the netcdf_ file does not contain the IFCs, """ try: return InteratomicForceConstants.from_file(self.filepath) except Exception as exc: print(exc) cprint("Interatomic force constants have not been calculated. Returning None", "red") return None
def abicomp_time(options): """ Analyze/plot the timing data of single or multiple runs. """ paths = options.paths from abipy.abio.timer import AbinitTimerParser if len(options.paths) == 1 and os.path.isdir(paths[0]): # Scan directory tree top = options.paths[0] print("Walking directory tree from top:", top, "Looking for file extension:", options.ext) parser, paths_found, okfiles = AbinitTimerParser.walk(top=top, ext=options.ext) if not paths_found: cprint("Empty file list!", color="magenta") return 1 print("Found %d files\n" % len(paths_found)) if okfiles != paths_found: badfiles = [f for f in paths_found if f not in okfiles] cprint("Cannot parse timing data from the following files:", color="magenta") for bad in badfiles: print(bad) else: # Parse list of files. parser = AbinitTimerParser() okfiles = parser.parse(options.paths) if okfiles != options.paths: badfiles = [f for f in options.paths if f not in okfiles] cprint("Cannot parse timing data from the following files:", color="magenta") for bad in badfiles: print(bad) if parser is None: cprint("Cannot analyze timing data. parser is None", color="magenta") return 1 print(parser.summarize()) if options.verbose: for timer in parser.timers(): print(timer.get_dataframe()) if options.ipython: cprint("Invoking ipython shell. Use parser to access the object inside ipython", color="blue") import IPython IPython.start_ipython(argv=[], user_ns={"parser": parser}) elif options.notebook: parser.make_and_open_notebook(foreground=options.foreground) else: parser.plot_all() return 0
def find_oncv_output(path): """ Fix possible error in the specification of filename when we want a `.out` file. Return output path. """ if path.endswith(".out"): return path root, _ = os.path.splitext(path) new_path = root + ".out" if not os.path.exists(new_path): raise ValueError("Cannot find neither %s nor %s" % (path, new_path)) cprint("Maybe you meant %s" % new_path, "yellow") return new_path
def exit_now(): """ Function used to test if we have to exit from the infinite loop below. Return: != 0 if we must exit. > 0 if some error occurred. """ if flow.all_ok: cprint("Flow reached all_ok", "green") return -1 if any(st.is_critical for st in before_task2stat.values()): cprint(boxed("Found tasks with critical status"), "red") return 1 return 0
def get_eos_fits_dataframe(self, eos_names="murnaghan"): """ Fit energy as function of volume to get the equation of state, equilibrium volume, bulk modulus and its derivative wrt to pressure. Args: eos_names: String or list of strings with EOS names. For the list of available models, see pymatgen.analysis.eos. Return: (fits, dataframe) namedtuple. fits is a list of ``EOSFit object`` dataframe is a |pandas-DataFrame| with the final results. """ # Read volumes and energies from the GSR files. energies, volumes = [], [] for label, gsr in self.items(): energies.append(float(gsr.energy)) volumes.append(float(gsr.structure.volume)) # Order data by volumes if needed. if np.any(np.diff(volumes) < 0): ves = sorted(zip(volumes, energies), key=lambda t: t[0]) volumes = [t[0] for t in ves] energies = [t[1] for t in ves] # Note that eos.fit expects lengths in Angstrom, and energies in eV. # I'm also monkey-patching the plot method. from pymatgen.analysis.eos import EOS if eos_names == "all": # Use all the available models. eos_names = [n for n in EOS.MODELS if n not in ("deltafactor", "numerical_eos")] else: eos_names = list_strings(eos_names) fits, index, rows = [], [], [] for eos_name in eos_names: try: fit = EOS(eos_name=eos_name).fit(volumes, energies) except Exception as exc: cprint("EOS %s raised exception:\n%s" % (eos_name, str(exc))) continue # Replace plot with plot_ax method fit.plot = fit.plot_ax fits.append(fit) index.append(eos_name) rows.append(OrderedDict([(aname, getattr(fit, aname)) for aname in ("v0", "e0", "b0_GPa", "b1")])) dataframe = pd.DataFrame(rows, index=index, columns=list(rows[0].keys()) if rows else None) return dict2namedtuple(fits=fits, dataframe=dataframe)
def abiview_skw(options): """ Interpolate energies in k-space along a k-path with star-function methods Note that the interpolation will likely fail if there are symmetrical k-points in the input set of k-points so it's recommended to call this method with energies obtained in the IBZ. Accept any file with ElectronBands e.g. GSR.nc, WFK.nc, ... """ ebands = abilab.ElectronBands.as_ebands(options.filepath) if not ebands.kpoints.is_ibz: cprint("SKW interpolator should be called with energies in the IBZ", "yellow") r = ebands.interpolate(lpratio=options.lpratio, line_density=options.line_density, verbose=options.verbose) r.ebands_kpath.plot() return 0