示例#1
0
class OutNcFile(AbinitNcFile):
    """
    Class representing the _OUT.nc file containing the dataset results
    produced at the end of the run. The netcdf variables can be accessed
    via instance attribute e.g. ``outfile.ecut``. Provides integration with ipython_.
    """

    # TODO: This object is deprecated
    def __init__(self, filepath):
        super().__init__(filepath)
        self.reader = NetcdfReader(filepath)
        self._varscache = {k: None for k in self.reader.rootgrp.variables}

    def __dir__(self):
        """Ipython integration."""
        return sorted(list(self._varscache.keys()))

    def __getattribute__(self, name):
        try:
            return super().__getattribute__(name)
        except AttributeError:
            # Look in self._varscache
            varscache = super().__getattribute__("_varscache")
            if name not in varscache:
                raise AttributeError("Cannot find attribute %s" % name)
            reader = super().__getattribute__("reader")
            if varscache[name] is None:
                varscache[name] = reader.read_value(name)
            return varscache[name]

    @lazy_property
    def params(self):
        """:class:`OrderedDict` with parameters that might be subject to convergence studies."""
        return {}

    def close(self):
        """Close the file."""
        self.reader.close()

    def get_allvars(self):
        """
        Read all netcdf_ variables present in the file.
        Return dictionary varname --> value
        """
        for k, v in self._varscache.items():
            if v is not None: continue
            self._varscache[k] = self.reader.read_value(k)
        return self._varscache
示例#2
0
文件: ddb.py 项目: Npikeulg/abipy
 def from_ec_nc_file(cls, ec_nc_file, tensor_type='relaxed_ion'):
     with NetcdfReader(ec_nc_file) as nc_reader:
         if tensor_type == 'relaxed_ion':
             ec = np.array(
                 nc_reader.read_variable('elastic_constants_relaxed_ion'))
             compl = np.array(
                 nc_reader.read_variable(
                     'compliance_constants_relaxed_ion'))
         elif tensor_type == 'clamped_ion':
             ec = np.array(
                 nc_reader.read_variable('elastic_constants_clamped_ion'))
             compl = np.array(
                 nc_reader.read_variable(
                     'compliance_constants_clamped_ion'))
         elif tensor_type == 'relaxed_ion_stress_corrected':
             ec = np.array(
                 nc_reader.read_variable(
                     'elastic_constants_relaxed_ion_stress_corrected'))
             compl = np.array(
                 nc_reader.read_variable(
                     'compliance_constants_relaxed_ion_stress_corrected'))
         else:
             raise ValueError(
                 'tensor_type "{0}" not allowed'.format(tensor_type))
     #TODO: add the structure object!
     return cls(elastic_tensor=ec,
                compliance_tensor=compl,
                structure=None,
                additional_info={'tensor_type': tensor_type})
示例#3
0
文件: outputs.py 项目: gmatteo/abipy
class OutNcFile(AbinitNcFile):
    """
    Class representing the _OUT.nc file containing the dataset results
    produced at the end of the run. The netcdf variables can be accessed
    via instance attribute e.g. ``outfile.ecut``. Provides integration with ipython_.
    """
    def __init__(self, filepath):
        super(OutNcFile, self).__init__(filepath)
        self.reader = NetcdfReader(filepath)
        self._varscache= {k: None for k in self.reader.rootgrp.variables}

    def __dir__(self):
        """Ipython integration."""
        return sorted(list(self._varscache.keys()))

    def __getattribute__(self, name):
        try:
            return super(OutNcFile, self).__getattribute__(name)
        except AttributeError:
            # Look in self._varscache
            varscache = super(OutNcFile, self).__getattribute__("_varscache")
            if name not in varscache:
                raise AttributeError("Cannot find attribute %s" % name)
            reader = super(OutNcFile, self).__getattribute__("reader")
            if varscache[name] is None:
                varscache[name] = reader.read_value(name)
            return varscache[name]

    @lazy_property
    def params(self):
        """:class:`OrderedDict` with parameters that might be subject to convergence studies."""
        return {}

    def close(self):
        """Close the file."""
        self.reader.close()

    def get_allvars(self):
        """
        Read all netcdf_ variables present in the file.
        Return dictionary varname --> value
        """
        for k, v in self._varscache.items():
            if v is not None: continue
            self._varscache[k] = self.reader.read_value(k)
        return self._varscache
示例#4
0
    def get_ibz(self, ngkpt=None, shiftk=None, kptopt=None, qpoint=None, workdir=None, manager=None):
        """
        This function, computes the list of points in the IBZ and the corresponding weights.
        It should be called with an input file that contains all the mandatory variables required by ABINIT.

        Args:
            ngkpt: Number of divisions for the k-mesh (default None i.e. use ngkpt from self)
            shiftk: Shiftks (default None i.e. use shiftk from self)
            qpoint: qpoint in reduced coordinates. Used to shift the k-mesh (default None i.e no shift)
            workdir: Working directory of the fake task used to compute the ibz. Use None for temporary dir.
            manager: :class:`TaskManager` of the task. If None, the manager is initialized from the config file.

        Returns:
            `namedtuple` with attributes:
                points: `ndarray` with points in the IBZ in reduced coordinates.
                weights: `ndarray` with weights of the points.

        .. warning::

            Multiple datasets are ignored. Only the list of k-points for dataset 1 are returned.
        """
        if self.ndtset != 1:
            raise RuntimeError("get_ibz cannot be used if the input contains more than one dataset")

        # Avoid modifications in self.
        inp = self.split_datasets()[0].deepcopy()

        # The magic value that makes ABINIT print the ibz and then stop.
        inp.prtkpt = -2

        if ngkpt is not None: inp.ngkpt = ngkpt
        if shiftk is not None:
            inp.shiftk = np.reshape(shiftk, (-1,3))
            inp.nshiftk = len(inp.shiftk)

        if kptopt is not None:
            inp.kptopt = kptopt

        if qpoint is not None:
            inp.qptn, inp.nqpt = qpoint, 1

        # Build a Task to run Abinit in a shell subprocess
        task = AbinitTask.temp_shell_task(inp, workdir=workdir, manager=manager)
        task.start_and_wait(autoparal=False)

        # Read the list of k-points from the netcdf file.
        try:
            with NetcdfReader(os.path.join(task.workdir, "kpts.nc")) as r:
                ibz = collections.namedtuple("ibz", "points weights")
                return ibz(points=r.read_value("reduced_coordinates_of_kpoints"),
                           weights=r.read_value("kpoint_weights"))

        except Exception as exc:
            # Try to understand if it's a problem with the Abinit input.
            report = task.get_event_report()
            if report.errors: raise self.Error(str(report))
            raise exc
示例#5
0
class OutNcFile(AbinitNcFile):
    """
    Class representing the _OUT.nc file containing the dataset results
    produced at the end of the run. The netcdf variables can be accessed
    via instance attribute e.g. `outfile.ecut`. Provides integration with ipython.
    """
    def __init__(self, filepath):
        super(OutNcFile, self).__init__(filepath)
        self.reader = NetcdfReader(filepath)
        self._varscache = {k: None for k in self.reader.rootgrp.variables}

    def __dir__(self):
        """Ipython integration."""
        return sorted(list(self._varscache.keys()))

    def __getattribute__(self, name):
        try:
            return super(OutNcFile, self).__getattribute__(name)
        except AttributeError:
            # Look in self._varscache
            varscache = super(OutNcFile, self).__getattribute__("_varscache")
            if name not in varscache:
                raise AttributeError("Cannot find attribute %s" % name)
            reader = super(OutNcFile, self).__getattribute__("reader")
            if varscache[name] is None:
                varscache[name] = reader.read_value(name)
            return varscache[name]

    def close(self):
        self.reader.close()

    def get_allvars(self):
        """
        Read all netcdf variables present in the file.
        Return dictionary varname --> value
        """
        for k, v in self._varscache.items():
            if v is not None: continue
            self._varscache[k] = self.reader.read_value(k)
        return self._varscache
示例#6
0
 def __init__(self, filepath):
     super(OutNcFile, self).__init__(filepath)
     self.reader = NetcdfReader(filepath)
     self._varscache = {k: None for k in self.reader.rootgrp.variables}
示例#7
0
文件: outputs.py 项目: gmatteo/abipy
 def __init__(self, filepath):
     super(OutNcFile, self).__init__(filepath)
     self.reader = NetcdfReader(filepath)
     self._varscache= {k: None for k in self.reader.rootgrp.variables}