Пример #1
0
def get_qpts(inp, ngqpt, manager=None, workdir="."):

    qpt_vars = dict(
        ngkpt=ngqpt,
        shiftk=[0., 0., 0.],
        nshiftk=1,
        prtvol=-2,
    )

    w = _setup_faketask('qpt', manager=manager, workdir=workdir)
    fake_input = inp.deepcopy()
    fake_input.set_variables(**qpt_vars)
    fake_task = w.register(fake_input)
    w.allocate()
    w.start()

    nc = NetcdfReader(fake_task.opath_from_ext('OUT'))
    qpts = nc.read_variable('kpt')[:].reshape((-1, 3))
    mem = 0
    for line in fake_task.log_file.readlines():
        if "P This job should need less than" in line:
            mem = float(line.split()[7])
            break

    w.rmtree()
    return qpts, mem
Пример #2
0
    def read(self):
        """

        """
        tree = os.walk(self.ps_name+'_df_run_full')
        for dirs in tree:
            file_name = os.path.join(dirs[0], 'deltadata.txt')
            if os.path.isfile(file_name):
                f = open(file_name, 'r')
                lines = f.readlines()
                try:
                    df = float(lines[0].split()[3])
                except ValueError:
                    print('warning', lines[0].split()[3])
                    df = abs(complex(lines[0].split()[3]))
                #print lines[0], df
                #print dirs[0]
                location = os.path.split(dirs[0])
                #print location
                base = os.path.join(*location[0:len(location)-1])
                out_file = os.path.join(base, 't3', 'outdata', 'out_GSR.nc')
                #print out_file
                out = NetcdfReader(out_file)
                if not isinstance(out.read_value('ecut'), collections.Iterable):
                    ecut = out.read_value('ecut')
                    etotal = out.read_value('etotal')
                elif not isinstance(out.read_value('ecut')[0], collections.Iterable):
                    ecut = out.read_value('ecut')[0]
                    etotal = out.read_value('etotal')[0]
                else:
                    raise Exception
                self.etotal_data.update({ecut: etotal})
                self.df_data.update({ecut: df})
        self.pseudo.dojo_report['delta_factor'] = self.df_data
        self.pseudo.dojo_report['total_energy'] = self.etotal_data
Пример #3
0
def check_hm(flow, n = 9):
    tasks = []
    i = 0
    for fl in flow[:n]:
        for task in fl:
            tasks.append(task)
    for i,task in enumerate(flow[n]):
        dos = task.outdir.has_abiext('DOS')
        if dos:
            ncdf = NetcdfReader(task.outdir.has_abiext('GSR'))
            acell = 2*ncdf.read_variable('primitive_vectors')[0][1]
            cycle = GroundStateScfCycle.from_file(tasks[i].output_file.path)
            moment = cycle.last_iteration['magn']
            etotal = cycle.last_etotal
            gapup,gapdn = is_hm(dos)
            name = tasks[i]._name.split('_')
            alloy = name[0]
            phase = name[1]
            print '%s\t%s\t%f\t%f\t%f\t%f\t%f' % (alloy,phase,acell,etotal,moment,gapup,gapdn)
Пример #4
0
def check_hm(flow):
    tasks = []
    i = 0
    for fl in flow[:9]:
        for task in fl:
            tasks.append(task)
    for i,task in enumerate(flow[9]):
        dos = task.outdir.has_abiext('DOS')
        if dos:
            print(tasks[i]._name)
            ncdf = NetcdfReader(task.outdir.has_abiext('GSR'))
            acell = 2*ncdf.read_variable('primitive_vectors')[0][1]
            cycle = GroundStateScfCycle.from_file(tasks[i].output_file.path)
            moment = cycle.last_iteration['magn']
            etotal = cycle.last_etotal
            print('  acell:' + str(acell*0.529177249))
            print('  momen:' + str(moment))
            print('  etotl:' + str(etotal))
            is_hm(dos)
Пример #5
0
def get_all_kpoints(inp, manager=None, workdir="."):

    kpt_vars = dict(
        prtvol=-1,
        kptopt=3,
    )

    w = _setup_faketask('kpt', manager=manager, workdir=workdir)
    fake_input = inp.deepcopy()
    fake_input.set_variables(**kpt_vars)
    fake_task = w.register(fake_input)
    w.allocate()
    w.start(wait=True)

    nc = NetcdfReader(fake_task.opath_from_ext('OUT'))
    kpts = nc.read_variable('kpt')[:].reshape((-1, 3))
    mem = 0
    for line in fake_task.log_file.readlines():
        if "P This job should need less than" in line:
            mem = float(line.split()[7])
            break

    w.rmtree()
    return kpts, mem
Пример #6
0
    def create_tasks(self, wfk_file, scr_input):
        """
        Create the SCR tasks and register them in self.

        Args:
            wfk_file: Path to the ABINIT WFK file to use for the computation of the screening.
            scr_input: Input for the screening calculation.
        """
        assert len(self) == 0
        wfk_file = self.wfk_file = os.path.abspath(wfk_file)

        # Build a temporary work in the tmpdir that will use a shell manager
        # to run ABINIT in order to get the list of q-points for the screening.
        shell_manager = self.manager.to_shell_manager(mpi_procs=1)

        w = Work(workdir=self.tmpdir.path_join("_qptdm_run"),
                 manager=shell_manager)

        fake_input = scr_input.deepcopy()
        fake_task = w.register(fake_input)
        w.allocate()
        w.build()

        # Create the symbolic link and add the magic value
        # nqpdm = -1 to the input to get the list of q-points.
        fake_task.inlink_file(wfk_file)
        fake_task.strategy.add_extra_abivars({"nqptdm": -1})
        fake_task.start_and_wait()

        # Parse the section with the q-points
        #qpoints = yaml_read_kpoints(fake_task.log_file.path, doc_tag="!Qptdms")
        from pymatgen.io.abinitio.netcdf import NetcdfReader
        with NetcdfReader(fake_task.outdir.has_abiext("qpts.nc")) as reader:
            qpoints = reader.read_value("qibz")
        #print("qpoints)
        #w.rmtree()

        # Now we can register the task for the different q-points
        for qpoint in qpoints:
            qptdm_input = scr_input.deepcopy()
            qptdm_input.set_variables(nqptdm=1, qptdm=qpoint)

            self.register(qptdm_input, manager=self.manager)

        self.allocate()
Пример #7
0
 def read_convergence_data(self, data_dir):
     gwrun = os.path.join(data_dir, 'out_SIGRES.nc')
     scfrunout = os.path.join(data_dir, 'out_OUT.nc')
     scfruneig = os.path.join(data_dir, 'out_EIG.nc')
     results = {}
     if os.path.isfile(gwrun):
         # return the gap at gamma
         data = NetcdfReader(gwrun)
         data.print_tree()
         if not isinstance(data.read_value('ecuteps'), collections.Iterable):
             ecuteps = data.read_value('ecuteps')
         elif not isinstance(data.read_value('ecuteps')[0], collections.Iterable):
             ecuteps = data.read_value('ecuteps')[0]
         else:
             raise Exception
         if not isinstance(data.read_value('sigma_nband'), collections.Iterable):
             sigma_nband = data.read_value('sigma_nband')
         elif not isinstance(data.read_value('sigma_nband')[0], collections.Iterable):
             sigma_nband = data.read_value('sigma_nband')[0]
         else:
             raise Exception
         gwgap = data.read_value('egwgap')[0][0]
         #gwgap = min(data.read_value('egwgap')[0])
         if not isinstance(gwgap, float):
             raise Exception
         results = {'ecuteps': float(Ha_to_eV * ecuteps),
                    'nbands': int(sigma_nband),
                    'gwgap': float(gwgap)}
         data.close()
         return results
     if os.path.isfile(scfruneig):
         # return the lowest and hightest eigenvalue at gamma
         data = NetcdfReader(scfruneig)
         out = NetcdfReader(scfrunout)
         if data.read_value('Eigenvalues')[0][0][-1] < 2.0:  # bad way to select only the scf results ..
             if not isinstance(out.read_value('ecut'), collections.Iterable):
                 ecut = out.read_value('ecut')
             elif not isinstance(out.read_value('ecut')[0], collections.Iterable):
                 ecut = out.read_value('ecut')[0]
             else:
                 raise Exception
             results = {'ecut': Ha_to_eV * ecut,
                        'min': data.read_value('Eigenvalues')[0][0][0]*Ha_to_eV,
                        'max': data.read_value('Eigenvalues')[0][0][-1]*Ha_to_eV,
                        'full_width': (data.read_value('Eigenvalues')[0][0][-1] -
                                       data.read_value('Eigenvalues')[0][0][0])*Ha_to_eV}
             data.close()
         return results
Пример #8
0
 def read_convergence_data(self, data_dir):
     gwrun = os.path.join(data_dir, 'out_SIGRES.nc')
     scfrunout = os.path.join(data_dir, 'out_OUT.nc')
     scfruneig = os.path.join(data_dir, 'out_EIG.nc')
     results = {}
     if os.path.isfile(gwrun):
         # return the gap at gamma
         data = NetcdfReader(gwrun)
         data.print_tree()
         if not isinstance(data.read_value('ecuteps'),
                           collections.Iterable):
             ecuteps = data.read_value('ecuteps')
         elif not isinstance(
                 data.read_value('ecuteps')[0], collections.Iterable):
             ecuteps = data.read_value('ecuteps')[0]
         else:
             raise Exception
         if not isinstance(data.read_value('sigma_nband'),
                           collections.Iterable):
             sigma_nband = data.read_value('sigma_nband')
         elif not isinstance(
                 data.read_value('sigma_nband')[0], collections.Iterable):
             sigma_nband = data.read_value('sigma_nband')[0]
         else:
             raise Exception
         gwgap = data.read_value('egwgap')[0][0]
         #gwgap = min(data.read_value('egwgap')[0])
         if not isinstance(gwgap, float):
             raise Exception
         results = {
             'ecuteps': float(Ha_to_eV * ecuteps),
             'nbands': int(sigma_nband),
             'gwgap': float(gwgap)
         }
         data.close()
         return results
     if os.path.isfile(scfruneig):
         # return the lowest and hightest eigenvalue at gamma
         data = NetcdfReader(scfruneig)
         out = NetcdfReader(scfrunout)
         if data.read_value('Eigenvalues')[0][0][
                 -1] < 2.0:  # bad way to select only the scf results ..
             if not isinstance(out.read_value('ecut'),
                               collections.Iterable):
                 ecut = out.read_value('ecut')
             elif not isinstance(
                     out.read_value('ecut')[0], collections.Iterable):
                 ecut = out.read_value('ecut')[0]
             else:
                 raise Exception
             results = {
                 'ecut':
                 Ha_to_eV * ecut,
                 'min':
                 data.read_value('Eigenvalues')[0][0][0] * Ha_to_eV,
                 'max':
                 data.read_value('Eigenvalues')[0][0][-1] * Ha_to_eV,
                 'full_width':
                 (data.read_value('Eigenvalues')[0][0][-1] -
                  data.read_value('Eigenvalues')[0][0][0]) * Ha_to_eV
             }
             data.close()
         return results
Пример #9
0
    def get_ibz(self,
                ngkpt=None,
                shiftk=None,
                kptopt=None,
                qpoint=None,
                workdir=None,
                manager=None):
        """
        This function, computes the list of points in the IBZ and the corresponding weights.
        It should be called with an input file that contains all the mandatory variables required by ABINIT.

        Args:
            ngkpt: Number of divisions for the k-mesh (default None i.e. use ngkpt from self)
            shiftk: Shiftks (default None i.e. use shiftk from self)
            qpoint: qpoint in reduced coordinates. Used to shift the k-mesh (default None i.e no shift)
            workdir: Working directory of the fake task used to compute the ibz. Use None for temporary dir.
            manager: :class:`TaskManager` of the task. If None, the manager is initialized from the config file.

        Returns:
            `namedtuple` with attributes:
                points: `ndarray` with points in the IBZ in reduced coordinates.
                weights: `ndarray` with weights of the points.

        .. warning::

            Multiple datasets are ignored. Only the list of k-points for dataset 1 are returned.
        """
        if self.ndtset != 1:
            raise RuntimeError(
                "get_ibz cannot be used if the input contains more than one dataset"
            )

        # Avoid modifications in self.
        inp = self.split_datasets()[0].deepcopy()

        # The magic value that makes ABINIT print the ibz and then stop.
        inp.prtkpt = -2

        if ngkpt is not None: inp.ngkpt = ngkpt
        if shiftk is not None:
            inp.shiftk = np.reshape(shiftk, (-1, 3))
            inp.nshiftk = len(inp.shiftk)

        if kptopt is not None:
            inp.kptopt = kptopt

        if qpoint is not None:
            inp.qptn, inp.nqpt = qpoint, 1

        # Build a Task to run Abinit in a shell subprocess
        task = AbinitTask.temp_shell_task(inp,
                                          workdir=workdir,
                                          manager=manager)
        task.start_and_wait(autoparal=False)

        # Read the list of k-points from the netcdf file.
        try:
            with NetcdfReader(os.path.join(task.workdir, "kpts.nc")) as r:
                ibz = collections.namedtuple("ibz", "points weights")
                return ibz(
                    points=r.read_value("reduced_coordinates_of_kpoints"),
                    weights=r.read_value("kpoint_weights"))

        except Exception as exc:
            # Try to understand if it's a problem with the Abinit input.
            report = task.get_event_report()
            if report.errors: raise self.Error(str(report))
            raise exc