Exemplo n.º 1
0
    def __init__(self, structure, struct_type, pseudo, ecut=None, pawecutdg=None, ngkpt=(8, 8, 8),
                 spin_mode="unpolarized", toldfe=1.e-9, smearing="fermi_dirac:0.001 Ha",
                 accuracy="normal", ecutsm=0.05, chksymbreak=0,
                 workdir=None, manager=None, **kwargs):
        """
        Build a :class:`Work` for the computation of the relaxed lattice parameter.

        Args:   
            structure: :class:`Structure` object 
            structure_type: fcc, bcc 
            pseudo: String with the name of the pseudopotential file or :class:`Pseudo` object.
            ecut: Cutoff energy in Hartree
            ngkpt: MP divisions.
            spin_mode: Spin polarization mode.
            toldfe: Tolerance on the energy (Ha)
            smearing: Smearing technique.
            workdir: String specifing the working directory.
            manager: :class:`TaskManager` responsible for the submission of the tasks.
        """
        super(GbrvRelaxAndEosWork, self).__init__(workdir=workdir, manager=manager)
        self.struct_type = struct_type
        self.accuracy = accuracy

        # nband must be large enough to accomodate fractional occupancies.
        fband = kwargs.pop("fband", None)
        self._pseudo = Pseudo.as_pseudo(pseudo)
        nband = gbrv_nband(self.pseudo)

        # Set extra_abivars.
        self.extra_abivars = dict(
            ecut=ecut,
            pawecutdg=pawecutdg,
            toldfe=toldfe,
            prtwf=0,
            #ecutsm=0.5,
            nband=nband,
            #paral_kgb=paral_kgb
        )
                                       
        self.extra_abivars.update(**kwargs)
        self.ecut = ecut
        self.smearing = Smearing.as_smearing(smearing)

        # Kpoint sampling: shiftk depends on struct_type
        shiftk = {"fcc": [0, 0, 0], "bcc": [0.5, 0.5, 0.5]}.get(struct_type)
        #ngkpt = (1,1,1)
        self.ksampling = KSampling.monkhorst(ngkpt, chksymbreak=chksymbreak, shiftk=shiftk)
        self.spin_mode = SpinMode.as_spinmode(spin_mode)
        relax_algo = RelaxationMethod.atoms_and_cell()

        #self.relax_input = RelaxStrategy(structure, pseudo, self.ksampling, relax_algo, 
        #                                 accuracy=accuracy, spin_mode=spin_mode, smearing=smearing, **self.extra_abivars)

        inp = abilab.AbinitInput(structure, pseudo)
        inp.add_abiobjects(self.ksampling, relax_algo, self.spin_mode, self.smearing)
        inp.set_vars(self.extra_abivars)

        # Register structure relaxation task.
        self.relax_task = self.register_relax_task(inp)
Exemplo n.º 2
0
    def work_for_pseudo(self, pseudo, accuracy="normal", kppa=6750, ecut=None, pawecutdg=None,
                        toldfe=1.e-9, smearing="fermi_dirac:0.1 eV", workdir=None, manager=None, **kwargs):
        """
        Returns a :class:`Work` object from the given pseudopotential.

        Args:
            kwargs: Extra variables passed to Abinit.

        .. note::

            0.001 Rydberg is the value used with WIEN2K
        """
        pseudo = Pseudo.as_pseudo(pseudo)
        symbol = pseudo.symbol

        if pseudo.ispaw and pawecutdg is None:
            raise ValueError("pawecutdg must be specified for PAW calculations.")

        try:
            cif_path = self.get_cif_path(symbol)
        except Exception as exc:
            raise self.Error(str(exc))

        # Include spin polarization for O, Cr and Mn (antiferromagnetic)
        # and Fe, Co, and Ni (ferromagnetic).
        # antiferromagnetic Cr, O
        # ferrimagnetic Mn
        spin_mode = "unpolarized"

        if symbol in ["Fe", "Co", "Ni"]:
            spin_mode = "polarized"
            if symbol == "Fe":
                kwargs['spinat'] = 2 * [(0, 0, 2.3)]
            if symbol == "Co":
                kwargs['spinat'] = 2 * [(0, 0, 1.2)]
            if symbol == "Ni":
                kwargs['spinat'] = 4 * [(0, 0, 0.6)]

        if symbol in ["O", "Cr", "Mn"]:
            spin_mode = "afm"
            if symbol == 'O':
                kwargs['spinat'] = [(0, 0, 1.5), (0, 0, 1.5), (0, 0, -1.5), (0, 0, -1.5)]
            elif symbol == 'Cr':
                kwargs['spinat'] = [(0, 0, 1.5), (0, 0, -1.5)]
            elif symbol == 'Mn':
                kwargs['spinat'] = [(0, 0, 2.0), (0, 0, 1.9), (0, 0, -2.0), (0, 0, -1.9)]

        # DO NOT CHANGE THE STRUCTURE REPORTED IN THE CIF FILE.
        structure = Structure.from_file(cif_path, primitive=False)

        # Magnetic elements:
        # Start from previous SCF run to avoid getting trapped in local minima 
        connect = symbol in ("Fe", "Co", "Ni", "Cr", "Mn", "O", "Zn", "Cu")

        return DeltaFactorWork(
            structure, pseudo, kppa, connect,
            spin_mode=spin_mode, toldfe=toldfe, smearing=smearing,
            accuracy=accuracy, ecut=ecut, pawecutdg=pawecutdg, ecutsm=0.5,
            workdir=workdir, manager=manager, **kwargs)
Exemplo n.º 3
0
    def add_pseudo(self, pseudo):
        """Add a pseudo to the Dojo."""
        pseudo = Pseudo.as_pseudo(pseudo)

        dojo_report = DojoReport.from_file(pseudo.filepath)

        # Construct the flow 
        flow_workdir = os.path.join(self.workdir, pseudo.name)
        flow = AbinitFlow(workdir=flow_workdir, manager=self.manager, pickle_protocol=0)

        # Construct the flow according to the info found in the dojo report.
        if not pseudo.has_hints:
            # We need the hints in order to run the other tests
            factory = PPConvergenceFactory()
            ecut_work = factory.work_for_pseudo(pseudo, ecut_slice=slice(4, None, 1), nlaunch=4)
            flow.register_work(ecut_work)

        else:
            # Hints are available --> construct a flow for the different trials.
            dojo_trial = "deltafactor"
            if dojo_trial in self.trials:
                # Do we have this element in the deltafactor database?
                #if not df_database().has_symbol(pseudo.symbol):
                #    logger.warning("Cannot find %s in deltafactor database." % pseudo.symbol)

                delta_factory = DeltaFactory()
                kppa = 6750 # 6750 is the value used in the deltafactor code.
                kppa = 1

                for accuracy in self.accuracies:
                    if dojo_report.has_trial(dojo_trial, accuracy): continue
                    ecut, pawecutdg = self._ecut_pawecutdg(pseudo, accuracy)
                    work = delta_factory.work_for_pseudo(pseudo, accuracy=accuracy, kppa=kppa, ecut=ecut, pawecutdg=pawecutdg)

                    logger.info("Adding work for %s with accuracy %s" % (dojo_trial, accuracy))
                    work.set_dojo_accuracy(accuracy)
                    flow.register_work(work)

            # Test if GBRV tests are wanted.
            gbrv_structs = [s.split("_")[1] for s in self.trials if s.startswith("gbrv_")]

            if gbrv_structs:
                gbrv_factory = GbrvFactory()
                for struct_type in gbrv_structs:
                    dojo_trial = "gbrv_" + struct_type
                    for accuracy in self.accuracies:
                        if dojo_report.has_trial(dojo_trial, accuracy): continue
                        ecut, pawecutdg = self._ecut_pawecutdg(pseudo, accuracy)
                        work = gbrv_factory.relax_and_eos_work(pseudo, struct_type, ecut=ecut, pawecutdg=pawecutdg)

                        logger.info("Adding work for %s with accuracy %s" % (dojo_trial, accuracy))
                        work.set_dojo_accuracy(accuracy)
                        flow.register_work(work)

        flow.allocate()
        self.pseudos.append(pseudo)
        self.flows.append(flow)
Exemplo n.º 4
0
    def work_for_pseudo(self, pseudo, **kwargs):
        """
        Create a :class:`Flow` for phonon calculations:

            1) One workflow for the GS run.

            2) nqpt workflows for phonon calculations. Each workflow contains
               nirred tasks where nirred is the number of irreducible phonon perturbations
               for that particular q-point.

            the kwargs are passed to scf_hp_inputs
        """
        try:
            qpt = kwargs['qpt']
        except IndexError:
            raise ValueError('A phonon test needs to specify a qpoint.')

        kwargs.pop('accuracy')

        pseudo = Pseudo.as_pseudo(pseudo)

        structure_or_cif = self.get_cif_path(pseudo.symbol)

        if not isinstance(structure_or_cif, Structure):
            # Assume CIF file
            structure = Structure.from_file(structure_or_cif, primitive=False)
        else:
            structure = structure_or_cif

        nat = len(structure)
        report = pseudo.read_dojo_report()
        ecut_str = '%.1f' % kwargs['ecut']
        #print(ecut_str)
        #print(report['deltafactor'][float(ecut_str)].keys())

        try:
            v0 = nat * report['deltafactor'][ecut_str]['v0']
        except KeyError:
            try:
                v0 = nat * report['deltafactor'][float(ecut_str)]['v0']
            except KeyError:
                # the df calculation at this ecut is not done already so the phonon task can not be created
                return None

        structure.scale_lattice(v0)
        all_inps = self.scf_ph_inputs(pseudos=[pseudo], structure=structure, **kwargs)
        scf_input, ph_inputs = all_inps[0], all_inps[1:]

        work = build_oneshot_phononwork(scf_input=scf_input, ph_inputs=ph_inputs, work_class=PhononDojoWork)
        #print('after build_oneshot_phonon')
        #print(work)
        work.set_dojo_trial(qpt)
        #print(scf_input.keys())
        work.ecut = scf_input['ecut']
        work._pseudo = pseudo
        return work
Exemplo n.º 5
0
    def __init__(self, pseudo, ecut_slice, nlaunch, atols_mev,
                 toldfe=1.e-8, spin_mode="polarized", acell=(8, 9, 10), 
                 smearing="fermi_dirac:0.1 eV", max_niter=300, workdir=None, manager=None):
        """
        Args:
            pseudo: string or :class:`Pseudo` instance
            ecut_slice: List of cutoff energies or slice object (mainly used for infinite iterations).
            nlaunch:
            atols_mev: List of absolute tolerances in meV (3 entries corresponding to accuracy ["low", "normal", "high"]
            spin_mode: Defined how the electronic spin will be treated.
            acell: Lengths of the periodic box in Bohr.
            smearing: :class:`Smearing` instance or string in the form "mode:tsmear". Default: FemiDirac with T=0.1 eV
            max_niter:
            workdir: Working directory.
            manager: :class:`TaskManager` object.
        """
        super(PseudoConvergence, self).__init__(workdir, manager)

        self._pseudo = Pseudo.as_pseudo(pseudo)
        self.nlaunch = nlaunch; assert nlaunch > 0
        self.atols_mev = atols_mev
        self.toldfe = toldfe
        self.spin_mode = SpinMode.as_spinmode(spin_mode)
        self.acell = acell
        self.smearing = Smearing.as_smearing(smearing)
        self.max_niter = max_niter; assert max_niter > 0
        self.ecut_slice = ecut_slice; assert isinstance(ecut_slice, slice)

        self.ecuts = []

        if self.pseudo.ispaw:
            raise NotImplementedError("PAW convergence tests are not supported yet")

        for i in range(self.nlaunch):
            ecut = ecut_slice.start + i * ecut_slice.step
            #if self.ecut_slice.stop is not None and ecut > self.ecut_slice.stop: continue
            self.add_task_with_ecut(ecut)
Exemplo n.º 6
0
    def relax_and_eos_work(self, pseudo, struct_type, ecut=None, pawecutdg=None, ref="ae", **kwargs):
        """
        Returns a :class:`Work` object from the given pseudopotential.

        Args:
            kwargs: Extra variables passed to Abinit.

        .. note::

            GBRV tests are done with the following parameteres:

                - No spin polarization for structural relaxation 
                  (only for magnetic moments for which spin-unpolarized structures are used)
                - All calculations are done on an 8x8x8 k-point density and with 0.002 Ry Fermi-Dirac smearing
        """
        pseudo = Pseudo.as_pseudo(pseudo)

        if pseudo.ispaw and pawecutdg is None:
            raise ValueError("pawecutdg must be specified for PAW calculations.")

        structure = self.make_ref_structure(pseudo.symbol, struct_type=struct_type, ref=ref)
 
        return GbrvRelaxAndEosWork(structure, struct_type, pseudo,
                                   ecut=ecut, pawecutdg=pawecutdg, **kwargs)
Exemplo n.º 7
0
def build_flow(pseudo, options):
    """Build the flow, returns None if no calculation must be performed.""" 
    pseudo = Pseudo.as_pseudo(pseudo)

    workdir = pseudo.basename + "_DOJO"
    if os.path.exists(workdir): 
        warn("Directory %s already exists" % workdir)
        return None
        #raise ValueError("%s exists" % workdir)

    flow = abilab.Flow(workdir=workdir, manager=options.manager)

    extra_abivars = {
            "mem_test": 0,
            "fband": 2,
            "nstep": 100,
            "paral_kgb": options.paral_kgb
            #"nsym": 1,
    }

    report = pseudo.read_dojo_report()
    #print(report)
    #hints = report["hints"]

    # Build ecut mesh.
    try:
        ppgen_ecut = int(report["ppgen_hints"]["high"]["ecut"])

        ecut_list = copy.copy(report["ecuts"])
    except KeyError:
        print('New pseudo without report from the generator, the convergence study is started from 16H')
        report["ppgen_hints"] = {}
        report["ppgen_hints"]["high"] = {} 
        report["ppgen_hints"]["high"]["ecut"] = 16.0
        report["ecuts"] = [16.0, 20.0, 24.0]
        pseudo.write_dojo_report(report)
        ppgen_ecut = int(report["ppgen_hints"]["high"]["ecut"])
        ecut_list = copy.copy(report["ecuts"])

    #if 'extend' in options:
    #    next_ecut = max(ecut_list) + 2
    #    ecut_list.append(next_ecut)

    #if 'new-ecut' in options:
    #    ecut_list.append(options['new-ecut'])

    add_ecuts = False
    if add_ecuts:
        #dense_right = np.linspace(ppgen_ecut, ppgen_ecut + 10, num=6)
        #dense_left = np.linspace(ppgen_ecut-8, ppgen_ecut, num=4, endpoint=False)
        #coarse_high = np.linspace(ppgen_ecut + 15, ppgen_ecut + 40, num=4)

        dense_right = np.arange(ppgen_ecut, ppgen_ecut + 6*2, step=2)
        dense_left = np.arange(max(ppgen_ecut-6, 2), ppgen_ecut, step=2)
        coarse_high = np.arange(ppgen_ecut + 15, ppgen_ecut + 35, step=5)

        ecut_list = list(dense_left) + list(dense_right) + list(coarse_high)

    # Computation of the deltafactor.
    if "df" in options.trials:
        #FIXME
        #factory = DeltaFactory(xc=pseudo.xc)
        if os.path.isfile('LDA'):
            factory = DeltaFactory(xc='LDA')
        else:
            factory = DeltaFactory()
        for ecut in ecut_list:
            if "deltafactor" in report and ecut in report["deltafactor"].keys(): continue
            pawecutdg = 2 * ecut if pseudo.ispaw else None
            # Build and register the workflow.
            work = factory.work_for_pseudo(pseudo, kppa=6750, ecut=ecut, pawecutdg=pawecutdg, **extra_abivars)
            flow.register_work(work, workdir='WDF' + str(ecut))

    # GBRV tests.
    if "gbrv" in options.trials:
        gbrv_factory = GbrvFactory()
        gbrv_structs = ("fcc", "bcc")
        for struct_type in gbrv_structs:
            dojo_trial = "gbrv_" + struct_type
            for ecut in ecut_list:
                if dojo_trial in report and ecut in report[dojo_trial].keys(): continue
                pawecutdg = 2 * ecut if pseudo.ispaw else None
                # FIXME: we use ntime=3, because structure relaxations go bananas after the third step.
                work = gbrv_factory.relax_and_eos_work(pseudo, struct_type, ecut=ecut, ntime=5, pawecutdg=pawecutdg, **extra_abivars)
                flow.register_work(work, workdir="GBRV_" + struct_type + str(ecut))

    # PHONON test
    if "phonon" in options.trials:
        phonon_factory = DFPTPhononFactory()
        for ecut in ecut_list:
            str_ecut = '%.1f' % ecut
            if "phonon" in report and str_ecut in report["phonon"].keys(): continue
            kppa = 1000
            pawecutdg = 2 * ecut if pseudo.ispaw else None
            work = phonon_factory.work_for_pseudo(pseudo, accuracy="high", kppa=kppa, ecut=ecut, pawecutdg=pawecutdg,
                                                  tolwfr=1.e-20, smearing="fermi_dirac:0.0005", qpt=[0,0,0], mem_test=0)
            if work is not None:
                flow.register_work(work, workdir='GammaPhononsAt'+str(ecut))
            else:
                warn('cannot create GammaPhononsAt' + str(ecut) + ' work, factory returned None')

    # PHONON WihtOut Asr test
    if "phwoa" in options.trials:
        phonon_factory = DFPTPhononFactory()
        for ecut in [ecut_list[0], ecut_list[-1]]:
            str_ecut = '%.1f' % ecut
            if "phwoa" in report and str_ecut in report["phwoa"].keys(): continue
            print('phwoa')
            kppa = 1000
            pawecutdg = 2 * ecut if pseudo.ispaw else None
            work = phonon_factory.work_for_pseudo(pseudo, accuracy="high", kppa=kppa, ecut=ecut, pawecutdg=pawecutdg,
                                                  tolwfr=1.e-20, smearing="fermi_dirac:0.0005", qpt=[0,0,0], rfasr=0)
            if work is not None:
                flow.register_work(work, workdir='GammaPhononsAt'+str(ecut)+'WOA')
            else:
                warn('cannot create GammaPhononsAt' + str(ecut) + 'WOA work, factory returned None')


    if len(flow) > 0:
        return flow.allocate()
    else:
        # Empty flow since all trials have been already performed.
        return None
Exemplo n.º 8
0
def main():
    def str_examples():
        examples = """
                   Usage Example:\n
                   ppdojo_run.py Si.psp8  => Build pseudo_dojo flow for Si.fhi\n
                   """
        return examples

    def show_examples_and_exit(error_code=1):
        """Display the usage of the script."""
        #sys.stderr.write(str_examples()+'\n')
        print(str_examples())
        sys.exit(error_code)

    parser = argparse.ArgumentParser(epilog=str_examples())

    parser.add_argument('-m', '--manager', type=str, default=None,  help="Manager file")
    parser.add_argument('-d', '--dry-run', default=False, action="store_true", help="Dry run, build the flow without submitting it")
    parser.add_argument('--paral-kgb', type=int, default=0,  help="Paral_kgb input variable.")
    parser.add_argument('-p', '--plot', default=False, action="store_true", help="Plot convergence when the flow is done")
    parser.add_argument('-n', '--new-ecut', type=int, default=None, action="store", help="Extend the ecut grid with the new-ecut point")

    def parse_trials(s):
        if s == "all": return ["df", "gbrv", "phonon", "phowa"]
        return s.split(",")

    parser.add_argument('--trials', default="all",  type=parse_trials, help=("List of tests e.g --trials=df,gbrv,phonon,phwoa\n"
                        "  df:     test delta factor against all electron refference\n"
                        "  gbrv:   test fcc and bcc lattice parameters agains AE refference\n"
                        "  phonon: test phonon mode at gamma convergence\n"
                        "  phwoa:  test violation of the acoustic sum rule (without enforcing it) at the min and max ecut\n"))

    parser.add_argument('--loglevel', default="ERROR", type=str,
                        help="set the loglevel. Possible values: CRITICAL, ERROR (default), WARNING, INFO, DEBUG")

    parser.add_argument('path', help='pseudopotential file.')

    # Create the parsers for the sub-commands
    #subparsers = parser.add_subparsers(dest='command', help='sub-command help', description="Valid subcommands")
    # Subparser for single command.
    #p_build = subparsers.add_parser('build', help="Build dojo.")

    try:
        options = parser.parse_args()
    except:
        show_examples_and_exit(1)

    # loglevel is bound to the string value obtained from the command line argument. 
    # Convert to upper case to allow the user to specify --loglevel=DEBUG or --loglevel=debug
    import logging
    numeric_level = getattr(logging, options.loglevel.upper(), None)
    if not isinstance(numeric_level, int):
        raise ValueError('Invalid log level: %s' % options.loglevel)
    logging.basicConfig(level=numeric_level)

    options.manager = abilab.TaskManager.from_user_config() if options.manager is None else \
                      abilab.TaskManager.from_file(options.manager)

    if os.path.isfile(options.path):
        # Operate on a single pseudo.
        flow = build_flow(options.path, options)
        if flow is None: 
            warn("DOJO_REPORT is already computed for pseudo %s." % options.path)
            return 0
        if options.dry_run:
            flow.build_and_pickle_dump()
        else:
            # Run the flow with the scheduler.
            #print("nlaunch: %d" % flow.rapidfire())
            flow.make_scheduler().start()

    else:
        # Gather all pseudos starting from the current working directory and run the flows iteratively.
        table = PeriodicTable()
        all_symbols = set(element.symbol for element in table.all_elements)
        #all_symbols = ["H"]
        #print(os.listdir(options.path))

        #print("here", os.path.basename(os.path.dirname(options.path)))
        #print("here", options.path)
        if os.path.basename(os.path.dirname(options.path)) in all_symbols:
            #print("here")
            dirs = [options.path]
        else:
            dirs = [os.path.join(options.path, d) for d in os.listdir(options.path) if d in all_symbols]
        print(dirs)

        pseudos = []
        for d in dirs:
            #print(d)
            pseudos.extend(os.path.join(d, p) for p in os.listdir(d) if p.endswith(".psp8"))

        if not pseudos:
            warn("Empty list of pseudos")
            return 0

        nflows, nlaunch = 0, 0
        #exc_filename = "allscheds_exceptions.log"
        #if os.path.exists(exc_filename):
        #    raise RuntimeError("File %s already exists, remove it before running the script" % exc_filename)
        #exc_log = open(exc_filename, "w")
        exc_log = sys.stderr

        for pseudo in pseudos:
            pseudo = Pseudo.as_pseudo(pseudo)
            report = pseudo.dojo_report
            if "version" not in report: continue

            flow = build_flow(pseudo, options)
            if flow is None: 
                warn("DOJO_REPORT is already computed for pseudo %s." % pseudo.basename)
                continue

            #if os.path.exists(flow.workdir) or nflows >= 2: continue
            nflows += 1

            try:
                flow.make_scheduler().start()
            except Exception as exc:
                # Log exception and proceed with the next pseudo.
                exc_log.write(str(exc))

            new_report = pseudo.read_dojo_report()
            new_report.plot_deltafactor_convergence()
            new_report.plot_gbrv_convergence()
            new_report.plot_phonon_convergence()

            #with open(pseudo.basename + "sched.stdout", "w") as sched_stdout, \
            #     open(pseudo.basename + "sched.stderr", "w") as sched_stderr: 
            #    with RedirectStdStreams(stdout=sched_stdout, stderr=sched_stderr):
            #        try:
            #            flow.make_scheduler().start()
            #        except Exception as exc:
            #            # Log exception and proceed with the next pseudo.
            #            exc_log.write(str(exc))

        #exc_log.close()
        #print("nlaunch: %d" % nlaunch)
        #print("nflows: %d" % nflows)

    return 0
Exemplo n.º 9
0
 def pseudos(self):
     return [Pseudo.as_pseudo(ref_file("14si.pspnc"))]
Exemplo n.º 10
0
    def __init__(self, structure, pseudo, kppa, connect,
                 ecut=None, pawecutdg=None, ecutsm=0.5,
                 spin_mode="polarized", toldfe=1.e-9, smearing="fermi_dirac:0.1 eV",
                 accuracy="normal", chksymbreak=0, workdir=None, manager=None, **kwargs):
        """
        Build a :class:`Work` for the computation of the deltafactor.

        Args:   
            structure: :class:`Structure` object
            pseudo: String with the name of the pseudopotential file or :class:`Pseudo` object.
            kppa: Number of k-points per atom.
            connect: True if the SCF run should be initialized from the previous run.
            spin_mode: Spin polarization mode.
            toldfe: Tolerance on the energy (Ha)
            smearing: Smearing technique.
            workdir: String specifing the working directory.
            manager: :class:`TaskManager` responsible for the submission of the tasks.
        """
        super(DeltaFactorWork, self).__init__(workdir=workdir, manager=manager)

        self._pseudo = Pseudo.as_pseudo(pseudo)

        spin_mode = SpinMode.as_spinmode(spin_mode)
        smearing = Smearing.as_smearing(smearing)

        # Compute the number of bands from the pseudo and the spin-polarization.
        # Add 6 bands to account for smearing.
        #nval = structure.num_valence_electrons(self.pseudo)
        #spin_fact = 2 if spin_mode.nsppol == 2 else 1
        #nband = int(nval / spin_fact) + 6

        # Set extra_abivars
        self.ecut, self.pawecutdg = ecut, pawecutdg

        extra_abivars = dict(
            ecut=ecut,
            pawecutdg=pawecutdg,
            ecutsm=ecutsm,
            toldfe=toldfe,
            #nband=nband,
            prtwf=0 if not connect else 1,
            #paral_kgb=paral_kgb,
            chkprim=0,
            nstep=200,
            #mem_test=0,
        )

        extra_abivars.update(**kwargs)
        self._input_structure = structure
        v0 = structure.volume

        # From 94% to 106% of the equilibrium volume.
        self.volumes = v0 * np.arange(94, 108, 2) / 100.

        for vol in self.volumes:
            new_lattice = structure.lattice.scale(vol)

            new_structure = Structure(new_lattice, structure.species, structure.frac_coords)

            ksampling = KSampling.automatic_density(new_structure, kppa, chksymbreak=chksymbreak)

            #scf_input = ScfStrategy(new_structure, self.pseudo, ksampling,
            #                        accuracy=accuracy, spin_mode=spin_mode,
            #                        smearing=smearing, **extra_abivars)

            scf_input = abilab.AbinitInput(structure=new_structure, pseudos=self.pseudo)
            scf_input.add_abiobjects(ksampling, smearing, spin_mode)
            scf_input.set_vars(extra_abivars)

            self.register_scf_task(scf_input)

        if connect:
            logger.info("Connecting SCF tasks using previous WFK file")
            middle = len(self.volumes) // 2
            filetype = "WFK"
            for i, task in enumerate(self[:middle]):
                task.add_deps({self[i + 1]: filetype})

            for i, task in enumerate(self[middle+1:]):
                task.add_deps({self[middle + i]: filetype})