def __init__(self, structure, spec, option=None): self.structure = structure self.spec = spec if option is not None: option.pop('gap', None) option['nscf_nbands'] = option['nbands'] option.pop('nbands', None) self.option = option print('option:', option) self.bands_fac = 1 self.tests = self.__class__.get_defaults_tests() self.convs = self.__class__.get_defaults_convs() self.response_models = self.__class__.get_response_models() if self.option is None: self.all_converged = False elif len(self.option) == len(self.convs): self.all_converged = True else: self.all_converged = False path_add = '.conv' if self.all_converged else '' self.work_dir = s_name(self.structure) + path_add try: abi_pseudo = os.environ['ABINIT_PS_EXT'] abi_pseudo_dir = os.environ['ABINIT_PS'] except KeyError: abi_pseudo = None abi_pseudo_dir = None pseudos = [] for element in self.structure.composition.element_composition: pseudo = os.path.join(abi_pseudo_dir, str(element) + abi_pseudo) pseudos.append(pseudo) self.pseudo_table = PseudoTable(pseudos)
def test_nc_pseudos(self): """Test norm-conserving pseudopotentials""" for symbol, pseudos in self.nc_pseudos.items(): for pseudo in pseudos: assert repr(pseudo) assert str(pseudo) self.assertTrue(pseudo.isnc) self.assertFalse(pseudo.ispaw) self.assertEqual(pseudo.Z, 14) self.assertEqual(pseudo.symbol, symbol) self.assertEqual(pseudo.Z_val, 4) self.assertGreaterEqual(pseudo.nlcc_radius, 0.0) # Test pickle self.serialize_with_pickle(pseudo, test_eq=False) # Test MSONable self.assertMSONable(pseudo) # HGH pseudos pseudo = self.Si_hgh self.assertFalse(pseudo.has_nlcc) self.assertEqual(pseudo.l_max, 1) self.assertEqual(pseudo.l_local, 0) assert not pseudo.supports_soc assert self.Si_hgh.md5 is not None assert self.Si_hgh == self.Si_hgh # TM pseudos pseudo = self.Si_pspnc self.assertTrue(pseudo.has_nlcc) self.assertEqual(pseudo.l_max, 2) self.assertEqual(pseudo.l_local, 2) assert not pseudo.supports_soc assert self.Si_hgh != self.Si_pspnc # FHI pseudos pseudo = self.Si_fhi self.assertFalse(pseudo.has_nlcc) self.assertEqual(pseudo.l_max, 3) self.assertEqual(pseudo.l_local, 2) assert not pseudo.supports_soc # Test PseudoTable. table = PseudoTable(self.nc_pseudos["Si"]) assert repr(table) assert str(table) self.assertTrue(table.allnc) self.assertTrue(not table.allpaw) self.assertFalse(not table.is_complete) assert len(table) == 3 assert len(table[14]) == 3 assert len(table.select_symbols("Si")) == 3 assert table.zlist == [14] # Test pickle self.serialize_with_pickle(table, test_eq=False)
def __init__(self, pseudos, pseudo_dir="", structure=None, ndtset=1, comment="", decorators=None): """ Args: pseudos: String or list of string with the name of the pseudopotential files. pseudo_dir: Name of the directory where the pseudopotential files are located. structure: file with the structure, :class:`Structure` object or dictionary with ABINIT geo variable ndtset: Number of datasets. comment: Optional string with a comment that will be placed at the beginning of the file. decorators: List of `AbinitInputDecorator` objects. """ # Dataset[0] contains the global variables common to the different datasets # Dataset[1:ndtset+1] stores the variables specific to the different datasets. self._ndtset = ndtset self._datasets = [] for i in range(ndtset + 1): dt0 = None if i > 0: dt0 = self._datasets[0] self._datasets.append(Dataset(index=i, dt0=dt0)) self._datasets[0]["ndtset"] = ndtset # Setup of the pseudopotential files. if isinstance(pseudos, PseudoTable): self._pseudos = pseudos elif all(isinstance(p, Pseudo) for p in pseudos): self._pseudos = PseudoTable(pseudos) else: # String(s) pseudo_dir = os.path.abspath(pseudo_dir) pseudo_paths = [ os.path.join(pseudo_dir, p) for p in list_strings(pseudos) ] missing = [p for p in pseudo_paths if not os.path.exists(p)] if missing: raise self.Error( "Cannot find the following pseudopotential files:\n%s" % str(missing)) self._pseudos = PseudoTable(pseudo_paths) if structure is not None: self.set_structure(structure) if comment is not None: self.set_comment(comment) self._decorators = [] if not decorators else decorators
def __init__(self, structure, pseudos, pseudo_dir="", ndtset=1): """ Args: structure: file with the structure, |Structure| object or dictionary with ABINIT geo variable Accepts also list of objects that can be converted to Structure object. In this case, however, ndtset must be equal to the length of the list. pseudos: String or list of string with the name of the pseudopotential files. pseudo_dir: Name of the directory where the pseudopotential files are located. ndtset: Number of datasets. """ # Setup of the pseudopotential files. if isinstance(pseudos, Pseudo): pseudos = [pseudos] elif isinstance(pseudos, PseudoTable): pseudos = pseudos elif all(isinstance(p, Pseudo) for p in pseudos): pseudos = PseudoTable(pseudos) else: # String(s) pseudo_dir = os.path.abspath(pseudo_dir) pseudo_paths = [ os.path.join(pseudo_dir, p) for p in list_strings(pseudos) ] missing = [p for p in pseudo_paths if not os.path.exists(p)] if missing: raise self.Error( f"Cannot find the following pseudopotential files:\n{str(missing)}" ) pseudos = PseudoTable(pseudo_paths) # Build the list of BasicAbinitInput objects. if ndtset <= 0: raise ValueError(f"ndtset {ndtset} cannot be <=0") if not isinstance(structure, (list, tuple)): self._inputs = [ BasicAbinitInput(structure=structure, pseudos=pseudos) for i in range(ndtset) ] else: assert len(structure) == ndtset self._inputs = [ BasicAbinitInput(structure=s, pseudos=pseudos) for s in structure ]
def __init__(self, structure, spec, option=None): self.structure = structure self.spec = spec self.option = option self.bands_fac = 1 self.tests = self.__class__.get_defaults_tests() self.convs = self.__class__.get_defaults_convs() self.response_models = self.__class__.get_response_models() if self.option is None: self.all_converged = False elif len(self.option) == len(self.convs): self.all_converged = True else: self.all_converged = False path_add = '.conv' if self.all_converged else '' self.work_dir = s_name(self.structure)+path_add try: abi_pseudo = os.environ['ABINIT_PS_EXT'] abi_pseudo_dir = os.environ['ABINIT_PS'] except KeyError: abi_pseudo = None abi_pseudo_dir = None pseudos = [] for element in self.structure.composition.element_composition: pseudo = os.path.join(abi_pseudo_dir, str(element) + abi_pseudo) pseudos.append(pseudo) self.pseudo_table = PseudoTable(pseudos)
def __init__( self, structure, pseudos, pseudo_dir=None, comment=None, abi_args=None, abi_kwargs=None, ): """ Args: structure: Parameters defining the crystalline structure. Accepts |Structure| object file with structure (CIF, netcdf file, ...) or dictionary with ABINIT geo variables. pseudos: Pseudopotentials to be used for the calculation. Accepts: string or list of strings with the name of the pseudopotential files, list of |Pseudo| objects or |PseudoTable| object. pseudo_dir: Name of the directory where the pseudopotential files are located. ndtset: Number of datasets. comment: Optional string with a comment that will be placed at the beginning of the file. abi_args: list of tuples (key, value) with the initial set of variables. Default: Empty abi_kwargs: Dictionary with the initial set of variables. Default: Empty """ # Internal dict with variables. we use an ordered dict so that # variables will be likely grouped by `topics` when we fill the input. abi_args = [] if abi_args is None else abi_args for key, value in abi_args: self._check_varname(key) abi_kwargs = {} if abi_kwargs is None else abi_kwargs for key in abi_kwargs: self._check_varname(key) args = list(abi_args)[:] args.extend(list(abi_kwargs.items())) self._vars = dict(args) self.set_structure(structure) if pseudo_dir is not None: pseudo_dir = os.path.abspath(pseudo_dir) if not os.path.exists(pseudo_dir): raise self.Error(f"Directory {pseudo_dir} does not exist") pseudos = [ os.path.join(pseudo_dir, p) for p in list_strings(pseudos) ] try: self._pseudos = PseudoTable.as_table( pseudos).get_pseudos_for_structure(self.structure) except ValueError as exc: raise self.Error(str(exc)) if comment is not None: self.set_comment(comment)
def num_valence_electrons(structure, pseudos): """ Returns the number of valence electrons. Args: pseudos: List of |Pseudo| objects or list of filenames. """ nval, table = 0, PseudoTable.as_table(pseudos) for site in structure: pseudo = table.pseudo_with_symbol(site.specie.symbol) nval += pseudo.Z_val return int(nval) if int(nval) == nval else nval
def num_valence_electrons(self, pseudos): """ Returns the number of valence electrons. Args: pseudos: List of :class:`Pseudo` objects or list of filenames. """ nval, table = 0, PseudoTable.as_table(pseudos) for site in self: pseudo = table.pseudo_with_symbol(site.species_string) nval += pseudo.Z_val return nval
def get_pseudos(top): """ Find pseudos within top, return :class:`PseudoTable` object sorted by atomic number Z. """ from monty.os.path import find_exts from pymatgen.io.abinit.pseudos import PseudoTable, Pseudo exts = ("psp8",) pseudos = [] for p in find_exts(top, exts, exclude_dirs="_*"): try: pseudos.append(Pseudo.from_file(p)) except Exception as exc: from warnings import warn warn("Exception in pseudo %s:\n%s" % (p.filepath, exc)) return PseudoTable(pseudos).sort_by_z()
def test_methods(self): """Test PseudoTable methods""" table = PseudoTable( ref_files("14si.pspnc", "14si.4.hgh", "14-Si.LDA.fhi")) assert str(table) assert len(table) == 3 for pseudo in table: assert pseudo.isnc assert table.allnc and not table.allpaw assert table.zlist == [14] # Data persistence self.serialize_with_pickle(table, test_eq=False) d = table.as_dict() PseudoTable.from_dict(d) self.assertMSONable(table) selected = table.select_symbols("Si") assert len(selected) == len( table) and selected.__class__ is table.__class__ with self.assertRaises(ValueError): table.pseudos_with_symbols("Si")
class SingleAbinitGWWork: """ GW flow for Abinit """ RESPONSE_MODELS = ["cd", "godby", "hybersten", "linden", "farid"] TESTS = { 'ecuteps': { 'test_range': (10, 14), 'method': 'direct', 'control': "gap", 'level': "sigma" }, 'nscf_nbands': { 'test_range': (30, 40), 'method': 'set_bands', 'control': "gap", 'level': "nscf" }, 'response_model': { 'test_range': RESPONSE_MODELS, 'method': 'direct', 'control': 'gap', 'level': 'screening' } } # scf level test are run independently, the last value will be used in the nscf and sigma tests # 'test': {'test_range': (1, 2, 3), 'method': 'direct', 'control': "e_ks_max", 'level': "scf"}, CONVS = { 'ecut': { 'test_range': (50, 48, 46, 44), 'method': 'direct', 'control': "e_ks_max", 'level': "scf" }, 'ecuteps': { 'test_range': (4, 6, 8, 10, 12), 'method': 'direct', 'control': "gap", 'level': "sigma" }, 'nscf_nbands': { 'test_range': (5, 10, 15, 20), 'method': 'set_bands', 'control': "gap", 'level': "nscf" } } def __init__(self, structure, spec, option=None): self.structure = structure self.spec = spec if option is not None: option.pop('gap', None) option['nscf_nbands'] = option['nbands'] option.pop('nbands', None) self.option = option print('option:', option) self.bands_fac = 1 self.tests = self.__class__.get_defaults_tests() self.convs = self.__class__.get_defaults_convs() self.response_models = self.__class__.get_response_models() if self.option is None: self.all_converged = False elif len(self.option) == len(self.convs): self.all_converged = True else: self.all_converged = False path_add = '.conv' if self.all_converged else '' self.work_dir = s_name(self.structure) + path_add try: abi_pseudo = os.environ['ABINIT_PS_EXT'] abi_pseudo_dir = os.environ['ABINIT_PS'] except KeyError: abi_pseudo = None abi_pseudo_dir = None pseudos = [] for element in self.structure.composition.element_composition: pseudo = os.path.join(abi_pseudo_dir, str(element) + abi_pseudo) pseudos.append(pseudo) self.pseudo_table = PseudoTable(pseudos) @classmethod def get_defaults_tests(cls): return copy.deepcopy(cls.TESTS) @classmethod def get_defaults_convs(cls): return copy.deepcopy(cls.CONVS) @classmethod def get_response_models(cls): return copy.deepcopy(cls.RESPONSE_MODELS) def get_electrons(self, structure): """ Method for retrieving the number of valence electrons """ electrons = 0 for element in structure.species: pseudo = self.pseudo_table.pseudo_with_symbol(element.symbol) electrons += pseudo.Z_val return electrons def get_bands(self, structure): """ Method for retrieving the standard number of bands """ bands = self.get_electrons(structure) / 2 + len(structure) return int(bands) def get_work_dir(self): name = s_name(self.structure) if not self.all_converged: return str(name) + '_' + str(self.option['test']) + '_' + str( self.option['value']) else: return str(name) def create(self): """ create single abinit G0W0 flow """ # manager = 'slurm' if 'ceci' in self.spec['mode'] else 'shell' # an AbiStructure object has an overwritten version of get_sorted_structure that sorts according to Z # this could also be pulled into the constructor of Abistructure # abi_structure = self.structure.get_sorted_structure() from abipy import abilab item = copy.copy(self.structure.item) self.structure.__class__ = abilab.Structure self.structure = self.structure.get_sorted_structure_z() self.structure.item = item abi_structure = self.structure manager = TaskManager.from_user_config() # Initialize the flow. flow = Flow(self.work_dir, manager, pickle_protocol=0) # kpoint grid defined over density 40 > ~ 3 3 3 if self.spec['converge'] and not self.all_converged: # (2x2x2) gamma centered mesh for the convergence test on nbands and ecuteps # if kp_in is present in the specs a kp_in X kp_in x kp_in mesh is used for the convergence study print('== here ===') print(self.spec.__class__) json.dumps(self.spec.data, indent=2) if 'kp_in' in self.spec.data.keys(): if self.spec['kp_in'] > 9: print( 'WARNING:\nkp_in should be < 13 to generate an n x n x n mesh\nfor larger values a grid with ' 'density kp_in will be generated') kppa = self.spec['kp_in'] else: kppa = 2 else: # use the specified density for the final calculation with the converged nbands and ecuteps of other # stand alone calculations kppa = self.spec['kp_grid_dens'] gamma = True # 'standard' parameters for stand alone calculation scf_nband = self.get_bands(self.structure) + 20 # additional bands to accommodate for nbdbuf and a bit extra nscf_nband = [10 * self.get_bands(self.structure)] nksmall = None ecuteps = [8] extra_abivars = dict() # read user defined extra abivars from file 'extra_abivars' should be dictionary extra_abivars.update(read_extra_abivars()) # self.bands_fac = 0.5 if 'gwcomp' in extra_abivars.keys() else 1 # self.convs['nscf_nbands']['test_range'] = # tuple([self.bands_fac*x for x in self.convs['nscf_nbands']['test_range']]) ecut = extra_abivars.pop('ecut', 44) ecutsigx = extra_abivars.pop('ecutsigx', 44) # if npfft is too large or if npfft changes between the nscf calcualtion and the screening / sigma calulations # strange things can happen if 'npfft' not in extra_abivars: extra_abivars['npfft'] = 3 if ecutsigx > ecut: raise RuntimeError('ecutsigx can not be larger than ecut') if ecutsigx < max(ecuteps): raise RuntimeError('ecutsigx < ecuteps this is not realistic') response_models = ['godby'] if 'ppmodel' in extra_abivars.keys(): response_models = [extra_abivars.pop('ppmodel')] if self.option is not None: for k in self.option.keys(): if k == 'ecut': ecut = self.option[k] if k in ['ecuteps', 'nscf_nbands']: pass else: extra_abivars.update({k: self.option[k]}) try: grid = read_grid_from_file(s_name(self.structure) + ".full_res")['grid'] all_done = read_grid_from_file( s_name(self.structure) + ".full_res")['all_done'] workdir = os.path.join(s_name(self.structure), 'w' + str(grid)) except (IOError, OSError): grid = 0 all_done = False workdir = None if not all_done: if (self.spec['test'] or self.spec['converge']) and not self.all_converged: if self.spec['test']: print('| setting test calculation') tests = SingleAbinitGWWork(self.structure, self.spec).tests response_models = [] else: if grid == 0: print('| setting convergence calculations for grid 0') # tests = SingleAbinitGWWorkFlow(self.structure, self.spec).convs tests = self.convs else: print('| extending grid') # tests = expand(SingleAbinitGWWorkFlow(self.structure, self.spec).convs, grid) tests = expand(self.convs, grid) ecuteps = [] nscf_nband = [] for test in tests: if tests[test]['level'] == 'scf': if self.option is None: extra_abivars.update( {test + '_s': tests[test]['test_range']}) elif test in self.option: extra_abivars.update({test: self.option[test]}) else: extra_abivars.update( {test + '_s': tests[test]['test_range']}) else: for value in tests[test]['test_range']: if test == 'nscf_nbands': nscf_nband.append( value * self.get_bands(self.structure)) # scr_nband takes nscf_nbands if not specified # sigma_nband takes scr_nbands if not specified if test == 'ecuteps': ecuteps.append(value) if test == 'response_model': response_models.append(value) elif self.all_converged: print( '| setting up for testing the converged values at the high kp grid ' ) # add a bandstructure and dos calculation if os.path.isfile('bands'): nksmall = -30 # negative value > only bandstructure else: nksmall = 30 # in this case a convergence study has already been performed. # The resulting parameters are passed as option ecuteps = [ self.option['ecuteps'], self.option['ecuteps'] + self.convs['ecuteps']['test_range'][1] - self.convs['ecuteps']['test_range'][0] ] nscf_nband = [ self.option['nscf_nbands'], self.option['nscf_nbands'] + self.convs['nscf_nbands']['test_range'][1] - self.convs['nscf_nbands']['test_range'][0] ] # for option in self.option: # if option not in ['ecuteps', 'nscf_nband']: # extra_abivars.update({option + '_s': self.option[option]}) else: print('| all is done for this material') return logger.info('ecuteps : %s ' % str(ecuteps)) logger.info('extra : %s ' % str(extra_abivars)) logger.info('nscf_nb : %s ' % str(nscf_nband)) inputs = g0w0_convergence_inputs(abi_structure, self.pseudo_table, kppa, nscf_nband, ecuteps, ecutsigx, scf_nband, ecut, accuracy="normal", spin_mode="unpolarized", smearing=None, response_models=response_models, charge=0.0, gw_qprange=2, gamma=gamma, nksmall=nksmall, extra_abivars=extra_abivars) work = G0W0Work(scf_inputs=inputs[0], nscf_inputs=inputs[1], scr_inputs=inputs[2], sigma_inputs=inputs[3]) # work = g0w0_extended_work(abi_structure, self.pseudo_table, kppa, nscf_nband, ecuteps, ecutsigx, scf_nband, # accuracy="normal", spin_mode="unpolarized", smearing=None, response_models=response_models, # charge=0.0, sigma_nband=None, scr_nband=None, gamma=gamma, nksmall=nksmall, **extra_abivars) print(workdir) flow.register_work(work, workdir=workdir) return flow.allocate() def create_job_file(self, serial=True): """ Create the jobfile for starting all schedulers manually serial = True creates a list that can be submitted as job that runs all schedulers a a batch job (the job header needs to be added) serial = False creates a list that can be used to start all schedulers on the frontend in the background """ job_file = open("job_collection", mode='a') if serial: job_file.write( str('abirun.py ' + self.work_dir + ' scheduler > ' + self.work_dir + '.log\n')) job_file.write( str('rm ' + self.work_dir + '/w*/t*/outdata/out_SCR\n')) else: job_file.write( str('nohup abirun.py ' + self.work_dir + ' scheduler > ' + self.work_dir + '.log & \n')) job_file.write(str('sleep 2\n')) job_file.close()
wlevs = np.log(np.linalg.eigvals(curly_U)).imag LOGGER.debug("loop eigenvalues:\n{}".format(wlevs / (2 * np.pi))) inner_loop_sum += sum(wlevs) / (2 * np.pi) string_phases.append(inner_loop_sum) string_sum = sum(string_phases) LOGGER.debug( "time info: {} seconds to do all svds".format(time.time() - after_overlap_time)) for string, val in zip(strings, string_phases): LOGGER.info("{}, {}: {}".format(string[0].frac_coords[0], string[0].frac_coords[1], val)) LOGGER.info("average across strings: {}".format(string_sum / len(strings))) # Electronic part completed # Computing Ionic Part # NEEDS TESTING # local_pspdir = '/home/john/Documents/research/pseudo/oncvpsp/sr_0.4/pbe' #local_pspdir = '/home/john/Documents/research/pseudo/oncvpsp/sr_0.4/pbesol' local_pspdir = '/mnt/home/jbonini/psps/oncvpsp/nc-sr-03_lda_standard' onc_psp_table = PseudoTable.from_dir(local_pspdir, exts=('psp8', )) occ_fact = 2 if wfc0.nsppol == 1 else 1 final_pol = get_spont_pol(wfc0.structure, wfc1.structure, onc_psp_table, [0., 0., occ_fact * string_sum / len(strings)], overlaps.rspace_trans[0]) LOGGER.info("Final polarization = {} C/m^2".format(final_pol)) wfc0.close() wfc1.close() LOGGER.debug("time info: {} seconds total".format(time.time() - true_start_time))
class SingleAbinitGWWork: """ GW flow for Abinit """ RESPONSE_MODELS = ["cd", "godby", "hybersten", "linden", "farid"] TESTS = {'ecuteps': {'test_range': (10, 14), 'method': 'direct', 'control': "gap", 'level': "sigma"}, 'nscf_nbands': {'test_range': (30, 40), 'method': 'set_bands', 'control': "gap", 'level': "nscf"}, 'response_model': {'test_range': RESPONSE_MODELS, 'method': 'direct', 'control': 'gap', 'level': 'screening'}} # scf level test are run independently, the last value will be used in the nscf and sigma tests # 'test': {'test_range': (1, 2, 3), 'method': 'direct', 'control': "e_ks_max", 'level': "scf"}, CONVS = {'ecut': {'test_range': (50, 48, 46, 44), 'method': 'direct', 'control': "e_ks_max", 'level': "scf"}, 'ecuteps': {'test_range': (4, 8, 12, 16, 20), 'method': 'direct', 'control': "gap", 'level': "sigma"}, 'nscf_nbands': {'test_range': (5, 10, 20, 30), 'method': 'set_bands', 'control': "gap", 'level': "nscf"}} def __init__(self, structure, spec, option=None): self.structure = structure self.spec = spec self.option = option self.bands_fac = 1 self.tests = self.__class__.get_defaults_tests() self.convs = self.__class__.get_defaults_convs() self.response_models = self.__class__.get_response_models() if self.option is None: self.all_converged = False elif len(self.option) == len(self.convs): self.all_converged = True else: self.all_converged = False path_add = '.conv' if self.all_converged else '' self.work_dir = s_name(self.structure)+path_add try: abi_pseudo = os.environ['ABINIT_PS_EXT'] abi_pseudo_dir = os.environ['ABINIT_PS'] except KeyError: abi_pseudo = None abi_pseudo_dir = None pseudos = [] for element in self.structure.composition.element_composition: pseudo = os.path.join(abi_pseudo_dir, str(element) + abi_pseudo) pseudos.append(pseudo) self.pseudo_table = PseudoTable(pseudos) @classmethod def get_defaults_tests(cls): return copy.deepcopy(cls.TESTS) @classmethod def get_defaults_convs(cls): return copy.deepcopy(cls.CONVS) @classmethod def get_response_models(cls): return copy.deepcopy(cls.RESPONSE_MODELS) def get_electrons(self, structure): """ Method for retrieving the number of valence electrons """ electrons = 0 for element in structure.species: pseudo = self.pseudo_table.pseudo_with_symbol(element.symbol) electrons += pseudo.Z_val return electrons def get_bands(self, structure): """ Method for retrieving the standard number of bands """ bands = self.get_electrons(structure) / 2 + len(structure) return int(bands) def get_work_dir(self): name = s_name(self.structure) if not self.all_converged: return str(name)+'_'+str(self.option['test'])+'_'+str(self.option['value']) else: return str(name) def create(self): """ create single abinit G0W0 flow """ # manager = 'slurm' if 'ceci' in self.spec['mode'] else 'shell' # an AbiStructure object has an overwritten version of get_sorted_structure that sorts according to Z # this could also be pulled into the constructor of Abistructure # abi_structure = self.structure.get_sorted_structure() from abipy import abilab item = copy.copy(self.structure.item) self.structure.__class__ = abilab.Structure self.structure = self.structure.get_sorted_structure_z() self.structure.item = item abi_structure = self.structure manager = TaskManager.from_user_config() # Initialize the flow. flow = Flow(self.work_dir, manager, pickle_protocol=0) # flow = Flow(self.work_dir, manager) # kpoint grid defined over density 40 > ~ 3 3 3 if self.spec['converge'] and not self.all_converged: # (2x2x2) gamma centered mesh for the convergence test on nbands and ecuteps # if kp_in is present in the specs a kp_in X kp_in x kp_in mesh is used for the convergence study if 'kp_in' in self.spec.data.keys(): if self.spec['kp_in'] > 9: print('WARNING:\nkp_in should be < 13 to generate an n x n x n mesh\nfor larger values a grid with ' 'density kp_in will be generated') kppa = self.spec['kp_in'] else: kppa = 2 else: # use the specified density for the final calculation with the converged nbands and ecuteps of other # stand alone calculations kppa = self.spec['kp_grid_dens'] gamma = True # 'standard' parameters for stand alone calculation scf_nband = self.get_bands(self.structure) + 20 # additional bands to accommodate for nbdbuf and a bit extra nscf_nband = [10 * self.get_bands(self.structure)] nksmall = None ecuteps = [8] extra_abivars = dict() # read user defined extra abivars from file 'extra_abivars' should be dictionary extra_abivars.update(read_extra_abivars()) # self.bands_fac = 0.5 if 'gwcomp' in extra_abivars.keys() else 1 # self.convs['nscf_nbands']['test_range'] = # tuple([self.bands_fac*x for x in self.convs['nscf_nbands']['test_range']]) ecut = extra_abivars.pop('ecut', 44) ecutsigx = extra_abivars.pop('ecutsigx', 44) if ecutsigx > ecut: raise RuntimeError('ecutsigx can not be largen than ecut') if ecutsigx < max(ecuteps): raise RuntimeError('ecutsigx < ecuteps this is not realistic') response_models = ['godby'] if 'ppmodel' in extra_abivars.keys(): response_models = [extra_abivars.pop('ppmodel')] if self.option is not None: for k in self.option.keys(): if k == 'ecut': ecut = self.option[k] if k in ['ecuteps', 'nscf_nbands']: pass else: extra_abivars.update({k: self.option[k]}) try: grid = read_grid_from_file(s_name(self.structure)+".full_res")['grid'] all_done = read_grid_from_file(s_name(self.structure)+".full_res")['all_done'] workdir = os.path.join(s_name(self.structure), 'w'+str(grid)) except (IOError, OSError): grid = 0 all_done = False workdir = None if not all_done: if (self.spec['test'] or self.spec['converge']) and not self.all_converged: if self.spec['test']: print('| setting test calculation') tests = SingleAbinitGWWork(self.structure, self.spec).tests response_models = [] else: if grid == 0: print('| setting convergence calculations for grid 0') # tests = SingleAbinitGWWorkFlow(self.structure, self.spec).convs tests = self.convs else: print('| extending grid') # tests = expand(SingleAbinitGWWorkFlow(self.structure, self.spec).convs, grid) tests = expand(self.convs, grid) ecuteps = [] nscf_nband = [] for test in tests: if tests[test]['level'] == 'scf': if self.option is None: extra_abivars.update({test + '_s': tests[test]['test_range']}) elif test in self.option: extra_abivars.update({test: self.option[test]}) else: extra_abivars.update({test + '_s': tests[test]['test_range']}) else: for value in tests[test]['test_range']: if test == 'nscf_nbands': nscf_nband.append(value * self.get_bands(self.structure)) # scr_nband takes nscf_nbands if not specified # sigma_nband takes scr_nbands if not specified if test == 'ecuteps': ecuteps.append(value) if test == 'response_model': response_models.append(value) elif self.all_converged: print('| setting up for testing the converged values at the high kp grid ') # add a bandstructure and dos calculation if os.path.isfile('bands'): nksmall = -30 # negative value > only bandstructure else: nksmall = 30 # in this case a convergence study has already been performed. # The resulting parameters are passed as option ecuteps = [self.option['ecuteps'], self.option['ecuteps'] + self.convs['ecuteps']['test_range'][1] - self.convs['ecuteps']['test_range'][0]] nscf_nband = [self.option['nscf_nbands'], self.option['nscf_nbands'] + self.convs['nscf_nbands'][ 'test_range'][1] - self.convs['nscf_nbands']['test_range'][0]] # for option in self.option: # if option not in ['ecuteps', 'nscf_nband']: # extra_abivars.update({option + '_s': self.option[option]}) else: print('| all is done for this material') return logger.info('ecuteps : %s ' % str(ecuteps)) logger.info('extra : %s ' % str(extra_abivars)) logger.info('nscf_nb : %s ' % str(nscf_nband)) inputs = g0w0_convergence_inputs(abi_structure, self.pseudo_table, kppa, nscf_nband, ecuteps, ecutsigx, scf_nband, ecut, accuracy="normal", spin_mode="unpolarized", smearing=None, response_models=response_models, charge=0.0, sigma_nband=None, scr_nband=None, gamma=gamma, nksmall=nksmall, extra_abivars=extra_abivars) work = G0W0Work(scf_inputs=inputs[0], nscf_inputs=inputs[1], scr_inputs=inputs[2], sigma_inputs=inputs[3]) # work = g0w0_extended_work(abi_structure, self.pseudo_table, kppa, nscf_nband, ecuteps, ecutsigx, scf_nband, # accuracy="normal", spin_mode="unpolarized", smearing=None, response_models=response_models, # charge=0.0, sigma_nband=None, scr_nband=None, gamma=gamma, nksmall=nksmall, **extra_abivars) print(workdir) flow.register_work(work, workdir=workdir) return flow.allocate() def create_job_file(self, serial=True): """ Create the jobfile for starting all schedulers manually serial = True creates a list that can be submitted as job that runs all schedulers a a batch job (the job header needs to be added) serial = False creates a list that can be used to start all schedulers on the frontend in the background """ job_file = open("job_collection", mode='a') if serial: job_file.write(str('abirun.py ' + self.work_dir + ' scheduler > ' + self.work_dir + '.log\n')) job_file.write(str('rm ' + self.work_dir + '/w*/t*/outdata/out_SCR\n')) else: job_file.write(str('nohup abirun.py ' + self.work_dir + ' scheduler > ' + self.work_dir + '.log & \n')) job_file.write(str('sleep 2\n')) job_file.close()
def pseudos(*filenames): """Returns a PseudoTable constructed from the input filenames located in tests/data/pseudos.""" pseudos = list(map(pseudo, filenames)) return PseudoTable(pseudos)
def scf_ph_inputs(structure, options): """ This function constructs the input files for the phonon calculation: GS input + the input files for the phonon calculation. """ abi_pseudo = os.environ['ABINIT_PS_EXT'] abi_pseudo_dir = os.environ['ABINIT_PS'] pseudos = [] for element in structure.composition.element_composition: pseudo = os.path.join(abi_pseudo_dir, str(element) + abi_pseudo) pseudos.append(pseudo) pseudos = PseudoTable(pseudos) #print('bounds:\n', structure.calc_kptbounds) #print('ngkpt:\n', structure.calc_ngkpt(4)) print('ks:\n', structure.calc_ksampling(4)) # try to get the qpoints from this ... qptbounds = structure.calc_kptbounds() qptbounds = np.reshape(qptbounds, (-1, 3)) # List of q-points for the phonon calculation. qpoints = [ 0.00000000E+00, 0.00000000E+00, 0.00000000E+00, 2.50000000E-01, 0.00000000E+00, 0.00000000E+00, 2.50000000E-01, 0.00000000E+00, 2.50000000E+00, 5.00000000E-01, 0.00000000E+00, 0.00000000E+00, 2.50000000E-01, 2.50000000E-01, 0.00000000E+00, 5.00000000E-01, 2.50000000E-01, 0.00000000E+00, -2.50000000E-01, 2.50000000E-01, 0.00000000E+00, 5.00000000E-01, 5.00000000E-01, 0.00000000E+00, 0.00000000E+00, 0.00000000E+00, 2.50000000E-01, -2.50000000E-01, 5.00000000E-01, 2.50000000E-01, ] qpoints2 = [ 0.00000000E+00, 0.00000000E+00, 0.00000000E+00, 5.00000000E-01, 0.00000000E+00, 0.00000000E+00, 0.00000000E-01, 5.00000000E-01, 0.00000000E+00, 0.00000000E+00, 0.00000000E+00, 5.00000000E-01, 5.00000000E-01, 5.00000000E-01, 0.00000000E+00, 0.00000000E+00, 5.00000000E-01, 5.00000000E-01, 5.00000000E-01, 0.00000000E+00, 5.00000000E-01, 5.00000000E-01, 5.00000000E-01, 5.00000000E-01, ] qpoints = np.reshape(qpoints, (-1, 3)) qpoints = unique_rows(np.concatenate((qpoints, qptbounds), axis=0)) if os.path.isfile('qpoints'): f = open('qpoints', 'r') qpoints = np.reshape(ast.literal_eval(f.read()), (-1, 3)) f.close() # Global variables used both for the GS and the DFPT run. global_vars = dict(istwfk='*1', ecut=16.0, ngkpt=[8, 8, 8], shiftk=[0, 0, 0], paral_kgb=0, nstep=200) global_vars.update(options) to_vecs(global_vars) inp = abilab.AbiInput(pseudos=pseudos, ndtset=1 + len(qpoints)) inp.set_structure(structure) inp.set_variables(**global_vars) inp[1].set_variables(tolwfr=1.0e-18, prtden=1, paral_kgb=1) for i, qpt in enumerate(qpoints): # Response-function calculation for phonons. inp[i + 2].set_variables( tolvrs=1.0e-10, kptopt=3, iscf=5, rfphon=1, # Will consider phonon-type perturbation nqpt=1, # One wavevector is to be considered qpt=qpt, # This wavevector is q=0 (Gamma) ) #rfatpol 1 1 # Only the first atom is displaced #rfdir 1 0 0 # Along the first reduced coordinate axis #kptopt 2 # Automatic generation of k points, taking # Split input into gs_inp and ph_inputs return inp.split_datasets()