def inner(): inputs = get_top_level_insb_inputs() inputs['fp_run_workflow'] = QuantumEspressoFirstPrinciplesRun inputs['fp_run'] = get_qe_specific_fp_run_inputs() inputs['code_tbmodels'] = orm.Code.get_from_string('tbmodels') inputs['model_evaluation_workflow'] = BandDifferenceModelEvaluation inputs['model_evaluation'] = { 'code_bands_inspect': orm.Code.get_from_string('bands_inspect') } inputs['wannier_parameters'] = orm.Dict(dict=dict( num_wann=14, num_bands=36, dis_num_iter=1000, num_iter=0, spinors=True, )) inputs['wannier_projections'] = orm.List( list=['In : s; px; py; pz', 'Sb : px; py; pz']) inputs['wannier'] = dict(code=code_wannier90, metadata=get_metadata_singlecore()) inputs['symmetries'] = orm.SinglefileData( file=str((test_data_dir / 'symmetries.hdf5').resolve())) inputs['slice_reference_bands'] = orm.List(list=list(range(12, 26))) inputs['slice_tb_model'] = orm.List( list=[0, 2, 3, 1, 5, 6, 4, 7, 9, 10, 8, 12, 13, 11]) return inputs
def test_validate_scale_factors(ctx): """Test the `validate_scale_factors` validator.""" assert eos.validate_scale_factors(None, ctx) is None assert eos.validate_scale_factors(orm.List(list=[0.98, 1, 1.02]), ctx) is None assert eos.validate_scale_factors( orm.List(list=[0, 1]), ctx) == 'need at least 3 scaling factors.'
def test_validate_distances(ctx): """Test the `validate_scale_factors` validator.""" assert dissociation.validate_distances(None, ctx) is None assert dissociation.validate_distances(orm.List(list=[0.98, 1, 1.02]), ctx) is None assert dissociation.validate_distances(orm.List(list=[0]), ctx) == 'need at least 2 distances.' assert dissociation.validate_distances( orm.List(list=[-1, -2, -2]), ctx) == 'distances must be positive.'
def test_both(configure_with_daemon): # pylint: disable=unused-argument """ Test setting both the attribute of a Dict and a plain input. """ res, node = run_get_node( AddInputsWorkChain, sub_process=EchoDictValue, added_input_values=orm.List(list=[1., 2.]), added_input_keys=orm.List(list=['x', 'a:b.c']) ) assert node.is_finished assert 'x' in res assert 'c' in res assert res['x'].value == 1 assert res['c'].value == 2
def test_nested(configure_with_daemon): # pylint: disable=unused-argument """ Test setting more complicated nested inputs. """ res, node = run_get_node( AddInputsWorkChain, sub_process=EchoNestedValues, added_input_values=orm.List(list=[1., 2.]), added_input_keys=orm.List(list=['x.y', 'a.b.c.d:e.f']) ) assert node.is_finished assert 'y' in res assert 'f' in res assert res['y'].value == 1 assert res['f'].value == 2
def test_basic( configure_with_daemon, # pylint: disable=unused-argument echo_process # pylint: disable=redefined-outer-name ): """ Basic test, adding a single input in a List. """ res, node = run_get_node( AddInputsWorkChain, sub_process=echo_process, added_input_values=orm.List(list=[1.]), added_input_keys=orm.List(list=['x']), ) assert node.is_finished_ok assert 'result' in res assert res['result'].value == 1
def test_dict(configure_with_daemon): # pylint: disable=unused-argument """ Test setting an attribute of a nested Dict. """ res, node = run_get_node( AddInputsWorkChain, sub_process=EchoDictValue, inputs={'x': orm.Float(1)}, added_input_values=orm.List(list=[2.]), added_input_keys=orm.List(list=['a:b.c']) ) assert node.is_finished assert 'x' in res assert 'c' in res assert res['x'].value == 1 assert res['c'].value == 2
def test_vasp_hf_wannier_input( configure_with_daemon, # pylint: disable=unused-argument assert_finished, get_insb_input # pylint: disable=redefined-outer-name ): """ Runs the workflow that calculates Wannier90 inputs from VASP + hybrids on InSb with a coarse grid. """ from aiida import orm from aiida.engine import run_get_node from aiida_tbextraction.fp_run.wannier_input import VaspWannierInput kpoints_mesh = orm.KpointsData() kpoints_mesh.set_kpoints_mesh([2, 2, 2]) wannier_projections = orm.List() wannier_projections.extend(['In : s; px; py; pz', 'Sb : px; py; pz']) result, node = run_get_node( VaspWannierInput, kpoints_mesh=kpoints_mesh, wannier_parameters=orm.Dict( dict=dict(num_wann=14, num_bands=36, spinors=True)), wannier_projections=wannier_projections, **get_insb_input) assert node.is_finished_ok assert all(key in result for key in ['wannier_input_folder', 'wannier_parameters', 'wannier_bands']) folder_list = result['wannier_input_folder'].get_folder_list() assert all( filename in folder_list for filename in ['wannier90.amn', 'wannier90.mmn', 'wannier90.eig'])
def test_strained_fp_tb( configure_with_daemon, # pylint: disable=unused-argument get_optimize_fp_tb_input, ): """ Run the DFT tight-binding optimization workflow with strain on an InSb sample for three strain values. """ from aiida.engine import run from aiida import orm from aiida_tbextraction.optimize_strained_fp_tb import OptimizeStrainedFirstPrinciplesTightBinding inputs = get_optimize_fp_tb_input() inputs['strain_kind'] = orm.Str('three_five.Biaxial001') inputs['strain_parameters'] = orm.Str('InSb') strain_list = [-0.1, 0, 0.1] inputs['strain_strengths'] = orm.List(list=strain_list) inputs['symmetry_repr_code'] = orm.Code.get_from_string('symmetry_repr') result = run(OptimizeStrainedFirstPrinciplesTightBinding, **inputs) print(result) for value in strain_list: suffix = '_{}'.format(value).replace('.', '_dot_').replace('-', 'm_') assert all( key + suffix in result for key in ['cost_value', 'tb_model', 'window'] )
def run_tb(self): """ Runs the workflow which creates the tight-binding model. """ # check for wannier_settings from wannier_input workflow inputs = self.exposed_inputs(TightBindingCalculation) self.report( "Merging 'wannier_settings' from input and wannier_input workflow." ) wannier_namespace_inputs = inputs.pop('wannier', {}) # merge settings wannier_settings_explicit = wannier_namespace_inputs.pop( 'settings', orm.Dict()) try: wannier_settings_from_wf = self.ctx.fp_run.outputs.wannier_settings wannier_namespace_inputs['settings'] = merge_nested_dict( dict_primary=wannier_settings_explicit, dict_secondary=wannier_settings_from_wf) except NotExistent: wannier_namespace_inputs['settings'] = wannier_settings_explicit # prefer wannier_projections from wannier_input workflow if it exists wannier_projections_explicit = wannier_namespace_inputs.pop( 'projections', None) try: wannier_namespace_inputs[ 'projections'] = self.ctx.fp_run.outputs.wannier_projections except NotExistent: wannier_namespace_inputs[ 'projections'] = wannier_projections_explicit # get slice_idx for tight-binding calculation slice_idx = self.inputs.get('slice_tb_model', None) if slice_idx is not None: inputs['slice_idx'] = slice_idx # get automatic guess for windows if needed wannier_bands = self.ctx.fp_run.outputs.wannier_bands wannier_parameters = self.ctx.fp_run.outputs.wannier_parameters if self.inputs.guess_windows: wannier_parameters = add_initial_window_inline( wannier_parameters=wannier_parameters, wannier_bands=wannier_bands, slice_reference_bands=self.inputs.get( 'slice_reference_bands', orm.List( list=range(wannier_bands.get_bands().shape[1]))))[1] self.report("Starting TightBindingCalculation workflow.") return ToContext(tbextraction_calc=self.submit( TightBindingCalculation, wannier=dict(kpoints=wannier_bands, parameters=wannier_parameters, local_input_folder=self.ctx.fp_run.outputs. wannier_input_folder, **wannier_namespace_inputs), **inputs))
def test_base_data_type_change(self, temp_dir): """ Base Data types type string changed Example: Bool: “data.base.Bool.” → “data.bool.Bool.” """ # Test content test_content = ('Hello', 6, -1.2399834e12, False) test_types = () for node_type in ['str', 'int', 'float', 'bool']: add_type = ('data.{}.{}.'.format(node_type, node_type.capitalize()),) test_types = test_types.__add__(add_type) # List of nodes to be exported export_nodes = [] # Create list of base type nodes nodes = [cls(val).store() for val, cls in zip(test_content, (orm.Str, orm.Int, orm.Float, orm.Bool))] export_nodes.extend(nodes) # Collect uuids for created nodes uuids = [n.uuid for n in nodes] # Create List() and insert already created nodes into it list_node = orm.List() list_node.set_list(nodes) list_node.store() list_node_uuid = list_node.uuid export_nodes.append(list_node) # Export nodes filename = os.path.join(temp_dir, 'export.aiida') export(export_nodes, filename=filename, silent=True) # Clean the database self.reset_database() # Import nodes again import_data(filename, silent=True) # Check whether types are correctly imported nlist = orm.load_node(list_node_uuid) # List for uuid, list_value, refval, reftype in zip(uuids, nlist.get_list(), test_content, test_types): # Str, Int, Float, Bool base = orm.load_node(uuid) # Check value/content self.assertEqual(base.value, refval) # Check type msg = "type of node ('{}') is not updated according to db schema v0.4".format(base.node_type) self.assertEqual(base.node_type, reftype, msg=msg) # List # Check value self.assertEqual(list_value, refval) # Check List type msg = "type of node ('{}') is not updated according to db schema v0.4".format(nlist.node_type) self.assertEqual(nlist.node_type, 'data.list.List.', msg=msg)
def strain_inputs(configure, strain_kind, strain_parameters, sample): import pymatgen from aiida import orm structure = orm.StructureData() structure.set_pymatgen(pymatgen.Structure.from_file(sample('POSCAR'))) return dict(structure=structure, strain_kind=orm.Str(strain_kind), strain_parameters=orm.Str(strain_parameters), strain_strengths=orm.List(list=[-0.2, -0.1, 0., 0.1, 0.2]))
def test_list_projections( #pylint: disable=too-many-locals fixture_sandbox, generate_calc_job, generate_common_inputs_gaas, file_regression): """Test a `Wannier90Calculation` where the projections are specified as a list.""" inputs = generate_common_inputs_gaas(inputfolder_seedname='aiida') inputs['projections'] = orm.List(list=['random', 'Ga:s']) calc_info = generate_calc_job(folder=fixture_sandbox, entry_point_name=ENTRY_POINT_NAME, inputs=inputs) cmdline_params = ['aiida'] local_copy_list = [ (val, val) for val in ('UNK00001.1', 'UNK00002.1', 'UNK00003.1', 'UNK00004.1', 'UNK00005.1', 'UNK00006.1', 'UNK00007.1', 'UNK00008.1', 'aiida.mmn', 'aiida.amn') ] retrieve_list = [ "aiida" + suffix for suffix in ('.wout', '.werr', '.r2mn', '_band.dat', '_band.dat', '_band.agr', '_band.kpt', '.bxsf', '_w.xsf', '_w.cube', '_centres.xyz', '_hr.dat', '_tb.dat', '_r.dat', '.bvec', '_wsvec.dat', '_qc.dat', '_dos.dat', '_htB.dat', '_u.mat', '_u_dis.mat', '.vdw', '_band_proj.dat', '_band.labelinfo.dat') ] retrieve_temporary_list = [] # Check the attributes of the returned `CalcInfo` assert isinstance(calc_info, datastructures.CalcInfo) code_info = calc_info.codes_info[0] assert code_info.cmdline_params == cmdline_params # ignore UUID - keep only second and third entry local_copy_res = [tup[1:] for tup in calc_info.local_copy_list] assert sorted(local_copy_res) == sorted(local_copy_list) assert sorted(calc_info.retrieve_list) == sorted(retrieve_list) assert sorted( calc_info.retrieve_temporary_list) == sorted(retrieve_temporary_list) assert calc_info.remote_symlink_list == [] with fixture_sandbox.open('aiida.win') as handle: input_written = handle.read() # Checks on the files written to the sandbox folder as raw input assert fixture_sandbox.get_content_list() == ['aiida.win'] file_regression.check(input_written, encoding='utf-8', extension='.win')
def get_initial_window_inline(wannier_bands, slice_reference_bands): """ InlineCalculation which returns the automatic guess for the window based on the Wannier bands. Arguments --------- wannier_bands : aiida.orm.data.array.bands.BandsData Bands calculated for the Wannier run. slice_reference_bands : aiida.orm.data.list.List Indices of the reference bands which should be considered. """ return orm.List( list=guess_window( wannier_bands=wannier_bands, slice_reference_bands=slice_reference_bands ) )
def test_get_scale_factors(generate_workchain, generate_eos_inputs, scaling_inputs, expected): """Test the ``EquationOfStateWorkChain.get_scale_factors`` method.""" inputs = generate_eos_inputs() # This conditional and conversion is necessary because for `aiida-core<2.0` the `list` type is not automatically # serialized to a `List` node. Once we require `aiida-core>=2.0`, this can be removed. The reason we couldn't # already simply turn the ``scaling_inputs`` into a ``orm.List`` is that during the parametrization done by pytest # no AiiDA profile will have been loaded yet and so creating a node will raise an exception. if 'scale_factors' in scaling_inputs and isinstance( scaling_inputs['scale_factors'], list): scaling_inputs['scale_factors'] = orm.List( list=scaling_inputs['scale_factors']) inputs.update(scaling_inputs) process = generate_workchain('common_workflows.eos', inputs) assert process.get_scale_factors() == expected
def define(cls, spec): # yapf: disable super(EnumCalculation, cls).define(spec) spec.input('metadata.options.resources', valid_type=dict, default={'num_machines':1, 'num_mpiprocs_per_machine':1}, non_db=True) spec.input('metadata.options.parser_name', valid_type=str, default='ce.genenum', non_db=True) spec.input('metadata.options.input_filename', valid_type=str, default='aiida.json', non_db=True) spec.input('metadata.options.output_filename', valid_type=str, default='aiida.out', non_db=True) spec.input('structure', valid_type=orm.StructureData, help='prototype structure to expand') spec.input('pbc', valid_type=orm.List, default=orm.List(list=[True, True, True])) spec.input('chemical_symbols', valid_type=orm.List, help='An N elements list of which that each element is the possible symbol of the site.') spec.input('min_volume', valid_type=orm.Int, default=orm.Int(1)) spec.input('max_volume', valid_type=orm.Int, default=orm.Int(1), help='If None, no hnf cells to be considered.') spec.input('concentration_restrictions', required=False, valid_type=orm.Dict, help='dict indicate the concentration of each elements.') spec.output('enumerate_structures', valid_type=StructureSet, help='enumerate structures store the outputs of the process') spec.output('number_of_structures', valid_type=orm.Int, help='Number of structures of enumerate structures') spec.exit_code(100, 'ERROR_MISSING_OUTPUT_FILES', message='Calculation did not produce all expected output files.')
def inner(): kpoints_mesh = orm.KpointsData() kpoints_mesh.set_kpoints_mesh([2, 2, 2]) kpoints = orm.KpointsData() kpoints.set_kpoints([[x, x, x] for x in np.linspace(0, 0.5, 6)], labels=((0, 'G'), (5, 'M'))) return { 'structure': insb_structure, 'kpoints_mesh': kpoints_mesh, 'kpoints': kpoints, 'wannier_parameters': orm.Dict(dict=dict(num_wann=14, num_bands=36, spinors=True, dis_num_iter=1000, num_iter=0)), 'wannier_projections': orm.List(list=['In : s; px; py; pz', 'Sb : px; py; pz']), }
def _construct_builder(self, **kwargs) -> engine.ProcessBuilder: """Construct a process builder based on the provided keyword arguments. The keyword arguments will have been validated against the input generator specification. """ # pylint: disable=too-many-branches,too-many-statements,too-many-locals structure = kwargs['structure'] engines = kwargs['engines'] protocol = kwargs['protocol'] spin_type = kwargs['spin_type'] relax_type = kwargs['relax_type'] electronic_type = kwargs['electronic_type'] magnetization_per_site = kwargs.get('magnetization_per_site', None) threshold_forces = kwargs.get('threshold_forces', None) reference_workchain = kwargs.get('reference_workchain', None) builder = self.process_class.get_builder() if relax_type == RelaxType.POSITIONS: relaxation_schema = 'relax' elif relax_type == RelaxType.NONE: relaxation_schema = 'relax' builder.relax.perform = orm.Bool(False) else: raise ValueError('relaxation type `{}` is not supported'.format( relax_type.value)) pymatgen_struct = structure.get_pymatgen() ortho_dict = None if pymatgen_struct.ntypesp <= 1: # pass the structure through a transform to generate orthorhombic structure if possible/needed. new = ortho_struct(structure) newstruct = new.get('outstruct') ortho_dict = new.get('outdict') newstruct.store() builder.structure = newstruct else: builder.structure = structure # for now apply simple stupid heuristic : atoms < 200 -> cubic, else -> linear. import copy if len(builder.structure.sites) <= 200: inputdict = copy.deepcopy( self.get_protocol(protocol)['inputdict_cubic']) else: inputdict = copy.deepcopy( self.get_protocol(protocol)['inputdict_linear']) # adapt hgrid to the strain if reference_workchain is not None and reference_workchain.is_finished_ok: logfile = reference_workchain.outputs.bigdft_logfile.logfile if isinstance(logfile, list): hgrids = logfile[0].get('dft').get('hgrids') else: hgrids = logfile.get('dft').get('hgrids') inputdict['dft']['hgrids'] = hgrids[0] * builder.structure.cell_lengths[0] / \ reference_workchain.inputs.structure.cell_lengths[0] # Soon : Use inputActions if electronic_type is ElectronicType.METAL: if 'mix' not in inputdict: inputdict['mix'] = {} inputdict['mix'].update({ 'iscf': 17, 'itrpmax': 200, 'rpnrm_cv': 1.E-12, 'norbsempty': 120, 'tel': 0.01, 'alphamix': 0.8, 'alphadiis': 1.0 }) if spin_type is SpinType.NONE: inputdict['dft'].update({'nspin': 1}) elif spin_type is SpinType.COLLINEAR: inputdict['dft'].update({'nspin': 2}) psp = [] if ortho_dict is not None: inputdict = BigDFTParameters.set_inputfile( inputdict['dft']['hgrids'], ortho_dict, inputdict, psp=psp, units='angstroem') else: # use HGH pseudopotentials instead of default ones from BigDFT, if the user does not specify new ones. # This may be moved to the plugin if we decide to make it the default behavior. for elem in pymatgen_struct.types_of_specie: BigDFTParameters.set_psp(elem.name, psp) inputdict['kpt'] = BigDFTParameters.set_kpoints( len(builder.structure.sites)) if pymatgen_struct.ntypesp <= 1: inputdict['dft'].update( BigDFTParameters.set_spin( builder.structure.sites[0].kind_name, len(builder.structure.sites))) if magnetization_per_site: for (i, atom) in enumerate(inputdict['posinp']['positions']): atom['IGSpin'] = int(magnetization_per_site[i]) # correctly set kpoints from protocol fast and moderate. If precise, use the ones from set_inputfile/set_kpt if self.get_protocol(protocol).get('kpoints_distance'): inputdict['kpt'] = { 'method': 'auto', 'kptrlen': self.get_protocol(protocol).get('kpoints_distance') } if psp: import os builder.pseudos = orm.List() psprel = [os.path.normpath(os.path.relpath(i)) for i in psp] builder.pseudos.extend(psprel) builder.parameters = BigDFTParameters(dict=inputdict) builder.code = orm.load_code(engines[relaxation_schema]['code']) run_opts = {'options': engines[relaxation_schema]['options']} builder.run_opts = orm.Dict(dict=run_opts) if threshold_forces is not None: builder.relax.threshold_forces = orm.Float(threshold_forces) return builder
def create_builder(): builder = OptimizeFirstPrinciplesTightBinding.get_builder() # Add the input structure builder.structure = orm.StructureData() builder.structure.set_ase(read_vasp('inputs/POSCAR')) # Specify that QuantumEspressoFirstPrinciplesRun should be used to run the first-principles calculations builder.fp_run_workflow = QuantumEspressoFirstPrinciplesRun # Set the inputs for the QuantumEspressoFirstPrinciplesRun workflow common_qe_parameters = orm.Dict( dict=dict( CONTROL=dict(etot_conv_thr=1e-3), SYSTEM=dict(noncolin=True, lspinorb=True, nbnd=36, ecutwfc=30), ) ) pseudo_dir = pathlib.Path(__file__ ).parent.absolute() / 'inputs' / 'pseudos' pseudos = { 'In': orm.UpfData( file=str(pseudo_dir / 'In.rel-pbe-dn-kjpaw_psl.1.0.0.UPF') ), 'Sb': orm.UpfData(file=str(pseudo_dir / 'Sb.rel-pbe-n-kjpaw_psl.1.0.0.UPF')) } # We use the same general Quantum ESPRESSO parameters for the # scf, nscf, and bands calculations. The calculation type and # k-points will be set by the workflow. repeated_pw_inputs = { 'pseudos': pseudos, 'parameters': common_qe_parameters, 'metadata': METADATA_PW, 'code': CODE_PW } builder.fp_run = { 'scf': repeated_pw_inputs, 'bands': { 'pw': repeated_pw_inputs }, 'to_wannier': { 'nscf': repeated_pw_inputs, 'pw2wannier': { 'code': CODE_PW2WANNIER, 'metadata': METADATA_PW2WANNIER }, 'wannier': { 'code': CODE_WANNIER, 'metadata': METADATA_WANNIER } } } # Setting the k-points for the reference bandstructure kpoints_list = [] kvals = np.linspace(0, 0.5, 20, endpoint=False) kvals_rev = np.linspace(0.5, 0, 20, endpoint=False) for k in kvals_rev: kpoints_list.append((k, k, 0)) # Z to Gamma for k in kvals: kpoints_list.append((0, k, k)) # Gamma to X for k in kvals: kpoints_list.append((k, 0.5, 0.5)) # X to L for k in kvals_rev: kpoints_list.append((k, k, k)) # L to Gamma for k in np.linspace(0, 0.375, 21, endpoint=True): kpoints_list.append((k, k, 2 * k)) # Gamma to K builder.kpoints = orm.KpointsData() builder.kpoints.set_kpoints( kpoints_list, labels=[(i * 20, label) for i, label in enumerate(['Z', 'G', 'X', 'L', 'G', 'K'])] ) # Setting the k-points mesh used to run the SCF and Wannier calculations builder.kpoints_mesh = orm.KpointsData() builder.kpoints_mesh.set_kpoints_mesh([6, 6, 6]) builder.wannier.code = CODE_WANNIER builder.code_tbmodels = orm.Code.get_from_string('tbmodels') # Setting the workflow to evaluate the tight-binding models builder.model_evaluation_workflow = BandDifferenceModelEvaluation # Setting the additional inputs for the model evaluation workflow builder.model_evaluation = dict( code_bands_inspect=orm.Code.get_from_string('bands_inspect') ) # Set the initial energy window value builder.initial_window = orm.List(list=[-1, 3, 10, 18]) # Tolerance for the energy window. builder.window_tol = orm.Float(1.5) # Tolerance for the 'cost_value'. builder.cost_tol = orm.Float(0.3) # The tolerances are set higher than might be appropriate for a 'production' # run to make the example run more quickly. # Setting the parameters for Wannier90 builder.wannier_parameters = orm.Dict( dict=dict( num_wann=14, num_bands=36, dis_num_iter=100, num_iter=0, spinors=True, # exclude_bands=range(1, ) ) ) # Choose the Wannier90 trial orbitals builder.wannier_projections = orm.List( list=['In : s; pz; px; py', 'Sb : pz; px; py'] ) # Set the resource requirements for the Wannier90 run builder.wannier.metadata = METADATA_WANNIER # Set the symmetry file builder.symmetries = orm.SinglefileData( file=os.path.abspath('inputs/symmetries.hdf5') ) # Pick the relevant bands from the reference calculation builder.slice_reference_bands = orm.List(list=list(range(12, 26))) return builder
def window_search_builder(test_data_dir, code_wannier90): # pylint: disable=too-many-locals,useless-suppression """ Sets up the process builder for window_search tests, and adds the inputs. """ builder = WindowSearch.get_builder() input_folder = orm.FolderData() input_folder_path = test_data_dir / 'wannier_input_folder' for filename in os.listdir(input_folder_path): input_folder.put_object_from_file( path=str((input_folder_path / filename).resolve()), key=filename ) builder.wannier.local_input_folder = input_folder builder.wannier.code = code_wannier90 builder.code_tbmodels = orm.Code.get_from_string('tbmodels') builder.model_evaluation_workflow = BandDifferenceModelEvaluation # print(builder.model_evaluation.dynamic) builder.model_evaluation = { 'code_bands_inspect': orm.Code.get_from_string('bands_inspect'), } builder.reference_bands = read(test_data_dir / 'bands.hdf5') initial_window = orm.List() initial_window.extend([-4.5, -4, 6.5, 16]) builder.initial_window = initial_window builder.window_tol = orm.Float(1.5) a = 3.2395 # pylint: disable=invalid-name structure = orm.StructureData() structure.set_pymatgen_structure( pymatgen.Structure( lattice=[[0, a, a], [a, 0, a], [a, a, 0]], species=['In', 'Sb'], coords=[[0] * 3, [0.25] * 3] ) ) builder.structure = structure wannier_parameters = orm.Dict( dict=dict( num_wann=14, num_bands=36, dis_num_iter=1000, num_iter=0, spinors=True, mp_grid=[6, 6, 6], ) ) builder.wannier.parameters = wannier_parameters builder.wannier.metadata.options = { 'resources': { 'num_machines': 1, 'tot_num_mpiprocs': 1 }, 'withmpi': False } builder.symmetries = orm.SinglefileData( file=str((test_data_dir / 'symmetries.hdf5').resolve()) ) slice_idx = orm.List() slice_idx.extend([0, 2, 3, 1, 5, 6, 4, 7, 9, 10, 8, 12, 13, 11]) builder.slice_idx = slice_idx k_values = [ x if x <= 0.5 else -1 + x for x in np.linspace(0, 1, 6, endpoint=False) ] k_points = [ list(reversed(k)) for k in itertools.product(k_values, repeat=3) ] wannier_bands = orm.BandsData() wannier_bands.set_kpoints(k_points) # Just let every energy window be valid. wannier_bands.set_bands(np.array([[0] * 14] * len(k_points))) builder.wannier_bands = wannier_bands return builder
def _to_input_list(self, x): input_list = orm.List() input_list.extend(x) return {self.input_key: input_list}
def get_builder(self, structure: StructureData, calc_engines: Dict[str, Any], *, protocol: str = None, relax_type: RelaxType = RelaxType.ATOMS, electronic_type: ElectronicType = ElectronicType.METAL, spin_type: SpinType = SpinType.NONE, magnetization_per_site: List[float] = None, threshold_forces: float = None, threshold_stress: float = None, previous_workchain=None, **kwargs) -> engine.ProcessBuilder: """Return a process builder for the corresponding workchain class with inputs set according to the protocol. :param structure: the structure to be relaxed. :param calc_engines: a dictionary containing the computational resources for the relaxation. :param protocol: the protocol to use when determining the workchain inputs. :param relax_type: the type of relaxation to perform. :param electronic_type: the electronic character that is to be used for the structure. :param spin_type: the spin polarization type to use for the calculation. :param magnetization_per_site: a list with the initial spin polarization for each site. Float or integer in units of electrons. If not defined, the builder will automatically define the initial magnetization if and only if `spin_type != SpinType.NONE`. :param threshold_forces: target threshold for the forces in eV/Å. :param threshold_stress: target threshold for the stress in eV/Å^3. :param previous_workchain: a <Code>RelaxWorkChain node. :param kwargs: any inputs that are specific to the plugin. :return: a `aiida.engine.processes.ProcessBuilder` instance ready to be submitted. """ # pylint: disable=too-many-locals, too-many-branches, too-many-statements protocol = protocol or self.get_default_protocol_name() super().get_builder(structure, calc_engines, protocol=protocol, relax_type=relax_type, electronic_type=electronic_type, spin_type=spin_type, magnetization_per_site=magnetization_per_site, threshold_forces=threshold_forces, threshold_stress=threshold_stress, previous_workchain=previous_workchain, **kwargs) builder = self.process_class.get_builder() if relax_type == RelaxType.ATOMS: relaxation_schema = 'relax' elif relax_type == RelaxType.NONE: relaxation_schema = 'relax' builder.relax.perform = orm.Bool(False) else: raise ValueError('relaxation type `{}` is not supported'.format( relax_type.value)) pymatgen_struct = structure.get_pymatgen() ortho_dict = None if pymatgen_struct.ntypesp <= 1: # pass the structure through a transform to generate orthorhombic structure if possible/needed. new = ortho_struct(structure) newstruct = new.get('outstruct') ortho_dict = new.get('outdict') newstruct.store() builder.structure = newstruct else: builder.structure = structure # for now apply simple stupid heuristic : atoms < 200 -> cubic, else -> linear. import copy if len(builder.structure.sites) <= 200: inputdict = copy.deepcopy( self.get_protocol(protocol)['inputdict_cubic']) else: inputdict = copy.deepcopy( self.get_protocol(protocol)['inputdict_linear']) # adapt hgrid to the strain if previous_workchain is not None and previous_workchain.is_finished_ok: logfile = previous_workchain.outputs.bigdft_logfile.logfile if isinstance(logfile, list): hgrids = logfile[0].get('dft').get('hgrids') else: hgrids = logfile.get('dft').get('hgrids') inputdict['dft']['hgrids'] = hgrids[0] * builder.structure.cell_lengths[0] / \ previous_workchain.inputs.structure.cell_lengths[0] # Soon : Use inputActions if electronic_type is ElectronicType.METAL: if 'mix' not in inputdict: inputdict['mix'] = {} inputdict['mix'].update({ 'iscf': 17, 'itrpmax': 200, 'rpnrm_cv': 1.E-12, 'norbsempty': 120, 'tel': 0.01, 'alphamix': 0.8, 'alphadiis': 1.0 }) if spin_type is SpinType.NONE: inputdict['dft'].update({'nspin': 1}) elif spin_type is SpinType.COLLINEAR: inputdict['dft'].update({'nspin': 2}) psp = [] if ortho_dict is not None: inputdict = BigDFTParameters.set_inputfile( inputdict['dft']['hgrids'], ortho_dict, inputdict, psp=psp, units='angstroem') else: # use HGH pseudopotentials instead of default ones from BigDFT, if the user does not specify new ones. # This may be moved to the plugin if we decide to make it the default behavior. for elem in pymatgen_struct.types_of_specie: BigDFTParameters.set_psp(elem.name, psp) inputdict['kpt'] = BigDFTParameters.set_kpoints( len(builder.structure.sites)) if pymatgen_struct.ntypesp <= 1: inputdict['dft'].update( BigDFTParameters.set_spin( builder.structure.sites[0].kind_name, len(builder.structure.sites))) if magnetization_per_site: for (i, atom) in enumerate(inputdict['posinp']['positions']): atom['IGSpin'] = int(magnetization_per_site[i]) if psp: import os builder.pseudos = orm.List() psprel = [os.path.relpath(i) for i in psp] builder.pseudos.extend(psprel) builder.parameters = BigDFTParameters(dict=inputdict) builder.code = orm.load_code(calc_engines[relaxation_schema]['code']) run_opts = {'options': calc_engines[relaxation_schema]['options']} builder.run_opts = orm.Dict(dict=run_opts) if threshold_forces is not None: builder.relax.threshold_forces = orm.Float(threshold_forces) return builder
def inner(window_values, slice_, symmetries): builder = RunWindow.get_builder() input_folder = orm.FolderData() input_folder_path = test_data_dir / 'wannier_input_folder' for filename in os.listdir(input_folder_path): input_folder.put_object_from_file(path=str( (input_folder_path / filename).resolve()), key=filename) builder.wannier.local_input_folder = input_folder builder.wannier.code = code_wannier90 builder.code_tbmodels = orm.Code.get_from_string('tbmodels') builder.model_evaluation_workflow = BandDifferenceModelEvaluation builder.reference_bands = read(test_data_dir / 'bands.hdf5') builder.model_evaluation = { 'code_bands_inspect': orm.Code.get_from_string('bands_inspect'), } window = orm.List(list=window_values) builder.window = window k_values = [ x if x <= 0.5 else -1 + x for x in np.linspace(0, 1, 6, endpoint=False) ] k_points = [ list(reversed(k)) for k in itertools.product(k_values, repeat=3) ] wannier_kpoints = orm.KpointsData() wannier_kpoints.set_kpoints(k_points) builder.wannier.kpoints = wannier_kpoints wannier_bands = orm.BandsData() wannier_bands.set_kpoints(k_points) # Just let every energy window be valid. wannier_bands.set_bands( np.array([[-20] * 10 + [-0.5] * 7 + [0.5] * 7 + [20] * 12] * len(k_points))) builder.wannier_bands = wannier_bands a = 3.2395 # pylint: disable=invalid-name structure = orm.StructureData() structure.set_pymatgen_structure( pymatgen.Structure(lattice=[[0, a, a], [a, 0, a], [a, a, 0]], species=['In', 'Sb'], coords=[[0] * 3, [0.25] * 3])) builder.structure = structure wannier_parameters = orm.Dict(dict=dict( num_wann=14, num_bands=36, dis_num_iter=1000, num_iter=0, spinors=True, mp_grid=[6, 6, 6], )) builder.wannier.parameters = wannier_parameters builder.wannier.metadata.options = { 'resources': { 'num_machines': 1, 'tot_num_mpiprocs': 1 }, 'withmpi': False } if symmetries: builder.symmetries = orm.SinglefileData( file=str(test_data_dir / 'symmetries.hdf5')) if slice_: slice_idx = orm.List() slice_idx.extend([0, 2, 3, 1, 5, 6, 4, 7, 9, 10, 8, 12, 13, 11]) builder.slice_idx = slice_idx return builder
def test_tbextraction(configure_with_daemon, test_data_dir, slice_, symmetries, code_wannier90): # pylint: disable=unused-argument """ Run the tight-binding calculation workflow, optionally including symmetrization and slicing of orbitals. """ builder = TightBindingCalculation.get_builder() wannier_input_folder = orm.FolderData() wannier_input_folder_path = test_data_dir / 'wannier_input_folder' for filename in os.listdir(wannier_input_folder_path): wannier_input_folder.put_object_from_file(path=str( (wannier_input_folder_path / filename).resolve()), key=filename) builder.wannier.local_input_folder = wannier_input_folder builder.wannier.code = code_wannier90 builder.code_tbmodels = orm.Code.get_from_string('tbmodels') k_values = [ x if x <= 0.5 else -1 + x for x in np.linspace(0, 1, 6, endpoint=False) ] k_points = [ list(reversed(k)) for k in itertools.product(k_values, repeat=3) ] wannier_kpoints = orm.KpointsData() wannier_kpoints.set_kpoints(k_points) builder.wannier.kpoints = wannier_kpoints a = 3.2395 # pylint: disable=invalid-name structure = orm.StructureData() structure.set_pymatgen_structure( pymatgen.Structure(lattice=[[0, a, a], [a, 0, a], [a, a, 0]], species=['In', 'Sb'], coords=[[0] * 3, [0.25] * 3])) builder.structure = structure builder.wannier.parameters = orm.Dict(dict=dict(num_wann=14, num_bands=36, dis_num_iter=1000, num_iter=0, dis_win_min=-4.5, dis_win_max=16., dis_froz_min=-4, dis_froz_max=6.5, spinors=True, mp_grid=[6, 6, 6])) builder.wannier.metadata.options = { 'resources': { 'num_machines': 1, 'tot_num_mpiprocs': 1 }, 'withmpi': False } if symmetries: builder.symmetries = orm.SinglefileData(file=str(test_data_dir / 'symmetries.hdf5')) if slice_: slice_idx = orm.List() slice_idx.extend([0, 2, 3, 1, 5, 6, 4, 7, 9, 10, 8, 12, 13, 11]) builder.slice_idx = slice_idx result, node = run_get_node(builder) assert node.is_finished_ok assert 'tb_model' in result
def run_window_search(self): """ Runs the workflow which creates the optimized tight-binding model. """ # check for wannier_settings from wannier_input workflow inputs = self.exposed_inputs(WindowSearch) self.report( "Merging 'wannier_settings' from input and wannier_input workflow." ) fp_run_outputs = self.ctx.fp_run.outputs wannier_namespace_inputs = inputs.pop('wannier', {}) with contextlib.suppress(NotExistent, KeyError): wannier_settings_explicit = wannier_namespace_inputs['settings'] wannier_settings_from_wf = fp_run_outputs.wannier_settings wannier_namespace_inputs['settings'] = merge_nested_dict( dict_primary=wannier_settings_explicit, dict_secondary=wannier_settings_from_wf ) with contextlib.suppress(NotExistent): wannier_namespace_inputs['projections' ] = fp_run_outputs.wannier_projections wannier_namespace_inputs['parameters' ] = fp_run_outputs.wannier_parameters wannier_namespace_inputs['local_input_folder' ] = fp_run_outputs.wannier_input_folder # slice reference bands if necessary reference_bands = fp_run_outputs.bands slice_reference_bands = self.inputs.get('slice_reference_bands', None) if slice_reference_bands is not None: reference_bands = slice_bands_inline( bands=reference_bands, slice_idx=slice_reference_bands ) # get slice_idx for window_search slice_idx = self.inputs.get('slice_tb_model', None) if slice_idx is not None: inputs['slice_idx'] = slice_idx self.report('Get or guess initial window.') wannier_bands = fp_run_outputs.wannier_bands initial_window = self.inputs.get('initial_window', None) if initial_window is None: initial_window = get_initial_window_inline( wannier_bands=wannier_bands, slice_reference_bands=self.inputs.get( 'slice_reference_bands', orm.List( list=list(range(wannier_bands.get_bands().shape[1])) ) ) ) self.report("Starting WindowSearch workflow.") return ToContext( window_search=self.submit( WindowSearch, reference_bands=reference_bands, wannier=wannier_namespace_inputs, wannier_bands=wannier_bands, initial_window=initial_window, **inputs ) )
def test_lua(aiida_profile, fixture_sandbox, generate_calc_job, fixture_code, generate_structure, generate_kpoints_mesh, generate_basis, generate_param, generate_psml_data, generate_lua_file, generate_lua_folder, file_regression): """ Test that single calculation is submitted with the right content of the aiida.fdf file. """ entry_point_name = 'siesta.siesta' psml = generate_psml_data('Si') lua_script = generate_lua_file() lua_folder = generate_lua_folder() lua_parameters = { 'number_of_internal_images_in_path': 5, 'neb_spring_constant': 0.45, 'neb_image_file_prefix': "image-" } lua_retrieve_list = ['NEB.results' ] inputs = { 'code': fixture_code(entry_point_name), 'structure': generate_structure(), 'kpoints': generate_kpoints_mesh(2), 'parameters': generate_param(), 'pseudos': { 'Si': psml, 'SiDiff': psml }, 'lua': { 'script': lua_script, 'input_files': lua_folder, 'parameters': orm.Dict(dict=lua_parameters), 'retrieve_list': orm.List(list=lua_retrieve_list) }, 'metadata': { 'options': { 'resources': {'num_machines': 1 }, 'max_wallclock_seconds': 1800, 'withmpi': False, } } } calc_info = generate_calc_job(fixture_sandbox, entry_point_name, inputs) list_lua_fold = lua_folder.list_object_names() local_copy_list = [ (psml.uuid, psml.filename, 'Si.psml'), (psml.uuid, psml.filename, 'SiDiff.psml'), (lua_script.uuid, lua_script.filename, lua_script.filename), (lua_folder.uuid, list_lua_fold[0], list_lua_fold[0]), (lua_folder.uuid, list_lua_fold[1], list_lua_fold[1]) ] retrieve_list = ['BASIS_ENTHALPY', 'MESSAGES','time.json','aiida.out','aiida.xml','*.ion.xml','NEB.results'] assert sorted(calc_info.local_copy_list) == sorted(local_copy_list) assert sorted(calc_info.retrieve_list) == sorted(retrieve_list) with fixture_sandbox.open('aiida.fdf') as handle: input_written = handle.read() file_regression.check(input_written, encoding='utf-8', extension='.fdf') with fixture_sandbox.open('config.lua') as conflua_handle: conflua_input_written = conflua_handle.read() file_regression.check(conflua_input_written, encoding='utf-8', extension='.lua')
def inner(): inputs = get_fp_tb_inputs() inputs['initial_window'] = orm.List(list=[-4.5, -4, 6.5, 16]) inputs['window_tol'] = orm.Float(1.5) return inputs