def init_calculation(self): """Create input dictionary for the calculation, deal with restart (later?)""" # count number of previous calculations self.ctx.running_calc += 1 # set the structure self.ctx.inputs.structure = self.inputs.structure # deal with oxidation states if self.ctx.running_calc > 1 and self.ctx.try_oxi: self.report('Trying to guess oxidation states') self.ctx.inputs.guess_oxistates = Bool(True) self.ctx.inputs.high_spin_preferred = Bool( self.ctx.high_spin_preferred) # set metadata label = self.inputs.metadata.get('label', 'CRYSTAL calc') description = self.inputs.metadata.get('description', '') self.ctx.inputs.metadata = AttributeDict({ 'options': self.ctx.options, 'label': '{} [{}]'.format(label, self.ctx.running_calc), 'description': description })
def launch_phono3py(cutoff_energy=350, is_nac=False): """Launch calculation.""" structure, forces_config, nac_config, phonon_settings = get_settings( cutoff_energy, is_nac) Phono3pyWorkChain = WorkflowFactory("phonopy.phono3py") builder = Phono3pyWorkChain.get_builder() builder.structure = structure builder.calculator_settings = Dict(dict={ "forces": forces_config, "nac": nac_config }) builder.run_phono3py = Bool(False) builder.remote_phono3py = Bool(False) builder.code_string = Str("phonopy@nancy") builder.phonon_settings = Dict(dict=phonon_settings) builder.symmetry_tolerance = Float(1e-5) builder.options = Dict(dict=forces_config["options"]) dim = phonon_settings["supercell_matrix"] kpoints_mesh = forces_config["kpoints_mesh"] label = "ZnTe phono3py %dx%dx%d kpt %dx%dx%d PBEsol %d eV" % ( tuple(dim) + tuple(kpoints_mesh) + (cutoff_energy, )) builder.metadata.label = label builder.metadata.description = label future = submit(builder) print(label) print(future) print("Running workchain with pk={}".format(future.pk))
def test_create(self): a = Float() # Check that initial value is zero self.assertAlmostEqual(a.value, 0.0) f = Float(6.0) self.assertAlmostEqual(f.value, 6.) self.assertAlmostEqual(f, Float(6.0)) i = Int() self.assertAlmostEqual(i.value, 0) i = Int(6) self.assertAlmostEqual(i.value, 6) self.assertAlmostEqual(f, i) b = Bool() self.assertAlmostEqual(b.value, False) b = Bool(False) self.assertAlmostEqual(b.value, False) self.assertAlmostEqual(b.value, get_false_node()) b = Bool(True) self.assertAlmostEqual(b.value, True) self.assertAlmostEqual(b.value, get_true_node()) s = Str() self.assertAlmostEqual(s.value, '') s = Str('Hello') self.assertAlmostEqual(s.value, 'Hello')
def get_shear_relax_builder(self, shear_strain_ratio: float, additional_relax_pks: list = None): """ Get relax builder for shear introduced relax twinboundary structure. Args: shear_strain_ratio (float): shear strain ratio """ twinboundary_analyzer = self.get_twinboundary_analyzer( additional_relax_pks=additional_relax_pks) cell = twinboundary_analyzer.get_shear_cell( shear_strain_ratio=shear_strain_ratio, is_standardize=False) # in order to get rotation matrix std = StandardizeCell(cell=cell) std_cell = std.get_standardized_cell(to_primitive=True, no_idealize=False, no_sort=True) if additional_relax_pks is None: rlx_pk = self.get_pks()['relax_pk'] else: rlx_pk = additional_relax_pks[-1] rlx_node = load_node(rlx_pk) builder = rlx_node.get_builder_restart() # fix kpoints mesh, offset = map(np.array, builder.kpoints.get_kpoints_mesh()) orig_mesh = np.abs( np.dot(np.linalg.inv(self._standardize.transformation_matrix), mesh).astype(int)) orig_offset = np.round(np.abs( np.dot(np.linalg.inv(std.transformation_matrix), offset)), decimals=2) std_mesh = np.abs( np.dot(std.transformation_matrix, orig_mesh).astype(int)) std_offset = np.round(np.abs( np.dot(std.transformation_matrix, orig_offset)), decimals=2) kpt = KpointsData() kpt.set_kpoints_mesh(std_mesh, offset=std_offset) builder.kpoints = kpt # fix structure builder.structure = get_aiida_structure(cell=std_cell) # fix relax conf builder.relax.convergence_max_iterations = Int(100) builder.relax.positions = Bool(True) builder.relax.shape = Bool(False) builder.relax.volume = Bool(False) builder.relax.convergence_positions = Float(1e-4) builder.relax.force_cutoff = \ Float(AiidaRelaxWorkChain(node=rlx_node).get_max_force()) builder.metadata.label = "tbr:{} rlx:{} shr:{} std:{}".format( self._pk, rlx_node.pk, shear_strain_ratio, True) builder.metadata.description = \ "twinboundary_relax_pk:{} relax_pk:{} " \ "shear_strain_ratio:{} standardize:{}".format( self._pk, rlx_node.pk, shear_strain_ratio, True) return builder
def test_create(self): """Creating basic data objects.""" term_a = Float() # Check that initial value is zero self.assertAlmostEqual(term_a.value, 0.0) float_ = Float(6.0) self.assertAlmostEqual(float_.value, 6.) self.assertAlmostEqual(float_, Float(6.0)) int_ = Int() self.assertAlmostEqual(int_.value, 0) int_ = Int(6) self.assertAlmostEqual(int_.value, 6) self.assertAlmostEqual(float_, int_) bool_ = Bool() self.assertAlmostEqual(bool_.value, False) bool_ = Bool(False) self.assertAlmostEqual(bool_.value, False) self.assertAlmostEqual(bool_.value, get_false_node()) bool_ = Bool(True) self.assertAlmostEqual(bool_.value, True) self.assertAlmostEqual(bool_.value, get_true_node()) str_ = Str() self.assertAlmostEqual(str_.value, '') str_ = Str('Hello') self.assertAlmostEqual(str_.value, 'Hello')
def run_two_volumes(self): self.report("run_two_volumes") for strain, future_name in zip((0.99, 1.01), ('minus', 'plus')): Workflow = WorkflowFactory('vasp.relax') builder = Workflow.get_builder() for key in self.ctx.inputs: builder[key] = self.ctx.inputs[key] if 'label' in self.ctx.inputs.metadata: label = self.ctx.inputs.metadata['label'] + " " + future_name builder.metadata['label'] = label if 'description' in self.ctx.inputs.metadata: description = self.ctx.inputs.metadata['description'] description += " " + future_name builder.metadata['description'] = description builder.structure = self.ctx['structure_%s' % future_name] relax = AttributeDict() relax.perform = Bool(True) relax.force_cutoff = Float(1e-8) relax.positions = Bool(True) relax.shape = Bool(True) relax.volume = Bool(False) relax.convergence_on = Bool(False) builder.relax = relax future = self.submit(builder) self.to_context(**{future_name: future})
def test_expose(self): res = launch.run( ParentExposeWorkChain, a=Int(1), sub_1={ 'b': Float(2.3), 'c': Bool(True) }, sub_2={ 'b': Float(1.2), 'sub_3': { 'c': Bool(False) } }, ) self.assertEquals( res, { 'a': Float(2.2), 'sub_1': { 'b': Float(2.3), 'c': Bool(True) }, 'sub_2': { 'b': Float(1.2), 'sub_3': { 'c': Bool(False) } } })
def define(cls, spec): super(ElasticWorkChain, cls).define(spec) spec.input('structure', valid_type=StructureData) spec.input('symmetric_strains_only', valid_type=Bool, default=lambda: Bool(True)) spec.input('skip_input_relax', valid_type=Bool, default=lambda: Bool(False)) spec.input('strain_magnitudes', valid_type=List, default=lambda: List(list=[-0.01,-0.005,0.005,0.01])) spec.input('clean_workdir', valid_type=Bool, default=lambda: Bool(True)) spec.expose_inputs(PwRelaxWorkChain, namespace='initial_relax', exclude=('structure', 'clean_workdir')) spec.expose_inputs(PwRelaxWorkChain, namespace='elastic_relax', exclude=('structure', 'clean_workdir')) spec.outline( cls.relax_input_structure, cls.get_relaxed_structure_stress, cls.get_deformed_structures, cls.compute_deformed_structures, cls.gather_computed_stresses, cls.fit_elastic_tensor, #NOTE: may wish to add a check of elastic constant quality cls.set_outputs ) spec.output('equilibrium_structure', valid_type=StructureData) spec.output('elastic_outputs', valid_type=ArrayData) spec.output('symmetry_mapping', valid_type=Dict) spec.exit_code(401, 'ERROR_SUB_PROCESS_FAILED_RELAX', message='one of the PwRelaxWorkChain subprocesses failed')
def test_nested_expose(self): res = launch.run( GrandParentExposeWorkChain, sub=dict( sub=dict( a=Int(1), sub_1={ 'b': Float(2.3), 'c': Bool(True) }, sub_2={ 'b': Float(1.2), 'sub_3': { 'c': Bool(False) } }, ))) self.assertEqual( res, { 'sub': { 'sub': { 'a': Float(2.2), 'sub_1': { 'b': Float(2.3), 'c': Bool(True) }, 'sub_2': { 'b': Float(1.2), 'sub_3': { 'c': Bool(False) } } } } })
def define(cls, spec): super().define(spec) spec.input('success', valid_type=Bool) spec.input('through_return', valid_type=Bool, default=lambda: Bool(False)) spec.input('through_exit_code', valid_type=Bool, default=lambda: Bool(False)) spec.exit_code(cls.EXIT_STATUS, 'EXIT_STATUS', cls.EXIT_MESSAGE) spec.outline(if_(cls.should_return_out_of_outline)(return_(cls.EXIT_STATUS)), cls.failure, cls.success) spec.output(cls.OUTPUT_LABEL, required=False)
def define(cls, spec): super(PhonopyWorkChain, cls).define(spec) spec.input('structure', valid_type=StructureData, required=True) spec.input('phonon_settings', valid_type=Dict, required=True) spec.input('displacement_dataset', valid_type=Dict, required=False) spec.input('immigrant_calculation_folders', valid_type=Dict, required=False) spec.input('calculation_nodes', valid_type=Dict, required=False) spec.input('calculator_settings', valid_type=Dict, required=False) spec.input('code_string', valid_type=Str, required=False) spec.input('options', valid_type=Dict, required=False) spec.input('symmetry_tolerance', valid_type=Float, required=False, default=Float(1e-5)) spec.input('dry_run', valid_type=Bool, required=False, default=Bool(False)) spec.input('run_phonopy', valid_type=Bool, required=False, default=Bool(False)) spec.input('remote_phonopy', valid_type=Bool, required=False, default=Bool(False)) spec.outline( cls.initialize_supercell_phonon_calculation, if_(cls.import_calculations)( if_(cls.import_calculations_from_files)( cls.read_force_and_nac_calculations_from_files, ), if_(cls.import_calculations_from_nodes)( cls.read_calculation_data_from_nodes, ), cls.check_imported_supercell_structures, ).else_(cls.run_force_and_nac_calculations, ), if_(cls.dry_run)(cls.postprocess_of_dry_run, ).else_( cls.create_force_sets, if_(cls.is_nac)(cls.create_nac_params), if_(cls.run_phonopy)(if_(cls.remote_phonopy)( cls.run_phonopy_remote, cls.collect_data, ).else_( cls.create_force_constants, cls.run_phonopy_in_workchain, )))) spec.output('force_constants', valid_type=ArrayData, required=False) spec.output('primitive', valid_type=StructureData, required=False) spec.output('supercell', valid_type=StructureData, required=False) spec.output('force_sets', valid_type=ArrayData, required=False) spec.output('nac_params', valid_type=ArrayData, required=False) spec.output('thermal_properties', valid_type=XyData, required=False) spec.output('band_structure', valid_type=BandsData, required=False) spec.output('dos', valid_type=XyData, required=False) spec.output('pdos', valid_type=XyData, required=False) spec.output('phonon_setting_info', valid_type=Dict, required=True)
def setUpClass(cls, *args, **kwargs): super().setUpClass(*args, **kwargs) from aiida.orm import Data, Bool, Float, Int cls.node_base = Data().store() cls.node_bool_true = Bool(True).store() cls.node_bool_false = Bool(False).store() cls.node_float = Float(1.0).store() cls.node_int = Int(1).store()
def test_return_out_of_outline(self): result, node = launch.run.get_node(PotentialFailureWorkChain, success=Bool(True), through_return=Bool(True)) self.assertEqual(node.exit_status, PotentialFailureWorkChain.EXIT_STATUS) self.assertEqual(node.is_finished, True) self.assertEqual(node.is_finished_ok, False) self.assertEqual(node.is_failed, True) self.assertNotIn(PotentialFailureWorkChain.OUTPUT_LABEL, node.get_outgoing().all_link_labels())
def main(code_string, incar, kmesh, structure, potential_family, potential_mapping, options): """Main method to setup the calculation.""" # First, we need to fetch the AiiDA datatypes which will # house the inputs to our calculation dict_data = DataFactory('dict') kpoints_data = DataFactory('array.kpoints') # Then, we set the workchain you would like to call workchain = WorkflowFactory('vasp.master') # And finally, we declare the options, settings and input containers settings = AttributeDict() inputs = AttributeDict() # Organize settings settings.parser_settings = { 'output_params': ['total_energies', 'maximum_force'] } # Set inputs for the following WorkChain execution # Set code inputs.code = Code.get_from_string(code_string) # Set structure inputs.structure = structure # Set k-points grid density kpoints = kpoints_data() kpoints.set_kpoints_mesh(kmesh) inputs.kpoints = kpoints # Set parameters inputs.parameters = dict_data(dict=incar) # Set potentials and their mapping inputs.potential_family = Str(potential_family) inputs.potential_mapping = dict_data(dict=potential_mapping) # Set options inputs.options = dict_data(dict=options) # Set settings inputs.settings = dict_data(dict=settings) # Set workchain related inputs, in this case, give more explicit output to repor inputs.verbose = Bool(True) # Master, convergence and relaxation related parameters that is passed to the master, # convergence and relaxation workchain, respectively # Turn of relaxation relax = AttributeDict() relax.perform = Bool(False) inputs.relax = relax # Extract electronic band structure inputs.extract_bands = Bool(True) # Submit the requested workchain with the supplied inputs submit(workchain, **inputs)
def test_failing_workchain_through_exit_code(self): result, node = launch.run.get_node(PotentialFailureWorkChain, success=Bool(False), through_exit_code=Bool(True)) self.assertEqual(node.exit_status, PotentialFailureWorkChain.EXIT_STATUS) self.assertEqual(node.exit_message, PotentialFailureWorkChain.EXIT_MESSAGE) self.assertEqual(node.is_finished, True) self.assertEqual(node.is_finished_ok, False) self.assertEqual(node.is_failed, True) self.assertNotIn(PotentialFailureWorkChain.OUTPUT_LABEL, node.get_outgoing().all_link_labels())
def test_successful_workchain_through_exit_code(self): result, node = launch.run.get_node(PotentialFailureWorkChain, success=Bool(True), through_exit_code=Bool(True)) self.assertEqual(node.exit_status, 0) self.assertEqual(node.is_finished, True) self.assertEqual(node.is_finished_ok, True) self.assertEqual(node.is_failed, False) self.assertIn(PotentialFailureWorkChain.OUTPUT_LABEL, node.get_outgoing().all_link_labels()) self.assertEqual( node.get_outgoing().get_node_by_label( PotentialFailureWorkChain.OUTPUT_LABEL), PotentialFailureWorkChain.OUTPUT_VALUE)
def test_vasp_hybrid_bands( configure_with_daemon, # pylint: disable=unused-argument assert_finished, wait_for, get_insb_input # pylint: disable=redefined-outer-name ): """ Runs the VASP + hybrids reference bands workflow with InSb, on a very coarse grid. """ from aiida.orm import Bool from aiida.plugins import DataFactory, load_node from aiida.engine import submit from aiida_tbextraction.fp_run.reference_bands import VaspReferenceBands KpointsData = orm.KpointsData kpoints_mesh = KpointsData() kpoints_mesh.set_kpoints_mesh([2, 2, 2]) kpoints = KpointsData() kpoints.set_kpoints_path([('G', (0, 0, 0), 'M', (0.5, 0.5, 0.5))]) pk = submit(VaspReferenceBands, merge_kpoints=Bool(True), kpoints=kpoints, kpoints_mesh=kpoints_mesh, **get_insb_input).pk wait_for(pk) assert_finished(pk) result = load_node(pk).get_outputs_dict() assert 'bands' in result assert (result['bands'].get_bands().shape == (len(kpoints.get_kpoints()), 36))
def main(code_string, datafiles, parameters): """Main method to setup the calculation.""" # First, we need to fetch the AiiDA datatypes which will # house the inputs to our calculation dict_data = DataFactory('dict') # Then, we set the workchain we would like to call workchain = WorkflowFactory('logger.gc_example') # Set inputs for the following WorkChain execution inputs = AttributeDict() # inputs.metadata = {'options': {'resources': {'num_machines': 1, 'num_mpiprocs_per_machine': 1}, # 'parser_name': 'logger', # 'withmpi': False, # 'output_filename': 'logger.out'}} # Set code inputs.code = Code.get_from_string(code_string) # Set datafiles inputs.datafiles = datafiles # Set parameters inputs.parameters = dict_data(dict=parameters) # Set workchain related inputs, in this case, give more explicit output to report inputs.verbose = Bool(True) # Submit the requested workchain with the supplied inputs run(workchain, **inputs)
def define(cls, spec): super(FormchkCalculation, cls).define(spec) spec.input( 'parent_calc_folder', valid_type=RemoteData, required=True, help='the folder of a containing the .chk' ) spec.input( 'chk_name', valid_type=Str, required=False, default=lambda: Str(cls.DEFAULT_INPUT_FILE), help="name of the checkpoint file" ) spec.input( 'retrieve_fchk', valid_type=Bool, required=False, default=lambda: Bool(False), help="retrieve the fchk file" ) # Turn mpi off by default spec.input('metadata.options.withmpi', valid_type=bool, default=False)
def launch_workflow(code, calculation, kpoints_mesh, clean_workdir, max_num_machines, max_wallclock_seconds, with_mpi, daemon): """Run the `PhBaseWorkChain` for a previously completed `PwCalculation`.""" from aiida.orm import Bool, Dict from aiida.plugins import WorkflowFactory from aiida_quantumespresso.utils.resources import get_default_options inputs = { 'ph': { 'code': code, 'qpoints': kpoints_mesh, 'parent_folder': calculation.outputs.remote_folder, 'parameters': Dict(dict={'INPUTPH': {}}), 'metadata': { 'options': get_default_options(max_num_machines, max_wallclock_seconds, with_mpi), } } } if clean_workdir: inputs['clean_workdir'] = Bool(True) launch.launch_process(WorkflowFactory('quantumespresso.ph.base'), daemon, **inputs)
def launch_workflow(code, datum, kpoints_mesh, clean_workdir, max_num_machines, max_wallclock_seconds, with_mpi, daemon): """Run the `MatdynBaseWorkChain` for a previously completed `Q2rCalculation`.""" from aiida.orm import Bool from aiida.plugins import WorkflowFactory from aiida_quantumespresso.utils.resources import get_default_options inputs = { 'matdyn': { 'code': code, 'kpoints': kpoints_mesh, 'force_constants': datum, 'metadata': { 'options': get_default_options(max_num_machines, max_wallclock_seconds, with_mpi), } } } if clean_workdir: inputs['clean_workdir'] = Bool(True) launch.launch_process(WorkflowFactory('quantumespresso.matdyn.base'), daemon, **inputs)
def define(cls, spec): super(CubegenCalculation, cls).define(spec) spec.input( "parameters", valid_type=Dict, required=True, help='dictionary containing entries for cubes to be printed.') spec.input('parent_calc_folder', valid_type=RemoteData, required=True, help='the folder of a containing the .fchk') spec.input( "stencil", valid_type=SinglefileData, required=False, help="In case of npts=-1, use this cube specification.", ) spec.input('retrieve_cubes', valid_type=Bool, required=False, default=lambda: Bool(False), help='should the cubes be retrieved?') spec.input( "gauss_memdef", valid_type=Int, required=False, default=lambda: Int(1024), help= "Set the GAUSS_MEMDEF env variable to set the max memory in MB.") # Turn mpi off by default spec.input('metadata.options.withmpi', valid_type=bool, default=False) spec.input( "metadata.options.parser_name", valid_type=str, default=cls.DEFAULT_PARSER, non_db=True, ) spec.inputs.dynamic = True spec.outputs.dynamic = True # Exit codes spec.exit_code( 300, "ERROR_NO_RETRIEVED_FOLDER", message="The retrieved folder could not be accessed.", ) spec.exit_code( 301, "ERROR_NO_RETRIEVED_TEMPORARY_FOLDER", message="The retrieved temporary folder could not be accessed.", )
def test_identifier_sub_classes(self): """ The sub_classes keyword argument should allow to narrow the scope of the query based on the orm class """ node_bool = Bool(True).store() node_float = Float(0.0).store() node_int = Int(1).store() param_type_normal = NodeParamType() param_type_scoped = NodeParamType(sub_classes=('aiida.data:bool', 'aiida.data:float')) # For the base NodeParamType all node types should be matched self.assertEqual( param_type_normal.convert(str(node_bool.pk), None, None).uuid, node_bool.uuid) self.assertEqual( param_type_normal.convert(str(node_float.pk), None, None).uuid, node_float.uuid) self.assertEqual( param_type_normal.convert(str(node_int.pk), None, None).uuid, node_int.uuid) # The scoped NodeParamType should only match Bool and Float self.assertEqual( param_type_scoped.convert(str(node_bool.pk), None, None).uuid, node_bool.uuid) self.assertEqual( param_type_scoped.convert(str(node_float.pk), None, None).uuid, node_float.uuid) # The Int should not be found and raise with self.assertRaises(click.BadParameter): param_type_scoped.convert(str(node_int.pk), None, None)
def define(cls, spec): super(SelfConsistentHubbardWorkChain, cls).define(spec) spec.input('structure', valid_type=StructureData) spec.input('hubbard_u', valid_type=ParameterData) spec.input('tolerance', valid_type=Float, default=Float(0.1)) spec.input('max_iterations', valid_type=Int, default=Int(5)) spec.input('is_insulator', valid_type=Bool, required=False) spec.input('meta_convergence', valid_type=Bool, default=Bool(False)) spec.expose_inputs(PwBaseWorkChain, namespace='scf', exclude=('structure')) spec.expose_inputs(HpWorkChain, namespace='hp', exclude=('parent_calculation')) spec.outline( cls.setup, cls.validate_inputs, if_(cls.should_run_recon)( cls.run_recon, cls.inspect_recon, ), while_(cls.should_run_iteration)( if_(cls.should_run_relax)( cls.run_relax, cls.inspect_relax, ), if_(cls.is_metal)(cls.run_scf_smearing).elif_(cls.is_magnetic)( cls.run_scf_smearing, cls.run_scf_fixed_magnetic).else_(cls.run_scf_fixed), cls.run_hp, cls.inspect_hp, ), cls.run_results, )
def test_simple_kill_through_process(self): """ Run the workchain for one step and then kill it. This should have the workchain and its children end up in the KILLED state. """ runner = get_manager().get_runner() process = TestWorkChainAbortChildren.MainWorkChain(inputs={'kill': Bool(True)}) @gen.coroutine def run_async(): yield run_until_waiting(process) process.kill() with self.assertRaises(plumpy.KilledError): yield process.future() runner.schedule(process) runner.loop.run_sync(lambda: run_async()) child = process.node.get_outgoing(link_type=LinkType.CALL_WORK).first().node self.assertEqual(child.is_finished_ok, False) self.assertEqual(child.is_excepted, False) self.assertEqual(child.is_killed, True) self.assertEqual(process.node.is_finished_ok, False) self.assertEqual(process.node.is_excepted, False) self.assertEqual(process.node.is_killed, True)
def launch_workflow(code, calculation, clean_workdir, max_num_machines, max_wallclock_seconds, with_mpi, daemon): """Run the `Q2rBaseWorkChain` for a previously completed `PhCalculation`.""" from aiida.orm import Bool from aiida.plugins import WorkflowFactory from aiida_quantumespresso.utils.resources import get_default_options expected_process_type = 'aiida.calculations:quantumespresso.ph' if calculation.process_type != expected_process_type: raise click.BadParameter( 'The input calculation node has a process_type: {}; should be {}'. format(calculation.process_type, expected_process_type)) inputs = { 'q2r': { 'code': code, 'parent_folder': calculation.outputs.remote_folder, 'metadata': { 'options': get_default_options(max_num_machines, max_wallclock_seconds, with_mpi), } } } if clean_workdir: inputs['clean_workdir'] = Bool(True) launch.launch_process(WorkflowFactory('quantumespresso.q2r.base'), daemon, **inputs)
def run_relax(self): """ Run the SiestaBaseWorkChain to relax the input structure """ inputs = dict(self.ctx.inputs) # Final input preparation, wrapping dictionaries in ParameterData nodes # The code and options (_options?) were set above # Pseudos was set above in 'ctx.inputs', and so it is in 'inputs' already inputs['kpoints'] = self.ctx.kpoints_mesh inputs['basis'] = Dict(dict=inputs['basis']) inputs['structure'] = self.ctx.structure_initial_primitive inputs['parameters'] = Dict(dict=inputs['parameters']) inputs['settings'] = Dict(dict=inputs['settings']) inputs['options'] = Dict(dict=inputs['options']) inputs['clean_workdir'] = Bool(False) inputs['max_iterations'] = Int(20) running = self.submit(SiestaBaseWorkChain, **inputs) self.report( 'launched SiestaBaseWorkChain<{}> in relaxation mode'.format( running.pk)) return ToContext(workchain_relax=running)
def example(code, formchk_pk): builder = CubegenCalculation.get_builder() builder.parent_calc_folder = load_node(formchk_pk).outputs.remote_folder builder.code = code builder.parameters = Dict( dict={ "h**o": { "kind": "MO=H**o", "npts": -2, }, "density": { "kind": "Density=SCF", "npts": -2, }, }) builder.retrieve_cubes = Bool(True) builder.metadata.options.resources = { "tot_num_mpiprocs": 1, "num_machines": 1, } builder.metadata.options.max_wallclock_seconds = 5 * 60 print("Submitted calculation...") run(builder)
def test_sqs_process(ce_sqs_code): prim = bulk('Au') structure = StructureData(ase=prim) chemical_symbols = List(list=[['Au', 'Pd']]) # set up calculation inputs = { 'code': ce_sqs_code, 'structure': structure, 'chemical_symbols': chemical_symbols, 'pbc': List(list=[True, True, True]), 'cutoffs': List(list=[5.0]), 'max_size': Int(8), 'include_smaller_cells': Bool(True), 'n_steps': Int(2000), 'target_concentrations': Dict(dict={ 'Au': 0.5, 'Pd': 0.5 }), 'metadata': { 'options': { 'max_wallclock_seconds': 30, }, } } result = run(CalculationFactory('ce.gensqs'), **inputs) assert 'sqs' in result assert 'cluster_space' in result sqs = result['sqs'].get_ase() assert sqs.get_number_of_atoms() == 8
def launch_workflow(code, structure, pseudo_family, kpoints_distance, ecutwfc, ecutrho, hubbard_u, hubbard_v, hubbard_file_pk, starting_magnetization, smearing, automatic_parallelization, clean_workdir, max_num_machines, max_wallclock_seconds, with_mpi, daemon): """Run a `PwBaseWorkChain`.""" from aiida.orm import Bool, Float, Str, Dict from aiida.plugins import WorkflowFactory from aiida_quantumespresso.utils.resources import get_default_options, get_automatic_parallelization_options builder = WorkflowFactory('quantumespresso.pw.base').get_builder() parameters = { 'SYSTEM': { 'ecutwfc': ecutwfc, 'ecutrho': ecutrho, }, } try: hubbard_file = validate.validate_hubbard_parameters( structure, parameters, hubbard_u, hubbard_v, hubbard_file_pk) except ValueError as exception: raise click.BadParameter(str(exception)) try: validate.validate_starting_magnetization(structure, parameters, starting_magnetization) except ValueError as exception: raise click.BadParameter(str(exception)) try: validate.validate_smearing(parameters, smearing) except ValueError as exception: raise click.BadParameter(str(exception)) builder.pw.code = code builder.pw.structure = structure builder.pw.parameters = Dict(dict=parameters) builder.pseudo_family = Str(pseudo_family) builder.kpoints_distance = Float(kpoints_distance) if hubbard_file: builder.hubbard_file = hubbard_file if automatic_parallelization: automatic_parallelization = get_automatic_parallelization_options( max_num_machines, max_wallclock_seconds) builder.automatic_parallelization = Dict( dict=automatic_parallelization) else: builder.pw.metadata.options = get_default_options( max_num_machines, max_wallclock_seconds, with_mpi) if clean_workdir: builder.clean_workdir = Bool(True) launch.launch_process(builder, daemon)