'writecoorstep': True, 'xml:write': True, }) settings = ParameterData(dict={}) # default basis basis = ParameterData( dict={ # 'pao-energy-shift': '100 meV', # '%block pao-basis-sizes': """ # Si DZP """, '%block pao-basis': block_pao_basis_content, }) max_iterations = Int(5) # upload pseudos folder = './siesta-files' pseudo_family = 'example-siesta-pps' pseudo_family_desc = "Siesta Pseudopotentials for example structures" files_found, files_uploaded = psf.upload_psf_family(folder, pseudo_family, pseudo_family_desc, stop_if_existing=False) pseudo_family = Str('example-siesta-pps') print("PSF files found: {}. New files uploaded: {}").format( files_found, files_uploaded)
def define(cls, spec): super(WorkChain, cls).define(spec) spec.input("a", valid_type=NumericType) spec.input("b", valid_type=NumericType) spec.input("c", valid_type=NumericType) spec.outline( cls.sum, cls.prod ) def sum(self): self.ctx.sum = self.inputs.a + self.inputs.b def prod(self): self.out(self.ctx.sum * self.inputs.c) if __name__ == '__main__': two = Int(2) three = Int(3) four = Int(4) print "WORKFUNCTION:" simpledata = add_multiply_wf(two, three, four) print "output pk:", simpledata.pk print "output value:", simpledata.value print(run(AddMultiplyWf, a=two, b=three, c=four))
def incr_inline(inp): return {'res': Int(inp.value + 2)}
def main(): # Submitting the Calculations print "Submitting {} calculations to the daemon".format( number_calculations) code = Code.get_from_string(codename) expected_results_calculations = {} for counter in range(1, number_calculations + 1): inputval = counter parameters = ParameterData(dict={'value': inputval}) template = ParameterData( dict={ ## The following line adds a significant sleep time. ## I set it to 1 second to speed up tests ## I keep it to a non-zero value because I want ## To test the case when AiiDA finds some calcs ## in a queued state #'cmdline_params': ["{}".format(counter % 3)], # Sleep time 'cmdline_params': ["1"], 'input_file_template': "{value}", # File just contains the value to double 'input_file_name': 'value_to_double.txt', 'output_file_name': 'output.txt', 'retrieve_temporary_files': ['triple_value.tmp'] }) calc = code.new_calc() calc.set_max_wallclock_seconds(5 * 60) # 5 min calc.set_resources({"num_machines": 1}) calc.set_withmpi(False) calc.set_parser_name('simpleplugins.templatereplacer.test.doubler') calc.use_parameters(parameters) calc.use_template(template) calc.store_all() print "[{}] created calculation {}, pk={}".format( counter, calc.uuid, calc.dbnode.pk) expected_results_calculations[calc.pk] = { 'value': inputval * 2, 'retrieved_temporary_files': { 'triple_value.tmp': str(inputval * 3) } } calc.submit() print "[{}] calculation submitted.".format(counter) # Submitting the Workchains print "Submitting {} workchains to the daemon".format(number_workchains) expected_results_workchains = {} for index in range(1, number_workchains + 1): inp = Int(index) future = submit(ParentWorkChain, inp=inp) expected_results_workchains[future.pid] = index * 2 calculation_pks = sorted(expected_results_calculations.keys()) workchains_pks = sorted(expected_results_workchains.keys()) pks = calculation_pks + workchains_pks print "Wating for end of execution..." start_time = time.time() exited_with_timeout = True while time.time() - start_time < timeout_secs: time.sleep(15) # Wait a few seconds # Print some debug info, both for debugging reasons and to avoid # that the test machine is shut down because there is no output print "#" * 78 print "####### TIME ELAPSED: {} s".format(time.time() - start_time) print "#" * 78 print "Output of 'verdi calculation list -a':" try: print subprocess.check_output( ["verdi", "calculation", "list", "-a"], stderr=subprocess.STDOUT, ) except subprocess.CalledProcessError as e: print "Note: the command failed, message: {}".format(e.message) print "Output of 'verdi work list':" try: print subprocess.check_output( ['verdi', 'work', 'list'], stderr=subprocess.STDOUT, ) except subprocess.CalledProcessError as e: print "Note: the command failed, message: {}".format(e.message) print "Output of 'verdi daemon status':" try: print subprocess.check_output( ["verdi", "daemon", "status"], stderr=subprocess.STDOUT, ) except subprocess.CalledProcessError as e: print "Note: the command failed, message: {}".format(e.message) if jobs_have_finished(pks): print "Calculation terminated its execution" exited_with_timeout = False break if exited_with_timeout: print_daemon_log() print "" print "Timeout!! Calculation did not complete after {} seconds".format( timeout_secs) sys.exit(2) else: if (validate_calculations(expected_results_calculations) and validate_workchains(expected_results_workchains)): print_daemon_log() print "" print "OK, all calculations have the expected parsed result" sys.exit(0) else: print_daemon_log() print "" print "ERROR! Some return values are different from the expected value" sys.exit(3)
PhononPhono3py = WorkflowFactory('phonopy.phonon3') # Chose how to run the calculation run_by_deamon = False if not run_by_deamon: result = run( PhononPhono3py, structure=structure, es_settings=es_settings, ph_settings=ph_settings, # Optional settings # pressure=Float(0), optimize=Bool(False), use_nac=Bool(False), chunks=Int(120), # set the number of maximum simultaneous calculations cutoff=Float(4.0), # calculate_fc=Bool(False), # set true to calculate 2nd & 3rd order force constants # recover=load_node(81309), # set workchain to recover data_sets=load_node(81481) # load previous data ) print(result) else: future = submit( PhononPhono3py, structure=structure, es_settings=es_settings, ph_settings=ph_settings, # Optional settings # pressure=Float(0),
def wf_without_kwargs(): return Int(4)
options = { 'resources': { 'num_machines': 1, 'tot_num_mpiprocs': 1, }, 'max_wallclock_seconds': 1800, } kpoints = KpointsData() kpoints.set_kpoints_mesh([1, 1, 1]) inputs = { 'code': Code.get_from_string('VASP.5.4.4@Raichu'), 'structure': load_node(888), 'kpoints': kpoints, 'parameters': ParameterData(dict={}), 'settings': ParameterData(dict={}), 'pseudo_family': Str('vasp-pbe'), 'options' : ParameterData( dict = { 'max_wallclock_seconds' : 3600, 'max_memory_kb': 10000000, 'resources' : { 'num_machines': 1 }, }), 'max_iterations' : Int(1), } process = VaspCalculation.process() # running = run(process, **inputs) running = run(VASPBaseWorkchain, **inputs)
def test_get_process(self): # Test cancelling a future before the process runs future = self.ticking_engine.submit(DummyProcess, inputs={'a': Int(5)})
def test_submit(self): fut = self.ticking_engine.submit(DummyProcess, inputs={'a': Int(5)}) self._tick_till_finished() res = fut.result() self.assertTrue(res['ran'].value)
def main(): # Submitting the Calculations print "Submitting {} calculations to the daemon".format( number_calculations) code = Code.get_from_string(codename) expected_results_calculations = {} for counter in range(1, number_calculations + 1): inputval = counter calc, expected_result = submit_calculation(code=code, counter=counter, inputval=inputval) expected_results_calculations[calc.pk] = expected_result # Submitting the Workchains print "Submitting {} workchains to the daemon".format(number_workchains) expected_results_workchains = {} for index in range(1, number_workchains + 1): inp = Int(index) future = submit(ParentWorkChain, inp=inp) expected_results_workchains[future.pid] = index * 2 calculation_pks = sorted(expected_results_calculations.keys()) workchains_pks = sorted(expected_results_workchains.keys()) pks = calculation_pks + workchains_pks print "Wating for end of execution..." start_time = time.time() exited_with_timeout = True while time.time() - start_time < timeout_secs: time.sleep(15) # Wait a few seconds # Print some debug info, both for debugging reasons and to avoid # that the test machine is shut down because there is no output print "#" * 78 print "####### TIME ELAPSED: {} s".format(time.time() - start_time) print "#" * 78 print "Output of 'verdi calculation list -a':" try: print subprocess.check_output( ["verdi", "calculation", "list", "-a"], stderr=subprocess.STDOUT, ) except subprocess.CalledProcessError as e: print "Note: the command failed, message: {}".format(e.message) print "Output of 'verdi work list':" try: print subprocess.check_output( ['verdi', 'work', 'list'], stderr=subprocess.STDOUT, ) except subprocess.CalledProcessError as e: print "Note: the command failed, message: {}".format(e.message) print "Output of 'verdi daemon status':" try: print subprocess.check_output( ["verdi", "daemon", "status"], stderr=subprocess.STDOUT, ) except subprocess.CalledProcessError as e: print "Note: the command failed, message: {}".format(e.message) if jobs_have_finished(pks): print "Calculation terminated its execution" exited_with_timeout = False break if exited_with_timeout: print_daemon_log() print "" print "Timeout!! Calculation did not complete after {} seconds".format( timeout_secs) sys.exit(2) else: # create cached calculations -- these should be FINISHED immediately cached_calcs = [] for counter in range(1, number_calculations + 1): inputval = counter calc, expected_result = create_cache_calc(code=code, counter=counter, inputval=inputval) cached_calcs.append(calc) expected_results_calculations[calc.pk] = expected_result if (validate_calculations(expected_results_calculations) and validate_workchains(expected_results_workchains) and validate_cached(cached_calcs)): print_daemon_log() print "" print "OK, all calculations have the expected parsed result" sys.exit(0) else: print_daemon_log() print "" print "ERROR! Some return values are different from the expected value" sys.exit(3)
} es_settings = ParameterData(dict=settings_dict) QHAPhonopy = WorkflowFactory('phonopy.qha') # Chose how to run the calculation run_by_deamon = False if not run_by_deamon: result = run( QHAPhonopy, structure=structure, es_settings=es_settings, ph_settings=ph_settings, # Optional settings num_expansions=Int(10), use_nac=Bool(False), ) print(result) else: future = submit( QHAPhonopy, structure=structure, es_settings=es_settings, ph_settings=ph_settings, # Optional settings num_expansions=Int(10), use_nac=Bool(False), )
def test_hashes_different(self): _, pid1 = run(return_input, inp=Int(2), _return_pid=True) _, pid2 = run(return_input, inp=Int(3), _return_pid=True) w1 = load_node(pid1) w2 = load_node(pid2) self.assertNotEqual(w1.get_hash(), w2.get_hash())
def execute(args): """ The main execution of the script, which will run some preliminary checks on the command line arguments before passing them to the workchain and running it """ try: code = Code.get_from_string(args.codename) except NotExistent as exception: print "Execution failed: could not retrieve the code '{}'".format( args.codename) print "Exception report: {}".format(exception) return try: structure = load_node(args.structure) except NotExistent as exception: print "Execution failed: failed to load the node for the given structure pk '{}'".format( args.structure) print "Exception report: {}".format(exception) return if not isinstance(structure, StructureData): print "The provided pk {} for the structure does not correspond to StructureData, aborting...".format( args.parent_calc) return kpoints = KpointsData() kpoints.set_kpoints_mesh(args.kpoints) parameters = { 'CONTROL': { 'restart_mode': 'from_scratch', 'calculation': 'scf', 'tstress': True, }, 'SYSTEM': { 'ecutwfc': 40., 'ecutrho': 320., }, 'ELECTRONS': { 'conv_thr': 1.e-10, } } settings = {} options = { 'resources': { 'num_machines': 1 }, 'max_wallclock_seconds': args.max_wallclock_seconds, } run( PwBaseWorkChain, code=code, structure=structure, pseudo_family=Str(args.pseudo_family), kpoints=kpoints, parameters=ParameterData(dict=parameters), settings=ParameterData(dict=settings), options=ParameterData(dict=options), max_iterations=Int(args.max_iterations), )
'mesh': [20, 20, 20], 'symmetry_precision': 1e-5, 'code': 'phono3py@stern_in', 'machine': machine_dict }) Phono3pyDist = WorkflowFactory('phonopy.phono3py_dist') # Chose how to run the calculation run_by_deamon = False if not run_by_deamon: result = run( Phono3pyDist, structure=structure, parameters=ph_settings, force_sets=load_node(81481), # load phonon3 WorkChain output data_set gp_chunks=Int( 2) # number of computers in which distribute the calculations ) print(result) else: future = submit( Phono3pyDist, structure=structure, parameters=ph_settings, force_sets=load_node(81481), # load phonon3 WorkChain output data_set gp_chunks=Int(2)) print future print('Running workchain with pk={}'.format(future.pid))
ph_settings = ParameterData( dict={ 'supercell': [[2, 0, 0], [0, 2, 0], [0, 0, 2]], 'primitive': [[0.0, 0.5, 0.5], [0.5, 0.0, 0.5], [0.5, 0.5, 0.0]], 'distance': 0.01, 'mesh': [20, 20, 20], 'symmetry_precision': 1e-5, 'code': 'phono3py@stern_in', 'machine': machine_dict }) Phono3pyDist = WorkflowFactory('phonopy.phono3py_dist') # Chose how to run the calculation run_by_deamon = False if not run_by_deamon: result = run( Phono3pyDist, structure=structure, parameters=ph_settings, force_sets=load_node(81481), # load force sets from phonon3 workchain gp_chunks=Int(8)) print(result) else: future = submit( Phono3pyDist, structure=structure, parameters=ph_settings, force_sets=load_node(81481), # load force sets from phonon3 workchain gp_chunks=Int(8)) print('Running workchain with pk={}'.format(future.pid))
def test_cancel(self): # Test cancelling a future before the process runs future = self.ticking_engine.submit(DummyProcess, inputs={'a': Int(5)}) self.assertTrue(future.running()) future.cancel() self.assertTrue(future.cancelled())
def simple_wf(): return {'a': Int(6), 'b': Int(7)}
def check(self): assert self.ctx.subwc.out.value == Int(5)
def nested_tester(): return { 'pid': Int(ProcessStack.get_active_process_id()), 'node_pk': Int(ProcessStack.get_active_process_calc_node().pk) }
def run(self): self.out("value", Int(5))
def execute(args): """ The main execution of the script, which will run some preliminary checks on the command line arguments before passing them to the workchain and running it """ try: code = Code.get_from_string(args.codename) except NotExistent as exception: print "Execution failed: could not retrieve the code '{}'".format(args.codename) print "Exception report: {}".format(exception) return alat = 10.0 # angstrom cell = [[alat, 0., 0.,], [0., alat, 0.,], [0., 0., alat,], ] # Water molecule # One of the H atoms is sligthy moved s = StructureData(cell=cell) s.append_atom(position=(0.000,0.000,0.00),symbols=['O']) s.append_atom(position=(0.757,0.586,0.00),symbols=['H']) s.append_atom(position=(-0.780,0.600,0.00),symbols=['H']) structure = s kpoints = KpointsData() kpoints.set_kpoints_mesh(args.kpoints) parameters = { 'meshcutoff': '80.000 Ry', 'dm:numberpulay': 4, 'dm:mixingweight': 0.2, 'dm:tolerance': 1.e-3, 'max-scfiterations': 30, 'scf-must-converge': True, 'geometry-must-converge': True, 'electronic-temperature': '25 meV', 'md-typeofrun': 'CG', 'md-numcgsteps': 6, 'md-maxcgdispl': '0.1 Ang', 'md-maxforcetol': '0.03 eV/Ang', 'xml:write': True } basis = { 'pao-energy-shift': '300 meV', 'pao-basis-size': 'DZP' } settings = {} options = { 'resources': { 'num_machines': 1 }, 'max_wallclock_seconds': args.max_wallclock_seconds, } run( SiestaBaseWorkChain, code=code, structure=structure, pseudo_family=Str(args.pseudo_family), kpoints=kpoints, parameters=ParameterData(dict=parameters), settings=ParameterData(dict=settings), options=ParameterData(dict=options), basis=ParameterData(dict=basis), max_iterations=Int(args.max_iterations), )
from aiida import load_dbenv load_dbenv() from aiida.orm.data.base import Int from aiida.work.workfunction import workfunction as wf # Define the workfunction @wf def sum(a, b): return a + b # Run it with some input r = sum(Int(4), Int(5)) print(r)
def execute(args): """ The main execution of the script, which will run some preliminary checks on the command line arguments before passing them to the workchain and running it """ try: code = Code.get_from_string(args.codename) except NotExistent as exception: print "Execution failed: could not retrieve the code '{}'".format(args.codename) print "Exception report: {}".format(exception) return try: structure = load_node(args.structure) except NotExistent as exception: print "Execution failed: failed to load the node for the given structure pk '{}'".format(args.structure) print "Exception report: {}".format(exception) return if not isinstance(structure, StructureData): print "The provided pk {} for the structure does not correspond to StructureData, aborting...".format(args.parent_calc) return kpoints = KpointsData() kpoints.set_kpoints_mesh(args.kpoints) parameters = { 'xc:functional': 'LDA', 'xc:authors': 'CA', 'spinpolarized': True, 'meshcutoff': '40.000 Ry', 'dm:numberpulay': 4, 'dm:mixingweight': 0.3, 'dm:tolerance': 1.e-3, 'max-scfiterations': 3, 'scf-must-converge': True, 'Solution-method': 'diagon', 'electronic-temperature': '25 meV', 'md-typeofrun': 'CG', 'md-numcgsteps': 0, 'md-maxcgdispl': '0.1 Ang', 'md-maxforcetol': '0.04 eV/Ang', 'writeforces': True, 'writecoorstep': True } basis = { 'pao-energy-shift': '300 meV', '%block pao-basis-sizes': """ Si DZP """, } settings = {} options = { 'resources': { 'num_machines': 1 }, 'max_wallclock_seconds': args.max_wallclock_seconds, } run( SiestaBaseWorkChain, code=code, structure=structure, pseudo_family=Str(args.pseudo_family), kpoints=kpoints, parameters=ParameterData(dict=parameters), settings=ParameterData(dict=settings), options=ParameterData(dict=options), basis=ParameterData(dict=basis), max_iterations=Int(args.max_iterations), )
self.out("r2", self.inputs.a) class F1WaitFor(WorkChain): @classmethod def define(cls, spec): super(F1WaitFor, cls).define(spec) spec.dynamic_input() spec.dynamic_output() spec.outline(cls.s1, cls.s2) def s1(self): p2 = async (long_running, a=self.inputs.inp) self.ctx.a = 1 self.ctx.r2 = p2.result() def s2(self): print("a={}".format(self.ctx.a)) print("r2={}".format(self.ctx.r2)) self.out("r1", self.ctx.r2['r2']) if __name__ == '__main__': five = Int(5) r1 = f1(five) run(F1, inp=five) R1 = run(F1WaitFor, inp=five)['r1']
def execute(args): """ The main execution of the script, which will run some preliminary checks on the command line arguments before passing them to the workchain and running it """ try: code = Code.get_from_string(args.codename) except NotExistent as exception: print "Execution failed: could not retrieve the code '{}'".format( args.codename) print "Exception report: {}".format(exception) return try: structure = load_node(args.structure) except NotExistent as exception: print "Execution failed: failed to load the node for the given structure pk '{}'".format( args.structure) print "Exception report: {}".format(exception) return if not isinstance(structure, StructureData): print "The provided pk {} for the structure does not correspond to StructureData, aborting...".format( args.parent_calc) return kpoints = KpointsData() kpoints.set_kpoints_mesh(args.kpoints) parameters = { 'xc-functional': 'LDA', 'xc-authors': 'CA', 'mesh-cutoff': '100.000 Ry', 'max-scfiterations': 30, 'dm-numberpulay': 4, 'dm-mixingweight': 0.1, 'dm-tolerance': 1.e-4, 'md-typeofrun': 'cg', 'md-numcgsteps': 8, 'md-maxcgdispl': '0.200 bohr', 'md-maxforcetol': '0.020 eV/Ang', 'geometry-must-converge': True, 'xml-write': True } # default basis basis = {} settings = {} options = { 'resources': { 'num_machines': 1 }, 'max_wallclock_seconds': args.max_wallclock_seconds, } run( SiestaBaseWorkChain, code=code, structure=structure, pseudo_family=Str(args.pseudo_family), kpoints=kpoints, parameters=ParameterData(dict=parameters), settings=ParameterData(dict=settings), options=ParameterData(dict=options), basis=ParameterData(dict=basis), max_iterations=Int(args.max_iterations), )
ThermalPhono3py = WorkflowFactory('phonopy.thermal') # Chose how to run the calculation run_by_deamon = False if not run_by_deamon: result = run( ThermalPhono3py, structure=structure, es_settings=es_settings, ph_settings=ph_settings, # Optional settings # pressure=Float(0), # Pressure at which the optimization takes place (no effect if optimize = False) optimize=Bool(False), use_nac=Bool(False), chunks=Int(120), # set the number of maximum simultaneous calculations initial_cutoff=Float(2.0), # Initial cutoff in Angstrom step=Float(1.0), # Step cutoff in Angstrom gp_chunks=Int( 1) # Number of computers in distributed phono3py calculation ) print(result) else: future = submit( ThermalPhono3py, structure=structure, es_settings=es_settings, ph_settings=ph_settings, # Optional settings # pressure=Float(0), optimize=Bool(True),
def execute(args): """ The main execution of the script, which will run some preliminary checks on the command line arguments before passing them to the workchain and running it """ try: code = Code.get_from_string(args.codename) except NotExistent as exception: print "Execution failed: could not retrieve the code '{}'".format(args.codename) print "Exception report: {}".format(exception) return try: structure = load_node(args.structure) except: # # Slightly distorted structure # alat = 5.430 # angstrom cell = [[0.5*alat, 0.5*alat, 0.,], [0., 0.5*alat, 0.5*alat,], [0.5*alat, 0., 0.5*alat,], ] # Si # This was originally given in the "ScaledCartesian" format # structure = StructureData(cell=cell) structure.append_atom(position=(0.000*alat,0.000*alat,0.000*alat),symbols=['Si']) structure.append_atom(position=(0.250*alat,0.245*alat,0.250*alat),symbols=['Si']) #print "Execution failed: failed to load the node for the given structure pk '{}'".format(args.structure) #print "Exception report: {}".format(exception) #return if not isinstance(structure, StructureData): print "The provided pk {} for the structure does not correspond to StructureData, aborting...".format(args.parent_calc) return kpoints = KpointsData() kpoints.set_kpoints_mesh(args.kpoints) bandskpoints = KpointsData() bandskpoints.set_cell(structure.cell, structure.pbc) bandskpoints.set_kpoints_path(kpoint_distance = 0.05) parameters = { 'xc-functional': 'LDA', 'xc-authors': 'CA', 'spinpolarized': False, 'meshcutoff': '150.0 Ry', 'max-scfiterations': 50, 'dm-numberpulay': 4, 'dm-mixingweight': 0.3, 'dm-tolerance': 1.e-4, 'Solution-method': 'diagon', 'electronic-temperature': '25 meV', 'md-typeofrun': 'cg', 'md-numcgsteps': 10, 'md-maxcgdispl': '0.1 Ang', 'md-maxforcetol': '0.04 eV/Ang' } # default basis basis = { 'pao-energy-shift': '100 meV', '%block pao-basis-sizes': """ Si DZP """, } settings = {} options = { 'resources': { 'num_machines': 1 }, 'max_wallclock_seconds': args.max_wallclock_seconds, } run( SiestaBaseWorkChain, code=code, structure=structure, pseudo_family=Str(args.pseudo_family), kpoints=kpoints, bandskpoints=bandskpoints, parameters=ParameterData(dict=parameters), settings=ParameterData(dict=settings), options=ParameterData(dict=options), basis=ParameterData(dict=basis), max_iterations=Int(args.max_iterations), )
def run(self): self.out('output', Int(self.inputs.inp.value * 2))