def run_get_node(process, inputs_dict): """ an implementation of run_get_node, which is compatible with both aiida v0.12 and v1.0.0 it will also convert "options" "label" and "description" to/from the _ variant :param process: a process :param inputs_dict: a dictionary of inputs :type inputs_dict: dict :return: the calculation Node """ if aiida_version() < cmp_version("1.0.0a1"): for key in ["options", "label", "description"]: if key in inputs_dict: inputs_dict["_" + key] = inputs_dict.pop(key) workchain = process.new_instance(inputs=inputs_dict) workchain.run_until_complete() calcnode = workchain.calc else: from aiida.work.launch import run_get_node # pylint: disable=import-error for key in ["_options", "_label", "_description"]: if key in inputs_dict: inputs_dict[key[1:]] = inputs_dict.pop(key) _, calcnode = run_get_node(process, **inputs_dict) return calcnode
def run_calculation(code, counter, inputval): """ Run a calculation through the Process layer. """ process, inputs, expected_result = create_calculation_process( code=code, inputval=inputval) result, calc = run_get_node(process, **inputs) print "[{}] ran calculation {}, pk={}".format(counter, calc.uuid, calc.pk) return calc, expected_result
from aiida.orm.data.int import Int from aiida.work.launch import run_get_node, run_get_pid from aiida.work.workchain import WorkChain class AddAndMultiplyWorkChain(WorkChain): ... result, node = run_get_node(AddAndMultiplyWorkChain, a=Int(1), b=Int(2), c=Int(3)) result, pid = run_get_pid(AddAndMultiplyWorkChain, a=Int(1), b=Int(2), c=Int(3))
from aiida.work.launch import run_get_node, run_get_pid from aiida.work.workfunctions import workfunction a = 1 b = 2 @workfunction def add(a, b): return a + b # Passing inputs as arguments result, node = run_get_node(add, a, b) result, pid = run_get_pid(add, a, b) # Passing inputs as keyword arguments result, node = run_get_node(add, a=a, b=b) result, pid = run_get_pid(add, a=a, b=b)
def launch(expression, code, use_calculations, use_workfunctions, sleep, timeout, modulo, dry_run, daemon): """ Evaluate the expression in Reverse Polish Notation in both a normal way and by procedurally generating a workchain that encodes the sequence of operators and gets the stack of operands as an input. Multiplications are modelled by a 'while_' construct and addition will be done performed by an addition or a subtraction, depending on the sign, branched by the 'if_' construct. Powers will be simulated by nested workchains. The script will generate a file containing the workchains with the procedurally generated outlines. Unless the dry run option is specified, the workchain will also be run and its output compared with the normal evaluation of the expression. If the two values match the script will be exited with a zero exit status, otherwise a non-zero exit status will be returned, indicating failure. In addition to normal rules of Reverse Polish Notation, the following restrictions to the expression apply: \b * Only integers are supported * Only the addition, multiplication and power operators (+, * and ^, respectively) are supported * Every intermediate result should be an integer, so no raising a number to a negative power * Operands for power operator are limited to the range [1, 3] * Expression has only a single sequence of numbers followed by single continuous sequence of operators If no expression is specified, a random one will be generated that adheres to these rules """ import importlib import sys import time import uuid from aiida.orm import Code from aiida.orm.data.int import Int from aiida.orm.data.str import Str from aiida.work.launch import run_get_node, submit from expression import generate, validate, evaluate from workchain import generate_outlines, format_outlines, write_workchain if use_calculations and not isinstance(code, Code): raise click.BadParameter( 'if you specify the -C flag, you have to specify a code as well') if expression is None: expression = generate() valid, error = validate(expression) if not valid: click.echo("the expression '{}' is invalid: {}".format( expression, error)) sys.exit(1) filename = 'polish_{}.py'.format(str(uuid.uuid4().hex)) evaluated = evaluate(expression, modulo) outlines, stack = generate_outlines(expression) outlines_string = format_outlines(outlines, use_calculations, use_workfunctions) workchain_filename = write_workchain(outlines_string, filename=filename) click.echo('Expression: {}'.format(expression)) if not dry_run: try: workchain_module = 'polish_workchains.{}'.format( filename.replace('.py', '')) workchains = importlib.import_module(workchain_module) except ImportError: click.echo( 'could not import the {} module'.format(workchain_module)) sys.exit(1) inputs = {'modulo': Int(modulo), 'operands': Str(' '.join(stack))} if code: inputs['code'] = code if daemon: workchain = submit(workchains.Polish00WorkChain, **inputs) start_time = time.time() timed_out = True while time.time() - start_time < timeout: time.sleep(sleep) if workchain.is_terminated: timed_out = False break if timed_out: click.secho('Failed: ', fg='red', bold=True, nl=False) click.secho( 'the workchain<{}> did not finish in time and the operation timed out' .format(workchain.pk), bold=True) sys.exit(1) try: result = workchain.out.result except AttributeError: click.secho('Failed: ', fg='red', bold=True, nl=False) click.secho( 'the workchain<{}> did not return a result output node'. format(workchain.pk), bold=True) sys.exit(1) else: results, workchain = run_get_node(workchains.Polish00WorkChain, **inputs) result = results['result'] click.echo('Evaluated : {}'.format(evaluated)) if not dry_run: click.echo('Workchain : {} <{}>'.format(result, workchain.pk)) if result != evaluated: click.secho('Failed: ', fg='red', bold=True, nl=False) click.secho( 'the workchain result did not match the evaluated value', bold=True) sys.exit(1) else: click.secho('Success: ', fg='green', bold=True, nl=False) click.secho( 'the workchain accurately reproduced the evaluated value', bold=True) sys.exit(0)
def main(): expected_results_calculations = {} expected_results_workchains = {} code = Code.get_from_string(codename) # Submitting the Calculations the old way, creating and storing a JobCalc first and submitting it print "Submitting {} old style calculations to the daemon".format( number_calculations) for counter in range(1, number_calculations + 1): inputval = counter calc, expected_result = submit_calculation(code=code, counter=counter, inputval=inputval) expected_results_calculations[calc.pk] = expected_result # Submitting the Calculations the new way directly through the launchers print "Submitting {} new style calculations to the daemon".format( number_calculations) for counter in range(1, number_calculations + 1): inputval = counter calc, expected_result = launch_calculation(code=code, counter=counter, inputval=inputval) expected_results_calculations[calc.pk] = expected_result # Submitting the Workchains print "Submitting {} workchains to the daemon".format(number_workchains) for index in range(number_workchains): inp = Int(index) result, node = run_get_node(NestedWorkChain, inp=inp) expected_results_workchains[node.pk] = index print "Submitting a workchain with 'submit'." builder = NestedWorkChain.get_builder() input_val = 4 builder.inp = Int(input_val) proc = submit(builder) expected_results_workchains[proc.pk] = input_val print "Submitting a workchain with a nested input namespace." value = Int(-12) pk = submit(NestedInputNamespace, foo={'bar': {'baz': value}}).pk print "Submitting a workchain with a dynamic non-db input." value = [4, 2, 3] pk = submit(DynamicNonDbInput, namespace={'input': value}).pk expected_results_workchains[pk] = value print "Submitting a workchain with a dynamic db input." value = 9 pk = submit(DynamicDbInput, namespace={'input': Int(value)}).pk expected_results_workchains[pk] = value print "Submitting a workchain with a mixed (db / non-db) dynamic input." value_non_db = 3 value_db = Int(2) pk = submit(DynamicMixedInput, namespace={ 'inputs': { 'input_non_db': value_non_db, 'input_db': value_db } }).pk expected_results_workchains[pk] = value_non_db + value_db print("Submitting the serializing workchain") pk = submit(SerializeWorkChain, test=Int).pk expected_results_workchains[pk] = ObjectLoader().identify_object(Int) print "Submitting the ListEcho workchain." list_value = List() list_value.extend([1, 2, 3]) pk = submit(ListEcho, list=list_value).pk expected_results_workchains[pk] = list_value print "Submitting a WorkChain which contains a workfunction." value = Str('workfunction test string') pk = submit(WorkFunctionRunnerWorkChain, input=value).pk expected_results_workchains[pk] = value print "Submitting a WorkChain which contains an InlineCalculation." value = Str('test_string') pk = submit(InlineCalcRunnerWorkChain, input=value).pk expected_results_workchains[pk] = value calculation_pks = sorted(expected_results_calculations.keys()) workchains_pks = sorted(expected_results_workchains.keys()) pks = calculation_pks + workchains_pks print "Wating for end of execution..." start_time = time.time() exited_with_timeout = True while time.time() - start_time < timeout_secs: time.sleep(15) # Wait a few seconds # Print some debug info, both for debugging reasons and to avoid # that the test machine is shut down because there is no output print "#" * 78 print "####### TIME ELAPSED: {} s".format(time.time() - start_time) print "#" * 78 print "Output of 'verdi calculation list -a':" try: print subprocess.check_output( ["verdi", "calculation", "list", "-a"], stderr=subprocess.STDOUT, ) except subprocess.CalledProcessError as e: print "Note: the command failed, message: {}".format(e.message) print "Output of 'verdi work list':" try: print subprocess.check_output( ['verdi', 'work', 'list', '-a', '-p1'], stderr=subprocess.STDOUT, ) except subprocess.CalledProcessError as e: print "Note: the command failed, message: {}".format(e.message) print "Output of 'verdi daemon status':" try: print subprocess.check_output( ["verdi", "daemon", "status"], stderr=subprocess.STDOUT, ) except subprocess.CalledProcessError as e: print "Note: the command failed, message: {}".format(e.message) if jobs_have_finished(pks): print "Calculation terminated its execution" exited_with_timeout = False break if exited_with_timeout: print_daemon_log() print "" print "Timeout!! Calculation did not complete after {} seconds".format( timeout_secs) sys.exit(2) else: # create cached calculations -- these should be FINISHED immediately cached_calcs = [] for counter in range(1, number_calculations + 1): calc, expected_result = create_cache_calc(code=code, counter=counter, inputval=counter) cached_calcs.append(calc) expected_results_calculations[calc.pk] = expected_result # new style cached calculations, with 'run' with enable_caching(): for counter in range(1, number_calculations + 1): calc, expected_result = run_calculation(code=code, counter=counter, inputval=counter) cached_calcs.append(calc) expected_results_calculations[calc.pk] = expected_result if (validate_calculations(expected_results_calculations) and validate_workchains(expected_results_workchains) and validate_cached(cached_calcs)): print_daemon_log() print "" print "OK, all calculations have the expected parsed result" sys.exit(0) else: print_daemon_log() print "" print "ERROR! Some return values are different from the expected value" sys.exit(3)
def launch(code, structure, pseudo_family, kpoints, max_num_machines, max_wallclock_seconds, daemon, ecutwfc, ecutrho, hubbard_u, hubbard_v, hubbard_file_pk, starting_magnetization, smearing, mode): """ Run a PwCalculation for a given input structure """ from aiida.orm.data.parameter import ParameterData from aiida.orm.data.upf import get_pseudos_from_structure from aiida.orm.utils import CalculationFactory from aiida.work.launch import run_get_node, submit from aiida_quantumespresso.utils.resources import get_default_options PwCalculation = CalculationFactory('quantumespresso.pw') parameters = { 'CONTROL': { 'calculation': mode, }, 'SYSTEM': { 'ecutwfc': ecutwfc, 'ecutrho': ecutrho, }, } try: hubbard_file = validate.validate_hubbard_parameters( structure, parameters, hubbard_u, hubbard_v, hubbard_file_pk) except ValueError as exception: raise click.BadParameter(exception.message) try: validate.validate_starting_magnetization(structure, parameters, starting_magnetization) except ValueError as exception: raise click.BadParameter(exception.message) try: validate.validate_smearing(parameters, smearing) except ValueError as exception: raise click.BadParameter(exception.message) inputs = { 'code': code, 'structure': structure, 'pseudo': get_pseudos_from_structure(structure, pseudo_family), 'kpoints': kpoints, 'parameters': ParameterData(dict=parameters), 'settings': ParameterData(dict={}), 'options': get_default_options(max_num_machines, max_wallclock_seconds), } if hubbard_file: inputs['hubbard_file'] = hubbard_file if daemon: calculation = submit(PwCalculation, **inputs) click.echo('Submitted {}<{}> to the daemon'.format( PwCalculation.__name__, calculation.pk)) else: click.echo( 'Running a pw.x calculation in the {} mode... '.format(mode)) results, calculation = run_get_node(PwCalculation, **inputs) click.echo('PwCalculation<{}> terminated with state: {}'.format( calculation.pk, calculation.get_state())) click.echo('\n{link:25s} {node}'.format(link='Output link', node='Node pk and type')) click.echo('{s}'.format(s='-' * 60)) for link, node in sorted(calculation.get_outputs(also_labels=True)): click.echo('{:25s} <{}> {}'.format(link, node.pk, node.__class__.__name__))
def test_run_get_node(self): inputs = {'a': Int(2), 'b': Str('test')} result, node = run_get_node(DummyProcess, **inputs) self.assertIsInstance(node, aiida.orm.Calculation)