def test_code_delete_one_force(self): result = self.cli_runner.invoke(delete, [str(self.code.pk), '--force']) self.assertIsNone(result.exception, result.output) with self.assertRaises(NotExistent): from aiida.orm import Code Code.get_from_string('code')
def generate_inputs(structure, es_settings, type=None, pressure=0.0): try: plugin = Code.get_from_string( es_settings.dict.code[type]).get_attr('input_plugin') except: try: plugin = Code.get_from_string( es_settings.dict.code).get_attr('input_plugin') except InputValidationError: raise InputValidationError( 'No code provided for {} calculation type'.format(type)) if plugin in ['vasp.vasp']: return generate_vasp_params(structure, es_settings, type=type, pressure=pressure) elif plugin in ['quantumespresso.pw']: return generate_qe_params(structure, es_settings, type=type, pressure=pressure) elif plugin in ['lammps.force', 'lammps.optimize', 'lammps.md']: return generate_lammps_params(structure, es_settings, type=type, pressure=pressure) else: print('No supported plugin') exit()
def get_immigrant_builder(calculation_folder, calculator_settings, calc_type=None): if calc_type: code = Code.get_from_string( calculator_settings[calc_type]['code_string']) else: code = Code.get_from_string(calculator_settings['code_string']) if code.attributes['input_plugin'] in ['vasp.vasp']: if calc_type is None: settings_dict = calculator_settings.get_dict() else: settings_dict = calculator_settings[calc_type] calc_cls = CalculationFactory('vasp.vasp') params = { 'metadata': { 'options': settings_dict['options'] }, 'settings': settings_dict } if 'potential_family' in settings_dict: params['potential_family'] = settings_dict['potential_family'] if 'potential_mapping' in settings_dict: params['potential_mapping'] = settings_dict['potential_mapping'] _, builder = calc_cls.immigrant(code, calculation_folder, **params) builder.metadata['options']['parser_name'] = 'vasp.vasp' else: raise RuntimeError("Code could not be found.") return builder
def test_delete_one(self): result = self.runner.invoke(delete, [str(self.code.pk)]) self.assertIsNone(result.exception) with self.assertRaises(NotExistent): from aiida.orm import Code Code.get_from_string('code')
def create_displacement_calculations(self): print 'test2!', self.ctx structures = create_supercells_with_displacements_using_phonopy( self.inputs.structure, self.inputs.ph_settings) print 'test!' self.ctx.data_sets = structures.pop('data_sets') self.ctx.number_of_displacements = len(structures) generate_inputs = { 'quantumespresso.pw': generate_qe_params, 'vasp.vasp': generate_vasp_params } ############### FOR TESTING ############### # 1) Load data from nodes if False: #For test from aiida.orm import load_node nodes = [461200, 461205, 461210, 461215] # VASP labels = [ 'structure_1', 'structure_0', 'structure_3', 'structure_2' ] for pk, label in zip(nodes, labels): future = load_node(pk) self.ctx._content[label] = future return calcs = {} for label, structure in structures.iteritems(): print label, structure print self.inputs.es_settings.dict.code # plugin = self.inputs.code.get_attr('input_plugin') try: plugin = Code.get_from_string( self.inputs.es_settings.dict.code).get_attr('input_plugin') # plugin = self.inputs.es_settings.dict.code.get_attr('input_plugin') except: plugin = Code.get_from_string( self.inputs.es_settings.dict.code_forces).get_attr( 'input_plugin') # plugin = self.inputs.es_settings.dict.code_forces.get_attr('input_plugin') JobCalculation, calculation_input = generate_inputs[plugin]( structure, self.inputs.machine, self.inputs.es_settings) calculation_input._label = label future = submit(JobCalculation, **calculation_input) calcs[label] = future return ToContext(**calcs)
def exists(self): """Returns True if the code exists, returns False otherwise.""" from aiida.common import NotExistent, MultipleObjectsError try: Code.get_from_string("{}@{}".format(self.label, self.computer.name)) return True except MultipleObjectsError: return True except NotExistent: return False
def main(zeopp_code_label, raspa_code_label): """ Prepare inputs and submit the Isotherm workchain. Usage: verdi run run_HTSMultiTWorkChain_HKUST-1_2comp.py zeopp@teslin raspa37@teslin """ builder = HTSMultiTempWorkChain.get_builder() builder.metadata.label = "test" builder.raspa_base.raspa.code = Code.get_from_string(raspa_code_label) builder.zeopp.code = Code.get_from_string(zeopp_code_label) builder.zeopp.atomic_radii = SinglefileData(file=os.path.abspath('../aiida_matdis/data/UFF.rad')) options = { "resources": { "num_machines": 1, "tot_num_mpiprocs": 1, }, "max_wallclock_seconds": 1 * 60 * 60, "withmpi": False, } builder.raspa_base.raspa.metadata.options = options builder.zeopp.metadata.options = options builder.structure = CifData(file=os.path.abspath('../aiida_matdis/data/HKUST-1.cif'), label="hkust1") builder.mixture = Dict(dict={ 'comp1': { 'name': 'xenon', 'molfraction': 0.20 }, 'comp2': { 'name': 'krypton', 'molfraction': 0.80 }, }) builder.parameters = Dict( dict={ 'ff_framework': 'UFF', # Default: UFF 'temperature_list': [273, 298], # (K) Note: higher temperature will have less adsorbate and it is faster 'zeopp_volpo_samples': 100, # Default: 1e5 *NOTE: default is good for standard real-case! 'zeopp_sa_samples': 100, # Default: 1e5 *NOTE: default is good for standard real-case! 'zeopp_block_samples': 100, # Default: 100 'raspa_widom_cycles': 500, # Default: 1e5 'raspa_gcmc_init_cycles': 500, # Default: 1e3 'raspa_gcmc_prod_cycles': 500, # Default: 1e4 'pressure_list': [0.1, 1.0], 'lcd_max': 15.0, 'pld_min': 3.5, 'probe_based': False, }) run(builder)
def main(raspa_code_label, cp2k_code_label): """Prepare inputs and submit the work chain.""" print("Testing BindingSite work chain (FF + DFT) for CO2 in Zn-MOF-74 ...") print("[NOTE: this test will run on 4 cpus and take ca. 10 minutes]") builder = BindingSiteWorkChain.get_builder() builder.metadata.label = "test" builder.raspa_base.raspa.code = Code.get_from_string(raspa_code_label) builder.cp2k_base.cp2k.code = Code.get_from_string(cp2k_code_label) builder.raspa_base.raspa.metadata.options = { "resources": { "num_machines": 1, "tot_num_mpiprocs": 1, }, "max_wallclock_seconds": 1 * 10 * 60, } builder.cp2k_base.cp2k.metadata.options = { "resources": { "num_machines": 1, "tot_num_mpiprocs": 4, }, "max_wallclock_seconds": 1 * 10 * 60, } builder.structure = CifData(file=os.path.abspath('data/Zn-MOF-74.cif'), label="Zn-MOF-74") builder.molecule = Str('co2') builder.parameters = Dict( dict={ "ff_framework": "UFF", # (str) Forcefield of the structure. "mc_steps": int(10), # (int) Number of MC cycles. "temperature_list": [300, 150], }) builder.protocol_tag = Str('test') builder.cp2k_base.cp2k.parameters = Dict(dict={ # Lowering CP2K default setting for a faster test calculation 'FORCE_EVAL': { 'DFT': { 'SCF': { 'EPS_SCF': 1.0E-4, 'OUTER_SCF': { 'EPS_SCF': 1.0E-4, }, }, }, }, 'MOTION': { 'GEO_OPT': { 'MAX_ITER': 5 } }, }) run(builder)
def exists(self): """Returns True if the code exists, returns False otherwise.""" from aiida.common import MultipleObjectsError, NotExistent if not self.label: return False try: Code.get_from_string( f"{self.label}@{self.inp_computer.selected_computer.label}") return True except MultipleObjectsError: return True except NotExistent: return False
def get_code(self, code_id): """ Get a Computer object with given identifier, that can either be the numeric ID (pk), or the label (if unique). .. note:: Since all command line arguments get converted to string types, we cannot assess the intended type (an integer pk or a string label) from the type of the variable code_id. If the code_id can be converted into an integer we will assume the value corresponds to a pk. This means, however, that if there would be another code, with a label directly equivalent to the string value of that pk, that this code can not be referenced by its label, as the other code, corresponding to the integer pk, will get matched first. """ from aiida.common.exceptions import NotExistent, MultipleObjectsError, InputValidationError from aiida.orm import Code as AiidaOrmCode try: pk = int(code_id) try: return AiidaOrmCode.get(pk=pk) except (NotExistent, MultipleObjectsError, InputValidationError) as e: print >> sys.stderr, e.message sys.exit(1) except ValueError: try: return AiidaOrmCode.get_from_string(code_id) except (NotExistent, MultipleObjectsError) as e: print >> sys.stderr, e.message sys.exit(1)
def generate_calculation_lammps(self, structure, parameters, type='optimize', pressure=0.0): if type == 'born_charges': return None codename = parameters['code'] code = Code.get_from_string(codename) calc = code.new_calc(max_wallclock_seconds=3600, resources=parameters['resources']) calc.label = "test lammps calculation" calc.description = "A much longer description" calc.use_code(code) calc.use_structure(structure) calc.use_potential(ParameterData(dict=parameters['potential'])) #if code.get_input_plugin_name() == 'lammps.optimize': if type == 'optimize': lammps_parameters = dict(parameters['parameters']) lammps_parameters.update({'pressure': pressure}) # pressure kb calc.use_parameters(ParameterData(dict=lammps_parameters)) calc.store_all() return calc
def launch_aiida_bulk_modulus(structure, code_string, resources, label="AlN VASP relax calculation"): incar_dict = { 'PREC': 'Accurate', 'EDIFF': 1e-8, 'NELMIN': 5, 'NELM': 100, 'ENCUT': 500, 'IALGO': 38, 'ISMEAR': 0, 'SIGMA': 0.01, 'GGA': 'PS', 'LREAL': False, 'LCHARG': False, 'LWAVE': False, } kpoints = KpointsData() kpoints.set_kpoints_mesh([6, 6, 4], offset=[0, 0, 0.5]) options = {'resources': resources, 'max_wallclock_seconds': 3600 * 10} potential_family = 'PBE.54' potential_mapping = {'Al': 'Al', 'N': 'N'} parser_settings = { 'add_energies': True, 'add_forces': True, 'add_stress': True } code = Code.get_from_string(code_string) Workflow = WorkflowFactory('vasp_bm.bulkmodulus') builder = Workflow.get_builder() builder.code = code builder.parameters = Dict(dict=incar_dict) builder.structure = structure builder.settings = Dict(dict={'parser_settings': parser_settings}) builder.potential_family = Str(potential_family) builder.potential_mapping = Dict(dict=potential_mapping) builder.kpoints = kpoints builder.options = Dict(dict=options) builder.metadata.label = label builder.metadata.description = label builder.clean_workdir = Bool(False) builder.relax = Bool(True) builder.force_cutoff = Float(1e-8) builder.steps = Int(10) builder.positions = Bool(True) builder.shape = Bool(True) builder.volume = Bool(True) builder.convergence_on = Bool(True) builder.convergence_volume = Float(1e-8) builder.convergence_max_iterations = Int(2) builder.verbose = Bool(True) node = submit(builder) return node
def get_lapw_calculation(self, lapw_structure, lapw_parameters, lapw_kpoint): params = self.get_parameters() lapw_codename = params['lapw_codename'] num_machines = params['num_machines'] max_wallclock_seconds = params['max_wallclock_seconds'] lapwbasis_family = params['lapwbasis_family'] code = Code.get_from_string(lapw_codename) computer = code.get_remote_computer() LAPWCalc = CalculationFactory('exciting.exciting') calc = LAPWCalc(computer=computer) calc.set_max_wallclock_seconds(max_wallclock_seconds) calc.set_resources({"num_machines": num_machines}) calc.store() calc.use_code(code) calc.use_structure(lapw_structure) calc.use_lapwbasis_from_family(lapwbasis_family) calc.use_parameters(lapw_parameters) calc.use_kpoints(lapw_kpoint) return calc
def main(pot_family, import_from, queue, code, computer, no_import): load_dbenv_if_not_loaded() from aiida.orm import WorkflowFactory, Code from aiida.work import submit if not no_import: click.echo('importing POTCAR files...') with cli_spinner(): import_pots(import_from, pot_family) code = Code.get_from_string('{}@{}'.format(code, computer)) workflow = WorkflowFactory('vasp.base') inputs = AttributeDict() inputs.structure = create_structure_Si() inputs.kpoints = create_kpoints() inputs.incar = create_params_simple() inputs.code = code inputs.potcar_family = get_data_node('str', pot_family) inputs.potcar_mapping = get_data_node('parameter', dict={'Si': 'Si'}) options = AttributeDict() options.queue_name = queue options.resources = {'num_machines': 1, 'num_mpiprocs_per_machine': 4} inputs.options = get_data_node('parameter', dict=options) submit(workflow, **inputs)
def main(code_string, datafiles, parameters): """Main method to setup the calculation.""" # First, we need to fetch the AiiDA datatypes which will # house the inputs to our calculation dict_data = DataFactory('dict') # Then, we set the workchain we would like to call workchain = WorkflowFactory('logger.gc_example') # Set inputs for the following WorkChain execution inputs = AttributeDict() # inputs.metadata = {'options': {'resources': {'num_machines': 1, 'num_mpiprocs_per_machine': 1}, # 'parser_name': 'logger', # 'withmpi': False, # 'output_filename': 'logger.out'}} # Set code inputs.code = Code.get_from_string(code_string) # Set datafiles inputs.datafiles = datafiles # Set parameters inputs.parameters = dict_data(dict=parameters) # Set workchain related inputs, in this case, give more explicit output to report inputs.verbose = Bool(True) # Submit the requested workchain with the supplied inputs run(workchain, **inputs)
def main(codelabel): """Run failing calculation""" try: code = Code.get_from_string(codelabel) except NotExistent: print("The code '{}' does not exist".format(codelabel)) sys.exit(1) print("Testing CP2K failure...") # a broken CP2K input parameters = Dict(dict={'GLOBAL': {'FOO_BAR_QUUX': 42}}) options = { "resources": { "num_machines": 1, "num_mpiprocs_per_machine": 1, }, "max_wallclock_seconds": 1 * 2 * 60, } print("Submitted calculation...") inputs = {'parameters': parameters, 'code': code, 'metadata': {'options': options,}} try: run(Cp2kCalculation, **inputs) print("ERROR!") print("CP2K failure was not recognized") sys.exit(3) except OutputParsingError: print("CP2K failure correctly recognized") sys.exit(0)
def test_duplicate(cmd_test_env): """Test the duplicate command""" from aiida.orm import Computer, Code _ = cmd_test_env runner = CliRunner() runner.invoke(duplicate_fe, ["-Y", "localhost"], catch_exceptions=False) assert Computer.get(label='localhost-fw') assert Computer.get( label='localhost-fw').scheduler_type == "fireworks_scheduler.default" runner.invoke(duplicate_fe, ["-Y", "localhost", "--suffix", 'fe'], catch_exceptions=False) assert Computer.get(label='localhost-fe') runner.invoke(duplicate_fe, ["-Y", "localhost", "--suffix", 'fc', '--include-codes'], catch_exceptions=False) assert Computer.get(label='localhost-fc') assert Code.get_from_string("bash@localhost-fc") runner.invoke( duplicate_fe, ["-Y", "localhost", "--job-should-keep-env", "--suffix", 'env'], catch_exceptions=False) assert Computer.get(label='localhost-env') assert Computer.get( label='localhost-env').scheduler_type == "fireworks_scheduler.keepenv"
def get_pw_calculation(self, pw_structure, pw_parameters, pw_kpoint): params = self.get_parameters() pw_codename = params['pw_codename'] num_machines = params['num_machines'] max_wallclock_seconds = params['max_wallclock_seconds'] pseudo_family = params['pseudo_family'] code = Code.get_from_string(pw_codename) computer = code.get_remote_computer() QECalc = CalculationFactory('quantumespresso.pw') calc = QECalc(computer=computer) calc.set_max_wallclock_seconds(max_wallclock_seconds) calc.set_resources({"num_machines": num_machines}) calc.store() calc.use_code(code) calc.use_structure(pw_structure) calc.use_pseudos_from_family(pseudo_family) calc.use_parameters(pw_parameters) calc.use_kpoints(pw_kpoint) return calc
def inner(inputs=None, settings=None): from aiida.plugins import CalculationFactory from aiida.engine import run calculation = CalculationFactory('vasp.vasp') mock_vasp.store() create_authinfo(computer=mock_vasp.computer, store=True) kpoints, _ = vasp_kpoints inpts = AttributeDict() inpts.code = Code.get_from_string('mock-vasp@localhost') inpts.structure = vasp_structure inpts.parameters = vasp_params inpts.kpoints = kpoints inpts.potential = get_data_class( 'vasp.potcar').get_potcars_from_structure( structure=inpts.structure, family_name=POTCAR_FAMILY_NAME, mapping=POTCAR_MAP) options = { 'withmpi': False, 'queue_name': 'None', 'resources': { 'num_machines': 1, 'num_mpiprocs_per_machine': 1 }, 'max_wallclock_seconds': 3600 } inpts.metadata = {} inpts.metadata['options'] = options if inputs is not None: inpts.update(inputs) results_and_node = run.get_node(calculation, **inpts) return results_and_node
def test_code_list(self): # set up second code 'code2' from aiida.orm import Code try: code = Code.get_from_string('code2') except NotExistent: code = Code( input_plugin_name='simpleplugins.templatereplacer', remote_computer_exec=[self.comp, '/remote/abs/path'], ) code.label = 'code2' code.store() options = [ '-A', '-a', '-o', '--input-plugin=simpleplugins.arithmetic.add', '--computer={}'.format(self.comp.name) ] result = self.runner.invoke(code_list, options) self.assertIsNone(result.exception) self.assertTrue( str(self.code.pk) in result.output, 'PK of first code should be included') self.assertTrue('code2' not in result.output, 'label of second code should not be included') self.assertTrue('comp' in result.output, 'computer name should be included')
def main(codelabel): """Example usage: verdi run thistest.py cp2k@localhost""" print("Testing CP2K multistage workchain on H2O") print(">>> Using 'singlepoint' tag, with no output structure") code = Code.get_from_string(codelabel) atoms = ase.build.molecule('H2O') atoms.center(vacuum=2.0) structure = StructureData(ase=atoms) options = { "resources": { "num_machines": 1, "num_mpiprocs_per_machine": 1, }, "max_wallclock_seconds": 1 * 3 * 60, } inputs = { 'structure': structure, 'protocol_tag': Str('singlepoint'), 'cp2k_base': { 'cp2k': { 'code': code, 'metadata': { 'options': options, } } } } run(Cp2kMultistageWorkChain, **inputs)
def generate_calculation(self, structure, parameters, type='optimize'): code = Code.get_from_string(parameters['code']) plugin = code.get_attrs()['input_plugin'].split('.')[0] pressure = self.get_attribute('pressure') if plugin == 'lammps': return self.generate_calculation_lammps(structure, parameters, type=type, pressure=pressure) elif plugin == 'vasp': return self.generate_calculation_vasp(structure, parameters, type=type, pressure=pressure) elif plugin == 'quantumespresso': return self.generate_calculation_qe(structure, parameters, type=type, pressure=pressure) else: self.append_to_report( 'The plugin: {}, is not implemented in this workflow'.format( plugin)) self.next(self.exit) return None
def wannrun(self): params = self.get_parameters() amnstep = self.get_attributes()['amnstep'] prev = self.get_step_calculations(self.amnrun) amncalc = prev.get(uuid=amnstep['uuid']) self.append_to_report('{}: retrieved amn calculation'.format( params.get('name'))) calc = CalculationFactory('vasp.wannier')() code = Code.get_from_string(params['wannier_x']) calc.use_code(code) calc.set_computer(code.get_computer()) calc.use_settings(amncalc.inp.wannier_settings) calc.use_data(amncalc.out.wannier_data) calc.label = params.get('name') + ': wannier run' calc.set_resources({'num_machines': 1}) calc.set_queue_name(amncalc.get_queue_name()) calc.description = calc.label calc.store_all() calc.set_extras({'experiment': 'tight-binding'}) self.group.add_nodes(calc) self.attach_calculation(calc) self.add_result('wannier_run', calc) self.append_to_report( '{}: starting wannier run, PK={}, uuid={}'.format( params.get('name'), calc.pk, calc.uuid)) self.add_attributes({'wannstep': {'pk': calc.pk, 'uuid': calc.uuid}}) self.next(self.exit)
def prepare_code(codename, codelocation, computername, workdir): """.""" # first create or read computer comp = prepare_computer(computername, workdir) # now decide which code to add if codename == 'kkrhost': execname = 'kkr.x' pluginname = 'kkr.kkr' elif codename == 'voronoi': execname = 'voronoi.exe' pluginname = 'kkr.voro' elif codename == 'kkrimp': execname = 'kkrflex.exe' pluginname = 'kkr.kkrimp' else: raise ValueError('unknown codename') # then get code from database or create a new code from aiida.orm import Code from aiida.common.exceptions import NotExistent try: code = Code.get_from_string(codename + '@' + computername) except NotExistent as exception: code = Code() code.label = codename code.description = '' code.set_remote_computer_exec((comp, codelocation + execname)) code.set_input_plugin_name(pluginname) if codename == 'voronoi': code.set_prepend_text('ln -s ' + codelocation + 'ElementDataBase .') code.store()
def start(self): ''' prepare, store and attach the selfconsistent run to get the charge density. ''' params = self.get_parameters() maker = VaspMaker(structure=params['structure'], calc_cls='vasp.scf') maker.rewrite_settings(**params['settings']) kp = params['kpmesh'] maker.kpoints.set_kpoints_mesh(kp) maker.code = Code.get_from_string(params['vasp']) maker.queue = params['queue'] maker.resources['num_machines'] = 4 maker.resources['num_mpiprocs_per_machine'] = 2 maker.label = params.get('name') + ': sc run' calc = maker.new() calc.description = '{}: selfconsistent run'.format(params.get('name')) calc.store_all() calc.set_extras({'experiment': 'tight-binding'}) self.group.add_nodes(calc) self.attach_calculation(calc) self.append_to_report( '{}: starting selfconsistent run, PK={}, uuid={}'.format( params.get('name'), calc.pk, calc.uuid)) self.add_attributes({'scstep': {'pk': calc.pk, 'uuid': calc.uuid}}) self.next(self.winrun)
def get_code(entry_point, computer_name='localhost'): """Get local code. Sets up code for given entry point on given computer. :param entry_point: Entry point of calculation plugin :param computer_name: Name of (local) computer :return: The code node :rtype: :py:class:`aiida.orm.Code` """ from aiida.orm import Code from aiida.common.exceptions import NotExistent computer = get_computer(computer_name) try: executable = executables[entry_point] except KeyError: raise KeyError( "Entry point {} not recognized. Allowed values: {}".format( entry_point, executables.keys())) try: code = Code.get_from_string('{}@{}'.format(executable, computer_name)) except NotExistent: path = get_path_to_executable(executable) code = Code( input_plugin_name=entry_point, remote_computer_exec=[computer, path], ) code.label = executable code.store() return code
def test_kkrflex_writeout_wc(self): """ simple Cu noSOC, FP, lmax2 full example using scf workflow """ from aiida.orm import Code, load_node from aiida.plugins import DataFactory from masci_tools.io.kkr_params import kkrparams from aiida_kkr.workflows.gf_writeout import kkr_flex_wc from numpy import array import os Dict = DataFactory('dict') StructureData = DataFactory('structure') # prepare computer and code (needed so that prepare_code(kkr_codename, codelocation, computername, workdir) # here we create a parameter node for the workflow input (workflow specific parameter) and adjust the convergence criterion. wfd =kkr_flex_wc.get_wf_defaults() options = {'queue_name' : queuename, 'resources': {"num_machines": 1},'max_wallclock_seconds' : 5*60, 'custom_scheduler_commands' : '', 'use_mpi' : False} options = Dict(dict=options) # The scf-workflow needs also the voronoi and KKR codes to be able to run the calulations KKRCode = Code.get_from_string(kkr_codename+'@'+computername) imp_info = Dict(dict={'Rcut':2.5533, 'ilayer_center': 0, 'Zimp':[29.]}) label = 'GF_writeout Cu bulk' descr = 'GF_writeout workflow for Cu bulk' from aiida.tools.importexport import import_data import_data('files/db_dump_kkrcalc.tar.gz') kkr_calc_remote = load_node('3058bd6c-de0b-400e-aff5-2331a5f5d566').outputs.remote_folder # create process builder to set parameters builder = kkr_flex_wc.get_builder() builder.metadata.description = descr builder.metadata.label = label builder.kkr = KKRCode builder.options = options builder.remote_data = kkr_calc_remote builder.impurity_info = imp_info # now run calculation from aiida.engine import run out = run(builder) n = out['workflow_info'] n = n.get_dict() assert n.get('successful') assert n.get('list_of_errors') == [] d = out['GF_host_remote'] assert isinstance(d, DataFactory('remote')) kkrflex_calc = load_node(n.get('pk_flexcalc')) kkrflex_retrieved = kkrflex_calc.outputs.retrieved for name in 'tmat green atominfo intercell_cmoms intercell_ref'.split(): assert 'kkrflex_'+name in kkrflex_retrieved.list_object_names()
def test_kkr_from_kkr(self): """ continue KKR calculation after a previous KKR calculation instead of starting from voronoi """ from aiida.orm import Code, load_node from aiida.plugins import DataFactory from masci_tools.io.kkr_params import kkrparams from aiida_kkr.calculations.kkr import KkrCalculation Dict = DataFactory('dict') # load necessary files from db_dump files from aiida.tools.importexport import import_data import_data('files/db_dump_kkrcalc.tar.gz') kkr_calc = load_node('3058bd6c-de0b-400e-aff5-2331a5f5d566') # prepare computer and code (needed so that prepare_code(kkr_codename, codelocation, computername, workdir) # extract KKR parameter (add missing values) params_node = kkr_calc.inputs.parameters # load code from database and create new voronoi calculation code = Code.get_from_string(kkr_codename+'@'+computername) options = {'resources': {'num_machines':1, 'tot_num_mpiprocs':1}, 'queue_name': queuename} builder = KkrCalculation.get_builder() builder.code = code builder.metadata.options = options builder.parameters = params_node builder.parent_folder = kkr_calc.outputs.remote_folder builder.metadata.dry_run = True from aiida.engine import run run(builder)
def test_strained_fp_tb( configure_with_daemon, # pylint: disable=unused-argument get_optimize_fp_tb_input, # pylint: disable=redefined-outer-name ): """ Run the DFT tight-binding optimization workflow with strain on an InSb sample for three strain values. """ from aiida.engine import run from aiida.orm import Code from aiida.orm import Str, List from aiida_tbextraction.optimize_strained_fp_tb import OptimizeStrainedFirstPrinciplesTightBinding inputs = get_optimize_fp_tb_input inputs['strain_kind'] = Str('three_five.Biaxial001') inputs['strain_parameters'] = Str('InSb') strain_strengths = List() strain_list = [-0.1, 0., 0.1] strain_strengths.extend(strain_list) inputs['strain_strengths'] = strain_strengths inputs['symmetry_repr_code'] = Code.get_from_string('symmetry_repr') result = run(OptimizeStrainedFirstPrinciplesTightBinding, **inputs) print(result) for value in strain_list: suffix = '_{}'.format(value).replace('.', '_dot_') assert all(key + suffix in result for key in ['cost_value', 'tb_model', 'window'])
def main(raspa_code_label): """Prepare inputs and submit the work chain.""" builder = SimAnnealingWorkChain.get_builder() builder.metadata.label = "test" builder.raspa_base.raspa.code = Code.get_from_string(raspa_code_label) builder.raspa_base.raspa.metadata.options = { "resources": { "num_machines": 1, "tot_num_mpiprocs": 1, }, "max_wallclock_seconds": 1 * 60 * 60, } builder.structure = CifData(file=os.path.abspath('data/HKUST-1.cif'), label="HKUST-1") builder.molecule = Str('co2') builder.parameters = Dict( dict={ "ff_framework": "UFF", # (str) Forcefield of the structure. "temperature_list": [ 300, 200, 100 ], # (list) List of decreasing temperatures for the annealing. "mc_steps": int(10), # (int) Number of MC cycles. "number_of_molecules": 3 # (int) Number of molecules loaded in the framework. }) run(builder)
#!/usr/bin/env python #from aiida.backends.utils import load_dbenv #load_dbenv() codename = 'QE5.4@daint-gpu' from aiida.orm import Code code = Code.get_from_string(codename) from aiida.orm import DataFactory StructureData = DataFactory('structure') alat = 5.4 # angstrom cell = [[alat, 0.10, 0.20,], [0.30, alat, 0.40,], [0.50, 0.60, alat,], ] s = StructureData(cell=cell) s.append_atom(position=(0.,0.,0.),symbols='Si') s.append_atom(position=(alat/2.,alat/2.,0.),symbols='Si') s.append_atom(position=(alat/2.,0.,alat/2.),symbols='Si') s.append_atom(position=(0.,alat/2.,alat/2.),symbols='Si') s.append_atom(position=(alat/4.,alat/4.,alat/4.),symbols='Si') s.append_atom(position=(3.*alat/4.,3.*alat/4.,alat/4.),symbols='Si') s.append_atom(position=(3.*alat/4.,alat/4.,3.*alat/4.),symbols='Si') s.append_atom(position=(alat/4.,3.*alat/4.,3.*alat/4.),symbols='Si') ParameterData = DataFactory('parameter') parameters = ParameterData(dict={ 'CONTROL': { 'calculation': 'relax', 'restart_mode': 'from_scratch', 'wf_collect': True, 'tprnfor': True, 'tstress': True, },