Exemple #1
0
 def define(cls, spec):
     super(BaseRestartWorkChain, cls).define(spec)
     spec.input('max_iterations', valid_type=Int, default=Int(5),
         help='maximum number of iterations the workchain will restart the calculation to finish successfully')
     spec.input('clean_workdir', valid_type=Bool, default=Bool(False),
         help='if True, work directories of all called calculation will be cleaned at the end of execution')
     spec.exit_code(100, 'ERROR_ITERATION_RETURNED_NO_CALCULATION',
         message='the run_calculation step did not successfully add a calculation node to the context')
     spec.exit_code(101, 'ERROR_MAXIMUM_ITERATIONS_EXCEEDED',
         message='the maximum number of iterations was exceeded')
     spec.exit_code(102, 'ERROR_UNEXPECTED_CALCULATION_STATE',
         message='the calculation finished with an unexpected calculation state')
     spec.exit_code(103, 'ERROR_SECOND_CONSECUTIVE_SUBMISSION_FAILURE',
         message='the calculation failed to submit, twice in a row')
     spec.exit_code(104, 'ERROR_SECOND_CONSECUTIVE_UNHANDLED_FAILURE',
         message='the calculation failed for an unknown reason, twice in a row')
    def define(cls, spec):
        super(PdosWorkChain, cls).define(spec)

        spec.input("cp2k_code", valid_type=Code)
        spec.input("slabsys_structure", valid_type=StructureData)
        spec.input("mol_structure", valid_type=StructureData)
        spec.input("pdos_lists", valid_type=List)
        spec.input("mgrid_cutoff", valid_type=Int, default=Int(600))
        spec.input("wfn_file_path", valid_type=Str, default=Str(""))
        spec.input("elpa_switch", valid_type=Bool, default=Bool(True))

        spec.input("overlap_code", valid_type=Code)
        spec.input("overlap_params", valid_type=ParameterData)

        spec.outline(cls.run_scfs, cls.run_overlap)

        spec.dynamic_output()
Exemple #3
0
    def define(cls, spec):
        super(QHAPhonopy, cls).define(spec)
        spec.input("structure", valid_type=StructureData)
        spec.input("ph_settings", valid_type=ParameterData)
        spec.input("es_settings", valid_type=ParameterData)
        # Optional arguments
        spec.input("num_expansions",
                   valid_type=Int,
                   required=False,
                   default=Int(10))
        spec.input("use_nac",
                   valid_type=Bool,
                   required=False,
                   default=Bool(True))

        spec.outline(cls.get_gruneisen_prediction,
                     cls.create_unit_cell_expansions, cls.calculate_qha)
Exemple #4
0
    def get_data_sets(self):

        print('cutoff: {}'.format(self.ctx.cutoff))

        future = submit(
            PhononPhono3py,
            structure=self.ctx.final_structure,
            ph_settings=self.inputs.ph_settings,
            es_settings=self.inputs.es_settings,
            optimize=Bool(False),
            cutoff=Float(self.ctx.cutoff),
            chunks=self.inputs.chunks,
            data_sets=self.ctx.input_data_sets,
            # data_sets=load_node(81481), #  Test purposes only
        )

        print('start phonon3 (pk={})'.format(self.pid))
        return ToContext(anharmonic=future)
Exemple #5
0
    def create_unit_cell_expansions(self):

        print('start Gruneisen (pk={})'.format(self.pid))
        prediction = self.ctx.gruneisen.out.prediction
        stress_range = prediction.dict.stress_range
        stress_delta = stress_range[-1] - stress_range[0]

        stress_samples = np.linspace(stress_range[0] - stress_delta * 0.5,
                                     stress_range[-1] + stress_delta * 0.5,
                                     self.inputs.num_expansions)

        print prediction.dict.stress_range
        print prediction.dict.volume_range

        # For testing
        if __testing__:
            self.ctx._content['phonon_0'] = load_node(19218)
            self.ctx._content['phonon_1'] = load_node(19221)
            self.ctx._content['phonon_2'] = load_node(19224)
            self.ctx._content['phonon_3'] = load_node(19227)
            self.ctx._content['phonon_4'] = load_node(19230)
            self.ctx._content['phonon_5'] = load_node(19233)
            self.ctx._content['phonon_6'] = load_node(19236)
            self.ctx._content['phonon_7'] = load_node(19239)
            self.ctx._content['phonon_8'] = load_node(19242)
            self.ctx._content['phonon_9'] = load_node(19245)
            return

        calcs = {}
        for i, stress in enumerate(stress_samples):

            future = submit(PhononPhonopy,
                            structure=self.inputs.structure,
                            ph_settings=self.inputs.ph_settings,
                            es_settings=self.inputs.es_settings,
                            pressure=Float(stress),
                            optimize=Bool(True),
                            use_nac=self.inputs.use_nac)

            calcs['phonon_{}'.format(i)] = future
            print('phonon workchain: {} {}'.format('phonon_{}'.format(i),
                                                   future.pid))

        return ToContext(**calcs)
 def define(cls, spec):
     super(STMWorkChain, cls).define(spec)
     
     spec.input("cp2k_code", valid_type=Code)
     spec.input("structure", valid_type=StructureData)
     spec.input("cell", valid_type=ArrayData)
     spec.input("mgrid_cutoff", valid_type=Int, default=Int(600))
     spec.input("wfn_file_path", valid_type=Str, default=Str(""))
     spec.input("elpa_switch", valid_type=Bool, default=Bool(True))
     
     spec.input("stm_code", valid_type=Code)
     spec.input("stm_params", valid_type=ParameterData)
     
     spec.outline(
         cls.run_scf_diag,
         cls.run_stm,
     )
     
     spec.dynamic_output()
Exemple #7
0
 def define(cls, spec):
     """
     Workfunction definition
     """
     super(PwRestartWf, cls).define(spec)
     spec.input("codename", valid_type=BaseType)
     spec.input("pseudo_family", valid_type=Str)
     spec.input("calculation_set", valid_type=ParameterData
                )  # custom_scheduler_commands,  resources,...
     spec.input("settings", valid_type=ParameterData)
     spec.input("structure", valid_type=StructureData)
     spec.input("kpoints", valid_type=KpointsData)
     spec.input("gamma", valid_type=Bool, default=Bool(0), required=False)
     spec.input("parameters", valid_type=ParameterData)
     spec.input("parameters_nscf", valid_type=ParameterData, required=False)
     spec.input("parent_folder", valid_type=RemoteData, required=False)
     spec.outline(cls.pwbegin,
                  while_(cls.pw_should_continue)(cls.pw_continue, ),
                  cls.report_wf)
     spec.dynamic_output()
Exemple #8
0
 def define(cls, spec):
     super(PwBandsWorkChain, cls).define(spec)
     spec.expose_inputs(PwRelaxWorkChain,
                        namespace='relax',
                        exclude=('structure', 'clean_workdir'))
     spec.expose_inputs(PwBaseWorkChain,
                        namespace='scf',
                        exclude=('structure', 'clean_workdir', 'kpoints'))
     spec.expose_inputs(PwBaseWorkChain,
                        namespace='bands',
                        exclude=('structure', 'clean_workdir'))
     spec.input('structure', valid_type=StructureData)
     spec.input('clean_workdir', valid_type=Bool, default=Bool(False))
     spec.input('group', valid_type=Str, required=False)
     spec.outline(
         cls.setup,
         if_(cls.should_do_relax)(
             cls.run_relax,
             cls.inspect_relax,
         ),
         cls.run_seekpath,
         cls.run_scf,
         cls.inspect_scf,
         cls.run_bands,
         cls.inspect_bands,
         cls.results,
     )
     spec.exit_code(401,
                    'ERROR_SUB_PROCESS_FAILED_RELAX',
                    message='the PwRelaxWorkChain sub process failed')
     spec.exit_code(402,
                    'ERROR_SUB_PROCESS_FAILED_SCF',
                    message='the scf PwBasexWorkChain sub process failed')
     spec.exit_code(403,
                    'ERROR_SUB_PROCESS_FAILED_BANDS',
                    message='the bands PwBasexWorkChain sub process failed')
     spec.output('primitive_structure', valid_type=StructureData)
     spec.output('seekpath_parameters', valid_type=ParameterData)
     spec.output('scf_parameters', valid_type=ParameterData)
     spec.output('band_parameters', valid_type=ParameterData)
     spec.output('band_structure', valid_type=BandsData)
Exemple #9
0
    def test_simple_kill_through_node(self):
        """
        Run the workchain for one step and then kill it by calling kill
        on the underlying WorkCalculation node. This should have the
        workchain end up in the ABORTED state.
        """
        engine = TickingEngine()
        future = engine.submit(TestWorkChainAbortChildren.MainWorkChain, {'kill': Bool(True)})

        while not future.done():
            engine.tick()

        child = future.process.calc.get_outputs(link_type=LinkType.CALL)[0]
        self.assertEquals(child.has_finished_ok(), False)
        self.assertEquals(child.has_failed(), False)
        self.assertEquals(child.has_aborted(), True)

        self.assertEquals(future.process.calc.has_finished_ok(), False)
        self.assertEquals(future.process.calc.has_failed(), False)
        self.assertEquals(future.process.calc.has_aborted(), True)
        engine.shutdown()
 def define(cls, spec):
     super(PhBaseWorkChain, cls).define(spec)
     spec.input('code', valid_type=Code)
     spec.input('qpoints', valid_type=KpointsData)
     spec.input('parent_folder', valid_type=RemoteData)
     spec.input('parameters', valid_type=ParameterData, required=False)
     spec.input('settings', valid_type=ParameterData, required=False)
     spec.input('options', valid_type=ParameterData, required=False)
     spec.input('only_initialization', valid_type=Bool, default=Bool(False))
     spec.outline(
         cls.setup,
         cls.validate_inputs,
         while_(cls.should_run_calculation)(
             cls.prepare_calculation,
             cls.run_calculation,
             cls.inspect_calculation,
         ),
         cls.results,
     )
     spec.output('output_parameters', valid_type=ParameterData)
     spec.output('remote_folder', valid_type=RemoteData)
     spec.output('retrieved', valid_type=FolderData)
Exemple #11
0
    def run_relax(self):
        """
        Run the SiestaBaseWorkChain to relax the input structure
        """
        self.report('Running run_relax')
        inputs = dict(self.ctx.inputs)

        # Final input preparation, wrapping dictionaries in ParameterData nodes
        # The code and options (_options?)  were set above
        # Pseudos was set above in 'ctx.inputs', and so it is in 'inputs' already
        
        inputs['kpoints'] = self.ctx.kpoints_mesh
        inputs['basis'] = ParameterData(dict=inputs['basis'])
        inputs['structure'] = self.ctx.structure_initial_primitive
        inputs['parameters'] = ParameterData(dict=inputs['parameters'])
        inputs['settings'] = ParameterData(dict=inputs['settings'])
        inputs['clean_workdir'] = Bool(False)
        inputs['max_iterations'] = Int(20)
        
        running = submit(SiestaBaseWorkChain, **inputs)
        self.report('launched SiestaBaseWorkChain<{}> in relaxation mode'.format(running.pid))
        
        return ToContext(workchain_relax=running)
Exemple #12
0
    def run_siesta(self):
        """
        Run the SiestaBaseWorkChain to calculate the input structure
        """
        self.report('Running run_siesta')

        rsi_inputs = {}
        rsi_inputs = dict(self.ctx.rsi_inputs)

        # Get the remote folder of the last calculation in the previous workchain

        rsi_inputs['kpoints'] = self.ctx.kpoints_mesh
        rsi_inputs['basis'] = ParameterData(dict=rsi_inputs['basis'])
        rsi_inputs['structure'] = self.ctx.structure_supercell
        rsi_inputs['parameters'] = ParameterData(dict=rsi_inputs['parameters'])
        rsi_inputs['settings'] = ParameterData(dict=rsi_inputs['settings'])
        rsi_inputs['clean_workdir'] = Bool(False)
        rsi_inputs['max_iterations'] = Int(20)
        
        running = submit(SiestaBaseWorkChain, **rsi_inputs)
        self.report('launched SiestaBaseWorkChain<{}> in run-Siesta mode'.format(running.pid))
        
        return ToContext(workchain_siesta=running)
def launch(code, calculation, kpoints, max_num_machines, max_wallclock_seconds,
           daemon, clean_workdir):
    """
    Run the HpWorkChain for a completed Hubbard PwCalculation
    """
    from aiida.orm.data.base import Bool
    from aiida.orm.data.parameter import ParameterData
    from aiida.orm.utils import WorkflowFactory
    from aiida.work.launch import run, submit
    from aiida_quantumespresso.utils.resources import get_default_options

    HpBaseWorkChain = WorkflowFactory('quantumespresso.hp.base')

    parameters = {'INPUTHP': {}}

    inputs = {
        'code':
        code,
        'parent_calculation':
        calculation,
        'qpoints':
        kpoints,
        'parameters':
        ParameterData(dict=parameters),
        'options':
        ParameterData(
            dict=get_default_options(max_num_machines, max_wallclock_seconds)),
        'clean_workdir':
        Bool(clean_workdir),
    }

    if daemon:
        workchain = submit(HpBaseWorkChain, **inputs)
        click.echo('Submitted {}<{}> to the daemon'.format(
            HpBaseWorkChain.__name__, workchain.pk))
    else:
        run(HpBaseWorkChain, **inputs)
Exemple #14
0
 def define(cls, spec):
     super(PhBaseWorkChain, cls).define(spec)
     spec.input('code', valid_type=Code)
     spec.input('qpoints', valid_type=KpointsData)
     spec.input('parent_folder', valid_type=RemoteData)
     spec.input('parameters', valid_type=ParameterData, required=False)
     spec.input('settings', valid_type=ParameterData, required=False)
     spec.input('options', valid_type=ParameterData, required=False)
     spec.input('only_initialization', valid_type=Bool, default=Bool(False))
     spec.outline(
         cls.setup,
         cls.validate_inputs,
         while_(cls.should_run_calculation)(
             cls.prepare_calculation,
             cls.run_calculation,
             cls.inspect_calculation,
         ),
         cls.results,
     )
     spec.exit_code(402, 'ERROR_CALCULATION_INVALID_INPUT_FILE',
         message='the calculation failed because it had an invalid input file')
     spec.output('output_parameters', valid_type=ParameterData)
     spec.output('remote_folder', valid_type=RemoteData)
     spec.output('retrieved', valid_type=FolderData)
        'potential': potential,
        'machine': machine_dict
    }

    es_settings = ParameterData(dict=settings_dict)

OptimizeStructure = WorkflowFactory('phonopy.optimize')

# Chose how to run the calculation
run_by_deamon = False
if not run_by_deamon:
    results = run(
        OptimizeStructure,
        structure=structure,
        es_settings=es_settings,
        # Optional settings
        pressure=Float(0.0),
        max_iterations=Int(3),
        tolerance_forces=Float(1e-5),
        tolerance_stress=Float(1e-2),
        standarize_cell=Bool(True))
else:
    future = submit(
        OptimizeStructure,
        structure=structure,
        es_settings=es_settings,
        # Optional settings
        # pressure=Float(10.0),
    )

    print('Running workchain with pk={}'.format(future.pid))
Exemple #16
0
    def define(cls, spec):
        """Workfunction definition

        Keyword arguments:
        restart_options_pw -- PW specific restart options (required)
        restart_options_gw -- GW spefific restart options (required)
        codename_pw -- PW code name (required)
        codename_p2y -- P2Y code name (required)
        codename_yambo -- Yambo code name (required)
        pseudo_family -- pseudo name (required)
        calculation_set_pw -- scheduler settings {'resources':{...}}  for PW calculation (required)
        calculation_set_pw_nscf -- PW NSCF specific scheduler settings {'resources':{...}}  for PW calculation (required)
        calculation_set_p2y -- scheduler settings {'resources':{...}} for P2Y conversion (required)
        calculation_set_yambo -- scheduler settings {'resources':{...}} for Yambo calculation (required)
        settings_pw -- plugin settings for PW  (required)
        settings_pw_nscf -- PW NSCF specific  plugin settings  (required)
        settings_p2y -- settings for P2Y { "ADDITIONAL_RETRIEVE_LIST":[], 'INITIALISE':True}  (optional)
        settings_yambo -- settings for yambo { "ADDITIONAL_RETRIEVE_LIST":[] } (optional)
        structure -- Structure (required)
        kpoint_pw -- kpoints  (option)
        gamma_pw -- Whether its a gammap point calculation(optional)
        parameters_pw -- PW SCF parameters (required)
        parameters_pw_nscf -- PW NSCF parameters (optional)
        parameters_p2y --  (required)
        parameters_yambo -- Parameters for Yambo (required)
        parent_folder -- Parent calculation (optional)
        previous_yambo_workchain -- Parent workchain (Yambo) (optional)
        to_set_qpkrange --  whether to set the QPkrange, override with defaults  (optional)
        to_set_bands -- Whether to set the bands, overide with default (optional)
        bands_groupname --  (optional)
        """
        super(YamboWorkflow, cls).define(spec)
        spec.input("restart_options_pw", valid_type=ParameterData, required=False)
        spec.input("restart_options_gw", valid_type=ParameterData, required=False)
        spec.input("codename_pw", valid_type=Str)
        spec.input("codename_p2y", valid_type=Str)
        spec.input("codename_yambo", valid_type=Str)
        spec.input("pseudo_family", valid_type=Str)
        spec.input("calculation_set_pw", valid_type=ParameterData) # custom_scheduler_commands,  resources,...
        spec.input("calculation_set_pw_nscf", valid_type=ParameterData,required=False) # custom_scheduler_commands,  resources,...
        spec.input("calculation_set_p2y", valid_type=ParameterData)
        spec.input("calculation_set_yambo", valid_type=ParameterData)
        spec.input("settings_pw", valid_type=ParameterData)
        spec.input("settings_pw_nscf", valid_type=ParameterData,required=False)
        spec.input("settings_p2y", valid_type=ParameterData)
        spec.input("settings_yambo", valid_type=ParameterData)
        spec.input("structure", valid_type=StructureData)
        spec.input("kpoint_pw", valid_type=KpointsData)
        spec.input("kpoint_pw_nscf", valid_type=KpointsData,required=False)
        spec.input("gamma_pw", valid_type=Bool, default=Bool(0), required=False )
        spec.input("parameters_pw", valid_type=ParameterData)
        spec.input("parameters_pw_nscf", valid_type=ParameterData,required=False)
        spec.input("parameters_p2y", valid_type=ParameterData)
        spec.input("parameters_yambo", valid_type=ParameterData)
        spec.input("parent_folder", valid_type=RemoteData,required=False)
        spec.input("previous_yambo_workchain", valid_type=Str,required=False)
        spec.input("to_set_qpkrange", valid_type=Bool,required=False, default=Bool(0) )
        spec.input("to_set_bands", valid_type=Bool,required=False, default=Bool(0) )
        spec.input("bands_groupname", valid_type=Str, required=False)
        spec.outline(
            cls.start_workflow,
            while_(cls.can_continue)(
                cls.perform_next,
            ),
            cls.report_wf
        )
        spec.dynamic_output()
Exemple #17
0
 def define(cls, spec):
     super(TestWorkChainAbortChildren.MainWorkChain, cls).define(spec)
     spec.input('kill', default=Bool(False))
     spec.outline(cls.start, cls.check)
Exemple #18
0
 def test_int_conversion(self):
     from aiida.orm.data.base import Bool
     for val in [True, False]:
         self.assertEqual(int(val), int(Bool(val)))
Exemple #19
0
    def validate_inputs(self):
        """
        Validate inputs that depend might depend on each other and cannot be validated by the spec. Also define
        dictionary `inputs` in the context, that will contain the inputs for the calculation that will be launched
        in the `run_calculation` step.
        """
        self.ctx.inputs = AttributeDict({
            'code':
            self.inputs.code,
            'structure':
            self.inputs.structure,
            'parameters':
            self.inputs.parameters.get_dict()
        })

        if 'CONTROL' not in self.ctx.inputs.parameters:
            self.ctx.inputs.parameters['CONTROL'] = {}

        if 'calculation' not in self.ctx.inputs.parameters['CONTROL']:
            self.ctx.inputs.parameters['CONTROL']['calculation'] = 'scf'

        if 'parent_folder' in self.inputs:
            self.ctx.inputs.parent_folder = self.inputs.parent_folder
            self.ctx.inputs.parameters['CONTROL']['restart_mode'] = 'restart'
        else:
            self.ctx.inputs.parameters['CONTROL'][
                'restart_mode'] = 'from_scratch'

        if 'settings' in self.inputs:
            self.ctx.inputs.settings = self.inputs.settings.get_dict()
        else:
            self.ctx.inputs.settings = {}

        if 'options' in self.inputs:
            self.ctx.inputs.options = self.inputs.options.get_dict()
        else:
            self.ctx.inputs.options = {}

        if 'vdw_table' in self.inputs:
            self.ctx.inputs.vdw_table = self.inputs.vdw_table

        # Either automatic_parallelization or options has to be specified
        if not any([
                key in self.inputs
                for key in ['options', 'automatic_parallelization']
        ]):
            return self.exit_codes.ERROR_INVALID_INPUT_RESOURCES

        # If automatic parallelization is not enabled, we better make sure that the options satisfy minimum requirements
        if 'automatic_parallelization' not in self.inputs:
            num_machines = self.ctx.inputs.options.get('resources', {}).get(
                'num_machines', None)
            max_wallclock_seconds = self.ctx.inputs.options.get(
                'max_wallclock_seconds', None)

            if num_machines is None or max_wallclock_seconds is None:
                return self.exit_codes.ERROR_INVALID_INPUT_RESOURCES_UNDERSPECIFIED

        # Either a KpointsData with given mesh/path, or a desired distance between k-points should be specified
        if all([
                key not in self.inputs
                for key in ['kpoints', 'kpoints_distance']
        ]):
            return self.exit_codes.ERROR_INVALID_INPUT_KPOINTS

        try:
            self.ctx.inputs.kpoints = self.inputs.kpoints
        except AttributeError:
            structure = self.inputs.structure
            distance = self.inputs.kpoints_distance
            force_parity = self.inputs.get('kpoints_force_parity', Bool(False))
            self.ctx.inputs.kpoints = create_kpoints_from_distance(
                structure, distance, force_parity)

        # Validate the inputs related to pseudopotentials
        structure = self.inputs.structure
        pseudos = self.inputs.get('pseudos', None)
        pseudo_family = self.inputs.get('pseudo_family', None)

        try:
            self.ctx.inputs.pseudo = validate_and_prepare_pseudos_inputs(
                structure, pseudos, pseudo_family)
        except ValueError as exception:
            self.report('{}'.format(exception))
            return self.exit_codes.ERROR_INVALID_INPUT_PSEUDO_POTENTIALS
Exemple #20
0
es_settings = ParameterData(dict=settings_dict)

PhononPhono3py = WorkflowFactory('phonopy.phonon3')

# Chose how to run the calculation
run_by_deamon = False
if not run_by_deamon:
    result = run(
        PhononPhono3py,
        structure=structure,
        es_settings=es_settings,
        ph_settings=ph_settings,
        # Optional settings
        # pressure=Float(0),
        optimize=Bool(False),
        use_nac=Bool(True))

    print(result)
else:
    future = submit(
        PhononPhono3py,
        structure=structure,
        es_settings=es_settings,
        ph_settings=ph_settings,
        # Optional settings
        # pressure=Float(0),
        optimize=Bool(False),
        use_nac=Bool(True))

    print future
Exemple #21
0
    parent_calc = None
    if args.parent:
        parent_calc = load_node(int(args.parent)) #1791 
    if args.yamboconfig:
        yambo_parameters = read_yambo_json(filename=args.yamboconfig)    
    kwargs = {     "codename_pw": Str(args.pwcode),
                   "codename_p2y":Str( args.precode),
                   "codename_yambo": Str(args.yambocode),
                   "pseudo_family": Str(args.pseudo),
                   "calculation_set_pw" :ParameterData(dict=calculation_set_pw ),
                   "calculation_set_p2y" :ParameterData(dict=calculation_set_p2y) ,
                   "calculation_set_yambo" :ParameterData(dict= calculation_set_yambo ),
                   "settings_pw" :settings_pw ,
                   "settings_p2y" :settings_p2y ,
                   "settings_yambo":settings_yambo ,
                   "input_pw" : ParameterData(dict={}), 
                   "structure" : structure,
                   "kpoint_pw" : kpoints,
                   "gamma_pw" : Bool(False),
                   "parameters_pw" : ParameterData(dict=pw_parameters) , 
                   "parameters_pw_nscf" : ParameterData(dict=pw_nscf_parameters) , 
                   "parameters_p2y" : ParameterData(dict=yambo_parameters) ,
                   "parameters_yambo" : ParameterData(dict=yambo_parameters),  
          }
    if parent_calc:
          kwargs["parent_folder"] =  parent_calc.out.remote_folder
    if args.parent_workchain:
          kwargs["previous_yambo_workchain"] =  Str(args.parent_workchain)
    full_result = run(YamboWorkflow, **kwargs )
    print ("Workflow submited :", full_result)
                 }

es_settings = ParameterData(dict=settings_dict)

QHAPhonopy = WorkflowFactory('phonopy.qha')

# Chose how to run the calculation
run_by_deamon = False
if not run_by_deamon:
    result = run(QHAPhonopy,
                 structure=structure,
                 es_settings=es_settings,
                 ph_settings=ph_settings,
                 # Optional settings
                 num_expansions=Int(10),
                 use_nac=Bool(True),
                 )

    print (result)
else:
    future = submit(QHAPhonopy,
                    structure=structure,
                    es_settings=es_settings,
                    ph_settings=ph_settings,
                    # Optional settings
                    num_expansions=Int(10),
                    use_nac=Bool(True),
                    )

    print future
    print('Running workchain with pk={}'.format(future.pid))
    es_settings = ParameterData(dict=settings_dict)

QHAPhonopy = WorkflowFactory('phonopy.qha')

# Chose how to run the calculation
run_by_deamon = False
if not run_by_deamon:
    result = run(
        QHAPhonopy,
        structure=structure,
        es_settings=es_settings,
        ph_settings=ph_settings,
        # Optional settings
        num_expansions=Int(10),
        use_nac=Bool(False),
    )

    print(result)
else:
    future = submit(
        QHAPhonopy,
        structure=structure,
        es_settings=es_settings,
        ph_settings=ph_settings,
        # Optional settings
        num_expansions=Int(10),
        use_nac=Bool(False),
    )

    print future
def launch(code_pw, code_hp, structure, pseudo_family, kpoints, qpoints,
           ecutwfc, ecutrho, hubbard_u, starting_magnetization,
           automatic_parallelization, clean_workdir, max_num_machines,
           max_wallclock_seconds, daemon, meta_convergence, is_insulator,
           parallelize_atoms):
    """
    Run the SelfConsistentHubbardWorkChain for a given input structure
    """
    from aiida.orm.data.base import Bool, Str
    from aiida.orm.data.parameter import ParameterData
    from aiida.orm.utils import WorkflowFactory
    from aiida.work.launch import run, submit
    from aiida_quantumespresso.utils.resources import get_default_options

    SelfConsistentHubbardWorkChain = WorkflowFactory(
        'quantumespresso.hp.hubbard')

    parameters = {
        'SYSTEM': {
            'ecutwfc': ecutwfc,
            'ecutrho': ecutrho,
            'lda_plus_u': True,
        },
    }

    parameters_hp = {'INPUTHP': {}}

    options = get_default_options(max_num_machines, max_wallclock_seconds)

    structure_kinds = structure.get_kind_names()
    hubbard_u_kinds = [kind for kind, value in hubbard_u]
    hubbard_u = {kind: value for kind, value in hubbard_u}

    if not set(hubbard_u_kinds).issubset(structure_kinds):
        raise click.BadParameter(
            'the kinds in the specified starting Hubbard U values {} is not a strict subset of the kinds in the structure {}'
            .format(hubbard_u_kinds, structure_kinds),
            param_hint='hubbard_u')

    if starting_magnetization:

        parameters['SYSTEM']['nspin'] = 2

        for kind, magnetization in starting_magnetization:

            if kind not in structure_kinds:
                raise click.BadParameter(
                    'the provided structure does not contain the kind {}'.
                    format(kind),
                    param_hint='starting_magnetization')

            parameters['SYSTEM'].setdefault('starting_magnetization',
                                            {})[kind] = magnetization

    inputs = {
        'structure': structure,
        'hubbard_u': ParameterData(dict=hubbard_u),
        'meta_convergence': Bool(meta_convergence),
        'is_insulator': Bool(is_insulator),
        'scf': {
            'code': code_pw,
            'pseudo_family': Str(pseudo_family),
            'kpoints': kpoints,
            'parameters': ParameterData(dict=parameters),
            'options': ParameterData(dict=options)
        },
        'hp': {
            'code': code_hp,
            'qpoints': qpoints,
            'parameters': ParameterData(dict=parameters_hp),
            'options': ParameterData(dict=options),
            'parallelize_atoms': Bool(parallelize_atoms),
        }
    }

    if daemon:
        workchain = submit(SelfConsistentHubbardWorkChain, **inputs)
        click.echo('Submitted {}<{}> to the daemon'.format(
            SelfConsistentHubbardWorkChain.__name__, workchain.pk))
    else:
        run(SelfConsistentHubbardWorkChain, **inputs)
Exemple #25
0
    es_settings = ParameterData(dict=settings_dict)

PhononPhonopy = WorkflowFactory('phonopy.phonon')

# Chose how to run the calculation
run_by_deamon = False
if not run_by_deamon:
    result = run(
        PhononPhonopy,
        structure=structure,
        es_settings=es_settings,
        ph_settings=ph_settings,
        # Optional settings
        pressure=Float(0.0),
        optimize=Bool(False),
        use_nac=Bool(False),
    )

    print(result)
else:
    future = submit(
        PhononPhonopy,
        structure=structure,
        es_settings=es_settings,
        ph_settings=ph_settings,
        # Optional settings
        pressure=Float(0),
        optimize=Bool(False),
        use_nac=Bool(False),
    )
Exemple #26
0
def launch(code, structure, pseudo_family, kpoints, max_num_machines,
           max_wallclock_seconds, daemon, ecutwfc, ecutrho, hubbard_u,
           hubbard_v, hubbard_file_pk, starting_magnetization, smearing,
           automatic_parallelization, clean_workdir):
    """
    Run the PwBaseWorkChain for a given input structure
    """
    from aiida.orm.data.base import Bool, Str
    from aiida.orm.data.parameter import ParameterData
    from aiida.orm.utils import WorkflowFactory
    from aiida.work.launch import run, submit
    from aiida_quantumespresso.utils.resources import get_default_options, get_automatic_parallelization_options

    PwBaseWorkChain = WorkflowFactory('quantumespresso.pw.base')

    parameters = {
        'SYSTEM': {
            'ecutwfc': ecutwfc,
            'ecutrho': ecutrho,
        },
    }

    try:
        hubbard_file = validate.validate_hubbard_parameters(
            structure, parameters, hubbard_u, hubbard_v, hubbard_file_pk)
    except ValueError as exception:
        raise click.BadParameter(exception.message)

    try:
        validate.validate_starting_magnetization(structure, parameters,
                                                 starting_magnetization)
    except ValueError as exception:
        raise click.BadParameter(exception.message)

    try:
        validate.validate_smearing(parameters, smearing)
    except ValueError as exception:
        raise click.BadParameter(exception.message)

    inputs = {
        'code': code,
        'structure': structure,
        'pseudo_family': Str(pseudo_family),
        'kpoints': kpoints,
        'parameters': ParameterData(dict=parameters),
    }

    if automatic_parallelization:
        automatic_parallelization = get_automatic_parallelization_options(
            max_num_machines, max_wallclock_seconds)
        inputs['automatic_parallelization'] = ParameterData(
            dict=automatic_parallelization)
    else:
        options = get_default_options(max_num_machines, max_wallclock_seconds)
        inputs['options'] = ParameterData(dict=options)

    if clean_workdir:
        inputs['clean_workdir'] = Bool(True)

    if daemon:
        workchain = submit(PwBaseWorkChain, **inputs)
        click.echo('Submitted {}<{}> to the daemon'.format(
            PwBaseWorkChain.__name__, workchain.pk))
    else:
        run(PwBaseWorkChain, **inputs)
Exemple #27
0
    # CODE INDEPENDENT
    machine_dict = {
        'resources': {
            'num_machines': 1,
            'parallel_env': 'mpi*',
            'tot_num_mpiprocs': 16
        },
        'max_wallclock_seconds': 30 * 60,
    }

    machine = ParameterData(dict=machine_dict)

    results = run(
        FrozenPhonon,
        structure=structure,
        machine=machine,
        es_settings=es_settings,
        ph_settings=ph_settings,
        # Optional settings
        pressure=Float(10),
        optimize=Bool(0))

    # Check results
    print results

    print results['force_constants'].get_array('force_constants')

    print results['force_constants'].pk
    print results['phonon_properties'].pk
    print results['dos'].pk
    es_settings = ParameterData(dict=settings_dict)

PhononPhono3py = WorkflowFactory('phonopy.phonon3')

# Chose how to run the calculation
run_by_deamon = False
if not run_by_deamon:
    result = run(
        PhononPhono3py,
        structure=structure,
        es_settings=es_settings,
        ph_settings=ph_settings,
        # Optional settings
        # pressure=Float(0),
        optimize=Bool(False),
        use_nac=Bool(False),
        chunks=Int(120),  # set the number of maximum simultaneous calculations
        cutoff=Float(4.0),
        # calculate_fc=Bool(False),  # set true to calculate 2nd & 3rd order force constants
        # recover=load_node(81309), # set workchain to recover
        data_sets=load_node(81481)  # load previous data
    )

    print(result)
else:
    future = submit(
        PhononPhono3py,
        structure=structure,
        es_settings=es_settings,
        ph_settings=ph_settings,
Exemple #29
0
    def iterate(self):
        """This function  creates (and on subsequent iteration updates) the parameters and submits subworkflows  for each parameter step"""
        self.report("Convergence iteration {}".format(str(self.ctx.iteration)))
        loop_items = range(1, self.ctx.loop_length + 1)
        if self.ctx.very_first == True:
            loop_items = range(self.ctx.loop_length)
            self.ctx.very_first = False
        self.report('will run four calculations in parallel')
        outs = {}
        # Extras: restart_options_pw, restart_options_gw, calculation_set_pw_nscf, settings_pw_nscf, kpoint_pw_nscf, kpoint_pw,
        extra_rs = {}
        extra_wf = {}
        if 'calculation_set_pw_nscf' in self.inputs.keys():
            extra_wf[
                'calculation_set_pw_nscf'] = self.inputs.calculation_set_pw_nscf
        if 'settings_pw_nscf' in self.inputs.keys():
            extra_wf['settings_pw_nscf'] = self.inputs.settings_pw_nscf
        if 'restart_options_pw' in self.inputs.keys():
            extra_wf['restart_options_pw'] = self.inputs.restart_options_pw
        if 'restart_options_gw' in self.inputs.keys():
            extra_wf['restart_options_gw'] = self.inputs.restart_options_gw
            extra_rs['restart_options'] = self.inputs.restart_options_gw

        if 'kpoints' != self.ctx.variable_to_converge:
            self.report("this is not a K-point convergence ")
            for num in loop_items:  # includes 0 because of starting point
                # There is a bug here, for Bands we might end up
                # with a situation like BandsRnXP  %   12 | 12  % where both
                # are equal at the very start, this is due to the fact that we need
                # to be able to support BSE calculations too.
                # TODO
                if loop_items[0] == 0:
                    if 'bands' == self.ctx.variable_to_converge:
                        self.init_parameters(num + 1)  # see above comment
                    else:
                        self.init_parameters(num)
                else:
                    if 'bands' == self.ctx.variable_to_converge:
                        self.update_parameters(num + 1)
                    else:
                        self.update_parameters(num)
                try:
                    p2y_done = self.ctx.p2y_parent_folder
                except AttributeError:
                    self.report(
                        ' no preceeding yambo parent, will run P2Y from NSCF parent first '
                    )
                    p2y_res = submit(
                        YamboRestartWf,
                        precode=self.inputs.precode,
                        yambocode=self.inputs.yambocode,
                        parameters=self.inputs.parameters_p2y,
                        calculation_set=self.ctx.calculation_set_p2y,
                        parent_folder=self.inputs.parent_nscf_folder,
                        settings=self.inputs.settings_p2y,
                        **extra_rs)
                    self.ctx.skip_prescf = True
                    #self.ctx.iteration-=1
                    self.ctx.very_first = True  #  There was a bug  because of this.
                    return ToContext(missing_p2y_parent=p2y_res)
                self.report(' running from preceeding yambo/p2y calculation  ')
                future = submit(YamboRestartWf,
                                precode=self.inputs.precode,
                                yambocode=self.inputs.yambocode,
                                parameters=self.ctx.parameters,
                                calculation_set=self.inputs.calculation_set,
                                parent_folder=self.ctx.p2y_parent_folder,
                                settings=self.inputs.settings,
                                **extra_rs)
                outs['r' + str(num)] = future
            self.ctx.iteration = self.ctx.iteration + 1
            return ToContext(**outs)
        else:
            # run yambowf, four times. with a different  nscf kpoint starting mesh
            self.report("  K-point convergence commencing")
            mesh = False
            for num in loop_items:  # includes 0 because of starting point
                if loop_items[0] == 0:
                    self.init_parameters(num)
                #else:
                #    self.update_parameters(num)  # Not neccessary, kpoint variation is done at PBE level with the self.ctx.distance_kpoints
                def get_kpoints():
                    self.ctx.distance_kpoints = self.ctx.distance_kpoints * 0.9  # 10% change
                    kpoints = KpointsData()
                    kpoints.set_cell_from_structure(self.inputs.structure)
                    kpoints.set_kpoints_mesh_from_density(
                        distance=self.ctx.distance_kpoints, force_parity=True)
                    return kpoints

                while True:
                    kpoints = get_kpoints()
                    if mesh == kpoints.get_kpoints_mesh():  # deduplicate
                        continue
                    else:
                        break
                mesh = kpoints.get_kpoints_mesh()
                if 'parent_scf_folder' in self.inputs.keys():
                    extra_wf['parent_folder'] = self.inputs.parent_scf_folder
                if 'QPkrange' not in self.ctx.parameters.get_dict().keys():
                    extra_wf['to_set_qpkrange'] = Bool(1)
                if 'BndsRnXp' not in self.ctx.parameters.get_dict().keys(
                ) or 'GbndRnge' not in self.ctx.parameters.get_dict().keys():
                    extra_wf['to_set_bands'] = Bool(1)
                extra_wf['calculation_set_p2y'] = self.ctx.calculation_set_p2y
                extra_wf['calculation_set_pw'] = self.inputs.calculation_set_pw
                extra_wf['settings_p2y'] = self.inputs.settings_p2y
                extra_wf['settings_pw'] = self.ctx.settings_pw
                future = submit(
                    YamboWorkflow,
                    codename_pw=self.inputs.pwcode,
                    codename_p2y=self.inputs.precode,
                    codename_yambo=self.inputs.yambocode,
                    pseudo_family=self.inputs.pseudo,
                    calculation_set_yambo=self.inputs.calculation_set,
                    settings_yambo=self.inputs.settings,
                    structure=self.inputs.structure,
                    kpoint_pw=kpoints,
                    parameters_pw=self.ctx.parameters_pw,
                    parameters_pw_nscf=self.ctx.parameters_pw_nscf,
                    parameters_p2y=self.inputs.parameters_p2y,
                    parameters_yambo=self.ctx.parameters,
                    **extra_wf)
                outs['r' + str(num)] = future
                self.ctx.conv_elem['kpoints'].append(self.ctx.distance_kpoints)
            self.ctx.iteration = self.ctx.iteration + 1
            return ToContext(**outs)
        return outs
Exemple #30
0
}

es_settings = ParameterData(dict=settings_dict)

GruneisenPhonopy = WorkflowFactory('phonopy.gruneisen')

# Chose how to run the calculation
run_by_deamon = False
if not run_by_deamon:
    result = run(
        GruneisenPhonopy,
        structure=structure,
        es_settings=es_settings,
        ph_settings=ph_settings,
        # Optional settings
        # pressure=Float(0),
        use_nac=Bool(True))

    print(result)
else:
    future = submit(
        GruneisenPhonopy,
        structure=structure,
        es_settings=es_settings,
        ph_settings=ph_settings,
        # Optional settings
        # pressure=Float(0),
        use_nac=Bool(True))

    print future
    print('Running workchain with pk={}'.format(future.pid))