Example #1
0
 def parse(**extra_settings):
     """Run the parser using default settings updated with extra_settings."""
     load_dbenv_if_not_loaded()
     from aiida.orm import CalculationFactory, DataFactory
     from aiida_vasp.parsers.vasp import VaspParser
     calc = CalculationFactory('vasp.vasp')()
     settings_dict = {
         'parser_settings': {
             'add_bands': True,
             'output_params': ['fermi_level']
         }
     }
     settings_dict.update(extra_settings)
     calc.use_settings(DataFactory('parameter')(dict=settings_dict))
     parser = VaspParser(calc=calc)
     retrieved = DataFactory('folder')()
     fldr = "basic"
     if "folder" in extra_settings:
         fldr = extra_settings["folder"]
     xml_file_path = xml_path(fldr)
     tmp_file_path = str(tmpdir.join('vasprun.xml'))
     #tmp_file_path = os.path.realpath(os.path.join(
     #    __file__, '../../../test_data/tmp/vasprun.xml'))
     xml_truncate(request.param, xml_file_path, tmp_file_path)
     retrieved.add_path(tmp_file_path, '')
     success, nodes = parser.parse_with_retrieved({'retrieved': retrieved})
     nodes = dict(nodes)
     return success, nodes
Example #2
0
 def setUp(self):
     super(TestProcessBuilder, self).setUp()
     self.assertIsNone(Process.current())
     self.calculation_class = CalculationFactory(
         'simpleplugins.templatereplacer')
     self.process_class = self.calculation_class.process()
     self.builder = self.process_class.get_builder()
Example #3
0
 def _init_defaults(self, *args, **kwargs):  # pylint: disable=unused-argument
     """Set default values"""
     calcname = kwargs.get('calc_cls', 'vasp.vasp')
     if isinstance(calcname, (str, unicode)):
         self.calc_cls = CalculationFactory(calcname)
     else:
         self.calc_cls = calcname
     self.label = kwargs.get('label', 'unlabeled')
     self._computer = kwargs.get('computer')
     self._code = kwargs.get('code')
     self._parameters = kwargs.get('parameters',
                                   self.calc_cls.new_parameters())
     self._set_default_structure(kwargs.get('structure'))
     self._paw_fam = kwargs.get('paw_family', 'PBE')
     self._paw_def = kwargs.get('paw_map')
     self._paws = {}
     self._set_default_paws()
     self._kpoints = kwargs.get('kpoints', self.calc_cls.new_kpoints())
     self.kpoints = self._kpoints
     self._charge_density = kwargs.get('charge_density', None)
     self._wavefunctions = kwargs.get('wavefunctions', None)
     self._wannier_parameters = kwargs.get('wannier_parameters', None)
     self._wannier_data = kwargs.get('wannier_data', None)
     self._recipe = None
     self._queue = kwargs.get('queue')
     self._resources = kwargs.get('resources', {})
Example #4
0
    def run_zeopp(self):

        self.report("Running workchain for structure {}".format(
            self.inputs.structure.filename))

        label = "zeopp"
        inputs = {}
        inputs['_label'] = label
        inputs['_description'] = "Sampling accessible pore surface with zeo++"
        inputs['code'] = self.inputs.zeopp_code
        inputs['structure'] = self.inputs.structure

        NetworkParameters = DataFactory('zeopp.parameters')
        network_dict = {
            'cssr': True,
            'ha': True,
            'vsa': [1.8, 1.8, 1000],
            'sa': [1.8, 1.8, 1000],
        }
        inputs['parameters'] = NetworkParameters(dict=network_dict)
        inputs['_options'] = self.default_options

        NetworkCalculation = CalculationFactory('zeopp.network')
        future = submit(NetworkCalculation.process(), **inputs)
        self.report(
            "pk: {} | Submitted zeo++ calculation for structure {}".format(
                future.pid, self.inputs.structure.filename))

        return ToContext(**{label: Outputs(future)})
Example #5
0
def main():
    DifferenceCalculation = CalculationFactory('wait.wait')
    builder = DifferenceCalculation.get_builder()
    builder.code = Code.get_from_string('wait')
    builder.options = {'resources': {'num_machines': 1}, 'withmpi': False}

    node = submit(builder)
    print(node.pk)
Example #6
0
    def test_with_subclasses(self, computer):

        extra_name = self.__class__.__name__ + "/test_with_subclasses"
        calc_params = {
            'computer': computer,
            'resources': {
                'num_machines': 1,
                'num_mpiprocs_per_machine': 1
            }
        }

        TemplateReplacerCalc = CalculationFactory(
            'simpleplugins.templatereplacer')
        ParameterData = DataFactory('parameter')

        a1 = JobCalculation(**calc_params).store()
        # To query only these nodes later
        a1.set_extra(extra_name, True)
        a2 = TemplateReplacerCalc(**calc_params).store()
        # To query only these nodes later
        a2.set_extra(extra_name, True)
        a3 = Data().store()
        a3.set_extra(extra_name, True)
        a4 = ParameterData(dict={'a': 'b'}).store()
        a4.set_extra(extra_name, True)
        a5 = Node().store()
        a5.set_extra(extra_name, True)
        # I don't set the extras, just to be sure that the filtering works
        # The filtering is needed because other tests will put stuff int he DB
        a6 = JobCalculation(**calc_params)
        a6.store()
        a7 = Node()
        a7.store()

        # Query by calculation
        results = list(JobCalculation.query(dbextras__key=extra_name))
        # a3, a4, a5 should not be found because they are not JobCalculations.
        # a6, a7 should not be found because they have not the attribute set.
        self.assertEquals(set([i.pk for i in results]), set([a1.pk, a2.pk]))

        # Same query, but by the generic Node class
        results = list(Node.query(dbextras__key=extra_name))
        self.assertEquals(set([i.pk for i in results]),
                          set([a1.pk, a2.pk, a3.pk, a4.pk, a5.pk]))

        # Same query, but by the Data class
        results = list(Data.query(dbextras__key=extra_name))
        self.assertEquals(set([i.pk for i in results]), set([a3.pk, a4.pk]))

        # Same query, but by the ParameterData subclass
        results = list(ParameterData.query(dbextras__key=extra_name))
        self.assertEquals(set([i.pk for i in results]), set([a4.pk]))

        # Same query, but by the TemplateReplacerCalc subclass
        results = list(TemplateReplacerCalc.query(dbextras__key=extra_name))
        self.assertEquals(set([i.pk for i in results]), set([a2.pk]))
Example #7
0
 def inner(calculation_string, code_string, single_core=True):
     from aiida.orm import CalculationFactory
     process = CalculationFactory(calculation_string).process()
     builder = process.get_builder()
     inputs_setup(
         builder,
         code_string=code_string,
         single_core=single_core
     )
     return builder
 def test_existing_calculations(self):
     """
     Test listing all preinstalled calculations
     """
     calculations = all_plugins('calculations')
     self.assertIsInstance(calculations, list)
     for i in calculations:
         self.assertTrue(
             issubclass(CalculationFactory(i), JobCalculation),
             'Calculation plugin class {} is not subclass of JobCalculation'.format(
                 CalculationFactory(i)))
Example #9
0
    def __init__(self, calculation):
        """
        Initialize Parser instance
        """
        CryBasicCalculation = CalculationFactory('crystal17.basic')
        CryMainCalculation = CalculationFactory('crystal17.main')
        # check for valid input
        if not isinstance(calculation,
                          (CryBasicCalculation, CryMainCalculation)):
            raise OutputParsingError(
                "Can only parse CryBasicCalculation or CryMainCalculation")

        super(CryBasicParser, self).__init__(calculation)
    def parse(**extra_settings):
        """Run the parser using default settings updated with extra_settings."""
        from aiida.orm import CalculationFactory, DataFactory
        calc = CalculationFactory('vasp.vasp')()
        settings_dict = {'pymatgen_parser': {'parse_potcar_file': False, 'exception_on_bad_xml': request.param}}
        settings_dict.update(extra_settings)
        calc.use_settings(DataFactory('parameter')(dict=settings_dict))
        parser = PymatgenParser(calc=calc)
        retrieved = DataFactory('folder')()
        retrieved.add_path(vasprun_path, '')

        success, nodes = parser.parse_with_retrieved({'retrieved': retrieved})
        nodes = dict(nodes)
        return success, nodes
Example #11
0
    def setUp(self):
        self.calc = CalculationFactory('vasp.vasp5')()
        Common.import_paw()

        larray = np.array([[0, .5, .5],
                           [.5, 0, .5],
                           [.5, .5, 0]])
        alat = 6.058
        self.structure = DataFactory('structure')(cell=larray*alat)
        self.structure.append_atom(position=[0, 0, 0], symbols='In')
        self.structure.append_atom(position=[.25, .25, .25], symbols='As')

        cifpath = realpath(join(dirname(__file__),
                                'data', 'EntryWithCollCode43360.cif'))
        self.cif = DataFactory('cif').get_or_create(cifpath)[0]
Example #12
0
 def setUp(self):
     """Set up test environment"""
     self.calc_cls = CalculationFactory('vasp.scf')
     self.code = Code()
     self.code.set_computer(self.computer)
     self.code.set_remote_computer_exec((self.computer, '/bin/foo'))
     Common.import_paw()
Example #13
0
    def fill_repo(self):
        from aiida.orm import JobCalculation, CalculationFactory, Data, DataFactory

        extra_name = self.__class__.__name__ + "/test_with_subclasses"
        calc_params = {
            'computer': self.computer,
            'resources': {
                'num_machines': 1,
                'num_mpiprocs_per_machine': 1
            }
        }

        TemplateReplacerCalc = CalculationFactory(
            'simpleplugins.templatereplacer')
        ParameterData = DataFactory('parameter')

        a1 = JobCalculation(**calc_params).store()
        # To query only these nodes later
        a1.set_extra(extra_name, True)
        a2 = TemplateReplacerCalc(**calc_params).store()
        # To query only these nodes later
        a2.set_extra(extra_name, True)
        a3 = Data().store()
        a3.set_extra(extra_name, True)
        a4 = ParameterData(dict={'a': 'b'}).store()
        a4.set_extra(extra_name, True)
        a5 = Node().store()
        a5.set_extra(extra_name, True)
        # I don't set the extras, just to be sure that the filtering works
        # The filtering is needed because other tests will put stuff int he DB
        a6 = JobCalculation(**calc_params)
        a6.store()
        a7 = Node()
        a7.store()
    def get_pw_calculation(self, pw_structure, pw_parameters, pw_kpoint):

        params = self.get_parameters()

        pw_codename = params['pw_codename']
        num_machines = params['num_machines']
        max_wallclock_seconds = params['max_wallclock_seconds']
        pseudo_family = params['pseudo_family']

        code = Code.get_from_string(pw_codename)
        computer = code.get_remote_computer()

        QECalc = CalculationFactory('quantumespresso.pw')

        calc = QECalc(computer=computer)
        calc.set_max_wallclock_seconds(max_wallclock_seconds)
        calc.set_resources({"num_machines": num_machines})
        calc.store()

        calc.use_code(code)

        calc.use_structure(pw_structure)
        calc.use_pseudos_from_family(pseudo_family)
        calc.use_parameters(pw_parameters)
        calc.use_kpoints(pw_kpoint)

        return calc
Example #15
0
 def __init__(self, calc):
     """
     Initialize the instance of CiffilterParser
     """
     self._supported_calculation_class = CalculationFactory(
         'codtools.ciffilter')
     super(CiffilterParser, self).__init__(calc)
    def get_lapw_calculation(self, lapw_structure, lapw_parameters,
                             lapw_kpoint):

        params = self.get_parameters()

        lapw_codename = params['lapw_codename']
        num_machines = params['num_machines']
        max_wallclock_seconds = params['max_wallclock_seconds']
        lapwbasis_family = params['lapwbasis_family']

        code = Code.get_from_string(lapw_codename)
        computer = code.get_remote_computer()

        LAPWCalc = CalculationFactory('exciting.exciting')

        calc = LAPWCalc(computer=computer)
        calc.set_max_wallclock_seconds(max_wallclock_seconds)
        calc.set_resources({"num_machines": num_machines})
        calc.store()

        calc.use_code(code)

        calc.use_structure(lapw_structure)
        calc.use_lapwbasis_from_family(lapwbasis_family)
        calc.use_parameters(lapw_parameters)
        calc.use_kpoints(lapw_kpoint)

        return calc
def parse_nac(aiida_env):
    """Give the parsing result of a retrieved NAC calculation (emulated)."""
    from aiida.orm import CalculationFactory, DataFactory
    calc = CalculationFactory('vasp.vasp')()
    calc.use_settings(DataFactory('parameter')(dict={'pymatgen_parser': {'parse_potcar_file': False, 'exception_on_bad_xml': False}}))
    parser = PymatgenParser(calc=calc)
    retrieved = DataFactory('folder')()
    retrieved.add_path(data_path('born_effective_charge', 'vasprun.xml'), '')
    retrieved.add_path(data_path('born_effective_charge', 'OUTCAR'), '')

    def parse():
        success, nodes = parser.parse_with_retrieved({'retrieved': retrieved})
        nodes = dict(nodes)
        return success, nodes

    return parse
Example #18
0
 def run_ddec_point_charges(self):
     """Compute ddec point charges from precomputed charge-density."""
     charge_density = self.ctx.charge_density_calc['remote_folder']
     #options['prepend_text'] = "export OMP_NUM_THREADS=12"
     inputs = {
         'code'                   : self.inputs.ddec_code,
         'parameters'             : self.inputs.ddec_parameters,
         'charge_density_folder'  : charge_density,
         '_options'               : self.inputs.ddec_options.get_dict(),
         '_label'                 : "run_pointcharges_ddec",
     }
     # Create the calculation process and launch it
     DdecCalculation = CalculationFactory('ddec')
     process = DdecCalculation.process()
     future  = submit(process, **inputs)
     self.report("pk: {} | Running ddec to compute point charges based on the charge-density")
     return ToContext(ddec_calc=Outputs(future))
Example #19
0
def make_inputs(incar, structure, kpoints, settings, codename, queue_name,
                num_procs):
    load_dbenv_if_not_loaded()
    from aiida.orm import CalculationFactory, DataFactory
    potcar_cls = get_data_cls('vasp.potcar')
    vasp_calc_proc = CalculationFactory('vasp.vasp').process()
    inputs = vasp_calc_proc.get_inputs_template()

    set_std_inputs(inputs, codename, queue_name, num_procs)
    inputs.kpoints = kpoints
    inputs.structure = structure
    inputs.potential = potcar_cls.get_potcars_from_structure(
        family_name='PBE', structure=inputs.structure, mapping=POTCAR_MAP)
    inputs.settings = DataFactory('parameter')(dict=settings)
    inputs.parameters = DataFactory('parameter')(dict=incar)

    return inputs
Example #20
0
 def setUp(self):
     self.calc_cls = CalculationFactory('vasp.base.BasicCalculation')
     Common.import_paw()
     Paw = DataFactory('vasp.paw')
     self.code = Code()
     self.code.set_computer(self.computer)
     self.code.set_remote_computer_exec((self.computer, '/bin/foo'))
     self.paw_in = Paw.load_paw(element='In')[0]
     self.paw_as = Paw.load_paw(element='As')[0]
     self.tmp, self.tmpf = tempfile.mkstemp()
Example #21
0
 def test_existing_calculations(self):
     """Test listing all preinstalled calculations """
     from aiida.orm.calculation.job import JobCalculation
     calcs = pl.existing_plugins(JobCalculation,
                                 'aiida.orm.calculation.job',
                                 suffix='Calculation')
     self.assertIsInstance(calcs, list)
     from aiida.orm import CalculationFactory
     for i in calcs:
         self.assertTrue(issubclass(CalculationFactory(i), JobCalculation))
    def test_existing_calculations(self):
        """
        Test listing all preinstalled calculations
        """
        entry_points = get_entry_points('aiida.calculations')
        self.assertIsInstance(entry_points, list)

        for entry_point in entry_points:
            cls = CalculationFactory(entry_point.name)
            self.assertTrue(issubclass(cls, Calculation),
                'Calculation plugin class {} is not subclass of {}'.format(cls, Calculation))
Example #23
0
 def setUp(self):
     self.calc_cls = CalculationFactory('vasp.amn')
     self.code = Code()
     self.code.set_computer(self.computer)
     self.code.set_remote_computer_exec((self.computer, '/bin/foo'))
     Common.import_paw()
     self.tmpd, self.tmpf = tempfile.mkstemp()
     self.wdat = Common.wdat()
     self.wdat.add_file(self.tmpf, 'test1')
     self.wdat.add_file(self.tmpf, 'test2')
     self.wdat._make_archive()
Example #24
0
def get_phrestart_wfs_with_parameters(wf_params,
                ignored_keys=['codename','calculation_set','settings',
                'input|max_restarts','input|clean_workdir',
                'parameters|INPUTPH|alpha_mix(1)','parameters|INPUTPH|niter_ph',
                'parameters|INPUTPH|nmix_ph','parameters|INPUTPH|fildrho',
                'parameters|INPUTPH|fildvscf']):
    """
    Find all PhrestartWorkflow already run with the same parameters.
    :param wf_params: a dictionary with all the parameters (can contain
        dictionaries, structure and kpoints)
    :param ignored_keys: list of keys of wf_params that are ignored in the 
        comparison (a '|' means descending into a sub-dictionary)
    :return: the list of workflows.
    """
    from copy import deepcopy
    from aiida.workflows.user.epfl_theos.dbimporters.utils import objects_are_equal
    from aiida.workflows.user.epfl_theos.dbimporters.utils import get_farthest_node
    PhCalculation = CalculationFactory('quantumespresso.ph')
    the_params = deepcopy(wf_params)
    replace_all_parameterdata_with_dict(the_params)
    take_out_keys_from_dictionary(the_params,ignored_keys)
    qpoints_ref = the_params.pop('qpoints',None)
    input_pw_calc_ref = the_params.pop('pw_calculation')
    if qpoints_ref:
        try:
            qpoints_ref = qpoints_ref.get_kpoints_mesh()
        except AttributeError:
            qpoints_ref = qpoints_ref.get_kpoints()
    
    wfs_ph = get_wfs_with_parameter(input_pw_calc_ref,'PhrestartWorkflow')
    wfs = []
    for wf_ph in wfs_ph:
        if ('ph_folder' in wf_ph.get_results() or 'last_clean_calc' in wf_ph.get_attributes()):
            params = deepcopy(wf_ph.get_parameters())
            replace_all_parameterdata_with_dict(params)
            take_out_keys_from_dictionary(params,ignored_keys+['pw_calculation'])
            qpoints = params.pop('qpoints',None)
            input_ph_calc = params.pop('ph_calculation',None)                
            if qpoints:
                try:
                    qpoints = qpoints.get_kpoints_mesh()
                except AttributeError:
                    qpoints = qpoints.get_kpoints()
            if input_ph_calc:
                qph = get_farthest_node(input_ph_calc,PhCalculation)
            
            if (objects_are_equal(qpoints,qpoints_ref)
                and objects_are_equal(the_params,params)
                and (input_ph_calc is None or 
                     (qph.count()==1 and qph[0].inp.parent_calc_folder.inp.remote_folder.pk==input_pw_calc_ref.pk))):
                wfs.append(wf_ph)
    
    return wfs
Example #25
0
def test_prepare_and_validate(new_database, new_workdir):
    """test preparation of inputs"""
    code = get_main_code(new_workdir)

    inparams = {"scf.k_points": (8, 8)}

    from aiida.orm import DataFactory, CalculationFactory
    StructureData = DataFactory('structure')

    atoms = crystal(symbols=[12, 8],
                    basis=[[0, 0, 0], [0.5, 0.5, 0.5]],
                    spacegroup=225,
                    cellpar=[4.21, 4.21, 4.21, 90, 90, 90])
    instruct = StructureData(ase=atoms)

    from aiida_crystal17.workflows.symmetrise_3d_struct import (
        run_symmetrise_3d_structure)
    instruct, settings = run_symmetrise_3d_structure(instruct)

    calc_cls = CalculationFactory('crystal17.main')
    calc_cls.prepare_and_validate(inparams, instruct, settings, flattened=True)
Example #26
0
    def run_pore_surface(self):

        zeopp_out = self.ctx.zeopp

        label = "pore_surface"
        inputs = {}
        inputs['_label'] = label
        inputs[
            '_description'] = "Subsampling pore surface & formation of supercell"
        inputs['code'] = self.inputs.pore_surface_code
        inputs['parameters'] = get_pore_surface_parameters(
            zeopp_out['output_parameters'])
        inputs['surface_sample'] = zeopp_out['surface_sample_vsa']
        inputs['structure'] = zeopp_out['structure_cssr']
        inputs['_options'] = self.default_options

        PoreSurfaceCalculation = CalculationFactory('phtools.surface')
        future = submit(PoreSurfaceCalculation.process(), **inputs)
        self.report("pk: {} | Submitted pore_surface for structure {}".format(
            future.pid, inputs['structure']))

        return ToContext(**{label: Outputs(future)})
Example #27
0
    def test_process_type_with_entry_point(self):
        """
        For a process with a registered entry point, the process_type will be its formatted entry point string
        """
        from aiida.orm import CalculationFactory, Code

        code = Code()
        code.set_remote_computer_exec((self.computer, '/bin/true'))
        code.store()

        parameters = ParameterData(dict={})
        template = ParameterData(dict={})
        options = {
            'resources': {
                'num_machines': 1,
                'tot_num_mpiprocs': 1
            },
            'max_wallclock_seconds': 1,
        }

        inputs = {
            'code': code,
            'parameters': parameters,
            'template': template,
            'options': options,
        }

        entry_point = 'simpleplugins.templatereplacer'
        calculation = CalculationFactory(entry_point)
        job_process = calculation.process()
        process = job_process(inputs=inputs)

        expected_process_type = 'aiida.calculations:{}'.format(entry_point)
        self.assertEqual(process.calc.process_type, expected_process_type)

        # Verify that load_process_class on the calculation node returns the original entry point class
        recovered_process = process.calc.load_process_class()
        self.assertEqual(recovered_process, calculation)
Example #28
0
def generate_calc():
    from aiida.orm import Code, Computer, CalculationFactory
    from aiida.common.datastructures import calc_states

    CustomCalc = CalculationFactory('simpleplugins.templatereplacer')

    computer = Computer.get("localhost")

    calc = CustomCalc(computer=computer, withmpi=True)
    calc.set_resources({"num_machines": 1, "num_mpiprocs_per_machine": 1})
    calc.store()
    calc._set_state(calc_states.FINISHED)

    return calc
Example #29
0
    def run_distance_matrix(self):

        pore_surface_out = self.ctx.pore_surface

        label = "distance_matrix"
        inputs = {}
        inputs['_label'] = label
        inputs[
            '_description'] = "Computing the distance matrix for surface point cloud"
        inputs['code'] = self.inputs.distance_matrix_code
        inputs['surface_sample'] = pore_surface_out['surface_sample']
        inputs['cell'] = pore_surface_out['cell']
        inputs['_options'] = self.default_options

        self.out("pore_surface", pore_surface_out['surface_sample'])

        DistanceMatrixCalculation = CalculationFactory('phtools.dmatrix')
        future = submit(DistanceMatrixCalculation.process(), **inputs)
        self.report(
            "pk: {} | Submitted distance_matrix for structure {}".format(
                future.pid, self.inputs.structure.filename))

        return ToContext(**{label: Outputs(future)})
Example #30
0
    def run_rips_complex(self):
        distance_matrix_out = self.ctx.distance_matrix

        label = "rips_complex"
        inputs = {}
        inputs['_label'] = label
        inputs[
            '_description'] = "Computing the distance matrix for surface point cloud"
        inputs['code'] = self.inputs.rips_code
        #inputs['distance_matrix'] = distance_matrix_out['distance_matrix']
        inputs['remote_folder'] = distance_matrix_out['remote_folder']

        Parameters = DataFactory('gudhi.rdm')
        inputs['parameters'] = Parameters(dict={'max-edge-length': 4.2})
        inputs['_options'] = self.default_options

        RipsDistanceMatrixCalculation = CalculationFactory('gudhi.rdm')
        future = submit(RipsDistanceMatrixCalculation.process(), **inputs)
        self.report(
            "pk: {} | Submitted rips calculation for structure {}".format(
                future.pid, self.inputs.structure.filename))

        return ToContext(**{label: Outputs(future)})