コード例 #1
0
def test_result_pass_serialization(water, result_input, res_success):
    res_in = ResultInput(molecule=water, **result_input)
    assert isinstance(res_in.dict(), dict)
    assert isinstance(res_in.json(), str)

    res_out = Result(molecule=water, **result_input, **res_success)
    assert isinstance(res_out.dict(), dict)
    assert isinstance(res_out.json(), str)
コード例 #2
0
def run_cfour(name, molecule, options, **kwargs):
    """QCDB API to QCEngine connection for CFOUR."""

    resi = ResultInput(
        **{
            'driver': inspect.stack()[1][3],
            'extras': {
                'qcdb:options': copy.deepcopy(options),
            },
            'model': {
                'method': name,
                'basis': '(auto)',
            },
            'molecule': molecule.to_schema(dtype=2),
            'provenance': provenance_stamp(__name__),
        })

    jobrec = qcng.compute(resi, "qcdb-cfour", raise_error=True).dict()

    hold_qcvars = jobrec['extras'].pop('qcdb:qcvars')
    jobrec['qcvars'] = {
        key: qcel.Datum(**dval)
        for key, dval in hold_qcvars.items()
    }

    return jobrec
コード例 #3
0
    def compute_energy(self, molecule: 'psi4.core.Molecule', wfn: 'psi4.core.Wavefunction' = None) -> float:
        """Compute dispersion energy based on engine, dispersion level, and parameters in `self`.

        Parameters
        ----------
        molecule : psi4.core.Molecule
            System for which to compute empirical dispersion correction.
        wfn :
            Location to set QCVariables

        Returns
        -------
        float
            Dispersion energy [Eh].

        Notes
        -----
        DISPERSION CORRECTION ENERGY
            Disp always set. Overridden in SCF finalization, but that only changes for "-3C" methods.
        self.fctldash + DISPERSION CORRECTION ENERGY
            Set if `fctldash` nonempty.

        """
        if self.engine in ['dftd3', 'mp2d']:
            resi = ResultInput(
                **{
                    'driver': 'energy',
                    'model': {
                        'method': self.fctldash,
                        'basis': '(auto)',
                    },
                    'keywords': {
                        'level_hint': self.dashlevel,
                        'params_tweaks': self.dashparams,
                        'dashcoeff_supplement': self.dashcoeff_supplement,
                        'verbose': 1,
                    },
                    'molecule': molecule.to_schema(dtype=2),
                    'provenance': p4util.provenance_stamp(__name__),
                })
            jobrec = qcng.compute(resi, self.engine, raise_error=True)

            dashd_part = float(jobrec.extras['qcvars']['DISPERSION CORRECTION ENERGY'])
            if wfn is not None:
                for k, qca in jobrec.extras['qcvars'].items():
                    if 'CURRENT' not in k:
                        wfn.set_variable(k, p4util.plump_qcvar(qca, k))

            if self.fctldash in ['hf3c', 'pbeh3c']:
                gcp_part = gcp.run_gcp(molecule, self.fctldash, verbose=False, dertype=0)
                dashd_part += gcp_part

            return dashd_part

        else:
            ene = self.disp.compute_energy(molecule)
            core.set_variable('DISPERSION CORRECTION ENERGY', ene)
            if self.fctldash:
                core.set_variable('{} DISPERSION CORRECTION ENERGY'.format(self.fctldash), ene)
            return ene
コード例 #4
0
    def compute(self, input_model: 'ResultInput',
                config: 'JobConfig') -> 'Result':
        self.found(raise_error=True)

        verbose = 1
        _print_helper(f'[1] {self.name} RESULTINPUT PRE-PLANT',
                      input_model.dict(), verbose >= 3)

        input_data = self.qcdb_build_input(input_model, config)
        input_model = ResultInput(**input_data)

        _print_helper(f'[2] {self.name} RESULTINPUT PRE-ENGINE',
                      input_model.dict(), verbose >= 4)

        output_model = Psi4Harness.compute(self,
                                           input_model=input_model,
                                           config=config)

        _print_helper(f'[3] {self.name} RESULT POST-ENGINE',
                      output_model.dict(), verbose >= 4)

        # ???
        if not output_model.success:
            return output_model

        _print_helper(f'[4a] {self.name} RESULT POST-HARVEST',
                      output_model.dict(), verbose >= 5)

        output_model = self.qcdb_post_parse_output(input_model, output_model)

        _print_helper(f'[4] {self.name} RESULT POST-POST-HARVEST',
                      output_model.dict(), verbose >= 2)

        return output_model
コード例 #5
0
def run_psi4(name, molecule, options, **kwargs):

    resi = ResultInput(
        **{
            'driver': inspect.stack()[1][3],
            'extras': {
                'qcdb:options': copy.deepcopy(options),
            },
            'model': {
                'method': name,
                'basis': '(auto)',
            },
            'molecule': molecule.to_schema(dtype=2),
            'provenance': provenance_stamp(__name__),
        })

    jobrec = qcng.compute(resi, "qcdb-psi4", raise_error=True).dict()
    hold_qcvars = jobrec['extras'].pop('qcdb:qcvars')
    jobrec['qcvars'] = {
        key: qcel.Datum(**dval)
        for key, dval in hold_qcvars.items()
    }
    pp.pprint(jobrec)
    print(jobrec.keys())
    print(jobrec['success'])
    return jobrec
コード例 #6
0
def test_compute_bad_models(program, model):
    if not testing.has_program(program):
        pytest.skip("Program '{}' not found.".format(program))

    inp = ResultInput(molecule=qcng.get_molecule("hydrogen"), driver="energy", model=model)

    with pytest.raises(ValueError) as exc:
        ret = qcng.compute(inp, program, raise_error=True)
コード例 #7
0
def test_compute_gradient(program, model):
    if not testing.has_program(program):
        pytest.skip("Program '{}' not found.".format(program))

    inp = ResultInput(molecule=qcng.get_molecule("hydrogen"), driver="gradient", model=model)
    ret = qcng.compute(inp, program, raise_error=True)

    assert ret.success is True
    assert isinstance(ret.return_result, list)
コード例 #8
0
def test_compute_bad_models(program, model):
    if not testing.has_program(program):
        pytest.skip("Program '{}' not found.".format(program))

    adriver = model.pop("driver", "energy")
    amodel = model
    inp = ResultInput(molecule=qcng.get_molecule("hydrogen"), driver=adriver, model=amodel)

    with pytest.raises(qcng.exceptions.InputError) as exc:
        ret = qcng.compute(inp, program, raise_error=True)
コード例 #9
0
def data_arg_helper(data_arg: str) -> 'ResultInput':
    """
    Converts the data argument of run and run-procedure commands to a ResultInput for compute

    Parameters
    ----------
    data_arg: str
        Either a data blob or file name or '-' for STDIN

    Returns
    -------
    ResultInput
        An input for compute.
    """
    if data_arg == "-":
        return ResultInput.parse_raw(sys.stdin.read())
    elif os.path.isfile(data_arg):
        return ResultInput.parse_file(data_arg)
    else:
        return ResultInput.parse_raw(data_arg)
コード例 #10
0
def test_run_psi4(tmp_path):
    """Tests qcengine run with psi4 and JSON input"""
    def check_result(stdout):
        output = json.loads(stdout)
        assert output["provenance"]["creator"].lower() == "psi4"
        assert output["success"] is True

    inp = ResultInput(molecule=get_molecule("hydrogen"),
                      driver="energy",
                      model={"method": "hf", "basis": "6-31G"})

    args = ["run", "psi4", inp.json()]
    check_result(run_qcengine_cli(args))

    args = ["run", "psi4", os.path.join(tmp_path, "input.json")]
    with util.disk_files({"input.json": inp.json()}, {}, cwd=tmp_path):
        check_result(run_qcengine_cli(args))

    args = ["run", "psi4", "-"]
    check_result(run_qcengine_cli(args, stdin=inp.json()))
コード例 #11
0
ファイル: runner.py プロジェクト: nuwandesilva/qcdb
def run_gamess(name, molecule, options, **kwargs):
    #def run_gamess2(name, molecule, options, **kwargs):

    resi = ResultInput(
        **{
            'driver': inspect.stack()[1][3],
            'extras': {
                'qcdb:options': copy.deepcopy(options),
            },
            'model': {
                'method': name,
                'basis': '(auto)',
            },
            'molecule': molecule.to_schema(dtype=2),
            'provenance': provenance_stamp(__name__),
        })

    if 'efpfrag' in kwargs:
        resi.extras['efpfrag'] = kwargs['efpfrag']

    if 'number_of_atoms_in_frag_x' in kwargs:
        resi.extras['number_of_atoms_in_frag_x'] = kwargs[
            'number_of_atoms_in_frag_x']
        print('run_gamess: number_of_atoms_in_frag_x = ',
              resi.extras['number_of_atoms_in_frag_x'])


#    if 'efpfrag1' in kwargs:
#       resi.extras['efpfrag1'] = kwargs['efpfrag1']
#
#    if 'efpfrag2' in kwargs:
#        resi.extras['efpfrag2'] = kwargs['efpfrag2']

    jobrec = qcng.compute(resi, "qcdb-gamess", raise_error=True).dict()
    hold_qcvars = jobrec['extras'].pop('qcdb:qcvars')
    jobrec['qcvars'] = {
        key: qcel.Datum(**dval)
        for key, dval in hold_qcvars.items()
    }
    return jobrec
コード例 #12
0
    def compute_gradient(self, molecule: 'psi4.core.Molecule',
                         wfn: 'psi4.core.Wavefunction' = None) -> 'psi4.core.Matrix':
        """Compute dispersion gradient based on engine, dispersion level, and parameters in `self`.

        Parameters
        ----------
        molecule : psi4.core.Molecule
            System for which to compute empirical dispersion correction.
        wfn :
            Location to set QCVariables

        Returns
        -------
        psi4.core.Matrix
            (nat, 3) dispersion gradient [Eh/a0].

        """
        if self.engine in ['dftd3', 'mp2d']:
            resi = ResultInput(
                **{
                    'driver': 'gradient',
                    'model': {
                        'method': self.fctldash,
                        'basis': '(auto)',
                    },
                    'keywords': {
                        'level_hint': self.dashlevel,
                        'params_tweaks': self.dashparams,
                        'dashcoeff_supplement': self.dashcoeff_supplement,
                        'verbose': 1,
                    },
                    'molecule': molecule.to_schema(dtype=2),
                    'provenance': p4util.provenance_stamp(__name__),
                })
            jobrec = qcng.compute(resi, self.engine, raise_error=True)

            dashd_part = core.Matrix.from_array(
                np.array(jobrec.extras['qcvars']['DISPERSION CORRECTION GRADIENT']).reshape(-1, 3))
            if wfn is not None:
                for k, qca in jobrec.extras['qcvars'].items():
                    if 'CURRENT' not in k:
                        wfn.set_variable(k, p4util.plump_qcvar(qca, k))

            if self.fctldash in ['hf3c', 'pbeh3c']:
                gcp_part = gcp.run_gcp(molecule, self.fctldash, verbose=False, dertype=1)
                dashd_part.add(gcp_part)

            return dashd_part
        else:
            return self.disp.compute_gradient(molecule)
コード例 #13
0
def test_repr_result():

    result = ResultInput(**{
        "driver": "gradient",
        "model": {
            "method": "UFF"
        },
        "molecule": {
            "symbols": ["He"],
            "geometry": [0, 0, 0]
        }
    })
    assert "UFF" in str(result)
    assert "UFF" in repr(result)
コード例 #14
0
def test_compute_gradient(program, model):
    if not testing.has_program(program):
        pytest.skip("Program '{}' not found.".format(program))

    inp = ResultInput(molecule=qcng.get_molecule("hydrogen"),
                      driver="gradient",
                      model=model,
                      extras={"mytag": "something"})
    ret = qcng.compute(inp, program, raise_error=True)

    assert ret.success is True
    assert isinstance(ret.return_result, np.ndarray)
    assert len(ret.return_result.shape) == 2
    assert ret.return_result.shape[1] == 3
    assert "mytag" in ret.extras, ret.extras
コード例 #15
0
def test_psi4_ref_switch():
    inp = ResultInput(
        **{
            "molecule": {
                "symbols": ["Li"],
                "geometry": [0, 0, 0],
                "molecular_multiplicity": 2
            },
            "driver": "energy",
            "model": {
                "method": "B3LYP",
                "basis": "sto-3g"
            },
            "keywords": {
                "scf_type": "df"
            }
        })

    ret = qcng.compute(inp, "psi4", raise_error=True, return_dict=False)

    assert ret.success is True
    assert ret.properties.calcinfo_nalpha == 2
    assert ret.properties.calcinfo_nbeta == 1
コード例 #16
0
def test_driverenum_derivative_int(water, result_input):
    res = ResultInput(molecule=water, **result_input)

    assert res.driver == 'gradient'
    assert res.driver.derivative_int() == 1
コード例 #17
0
def test_result_sparsity(water, result_input, res_success):
    res_in = ResultInput(molecule=water, **result_input)
    assert set(res_in.dict()["model"].keys()) == {"method", "basis"}
コード例 #18
0
def vpt2(name, **kwargs):
    """Perform vibrational second-order perturbation computation through
    Cfour to get anharmonic frequencies. This version uses c4 for the disp
    and pt2 but gets gradients from p4.

    :type c4full: :ref:`boolean <op_py_boolean>`
    :param c4full: ``'on'`` || |dl| ``'off'`` |dr|

        Indicates whether when *name* indicates a Cfour method and *mode*
        indicates a sow/reap approach, sown files are direct ZMAT files
        and FJOBARC files are expected to reap, so that Cfour only, not
        Cfour-through-Psi4, is needed for distributed jobs.

    .. caution:: Some features are not yet implemented. Buy a developer a coffee.

       - Presently uses all gradients. Could mix in analytic 2nd-derivs.

       - Collect resutls.

       - Manage scratch / subdir better.

       - Allow CFOUR_BASIS

       - Consider forcing some tighter convcrit, c4 and p4

       - mixed ang/bohr signals

       - error by converting to ang in psi?

       - Expand CURRENT DIPOLE XYZ beyond SCF

       - Remember additional FJOBARC record TOTENER2 if EXCITE .ne. NONE

       - switch C --> S/R with recovery using shelf

    """
    from . import endorsed_plugins
    kwargs = kwargs_lower(kwargs)

    #    if 'options' in kwargs:
    #        driver_helpers.set_options(kwargs.pop('options'))
    #
    #    # Bounce if name is function
    #    if hasattr(name, '__call__'):
    #        return name(energy, kwargs.pop('label', 'custom function'), ptype='energy', **kwargs)
    #
    #    # Allow specification of methods to arbitrary order
    lowername = name.lower()
    package = get_package(lowername, kwargs)
    #    lowername, level = driver_helpers._parse_arbitrary_order(lowername)
    #    if level:
    #        kwargs['level'] = level

    # Make sure the molecule the user provided is the active one
    molecule = kwargs.pop('molecule', get_active_molecule())
    molecule.update_geometry()

    #    if len(pe.nu_options.scroll) == 0:
    #        #print('EMPTY OPT')
    #        pe.load_nu_options()

    # -----
    verbose = kwargs.pop('verbose', 0)
    scratch_messy = kwargs.pop('scratch_messy', True)  # TODO

    kwgs = {'accession': kwargs['accession'], 'verbose': verbose}

    #    optstash = p4util.OptionsState(
    #        ['BASIS'])

    # Option mode of operation- whether vpt2 run in one job or files farmed out
    if not ('vpt2_mode' in kwargs):
        if ('mode' in kwargs):
            kwargs['vpt2_mode'] = kwargs['mode']
            del kwargs['mode']
        else:
            kwargs['vpt2_mode'] = 'continuous'

    # Switches for route through code- S/R or continuous & Psi4 or Cfour gradients
    isSowReap = True if kwargs['vpt2_mode'].lower() == 'sowreap' else False
    #!BR#    isC4notP4 = bool(re.match('cfour', lowername)) or bool(re.match('c4-', lowername))
    isC4notP4 = False  # TODO until intf_psi4 hooked up to qcng
    isC4fully = True if ('c4full' in kwargs
                         and yes.match(str(kwargs['c4full'])) and isC4notP4
                         and isSowReap) else False
    print('isSowReap=', isSowReap, 'isC4notP4=', isC4notP4, 'isC4fully=',
          isC4fully)

    cfourharness = qcng.get_program('qcdb-cfour')
    config = qcng.config.get_config(local_options={"ncores": 2})

    # Save submission directory and basis set
    current_directory = os.getcwd()
    #    user_basis = core.get_global_option('BASIS')

    # Open data persistence shelf- vital for sowreap, checkpoint for continuouw
    #    shelf = shelve.open(current_directory + '/' + os.path.splitext(core.outfile_name())[0] + '.shelf', writeback=True)
    shelf = shelve.open(current_directory + '/vpt2scratch.shelf',
                        writeback=True)

    # Cfour keywords to request vpt2 analysis through findif gradients
    c000_opts = RottenOptions()
    pe.load_options(c000_opts)
    c000_opts.require('CFOUR', 'VIBRATION', 'FINDIF', **kwgs)
    c000_opts.require('CFOUR', 'FREQ_ALGORITHM', 'PARALLEL', **kwgs)
    c000_opts.require('CFOUR', 'ANH_ALGORITHM', 'PARALLEL', **kwgs)
    c000_opts.require('CFOUR', 'ANHARMONIC', 'VPT2', **kwgs)
    c000_opts.require('CFOUR', 'FD_PROJECT', 'OFF', **kwgs)

    # When a Psi4 method is requested for vpt2, a skeleton of
    #   computations in Cfour is still required to hang the gradients
    #   upon. The skeleton is as cheap as possible (integrals only
    #   & sto-3g) and set up here.
    #!BR#    if isC4notP4:
    #!BR#        skelname = lowername
    #!BR#    else:
    if True:
        skelname = 'c4-scf'
#        core.set_global_option('BASIS', 'STO-3G')
#    P4  'c4-scf'/'cfour'CALC_LEVEL      lowername  # temporary
#    C4  lowername                       cfour{}  # temporary

    if 'status' not in shelf:
        shelf['status'] = 'initialized'
        shelf['linkage'] = os.getpid()
        shelf['zmat'] = {
        }  # Cfour-generated ZMAT files with finite difference geometries
        shelf['fjobarc'] = {
        }  # Cfour- or Psi4-generated ascii files with packaged gradient results
        shelf['results'] = {}  # models.Result
        shelf.sync()
    else:
        pass
        # how decide whether to use. keep precedent of intco.dat in mind

#    # Construct and move into directory job scratch / cfour scratch / harm
#    psioh = core.IOManager.shared_object()
#    psio = core.IO.shared_object()
#    os.chdir(psioh.get_default_path())  # psi_scratch
#    cfour_tmpdir = kwargs['path'] if 'path' in kwargs else \
#        'psi.' + str(os.getpid()) + '.' + psio.get_default_namespace() + \
#        '.cfour.' + str(uuid.uuid4())[:8]
#    if not os.path.exists(cfour_tmpdir):
#        os.mkdir(cfour_tmpdir)
#    os.chdir(cfour_tmpdir)  # psi_scratch/cfour
#    if not os.path.exists('harm'):
#        os.mkdir('harm')
#    os.chdir('harm')  # psi_scratch/cfour/harm

#    psioh.set_specific_retention(32, True)  # temporary, to track p4 scratch
#shelf['status'] = 'anharm_jobs_sown'  # temporary to force backtrack
    print('STAT', shelf['status'])  # temporary

    resi = ResultInput(
        **{
            'driver':
            'energy',  # to prevent qcdb imposition of analytic hessian
            'extras': {
                'qcdb:options': copy.deepcopy(c000_opts),  #pe.nu_options),
            },
            'model': {
                'method': 'c4-scf',  #'hf',
                #'basis': '6-31g',
                'basis': 'sto-3g',
            },
            'molecule': molecule.to_schema(dtype=2),
            'provenance': provenance_stamp(__name__),
        })

    # Generate the ZMAT input file in scratch
    cfourrec = cfourharness.qcdb_build_input(resi, config)
    shelf['genbas'] = cfourrec['infiles']['GENBAS']
    shelf['zmat']['000-000'] = cfourrec['infiles']['ZMAT']
    shelf.sync()

    #    with open('ZMAT', 'w') as handle:
    #        cfour_infile = write_zmat(skelname, 1)
    #        handle.write(cfour_infile)
    #    print('\n====== Begin ZMAT input for CFOUR ======')
    #    print(open('ZMAT', 'r').read())
    #    print('======= End ZMAT input for CFOUR =======\n')

    # Check existing shelf consistent with generated ZMAT, store
    #    if ('000-000' in shelf['zmat']) and (shelf['zmat']['000-000'] != cfour_infile):
    #        diff = difflib.Differ().compare(shelf['zmat']['000-000'].splitlines(), cfour_infile.splitlines())
    #        raise ValidationError("""Input file translated to Cfour ZMAT does not match ZMAT stored in shelf.\n\n""" +
    #            '\n'.join(list(diff)))

    # Reset basis after Cfour skeleton seeded
    #    core.set_global_option('BASIS', user_basis)

    if shelf['status'] == 'initialized':
        print('{:_^45}'.format('  VPT2 Setup: Harmonic  '))

        # Generate the displacements that will form the harmonic freq
        scrkwgs = {
            'scratch_directory': config.scratch_directory,
            'scratch_messy': True,
            'scratch_suffix': '_000'
        }
        success, dexe = qcng.util.execute(['xjoda'], cfourrec['infiles'], [],
                                          **scrkwgs)
        partial = dexe['stdout']

        scrkwgs.update({
            'scratch_name': Path(dexe['scratch_directory']).name,
            'scratch_exist_ok': True
        })
        scrkwgs.update({'scratch_messy': scratch_messy})
        success, dexe = qcng.util.execute(['xsymcor'], {}, ['zmat*'],
                                          **scrkwgs)
        partial += dexe['stdout']

        print(partial)  # partial.out

        # Read the displacements that will form the harmonic freq
        zmats0N = ['000-' + item[-3:] for item in dexe['outfiles']['zmat*']]
        for zm_2 in zmats0N:
            _, zm2 = zm_2.split('-')
            shelf['zmat'][zm_2] = dexe['outfiles']['zmat*']['zmat' + zm2]
            shelf.sync()
            print(f'  CFOUR scratch file zmat{zm2} for {zm_2} has been read\n')
            #print('%s\n' % shelf['zmat'][zm_2])

        # S/R: Write distributed input files for harmonic freq
        if isSowReap:
            os.chdir(current_directory)
            inputSansMol = p4util.format_currentstate_for_input(gradient,
                                                                lowername,
                                                                allButMol=True,
                                                                **kwargs)
            for zm12 in zmats0N:
                zm1, zm2 = zm12.split('-')

                ifile = vpt2_sow_files(zm12, shelf['linkage'], isC4notP4,
                                       isC4fully, shelf['zmat'][zm12],
                                       inputSansMol, shelf['genbas'])

                with open('VPT2-' + zm12 + '.in', 'w') as handle:
                    handle.write(ifile)

            msg = vpt2_instructions('harmonic', current_directory, zmats0N)
            core.print_out(msg)
            print(msg)

        shelf['status'] = 'harm_jobs_sown'

        # S/R: Pause for distributed calculations
        if isSowReap:
            shelf.close()
            return 0.0

    if shelf['status'] == 'harm_jobs_sown':
        zmats0N = [
            item for item in sorted(shelf['zmat'].keys())
            if (item[:3] == '000' and item[-3:] != '000')
        ]

        # S/R: Check that distributed calcs all completed correctly
        if isSowReap:
            msg = vpt2_instructions('harmonic', current_directory, zmats0N)
            core.print_out(msg)
            isOk, msg = sown_jobs_status(
                current_directory, 'VPT2', zmats0N, reap_job_validate,
                shelf['linkage'],
                ['CURRENT ENERGY', 'CURRENT DIPOLE', 'CURRENT GRADIENT'])
            core.print_out(msg)
            print(msg)
            if not isOk:
                shelf.close()
                return 0.0

        # Collect all results from gradients forming the harmonic freq
        for zm12 in zmats0N:
            zm1, zm2 = zm12.split('-')
            if zm12 not in shelf['fjobarc']:
                print('{:_^45}'.format(f'  VPT2 Computation: {zm12}  '))

                fjobarc = vpt2_reaprun_files(
                    zm12,
                    shelf['linkage'],
                    isSowReap,
                    isC4notP4,
                    isC4fully,
                    shelf['zmat']
                    [zm12],  #current_directory, psioh.get_default_path(), cfour_tmpdir,
                    lowername,
                    kwargs,
                    shelf['genbas'],
                    config,
                    package,
                    scratch_messy=scratch_messy)
                shelf['fjobarc'][zm12] = fjobarc
                shelf.sync()
        shelf['status'] = 'harm_jobs_reaped'

    if shelf['status'] == 'harm_jobs_reaped':
        zmats0N = [
            item for item in sorted(shelf['zmat'].keys())
            if (item[:3] == '000' and item[-3:] != '000')
        ]

        print('{:_^45}'.format('  VPT2 Results: Harmonic  '))
        for k, v in shelf.items():
            print('   {:_^20}'.format(k))
            #pp.pprint(v)

        #scrkwgs = {'scratch_directory': config.scratch_directory, 'scratch_messy': True, 'scratch_suffix': '_000'}
        #scrkwgs.update({'scratch_name': Path(dexe['scratch_directory']).name, 'scratch_exist_ok': True})
        #scrkwgs.update({'scratch_messy': scratch_messy})

        # Process the gradients into harmonic freq
        scrkwgs = {
            'scratch_directory': config.scratch_directory,
            'scratch_messy': True,
            'scratch_suffix': '_000med'
        }
        success, dexe = qcng.util.execute(['xjoda'], {
            'ZMAT': shelf['zmat']['000-000'],
            'GENBAS': shelf['genbas']
        }, [], **scrkwgs)
        harmout = dexe['stdout']

        scrkwgs.update({
            'scratch_name': Path(dexe['scratch_directory']).name,
            'scratch_exist_ok': True
        })
        success, dexe = qcng.util.execute(['xsymcor'], {}, [], **scrkwgs)
        print('xsymcor', success)
        harmout += dexe['stdout']

        for zm12 in zmats0N:
            zm1, zm2 = zm12.split('-')
            success, dexe = qcng.util.execute(
                ['xja2fja'], {'FJOBARC': shelf['fjobarc'][zm12]}, [],
                **scrkwgs)
            print(zm12, 'xja2fja', success)
            harmout += dexe['stdout']

            success, dexe = qcng.util.execute(['xsymcor'], {}, [], **scrkwgs)
            print(zm12, 'xsymcor', success)
            harmout += dexe['stdout']

        success, dexe = qcng.util.execute(['xjoda'], {}, [], **scrkwgs)
        print('xjoda', success)
        harmout += dexe['stdout']

        for zm in Path(dexe['scratch_directory']).glob('zmat*'):
            print('Removing', zm)
            os.remove(zm)

        scrkwgs.update({'scratch_messy': scratch_messy})
        success, dexe = qcng.util.execute(['xcubic'], {}, ['zmat*'], **scrkwgs)
        print('xcubic', success)
        harmout += dexe['stdout']
        #print('HARMOUT')
        #print(harmout)

        pp.pprint(shelf['zmat'].keys())
        pp.pprint(shelf['fjobarc'].keys())

        #        os.chdir(psioh.get_default_path() + cfour_tmpdir + '/harm')  # psi_scratch/cfour/harm
        #        harmout = run_cfour_module('xjoda')
        #        harmout += run_cfour_module('xsymcor')
        #        for zm12 in zmats0N:
        #            zm1, zm2 = zm12.split('-')
        #            with open('FJOBARC', 'w') as handle:
        #                handle.write(shelf['fjobarc'][zm12])
        #            harmout += run_cfour_module('xja2fja')
        #            harmout += run_cfour_module('xsymcor')
        #            shutil.move('FJOBARC', 'fja.' + zm12)
        #            try:
        #                os.remove('zmat' + zm2)
        #            except OSError:
        #                pass
        #        harmout += run_cfour_module('xjoda')
        #        harmout += run_cfour_module('xcubic')
        #        core.print_out(harmout)
        #        with open('harm.out', 'w') as handle:
        #            handle.write(harmout)

        # Generate displacements along harmonic normal modes
        #zmatsN0 = [item[-3:] for item in sorted(shelf['zmat'].keys()) if (item[:3] == '000' and item[-3:] != '000')]
        for fl, contents in dexe['outfiles']['zmat*'].items():
            zmN_ = fl[-3:] + '-000'
            shelf['zmat'][zmN_] = contents
            shelf.sync()
            print(f'  CFOUR scratch file {fl} for {zmN_} has been read\n')
            #print('%s\n' % shelf['zmat'][zm12])

        zmatsN0 = [
            item for item in sorted(shelf['zmat'].keys())
            if (item[:3] != '000' and item[-3:] == '000')
        ]
        for zmN0 in zmatsN0:
            zm1, _ = zmN0.split('-')

            # Collect displacements along the normal coordinates generated by the harmonic freq.
            #   Further harmonic freqs are to be run at each of these to produce quartic force field.
            #   To carry these out, generate displacements for findif by gradient at each displacement.

            scrkwgs = {
                'scratch_directory': config.scratch_directory,
                'scratch_messy': True,
                'scratch_suffix': f'_{zmN0}'
            }
            success, dexe = qcng.util.execute(['xjoda'], {
                'ZMAT': shelf['zmat'][zmN0],
                'GENBAS': shelf['genbas']
            }, [], **scrkwgs)

            scrkwgs.update({
                'scratch_name': Path(dexe['scratch_directory']).name,
                'scratch_exist_ok': True
            })
            scrkwgs.update({'scratch_messy': scratch_messy})
            success, dexe = qcng.util.execute(['xsymcor'], {}, ['zmat*'],
                                              **scrkwgs)

            for fl, contents in dexe['outfiles']['zmat*'].items():
                zm12 = zm1 + '-' + fl[-3:]
                shelf['zmat'][zm12] = contents
                shelf.sync()
                print('  CFOUR scratch file %s for %s has been read\n' %
                      (fl, zm12))
                #print('%s\n' % shelf['zmat'][zm12])


#        zmatsN0 = [item[-3:] for item in sorted(glob.glob('zmat*'))]
#        os.chdir('..')  # psi_scratch/cfour
#        for zm1 in zmatsN0:
#            zm12 = zm1 + '-000'
#            with open(psioh.get_default_path() + cfour_tmpdir + '/harm/zmat' + zm1, 'r') as handle:
#                shelf['zmat'][zm12] = handle.read()
#                shelf.sync()
#                core.print_out('  CFOUR scratch file %s for %s has been read\n' % ('zmat' + zm1, zm12))
#                core.print_out('%s\n' % shelf['zmat'][zm12])
#
#            # Collect displacements along the normal coordinates generated by the harmonic freq.
#            #   Further harmonic freqs are to be run at each of these to produce quartic force field.
#            #   To carry these out, generate displacements for findif by gradient at each displacement.
#            if os.path.exists(zm1):
#                shutil.rmtree(zm1)
#            os.mkdir(zm1)
#            os.chdir(zm1)  # psi_scratch/cfour/004
#            with open('ZMAT', 'w') as handle:
#                handle.write(shelf['zmat'][zm12])
#            shutil.copy2('../harm/GENBAS', 'GENBAS')  # ln -s $ecpdir/ECPDATA $j/ECPDATA
#            with open('partial.out', 'w') as handle:
#                handle.write(run_cfour_module('xjoda'))
#                handle.write(run_cfour_module('xsymcor'))
#
#            # Read the displacements that will form the anharmonic freq
#            zmatsNN = [item[-3:] for item in sorted(glob.glob('zmat*'))]
#            for zm2 in zmatsNN:
#                zm12 = zm1 + '-' + zm2
#                with open(psioh.get_default_path() + cfour_tmpdir + '/' + zm1 + '/zmat' + zm2, 'r') as handle:
#                    shelf['zmat'][zm12] = handle.read()
#                    shelf.sync()
#                    core.print_out('  CFOUR scratch file %s for %s has been read\n' % ('zmat' + zm2, zm12))
#                    core.print_out('%s\n' % shelf['zmat'][zm12])
#            os.chdir('..')  # psi_scratch/cfour

        zmatsNN = [
            item for item in sorted(shelf['zmat'].keys())
            if (item[:3] != '000' and item[-3:] != '000')
        ]

        # S/R: Write distributed input files for anharmonic freq
        if isSowReap:
            os.chdir(current_directory)
            inputSansMol = p4util.format_currentstate_for_input(gradient,
                                                                lowername,
                                                                allButMol=True,
                                                                **kwargs)
            for zm12 in zmatsNN:
                zm1, zm2 = zm12.split('-')

                ifile = vpt2_sow_files(zm12, shelf['linkage'], isC4notP4,
                                       isC4fully, shelf['zmat'][zm12],
                                       inputSansMol, shelf['genbas'])
                # GENBAS needed here

                with open('VPT2-' + zm12 + '.in', 'w') as handle:
                    handle.write(ifile)

            msg = vpt2_instructions('anharmonic', current_directory, zmatsNN)
            core.print_out(msg)
            print(msg)

        shelf['status'] = 'anharm_jobs_sown'

        # S/R: Pause for distributed calculations
        if isSowReap:
            shelf.close()
            return 0.0

    if shelf['status'] == 'anharm_jobs_sown':
        zmatsNN = [
            item for item in sorted(shelf['zmat'].keys())
            if (item[:3] != '000' and item[-3:] != '000')
        ]

        # S/R: Check that distributed calcs all completed correctly
        if isSowReap:
            msg = vpt2_instructions('anharmonic', current_directory, zmatsNN)
            core.print_out(msg)
            isOk, msg = sown_jobs_status(
                current_directory, 'VPT2', zmatsNN, reap_job_validate,
                shelf['linkage'],
                ['CURRENT ENERGY', 'CURRENT DIPOLE', 'CURRENT GRADIENT'])
            core.print_out(msg)
            print(msg)
            if not isOk:
                shelf.close()
                return 0.0

        # Collect all results from gradients forming the anharmonic freq
        for zmNN in zmatsNN:
            zm1, zm2 = zmNN.split('-')
            if zmNN not in shelf['fjobarc']:
                print('{:_^45}'.format(f'  VPT2 Computation: {zmNN}'))

                fjobarc = vpt2_reaprun_files(zmNN,
                                             shelf['linkage'],
                                             isSowReap,
                                             isC4notP4,
                                             isC4fully,
                                             shelf['zmat'][zmNN],
                                             lowername,
                                             kwargs,
                                             shelf['genbas'],
                                             config,
                                             package,
                                             scratch_messy=scratch_messy)
                shelf['fjobarc'][zmNN] = fjobarc
                shelf.sync()
        shelf['status'] = 'anharm_jobs_reaped'

    if shelf['status'] == 'anharm_jobs_reaped':
        zmats0N = [
            item for item in sorted(shelf['zmat'].keys())
            if (item[:3] == '000' and item[-3:] != '000')
        ]
        zmatsN0 = [
            item for item in sorted(shelf['zmat'].keys())
            if (item[:3] != '000' and item[-3:] == '000')
        ]
        zmatsNN = [
            item for item in sorted(shelf['zmat'].keys())
            if (item[:3] != '000' and item[-3:] != '000')
        ]

        print('{:_^45}'.format('  VPT2 Results: Harmonic  '))

        # Process the gradients into harmonic freq
        scrkwgs = {
            'scratch_directory': config.scratch_directory,
            'scratch_messy': True,
            'scratch_suffix': '_000final'
        }
        success, dexe = qcng.util.execute(['xjoda'], {
            'ZMAT': shelf['zmat']['000-000'],
            'GENBAS': shelf['genbas']
        }, [], **scrkwgs)
        anharmout = dexe['stdout']

        scrkwgs.update({
            'scratch_name': Path(dexe['scratch_directory']).name,
            'scratch_exist_ok': True
        })
        success, dexe = qcng.util.execute(['xsymcor'], {}, [], **scrkwgs)
        anharmout += dexe['stdout']

        for zm12 in zmats0N:
            zm1, zm2 = zm12.split('-')
            success, dexe = qcng.util.execute(
                ['xja2fja'], {'FJOBARC': shelf['fjobarc'][zm12]}, [],
                **scrkwgs)
            anharmout += dexe['stdout']
            success, dexe = qcng.util.execute(['xsymcor'], {}, [], **scrkwgs)
            anharmout += dexe['stdout']

        success, dexe = qcng.util.execute(['xjoda'], {}, [], **scrkwgs)
        anharmout += dexe['stdout']
        scrkwgs.update({'scratch_messy': scratch_messy})
        success, dexe = qcng.util.execute(['xcubic'], {},
                                          ['zmat*', 'JOBARC', 'JAINDX'],
                                          as_binary=['JOBARC', 'JAINDX'],
                                          **scrkwgs)
        anharmout += dexe['stdout']

        jobarc0 = dexe['outfiles']['JOBARC']
        jaindx0 = dexe['outfiles']['JAINDX']

        # Process the gradients into harmonic freq at each normco displaced point
        os.chdir('..')  # psi_scratch/cfour
        for zm1_ in zmatsN0:
            zm1, _ = zm1_.split('-')

            scrkwgs = {
                'scratch_directory': config.scratch_directory,
                'scratch_messy': True,
                'scratch_suffix': f'_{zm1}final'
            }
            success, dexe = qcng.util.execute(['xjoda'], {
                'ZMAT': shelf['zmat'][zm1_],
                'GENBAS': shelf['genbas']
            }, [], **scrkwgs)
            anharmout = dexe['stdout']

            scrkwgs.update({
                'scratch_name': Path(dexe['scratch_directory']).name,
                'scratch_exist_ok': True
            })
            success, dexe = qcng.util.execute(['xsymcor'], {}, [], **scrkwgs)
            anharmout += dexe['stdout']

            for zm12 in [
                    item for item in zmatsNN
                    if (item[:3] == zm1 and item[-3:] != '000')
            ]:
                _, zm2 = zm12.split('-')
                print(zm12, shelf['fjobarc'][zm12])

                success, dexe = qcng.util.execute(
                    ['xja2fja'], {'FJOBARC': shelf['fjobarc'][zm12]}, [],
                    **scrkwgs)
                anharmout += dexe['stdout']

                success, dexe = qcng.util.execute(['xsymcor'], {}, [],
                                                  **scrkwgs)
                anharmout += dexe['stdout']

                os.remove(Path(dexe['scratch_directory']) / 'FJOBARC')

            success, dexe = qcng.util.execute(['xjoda'], {}, [], **scrkwgs)
            anharmout += dexe['stdout']

            scrkwgs.update({'scratch_messy': scratch_messy})
            success, dexe = qcng.util.execute(['xja2fja'], {}, ['FJOBARC'],
                                              **scrkwgs)
            anharmout += dexe['stdout']
            shelf['fjobarc'][zm1_] = dexe['outfiles']['FJOBARC']
            shelf.sync()

            print('PARTIAL', zm1_, '\n', anharmout)

        # Process the harmonic freqs at normco displacements into anharmonic freq
        print('{:_^45}'.format('  VPT2 Results: Anharmonic  '))

        pprint.pprint(shelf['zmat'].keys())
        pprint.pprint(shelf['fjobarc'].keys())

        scrkwgs = {
            'scratch_directory': config.scratch_directory,
            'scratch_messy': True,
            'scratch_suffix': '_000anharm'
        }
        success, dexe = qcng.util.execute(['ls'], {
            'JOBARC': jobarc0,
            'JAINDX': jaindx0
        }, [],
                                          as_binary=['JOBARC', 'JAINDX'],
                                          **scrkwgs)
        anharmout = ''
        scrkwgs.update({
            'scratch_name': Path(dexe['scratch_directory']).name,
            'scratch_exist_ok': True
        })

        for zm1_ in zmatsN0:
            zm1, _ = zm1_.split('-')
            success, dexe = qcng.util.execute(
                ['xja2fja'], {'FJOBARC': shelf['fjobarc'][zm1_]}, [],
                **scrkwgs)
            print(zm1_, 'xja2fja', success)
            anharmout += dexe['stdout']

            success, dexe = qcng.util.execute(['xcubic'], {}, [], **scrkwgs)
            print(zm1_, 'xcubic', success)
            anharmout += dexe['stdout']

        print(anharmout)  # anharm.out

        shelf['status'] = 'vpt2_completed'

    # Finish up
    shelf.close()
コード例 #19
0
def vpt2_reaprun_files(item, linkage, isSowReap, isC4notP4, isC4fully, zmat,
                       lowername, kwargs, genbas, config, package,
                       scratch_messy):
    """Provided with the particular displacement number *item* and the
    associated *zmat* file with geometry and *linkage*, returns the
    FJOBARC contents. Depending on the mode settings of *isC4notP4*,
    *isSowReap*, and *isC4fully*, either runs (using *lowername* and
    *kwargs*) or reaps contents. *outdir* is where psi4 was invoked,
    *scrdir* is the psi4 scratch directory, and *c4scrdir* is Cfour
    scratch directory within.

    """
    # Extract qcel.models.Molecule at findif orientation
    zmmol = harvest_zmat(zmat)

    # Cfour S/R Direct for gradients
    if isC4fully:
        with open('VPT2-' + item + '.fja', 'r') as handle:
            fjobarc = handle.read()


#!BR#    # Cfour for gradients
#!BR#    elif isC4notP4:
#!BR#
#!BR#        # S/R: Reap results from output file
#!BR#        if isSowReap:
#!BR#            isOk, msg, results = reap_job_validate(outdir, 'VPT2', item, linkage,
#!BR#                ['CURRENT ENERGY', 'CURRENT DIPOLE', 'CURRENT GRADIENT', 'CURRENT MOLECULE'])
#!BR#            if not isOk:
#!BR#                raise ValidationError(msg)
#!BR#
#!BR#            fje = results['CURRENT ENERGY']
#!BR#            fjgrd = results['CURRENT GRADIENT']
#!BR#            fjdip = [item / constants.dipmom_au2debye for item in results['CURRENT DIPOLE']]
#!BR#            c4mol = qcdb.Molecule(results['CURRENT MOLECULE'])
#!BR#            c4mol.update_geometry()
#!BR#
#!BR#        # C: Run the job and collect results
#!BR#        else:
#!BR#            # Prepare Cfour skeleton calc directory
#!BR#            {'ZMAT': zmat, 'GENBAS': shelf['genbas']},
#!BR#            os.chdir(scrdir + c4scrdir)  # psi_scratch/cfour
#!BR#            if os.path.exists('scr.' + item):
#!BR#                shutil.rmtree('scr.' + item)
#!BR#            os.mkdir('scr.' + item)
#!BR#            os.chdir('scr.' + item)  # psi_scratch/cfour/scr.000-004
#!BR#            with open('ZMAT', 'w') as handle:
#!BR#                handle.write(zmat)
#!BR#            shutil.copy2('../harm/GENBAS', 'GENBAS')
#!BR#
#!BR#            #os.chdir(scrdir + '/scr.' + item)
#!BR#            #run_cfour_module('xja2fja')
#!BR#            #with open('FJOBARC', 'r') as handle:
#!BR#            #    fjobarc = handle.read()
#!BR#
#!BR#            # Run Cfour calc using ZMAT & GENBAS in scratch, outdir redirects to outfile
#!BR#            os.chdir(outdir)  # current_directory
#!BR#            core.get_active_molecule().set_name('blank_molecule_psi4_yo')
#!BR#            energy('cfour', path=c4scrdir + '/scr.' + item)
#!BR##            os.chdir(scrdir + '/scr.' + item)
#!BR#
#!BR#            fje = core.variable('CURRENT ENERGY')
#!BR#            fjgrd = p4util.mat2arr(core.get_gradient())
#!BR#            fjdip = [core.variable('CURRENT DIPOLE X') / constants.dipmom_au2debye,
#!BR#                     core.variable('CURRENT DIPOLE Y') / constants.dipmom_au2debye,
#!BR#                     core.variable('CURRENT DIPOLE Z') / constants.dipmom_au2debye]
#!BR#            # TODO switch DIPOLE to vector
#!BR#            c4mol = qcdb.Molecule(core.get_active_molecule().create_psi4_string_from_molecule())
#!BR#            c4mol.update_geometry()
#!BR#
#!BR#        # Get map btwn ZMAT and C4 orientation, then use it, grad and dipole to forge FJOBARC file
#!BR#        fjobarc = format_fjobarc(fje,
#!BR#            *backtransform(chgeMol=zmmol, permMol=c4mol), gradient=fjgrd, dipole=fjdip)

# Psi4 for gradients
    else:

        # Run Cfour skeleton calc and extract qcdb.Molecule at needed C4 orientation
        scrkwgs = {
            'scratch_directory': config.scratch_directory,
            'scratch_messy': True,
            'scratch_suffix': f'_{item}skel'
        }
        success, dexe = qcng.util.execute(['xjoda'], {
            'ZMAT': zmat,
            'GENBAS': genbas
        }, [], **scrkwgs)
        skel = dexe['stdout']

        scrkwgs.update({
            'scratch_name': Path(dexe['scratch_directory']).name,
            'scratch_exist_ok': True
        })
        success, dexe = qcng.util.execute(['xvmol'], {}, [], **scrkwgs)
        skel += dexe['stdout']

        scrkwgs.update({'scratch_messy': scratch_messy})
        success, dexe = qcng.util.execute(['xvmol2ja'], {},
                                          ['JOBARC', 'JAINDX'],
                                          as_binary=['JOBARC', 'JAINDX'],
                                          **scrkwgs)
        skel += dexe['stdout']

        print(skel)  # partial.out

        print(
            f"""  CFOUR scratch file 'JOBARC (binary)' for {item} has been read"""
        )
        c4mol = jajo2mol(
            getrec([b'COORD   ', b'ATOMCHRG', b'MAP2ZMAT', b'IFLAGS  '],
                   dexe['outfiles']['JOBARC'], dexe['outfiles']['JAINDX']))

        # S/R: Reap results from output file
        if isSowReap:
            isOk, msg, results = reap_job_validate(
                outdir, 'VPT2', item, linkage,
                ['CURRENT ENERGY', 'CURRENT DIPOLE', 'CURRENT GRADIENT'])
            if not isOk:
                raise ValidationError(msg)

            fje = results['CURRENT ENERGY']
            fjgrd = results['CURRENT GRADIENT']
            fjdip = [
                item / constants.dipmom_au2debye
                for item in results['CURRENT DIPOLE']
            ]

        # C: Run the job and collect results
        else:

            resi = ResultInput(
                **{
                    'driver': 'gradient',
                    'extras': {
                        'qcdb:options': copy.deepcopy(pe.nu_options),
                    },
                    'model': {
                        'method': lowername,
                        'basis': '(auto)',
                    },
                    'molecule': zmmol,
                    'provenance': provenance_stamp(__name__),
                })
            res = qcng.compute(resi, 'qcdb-' + package, raise_error=True)
            #pp.pprint(res.dict())

            fje = res.extras['qcdb:qcvars']['CURRENT ENERGY']['data']
            fjgrd = res.extras['qcdb:qcvars']['CURRENT GRADIENT']['data']
            #au2debye = qcel.constants.get('dipmom_au2debye', return_tuple=True).data
            # TODO switch DIPOLE to vector
            fjdip = [
                float(res.extras['qcdb:qcvars']['CURRENT DIPOLE X']['data']) /
                qcel.constants.dipmom_au2debye,
                float(res.extras['qcdb:qcvars']['CURRENT DIPOLE Y']['data']) /
                qcel.constants.dipmom_au2debye,
                float(res.extras['qcdb:qcvars']['CURRENT DIPOLE Z']['data']) /
                qcel.constants.dipmom_au2debye
            ]

        # Transform results into C4 orientation (defined by c4mol) & forge FJOBARC file
        fjobarc = format_fjobarc(
            fje,
            *backtransform(chgeMol=zmmol,
                           permMol=c4mol,
                           chgeGrad=fjgrd,
                           chgeDip=fjdip))

    return fjobarc
コード例 #20
0
def unpack_single_task_spec(storage, meta, molecules):
    """Transforms a metadata compute packet into an expanded
    QC Schema for multiple runs.

    Parameters
    ----------
    storage : DBSocket
        A live connection to the current database.
    meta : dict
        A JSON description of the metadata involved with the computation
    molecules : list of str, dict
        A list of molecule ID's or full JSON molecules associated with the run.

    Returns
    -------
    ret : tuple(dict, list)
        A dictionary of JSON representations with keys built in.
        The list is an array of any errors occurred

    Examples
    --------

    >>> meta = {
        "procedure": "single",
        "driver": "energy",
        "method": "HF",
        "basis": "sto-3g",
        "keywords": "default",
        "program": "psi4",
    }

    >>> molecules = [{"geometry": [0, 0, 0], "symbols" : ["He"]}]

    >>> unpack_single_task_spec(storage, meta, molecules)

    """

    # Get the required molecules
    raw_molecules_query = storage.get_add_molecules_mixed(molecules)

    # Pull out the needed keywords
    if meta["keywords"] is None:
        keyword_set = {}
    else:
        keyword_set = storage.get_add_keywords_mixed([meta["keywords"]
                                                      ])["data"][0]
        keyword_set = keyword_set["values"]

    # Create the "universal header"
    task_meta = json.dumps({
        "driver": meta["driver"],
        "keywords": keyword_set,
        "model": {
            "method": meta["method"],
            "basis": meta["basis"]
        },
        "extras": {
            "_qcfractal_tags": {
                "program": meta["program"],
                "keywords": meta["keywords"]
            }
        }
    })

    tasks = []
    for mol in raw_molecules_query["data"]:
        if mol is None:
            tasks.append(None)
            continue

        data = json.loads(task_meta)
        data["molecule"] = mol

        tasks.append(ResultInput(**data))

    return tasks, []
コード例 #21
0
def test_result_sparsity(water, result_input, res_success):
    res_in = ResultInput(molecule=water, **result_input)
    assert set(
        res_in.dict()["properties"].keys()) == {"scf_one_electron_energy"}