Esempio n. 1
0
def test_compute_wavefunction(fractal_compute_server):

    psiver = qcng.get_program("psi4").get_version()
    if parse_version(psiver) < parse_version("1.4a2.dev160"):
        pytest.skip("Must be used a modern version of Psi4 to execute")

    # Build a client
    client = ptl.FractalClient(fractal_compute_server)

    # Add a hydrogen and helium molecule
    hydrogen = ptl.Molecule.from_data([[1, 0, 0, -0.5], [1, 0, 0, 0.5]], dtype="numpy", units="bohr")

    # Ask the server to compute a new computation
    r = client.add_compute(
        program="psi4",
        driver="energy",
        method="HF",
        basis="sto-3g",
        molecule=hydrogen,
        protocols={"wavefunction": "orbitals_and_eigenvalues"},
    )

    fractal_compute_server.await_results()
    assert len(fractal_compute_server.list_current_tasks()) == 0

    result = client.query_results(id=r.ids)[0]
    assert result.wavefunction

    r = result.get_wavefunction("orbitals_a")
    assert isinstance(r, np.ndarray)
    assert r.shape == (2, 2)

    r = result.get_wavefunction(["orbitals_a", "basis"])
    assert r.keys() == {"orbitals_a", "basis"}
Esempio n. 2
0
def test_qchem_logfile_parser(test_case):

    # Get output file data
    data = qchem_logonly_info.get_test_data(test_case)
    outfiles = {"dispatch.out": data["qchem.out"]}
    with pytest.warns(Warning):
        output = qcng.get_program("qchem",
                                  check=False).parse_logfile(outfiles).dict()
    output["stdout"] = None

    output_ref = qcel.models.AtomicResult.parse_raw(data["output.json"]).dict()
    for key in list(output["provenance"].keys()):
        if key not in output_ref["provenance"]:
            output["provenance"].pop(key)

    # Modify ref to trim down total data as a molecule is now sparse
    output_ref["molecule"] = {
        k: v
        for k, v in output_ref["molecule"].items() if k in output["molecule"]
    }

    check, message = compare_recursive(output_ref,
                                       output,
                                       return_message=True,
                                       forgive=qchem_forgive)
    assert check, message
Esempio n. 3
0
def test_qchem_logfile_parser_qcscr(test_case):

    # Get output file data
    data = qchem_info.get_test_data(test_case)
    outfiles = qcel.util.deserialize(data["outfiles.msgpack"], "msgpack-ext")

    with pytest.warns(Warning):
        output = qcng.get_program("qchem",
                                  check=False).parse_logfile(outfiles).dict()
    output["stdout"] = None

    output_ref = qcel.models.AtomicResult.parse_raw(data["output.json"]).dict()
    for key in list(output["provenance"].keys()):
        if key not in output_ref["provenance"]:
            output["provenance"].pop(key)

    output_ref["stdout"] = None

    # Modify ref to trim down total data as a molecule is now sparse
    output_ref["molecule"] = {
        k: v
        for k, v in output_ref["molecule"].items() if k in output["molecule"]
    }

    output_ref["model"]["method"] = output_ref["model"]["method"].lower()
    check, message = compare_recursive(output_ref,
                                       output,
                                       return_message=True,
                                       forgive=qchem_forgive)
    assert check, message
Esempio n. 4
0
def test_qchem_logfile_parser_qcscr(test_case):

    # Get output file data
    data = qchem_info.get_test_data(test_case)
    outfiles = qcel.util.deserialize(data["outfiles.msgpack"], "msgpack-ext")

    with pytest.warns(Warning):
        output = qcng.get_program("qchem",
                                  check=False).parse_logfile(outfiles).dict()
    output["stdout"] = None

    output_ref = qcel.models.AtomicResult.parse_raw(data["output.json"]).dict()
    for key in list(output["provenance"].keys()):
        if key not in output_ref["provenance"]:
            output["provenance"].pop(key)

    output_ref["stdout"] = None

    # compare_recursive.forgive can be used once QCEL#174 is released
    output["molecule"].pop("connectivity")
    output_ref["molecule"].pop("connectivity")

    output_ref["model"]["method"] = output_ref["model"]["method"].lower()
    check, message = compare_recursive(
        output_ref,
        output,
        return_message=True,
        forgive=[
            "root.molecule.provenance.version", "root.provenance.version",
            "root.provenance.routine"
        ],
    )
    assert check, message
Esempio n. 5
0
def test_local_options_memory_gib(program, model, keywords, memory_trickery, request):
    """Ensure memory handling implemented in harness (if applicable).

    For available harnesses, run minimal calc at specific total node memory, both through runtime
      config alone and with clashing (and non-QCEngine-like) keyword spec. Check memory quantity
      shows up in ``TaskConfig``.
    For ``managed-memory``-active harnesses, check that memory registers in output.

    New Harness Instructions
    ------------------------
    * Make sure minimal calc is in _canonical_methods above.
    * If ``managed_memory=True`` in harness, add regex to ``stdout_ref`` below to check that memory
      is specifiable.
    * If this test doesn't work, implement or adjust ``config.memory`` in your harness.

    """
    if not has_program(program):
        pytest.skip(f"Program '{program}' not found.")

    harness = qcng.get_program(program)
    molecule = _get_molecule(program, model["method"])

    addl_keywords = memory_trickery.get(program, memory_trickery)
    use_keywords = {**keywords, **addl_keywords}

    #  <<  Config

    config = qcng.config.get_config(
        hostname="something",
        task_config={
            "ncores": 1,
            "nnodes": 1,
            "memory": 1.555,
        },
    )

    #  <<  Run

    inp = AtomicInput(molecule=molecule, driver="energy", model=model, keywords=use_keywords)
    ret = qcng.compute(inp, program, raise_error=True, local_options=config.dict())
    pprint.pprint(ret.dict(), width=200)
    assert ret.success is True

    #  <<  Reference

    stdout_ref = {  # 1.555 GiB = 208708567 quad-words
        "cfour": "Allocated    1592 MB of main memory",
        "gamess": "208000000 WORDS OF MEMORY AVAILABLE",
        "nwchem": r"total    =  2087085\d\d doubles =   1592.3 Mbytes",  # doubles is quad-words. Mbytes is MiB
        "psi4": "1592 MiB Core",
    }

    #  <<  Test

    assert config.ncores == 1
    assert pytest.approx(config.memory, 0.1) == 1.555

    if harness._defaults["managed_memory"] is True:
        assert re.search(stdout_ref[program], ret.stdout), f"Memory pattern not found: {stdout_ref[program]}"
Esempio n. 6
0
def test_terachem_input_formatter(test_case):
    # Get input file data
    data = terachem_info.get_test_data(test_case)
    inp = qcel.models.AtomicInput.parse_raw(data["input.json"])

    # TODO add actual comparison of generated input file
    input_file = qcng.get_program("terachem", check=False).build_input(inp, qcng.get_config())
    assert input_file.keys() >= {"commands", "infiles"}
Esempio n. 7
0
 def check_available(program: str, optimiser: str) -> bool:
     """
     A wrapper around the general harness check in qcengine,
     here we can check that a optimiser and program are installed before run time.
     """
     _ = qcng.get_program(name=program, check=True)
     _ = qcng.get_procedure(name=optimiser)
     return True
Esempio n. 8
0
def test_molpro_input_formatter(test_case):

    # Get output file data
    data = molpro_info.get_test_data(test_case)
    inp = qcel.models.ResultInput.parse_raw(data["input.json"])

    # Just test that it runs for now
    input_file = qcng.get_program('molpro').build_input(inp, qcng.get_config())
    assert input_file.keys() >= {"commands", "infiles"}
Esempio n. 9
0
def test_local_options_ncores(program, model, keywords, ncores):
    """Ensure multithreading implemented in harness (if applicable) or multithreaded runs don't
    break harness (if inapplicable).

    For available harnesses, run minimal calc with single and multiple cores; check ncores count
      shows up in ``TaskConfig``.
    For ``thread_parallel``-active harnesses, check ncores count registers in output.

    New Harness Instructions
    ------------------------
    * Make sure minimal calc is in _canonical_methods above.
    * If ``thread_parallel=True`` in harness, add regex to ``stdout_ref`` below to check ncores the
      program sees.
    * If this test doesn't work, implement or adjust ``config.ncores`` in your harness.

    """
    if not has_program(program):
        pytest.skip(f"Program '{program}' not found.")

    harness = qcng.get_program(program)
    molecule = _get_molecule(program, model["method"])

    #  <<  Config

    config = qcng.config.get_config(
        hostname="something",
        task_config={
            "ncores": ncores,
            "nnodes": 1,
        },
    )

    #  <<  Run

    inp = AtomicInput(molecule=molecule, driver="energy", model=model, keywords=keywords)
    ret = qcng.compute(inp, program, raise_error=True, local_options=config.dict())
    pprint.pprint(ret.dict(), width=200)
    assert ret.success is True

    #  <<  Reference

    stdout_ref = {
        "cfour": rf"Running with {ncores} threads/proc",
        "gamess": rf"MEMDDI DISTRIBUTED OVER\s+{ncores} PROCESSORS",
        # "gamess": rf"PARALLEL VERSION RUNNING ON\s+{ncores} PROCESSORS IN\s+1 NODES",  # no line for serial
        # nwchem is node_parallel only
        "psi4": rf"Threads:\s+{ncores}",
    }

    #  <<  Test

    assert config.ncores == ncores
    assert config.nnodes == 1

    if harness._defaults["thread_parallel"] is True:
        assert re.search(stdout_ref[program], ret.stdout), f"Thread pattern not found: {stdout_ref[program]}"
Esempio n. 10
0
def test_terachem_output_parser(test_case):
    # Get output file data
    data = terachem_info.get_test_data(test_case)
    inp = qcel.models.ResultInput.parse_raw(data["input.json"])

    output = qcng.get_program('terachem', check=False).parse_output(data, inp)

    output_ref = qcel.models.Result.parse_raw(data["output.json"])

    assert compare_recursive(output_ref.dict(), output.dict())
Esempio n. 11
0
def test_terachem_output_parser(test_case):
    # Get output file data
    data = terachem_info.get_test_data(test_case)
    inp = qcel.models.AtomicInput.parse_raw(data["input.json"])

    output = qcng.get_program("terachem", check=False).parse_output(data, inp).dict()
    output_ref = qcel.models.AtomicResult.parse_raw(data["output.json"]).dict()

    # Forgiving molecule since it is now sparse
    assert compare_recursive(output_ref, output, forgive={"stdout", "provenance", "molecule"})
Esempio n. 12
0
def test_entos_input_formatter_template(test_case):

    # Get input file data
    data = entos_info.get_test_data(test_case)
    inp = qcel.models.ResultInput.parse_raw(data["input.json"])

    # TODO add actual comparison of generated input file
    input_file = qcng.get_program('entos', check=False).build_input(
        inp, qcng.get_config(), template="Test template")
    assert input_file.keys() >= {"commands", "infiles"}
Esempio n. 13
0
def is_program_new_enough(program, version_feature_introduced):
    """Returns True if `program` registered in QCEngine, locatable in
    environment, has parseable version, and that version in normalized
    form is equal to or later than `version_feature_introduced`.

    """
    if program not in qcng.list_available_programs():
        return False
    candidate_version = qcng.get_program(program).get_version()

    return parse_version(candidate_version) >= parse_version(version_feature_introduced)
Esempio n. 14
0
def test_molpro_input_formatter(test_case):

    # Get output file data
    data = molpro_info.get_test_data(test_case)
    inp = qcel.models.ResultInput.parse_raw(data["input.json"])

    # Just test that it runs for now
    # TODO add actual comparison
    input_file = qcng.get_program('molpro').build_input(inp, qcng.get_config())
    #print(input_file['infiles']['dispatch.mol'])
    assert input_file.keys() >= {"commands", "infiles"}
Esempio n. 15
0
def test_entos_output_parser(test_case):

    # Get output file data
    data = entos_info.get_test_data(test_case)
    inp = qcel.models.AtomicInput.parse_raw(data["input.json"])

    output = qcng.get_program("entos", check=False).parse_output(data, inp).dict()
    output.pop("provenance", None)

    output_ref = qcel.models.AtomicResult.parse_raw(data["output.json"]).dict()
    output_ref.pop("provenance", None)

    check = compare_recursive(output_ref, output)
    assert check, (output, output_ref)
Esempio n. 16
0
def test_qchem_output_parser(test_case):

    # Get output file data
    data = qchem_info.get_test_data(test_case)
    inp = qcel.models.AtomicInput.parse_raw(data["input.json"])

    outfiles = qcel.util.deserialize(data["outfiles.msgpack"], "msgpack-ext")
    output = qcng.get_program("qchem", check=False).parse_output(outfiles, inp).dict()
    output.pop("provenance", None)

    output_ref = qcel.models.AtomicResult.parse_raw(data["output.json"]).dict()
    output_ref.pop("provenance", None)

    check, message = compare_recursive(output_ref, output, return_message=True)
    assert check, message
Esempio n. 17
0
def test_molpro_output_parser(test_case):

    # Get output file data
    data = molpro_info.get_test_data(test_case)
    inp = qcel.models.ResultInput.parse_raw(data["input.json"])

    output = qcng.get_program('molpro').parse_output(data, inp).dict()
    output.pop("provenance", None)

    output_ref = qcel.models.Result.parse_raw(data["output.json"]).dict()
    output_ref.pop("provenance", None)

    # TODO add `skip` to compare_recusive
    check = compare_recursive(output_ref, output)
    assert check, check
Esempio n. 18
0
def test_terachem_executer(test_case):

    # Get input file data
    data = terachem_info.get_test_data(test_case)
    inp = qcel.models.ResultInput.parse_raw(data["input.json"])

    # Just test that it runs for now
    result = qcng.get_program('terachem').compute(inp, qcng.get_config())
    assert result.success == True

    # Get output file data

    output_ref = qcel.models.Result.parse_raw(data["output.json"])

    if result.success:
        atol = 1e-6
        if result.driver == "gradient":
            atol = 1e-3
        assert compare_recursive(output_ref.return_result,
                result.return_result, atol = atol) 
Esempio n. 19
0
def test_qchem_logfile_parser(test_case):

    # Get output file data
    data = qchem_logonly_info.get_test_data(test_case)
    outfiles = {"dispatch.out": data["qchem.out"]}
    with pytest.warns(Warning):
        output = qcng.get_program("qchem",
                                  check=False).parse_logfile(outfiles).dict()
    output["stdout"] = None

    output_ref = qcel.models.AtomicResult.parse_raw(data["output.json"]).dict()
    for key in list(output["provenance"].keys()):
        if key not in output_ref["provenance"]:
            output["provenance"].pop(key)

    check, message = compare_recursive(output_ref,
                                       output,
                                       return_message=True,
                                       forgive=[
                                           "root.molecule.provenance.version",
                                           "root.provenance.version"
                                       ])
    assert check, message
Esempio n. 20
0
def test_protocol_native(program, model, keywords, native):
    """Ensure native_files protocol implemented in harness.

    For harnesses, run minimal gradient calc with different protocol settings; check expected
      native/DSL files show up in ``AtomicResult``.

    New Harness Instructions
    ------------------------
    * Make sure minimal calc is in _canonical_methods above. This uses gradient for more files.
    * Add regex to ``input_ref`` & ``all_ref`` below to check content of input and any other file.
    * If this test doesn't work, implement or adjust ``native_files`` in your harness.

    """
    if not has_program(program):
        pytest.skip(f"Program '{program}' not found.")

    harness = qcng.get_program(program)
    molecule = _get_molecule(program, model["method"])

    #  <<  Config

    protocols = {
        "native_files": native,
    }
    config = qcng.config.get_config(
        hostname="something",
        local_options={
            "ncores": 1,
            "nnodes": 1,
        },
    )

    #  <<  Run

    inp = AtomicInput(molecule=molecule,
                      driver="gradient",
                      model=model,
                      keywords=keywords,
                      protocols=protocols)
    ret = qcng.compute(inp,
                       program,
                       raise_error=True,
                       local_options=config.dict())
    pprint.pprint(ret.dict(), width=200)
    assert ret.success is True

    #  <<  Reference

    input_ref = {
        "cfour": rf"\*CFOUR\(BASIS=6-31G",
        "dftd3": rf"1.000000     1.261000     1.703000",
        "gamess": rf"\$basis gbasis=n31 ngauss=6 \$end",
        "gcp": rf"level HF3C",
        "mctc-gcp": rf"level DFT/SV",
        "mp2d": rf"--TT_a1=0.944 --TT_a2=0.48",
        "nwchem": rf"H library 6-31G",
        "psi4": rf'"driver": "gradient",',
    }

    all_ref = {
        "cfour": ("GRD", rf"1.0+\s+0.0+\s+0.0+\s+0.03"),
        "dftd3": ("dftd3_geometry.xyz", rf"H\s+0.0+\s+0.0+\s+0.34"),
        "gamess": ("gamess.dat", rf"CLOSED SHELL ORBITALS --- GENERATED AT"),
        "gcp": ("gcp_geometry.xyz", rf"H\s+0.0+\s+0.0+\s+0.34"),
        "mctc-gcp": ("gcp_geometry.xyz", rf"H\s+0.0+\s+0.0+\s+0.34"),
        "mp2d": ("mp2d_geometry", rf"H\s+0.0+\s+0.0+\s+0.34"),
        "nwchem": ("nwchem.grad", rf"0.0, 0.0, 0.03"),
        "psi4": ("psi4.grad", rf"1.0+\s+(-?)0.0+\s+(-?)0.0+\s+0.03"),
    }

    #  <<  Test

    if native == "none":
        assert ret.native_files is None
    elif native == "input":
        assert list(ret.native_files.keys()) == ["input"]

    if ret.native_files:
        assert "stdout" not in ret.native_files, f"Stdout found in native_files -- clean up the harness"
        assert "stderr" not in ret.native_files, f"Stderr found in native_files -- clean up the harness"
    assert "outfiles" not in ret.extras, f"Outfiles found in extras -- clean up the harness"

    if native in ["input", "all"]:
        assert re.search(
            input_ref[program], ret.native_files["input"]
        ), f"Input file pattern not found: {input_ref[program]}"
    if native == "all" and program != "psi4":  # allow psi4 once native_files PR merged
        fl, snip = all_ref[program]
        assert re.search(snip, ret.native_files[fl]
                         ), f"Ancillary file pattern not found in {fl}: {snip}"
Esempio n. 21
0
def test_local_options_scratch(program, model, keywords):
    """Ensure scratch handling implemented in harness (if applicable).

    For available harnesses, run minimal calc at specific scratch directory name (randomly generated
      during test) and skip scratch clean-up. Check scratch settings show up in ``TaskConfig``.
    For ``scratch``-active harnesses, check that an expected file is written to and left behind in
      scratch directory. Check any scratch-related printing in output.

    New Harness Instructions
    ------------------------
    * Make sure minimal calc is in _canonical_methods above.
    * If ``scratch=True`` in harness, add single file (preferrably output) glob to ``scratch_sample``
      below to check that program scratch is directable.
    * If ``scratch=True`` in harness, if scratch directory mentioned in output, add regex to
      ``stdout_ref`` below to check that program scratch is directable. Otherwise, add an
      always-passing regex.
    * If this test doesn't work, implement or adjust ``config.scratch_directory`` and
      ``config.scratch_messy`` in your harness.

    """
    if not has_program(program):
        pytest.skip(f"Program '{program}' not found.")

    harness = qcng.get_program(program)
    molecule = _get_molecule(program, model["method"])

    #  <<  Config

    scratch_directory = tempfile.mkdtemp(suffix="_" + program)

    config = qcng.config.get_config(
        hostname="something",
        local_options={
            "scratch_directory": scratch_directory,
            "scratch_messy": True,
        },
    )

    #  <<  Run

    inp = AtomicInput(molecule=molecule,
                      driver="energy",
                      model=model,
                      keywords=keywords)
    ret = qcng.compute(inp,
                       program,
                       raise_error=True,
                       local_options=config.dict())
    pprint.pprint(ret.dict(), width=200)
    assert ret.success is True

    #  <<  Reference

    stdout_ref = {
        "cfour": "University of Florida",  # freebie
        "dftd3": "Grimme",  # freebie
        "gamess": "IOWA STATE UNIVERSITY",  # freebie
        "gcp": "Grimme",  # freebie
        "mctc-gcp": "Grimme",  # freebie
        "mp2d": "Beran",  # freebie
        "nwchem": "E. Apra",  # freebie
        "psi4": rf"Scratch directory: {scratch_directory}/tmp\w+_psi_scratch/",
    }

    # a scratch file (preferrably output) expected after job if scratch not cleaned up
    scratch_sample = {
        "cfour": "*/NEWFOCK",
        "dftd3": "*/dftd3_geometry.xyz",  # no outfiles
        "gamess": "*/gamess.dat",
        "gcp": "*/gcp_geometry.xyz",  # no outfiles
        "mctc-gcp": "*/gcp_geometry.xyz",  # no outfiles
        "mp2d": "*/mp2d_geometry",  # no outfiles
        "nwchem": "*/nwchem.db",
        "psi4": "*/psi.*.35",
    }

    #  <<  Test

    assert config.scratch_directory.endswith(program)

    if harness._defaults["scratch"] is True:
        sample_file = list(
            Path(scratch_directory).glob(scratch_sample[program]))
        assert len(
            sample_file
        ) == 1, f"Scratch sample not found: {scratch_sample[program]} in {scratch_directory}"

        assert re.search(
            stdout_ref[program],
            ret.stdout), f"Scratch pattern not found: {stdout_ref[program]}"
Esempio n. 22
0
        temporary_directory,
        f"{model['method'].lower()}_{molecule_name}_{driver}")
    os.makedirs(folder_name, exist_ok=True)

    input_model = {
        "molecule": qcng.get_molecule(molecule_name),
        "keywords": keywords,
        "model": model,
        "driver": driver,
    }
    input_model = qcel.models.ResultInput(**input_model)

    with open(os.path.join(folder_name, "input.json"), "w") as handle:
        handle.write(serialize(input_model, 'json'))

    # Save generated infiles
    prog = qcng.get_program(program)
    inputs = prog.build_input(input_model, config)
    with open(os.path.join(folder_name, "infiles.msgpack"), "wb") as handle:
        handle.write(serialize(inputs["infiles"], 'msgpack-ext'))

    # Save outfiles
    ret = prog.execute(inputs)
    with open(os.path.join(folder_name, "outfiles.msgpack"), "wb") as handle:
        handle.write(serialize(ret[1]["outfiles"], 'msgpack-ext'))

    # Save result
    result = prog.parse_output(ret[1]["outfiles"], input_model)
    with open(os.path.join(folder_name, "output.json"), "w") as handle:
        handle.write(serialize(result, 'json'))
Esempio n. 23
0
import os
import qcengine as qcng
import qcelemental as qcel

os.environ["TERACHEM_PBS_HOST"] = "127.0.0.1"
os.environ["TERACHEM_PBS_PORT"] = "11111"

prog = qcng.get_program("terachem_pbs")

mol = qcel.models.Molecule.from_data("""
    O  0.0  0.000  -0.129
    H  0.0 -1.494  1.027
    H  0.0  1.494  1.027
""")

inp = qcel.models.AtomicInput(
    molecule=mol,
    driver="energy",
    model={
        "method": "pbe0",
        "basis": "6-31g"
    },
)
ret = prog.compute(inp)
print(ret)
Esempio n. 24
0
def vpt2(name, **kwargs):
    """Perform vibrational second-order perturbation computation through
    Cfour to get anharmonic frequencies. This version uses c4 for the disp
    and pt2 but gets gradients from p4.

    :type c4full: :ref:`boolean <op_py_boolean>`
    :param c4full: ``'on'`` || |dl| ``'off'`` |dr|

        Indicates whether when *name* indicates a Cfour method and *mode*
        indicates a sow/reap approach, sown files are direct ZMAT files
        and FJOBARC files are expected to reap, so that Cfour only, not
        Cfour-through-Psi4, is needed for distributed jobs.

    .. caution:: Some features are not yet implemented. Buy a developer a coffee.

       - Presently uses all gradients. Could mix in analytic 2nd-derivs.

       - Collect resutls.

       - Manage scratch / subdir better.

       - Allow CFOUR_BASIS

       - Consider forcing some tighter convcrit, c4 and p4

       - mixed ang/bohr signals

       - error by converting to ang in psi?

       - Expand CURRENT DIPOLE XYZ beyond SCF

       - Remember additional FJOBARC record TOTENER2 if EXCITE .ne. NONE

       - switch C --> S/R with recovery using shelf

    """
    from . import endorsed_plugins
    kwargs = kwargs_lower(kwargs)

    #    if 'options' in kwargs:
    #        driver_helpers.set_options(kwargs.pop('options'))
    #
    #    # Bounce if name is function
    #    if hasattr(name, '__call__'):
    #        return name(energy, kwargs.pop('label', 'custom function'), ptype='energy', **kwargs)
    #
    #    # Allow specification of methods to arbitrary order
    lowername = name.lower()
    package = get_package(lowername, kwargs)
    #    lowername, level = driver_helpers._parse_arbitrary_order(lowername)
    #    if level:
    #        kwargs['level'] = level

    # Make sure the molecule the user provided is the active one
    molecule = kwargs.pop('molecule', get_active_molecule())
    molecule.update_geometry()

    #    if len(pe.nu_options.scroll) == 0:
    #        #print('EMPTY OPT')
    #        pe.load_nu_options()

    # -----
    verbose = kwargs.pop('verbose', 0)
    scratch_messy = kwargs.pop('scratch_messy', True)  # TODO

    kwgs = {'accession': kwargs['accession'], 'verbose': verbose}

    #    optstash = p4util.OptionsState(
    #        ['BASIS'])

    # Option mode of operation- whether vpt2 run in one job or files farmed out
    if not ('vpt2_mode' in kwargs):
        if ('mode' in kwargs):
            kwargs['vpt2_mode'] = kwargs['mode']
            del kwargs['mode']
        else:
            kwargs['vpt2_mode'] = 'continuous'

    # Switches for route through code- S/R or continuous & Psi4 or Cfour gradients
    isSowReap = True if kwargs['vpt2_mode'].lower() == 'sowreap' else False
    #!BR#    isC4notP4 = bool(re.match('cfour', lowername)) or bool(re.match('c4-', lowername))
    isC4notP4 = False  # TODO until intf_psi4 hooked up to qcng
    isC4fully = True if ('c4full' in kwargs
                         and yes.match(str(kwargs['c4full'])) and isC4notP4
                         and isSowReap) else False
    print('isSowReap=', isSowReap, 'isC4notP4=', isC4notP4, 'isC4fully=',
          isC4fully)

    cfourharness = qcng.get_program('qcdb-cfour')
    config = qcng.config.get_config(local_options={"ncores": 2})

    # Save submission directory and basis set
    current_directory = os.getcwd()
    #    user_basis = core.get_global_option('BASIS')

    # Open data persistence shelf- vital for sowreap, checkpoint for continuouw
    #    shelf = shelve.open(current_directory + '/' + os.path.splitext(core.outfile_name())[0] + '.shelf', writeback=True)
    shelf = shelve.open(current_directory + '/vpt2scratch.shelf',
                        writeback=True)

    # Cfour keywords to request vpt2 analysis through findif gradients
    c000_opts = RottenOptions()
    pe.load_options(c000_opts)
    c000_opts.require('CFOUR', 'VIBRATION', 'FINDIF', **kwgs)
    c000_opts.require('CFOUR', 'FREQ_ALGORITHM', 'PARALLEL', **kwgs)
    c000_opts.require('CFOUR', 'ANH_ALGORITHM', 'PARALLEL', **kwgs)
    c000_opts.require('CFOUR', 'ANHARMONIC', 'VPT2', **kwgs)
    c000_opts.require('CFOUR', 'FD_PROJECT', 'OFF', **kwgs)

    # When a Psi4 method is requested for vpt2, a skeleton of
    #   computations in Cfour is still required to hang the gradients
    #   upon. The skeleton is as cheap as possible (integrals only
    #   & sto-3g) and set up here.
    #!BR#    if isC4notP4:
    #!BR#        skelname = lowername
    #!BR#    else:
    if True:
        skelname = 'c4-scf'
#        core.set_global_option('BASIS', 'STO-3G')
#    P4  'c4-scf'/'cfour'CALC_LEVEL      lowername  # temporary
#    C4  lowername                       cfour{}  # temporary

    if 'status' not in shelf:
        shelf['status'] = 'initialized'
        shelf['linkage'] = os.getpid()
        shelf['zmat'] = {
        }  # Cfour-generated ZMAT files with finite difference geometries
        shelf['fjobarc'] = {
        }  # Cfour- or Psi4-generated ascii files with packaged gradient results
        shelf['results'] = {}  # models.Result
        shelf.sync()
    else:
        pass
        # how decide whether to use. keep precedent of intco.dat in mind

#    # Construct and move into directory job scratch / cfour scratch / harm
#    psioh = core.IOManager.shared_object()
#    psio = core.IO.shared_object()
#    os.chdir(psioh.get_default_path())  # psi_scratch
#    cfour_tmpdir = kwargs['path'] if 'path' in kwargs else \
#        'psi.' + str(os.getpid()) + '.' + psio.get_default_namespace() + \
#        '.cfour.' + str(uuid.uuid4())[:8]
#    if not os.path.exists(cfour_tmpdir):
#        os.mkdir(cfour_tmpdir)
#    os.chdir(cfour_tmpdir)  # psi_scratch/cfour
#    if not os.path.exists('harm'):
#        os.mkdir('harm')
#    os.chdir('harm')  # psi_scratch/cfour/harm

#    psioh.set_specific_retention(32, True)  # temporary, to track p4 scratch
#shelf['status'] = 'anharm_jobs_sown'  # temporary to force backtrack
    print('STAT', shelf['status'])  # temporary

    resi = ResultInput(
        **{
            'driver':
            'energy',  # to prevent qcdb imposition of analytic hessian
            'extras': {
                'qcdb:options': copy.deepcopy(c000_opts),  #pe.nu_options),
            },
            'model': {
                'method': 'c4-scf',  #'hf',
                #'basis': '6-31g',
                'basis': 'sto-3g',
            },
            'molecule': molecule.to_schema(dtype=2),
            'provenance': provenance_stamp(__name__),
        })

    # Generate the ZMAT input file in scratch
    cfourrec = cfourharness.qcdb_build_input(resi, config)
    shelf['genbas'] = cfourrec['infiles']['GENBAS']
    shelf['zmat']['000-000'] = cfourrec['infiles']['ZMAT']
    shelf.sync()

    #    with open('ZMAT', 'w') as handle:
    #        cfour_infile = write_zmat(skelname, 1)
    #        handle.write(cfour_infile)
    #    print('\n====== Begin ZMAT input for CFOUR ======')
    #    print(open('ZMAT', 'r').read())
    #    print('======= End ZMAT input for CFOUR =======\n')

    # Check existing shelf consistent with generated ZMAT, store
    #    if ('000-000' in shelf['zmat']) and (shelf['zmat']['000-000'] != cfour_infile):
    #        diff = difflib.Differ().compare(shelf['zmat']['000-000'].splitlines(), cfour_infile.splitlines())
    #        raise ValidationError("""Input file translated to Cfour ZMAT does not match ZMAT stored in shelf.\n\n""" +
    #            '\n'.join(list(diff)))

    # Reset basis after Cfour skeleton seeded
    #    core.set_global_option('BASIS', user_basis)

    if shelf['status'] == 'initialized':
        print('{:_^45}'.format('  VPT2 Setup: Harmonic  '))

        # Generate the displacements that will form the harmonic freq
        scrkwgs = {
            'scratch_directory': config.scratch_directory,
            'scratch_messy': True,
            'scratch_suffix': '_000'
        }
        success, dexe = qcng.util.execute(['xjoda'], cfourrec['infiles'], [],
                                          **scrkwgs)
        partial = dexe['stdout']

        scrkwgs.update({
            'scratch_name': Path(dexe['scratch_directory']).name,
            'scratch_exist_ok': True
        })
        scrkwgs.update({'scratch_messy': scratch_messy})
        success, dexe = qcng.util.execute(['xsymcor'], {}, ['zmat*'],
                                          **scrkwgs)
        partial += dexe['stdout']

        print(partial)  # partial.out

        # Read the displacements that will form the harmonic freq
        zmats0N = ['000-' + item[-3:] for item in dexe['outfiles']['zmat*']]
        for zm_2 in zmats0N:
            _, zm2 = zm_2.split('-')
            shelf['zmat'][zm_2] = dexe['outfiles']['zmat*']['zmat' + zm2]
            shelf.sync()
            print(f'  CFOUR scratch file zmat{zm2} for {zm_2} has been read\n')
            #print('%s\n' % shelf['zmat'][zm_2])

        # S/R: Write distributed input files for harmonic freq
        if isSowReap:
            os.chdir(current_directory)
            inputSansMol = p4util.format_currentstate_for_input(gradient,
                                                                lowername,
                                                                allButMol=True,
                                                                **kwargs)
            for zm12 in zmats0N:
                zm1, zm2 = zm12.split('-')

                ifile = vpt2_sow_files(zm12, shelf['linkage'], isC4notP4,
                                       isC4fully, shelf['zmat'][zm12],
                                       inputSansMol, shelf['genbas'])

                with open('VPT2-' + zm12 + '.in', 'w') as handle:
                    handle.write(ifile)

            msg = vpt2_instructions('harmonic', current_directory, zmats0N)
            core.print_out(msg)
            print(msg)

        shelf['status'] = 'harm_jobs_sown'

        # S/R: Pause for distributed calculations
        if isSowReap:
            shelf.close()
            return 0.0

    if shelf['status'] == 'harm_jobs_sown':
        zmats0N = [
            item for item in sorted(shelf['zmat'].keys())
            if (item[:3] == '000' and item[-3:] != '000')
        ]

        # S/R: Check that distributed calcs all completed correctly
        if isSowReap:
            msg = vpt2_instructions('harmonic', current_directory, zmats0N)
            core.print_out(msg)
            isOk, msg = sown_jobs_status(
                current_directory, 'VPT2', zmats0N, reap_job_validate,
                shelf['linkage'],
                ['CURRENT ENERGY', 'CURRENT DIPOLE', 'CURRENT GRADIENT'])
            core.print_out(msg)
            print(msg)
            if not isOk:
                shelf.close()
                return 0.0

        # Collect all results from gradients forming the harmonic freq
        for zm12 in zmats0N:
            zm1, zm2 = zm12.split('-')
            if zm12 not in shelf['fjobarc']:
                print('{:_^45}'.format(f'  VPT2 Computation: {zm12}  '))

                fjobarc = vpt2_reaprun_files(
                    zm12,
                    shelf['linkage'],
                    isSowReap,
                    isC4notP4,
                    isC4fully,
                    shelf['zmat']
                    [zm12],  #current_directory, psioh.get_default_path(), cfour_tmpdir,
                    lowername,
                    kwargs,
                    shelf['genbas'],
                    config,
                    package,
                    scratch_messy=scratch_messy)
                shelf['fjobarc'][zm12] = fjobarc
                shelf.sync()
        shelf['status'] = 'harm_jobs_reaped'

    if shelf['status'] == 'harm_jobs_reaped':
        zmats0N = [
            item for item in sorted(shelf['zmat'].keys())
            if (item[:3] == '000' and item[-3:] != '000')
        ]

        print('{:_^45}'.format('  VPT2 Results: Harmonic  '))
        for k, v in shelf.items():
            print('   {:_^20}'.format(k))
            #pp.pprint(v)

        #scrkwgs = {'scratch_directory': config.scratch_directory, 'scratch_messy': True, 'scratch_suffix': '_000'}
        #scrkwgs.update({'scratch_name': Path(dexe['scratch_directory']).name, 'scratch_exist_ok': True})
        #scrkwgs.update({'scratch_messy': scratch_messy})

        # Process the gradients into harmonic freq
        scrkwgs = {
            'scratch_directory': config.scratch_directory,
            'scratch_messy': True,
            'scratch_suffix': '_000med'
        }
        success, dexe = qcng.util.execute(['xjoda'], {
            'ZMAT': shelf['zmat']['000-000'],
            'GENBAS': shelf['genbas']
        }, [], **scrkwgs)
        harmout = dexe['stdout']

        scrkwgs.update({
            'scratch_name': Path(dexe['scratch_directory']).name,
            'scratch_exist_ok': True
        })
        success, dexe = qcng.util.execute(['xsymcor'], {}, [], **scrkwgs)
        print('xsymcor', success)
        harmout += dexe['stdout']

        for zm12 in zmats0N:
            zm1, zm2 = zm12.split('-')
            success, dexe = qcng.util.execute(
                ['xja2fja'], {'FJOBARC': shelf['fjobarc'][zm12]}, [],
                **scrkwgs)
            print(zm12, 'xja2fja', success)
            harmout += dexe['stdout']

            success, dexe = qcng.util.execute(['xsymcor'], {}, [], **scrkwgs)
            print(zm12, 'xsymcor', success)
            harmout += dexe['stdout']

        success, dexe = qcng.util.execute(['xjoda'], {}, [], **scrkwgs)
        print('xjoda', success)
        harmout += dexe['stdout']

        for zm in Path(dexe['scratch_directory']).glob('zmat*'):
            print('Removing', zm)
            os.remove(zm)

        scrkwgs.update({'scratch_messy': scratch_messy})
        success, dexe = qcng.util.execute(['xcubic'], {}, ['zmat*'], **scrkwgs)
        print('xcubic', success)
        harmout += dexe['stdout']
        #print('HARMOUT')
        #print(harmout)

        pp.pprint(shelf['zmat'].keys())
        pp.pprint(shelf['fjobarc'].keys())

        #        os.chdir(psioh.get_default_path() + cfour_tmpdir + '/harm')  # psi_scratch/cfour/harm
        #        harmout = run_cfour_module('xjoda')
        #        harmout += run_cfour_module('xsymcor')
        #        for zm12 in zmats0N:
        #            zm1, zm2 = zm12.split('-')
        #            with open('FJOBARC', 'w') as handle:
        #                handle.write(shelf['fjobarc'][zm12])
        #            harmout += run_cfour_module('xja2fja')
        #            harmout += run_cfour_module('xsymcor')
        #            shutil.move('FJOBARC', 'fja.' + zm12)
        #            try:
        #                os.remove('zmat' + zm2)
        #            except OSError:
        #                pass
        #        harmout += run_cfour_module('xjoda')
        #        harmout += run_cfour_module('xcubic')
        #        core.print_out(harmout)
        #        with open('harm.out', 'w') as handle:
        #            handle.write(harmout)

        # Generate displacements along harmonic normal modes
        #zmatsN0 = [item[-3:] for item in sorted(shelf['zmat'].keys()) if (item[:3] == '000' and item[-3:] != '000')]
        for fl, contents in dexe['outfiles']['zmat*'].items():
            zmN_ = fl[-3:] + '-000'
            shelf['zmat'][zmN_] = contents
            shelf.sync()
            print(f'  CFOUR scratch file {fl} for {zmN_} has been read\n')
            #print('%s\n' % shelf['zmat'][zm12])

        zmatsN0 = [
            item for item in sorted(shelf['zmat'].keys())
            if (item[:3] != '000' and item[-3:] == '000')
        ]
        for zmN0 in zmatsN0:
            zm1, _ = zmN0.split('-')

            # Collect displacements along the normal coordinates generated by the harmonic freq.
            #   Further harmonic freqs are to be run at each of these to produce quartic force field.
            #   To carry these out, generate displacements for findif by gradient at each displacement.

            scrkwgs = {
                'scratch_directory': config.scratch_directory,
                'scratch_messy': True,
                'scratch_suffix': f'_{zmN0}'
            }
            success, dexe = qcng.util.execute(['xjoda'], {
                'ZMAT': shelf['zmat'][zmN0],
                'GENBAS': shelf['genbas']
            }, [], **scrkwgs)

            scrkwgs.update({
                'scratch_name': Path(dexe['scratch_directory']).name,
                'scratch_exist_ok': True
            })
            scrkwgs.update({'scratch_messy': scratch_messy})
            success, dexe = qcng.util.execute(['xsymcor'], {}, ['zmat*'],
                                              **scrkwgs)

            for fl, contents in dexe['outfiles']['zmat*'].items():
                zm12 = zm1 + '-' + fl[-3:]
                shelf['zmat'][zm12] = contents
                shelf.sync()
                print('  CFOUR scratch file %s for %s has been read\n' %
                      (fl, zm12))
                #print('%s\n' % shelf['zmat'][zm12])


#        zmatsN0 = [item[-3:] for item in sorted(glob.glob('zmat*'))]
#        os.chdir('..')  # psi_scratch/cfour
#        for zm1 in zmatsN0:
#            zm12 = zm1 + '-000'
#            with open(psioh.get_default_path() + cfour_tmpdir + '/harm/zmat' + zm1, 'r') as handle:
#                shelf['zmat'][zm12] = handle.read()
#                shelf.sync()
#                core.print_out('  CFOUR scratch file %s for %s has been read\n' % ('zmat' + zm1, zm12))
#                core.print_out('%s\n' % shelf['zmat'][zm12])
#
#            # Collect displacements along the normal coordinates generated by the harmonic freq.
#            #   Further harmonic freqs are to be run at each of these to produce quartic force field.
#            #   To carry these out, generate displacements for findif by gradient at each displacement.
#            if os.path.exists(zm1):
#                shutil.rmtree(zm1)
#            os.mkdir(zm1)
#            os.chdir(zm1)  # psi_scratch/cfour/004
#            with open('ZMAT', 'w') as handle:
#                handle.write(shelf['zmat'][zm12])
#            shutil.copy2('../harm/GENBAS', 'GENBAS')  # ln -s $ecpdir/ECPDATA $j/ECPDATA
#            with open('partial.out', 'w') as handle:
#                handle.write(run_cfour_module('xjoda'))
#                handle.write(run_cfour_module('xsymcor'))
#
#            # Read the displacements that will form the anharmonic freq
#            zmatsNN = [item[-3:] for item in sorted(glob.glob('zmat*'))]
#            for zm2 in zmatsNN:
#                zm12 = zm1 + '-' + zm2
#                with open(psioh.get_default_path() + cfour_tmpdir + '/' + zm1 + '/zmat' + zm2, 'r') as handle:
#                    shelf['zmat'][zm12] = handle.read()
#                    shelf.sync()
#                    core.print_out('  CFOUR scratch file %s for %s has been read\n' % ('zmat' + zm2, zm12))
#                    core.print_out('%s\n' % shelf['zmat'][zm12])
#            os.chdir('..')  # psi_scratch/cfour

        zmatsNN = [
            item for item in sorted(shelf['zmat'].keys())
            if (item[:3] != '000' and item[-3:] != '000')
        ]

        # S/R: Write distributed input files for anharmonic freq
        if isSowReap:
            os.chdir(current_directory)
            inputSansMol = p4util.format_currentstate_for_input(gradient,
                                                                lowername,
                                                                allButMol=True,
                                                                **kwargs)
            for zm12 in zmatsNN:
                zm1, zm2 = zm12.split('-')

                ifile = vpt2_sow_files(zm12, shelf['linkage'], isC4notP4,
                                       isC4fully, shelf['zmat'][zm12],
                                       inputSansMol, shelf['genbas'])
                # GENBAS needed here

                with open('VPT2-' + zm12 + '.in', 'w') as handle:
                    handle.write(ifile)

            msg = vpt2_instructions('anharmonic', current_directory, zmatsNN)
            core.print_out(msg)
            print(msg)

        shelf['status'] = 'anharm_jobs_sown'

        # S/R: Pause for distributed calculations
        if isSowReap:
            shelf.close()
            return 0.0

    if shelf['status'] == 'anharm_jobs_sown':
        zmatsNN = [
            item for item in sorted(shelf['zmat'].keys())
            if (item[:3] != '000' and item[-3:] != '000')
        ]

        # S/R: Check that distributed calcs all completed correctly
        if isSowReap:
            msg = vpt2_instructions('anharmonic', current_directory, zmatsNN)
            core.print_out(msg)
            isOk, msg = sown_jobs_status(
                current_directory, 'VPT2', zmatsNN, reap_job_validate,
                shelf['linkage'],
                ['CURRENT ENERGY', 'CURRENT DIPOLE', 'CURRENT GRADIENT'])
            core.print_out(msg)
            print(msg)
            if not isOk:
                shelf.close()
                return 0.0

        # Collect all results from gradients forming the anharmonic freq
        for zmNN in zmatsNN:
            zm1, zm2 = zmNN.split('-')
            if zmNN not in shelf['fjobarc']:
                print('{:_^45}'.format(f'  VPT2 Computation: {zmNN}'))

                fjobarc = vpt2_reaprun_files(zmNN,
                                             shelf['linkage'],
                                             isSowReap,
                                             isC4notP4,
                                             isC4fully,
                                             shelf['zmat'][zmNN],
                                             lowername,
                                             kwargs,
                                             shelf['genbas'],
                                             config,
                                             package,
                                             scratch_messy=scratch_messy)
                shelf['fjobarc'][zmNN] = fjobarc
                shelf.sync()
        shelf['status'] = 'anharm_jobs_reaped'

    if shelf['status'] == 'anharm_jobs_reaped':
        zmats0N = [
            item for item in sorted(shelf['zmat'].keys())
            if (item[:3] == '000' and item[-3:] != '000')
        ]
        zmatsN0 = [
            item for item in sorted(shelf['zmat'].keys())
            if (item[:3] != '000' and item[-3:] == '000')
        ]
        zmatsNN = [
            item for item in sorted(shelf['zmat'].keys())
            if (item[:3] != '000' and item[-3:] != '000')
        ]

        print('{:_^45}'.format('  VPT2 Results: Harmonic  '))

        # Process the gradients into harmonic freq
        scrkwgs = {
            'scratch_directory': config.scratch_directory,
            'scratch_messy': True,
            'scratch_suffix': '_000final'
        }
        success, dexe = qcng.util.execute(['xjoda'], {
            'ZMAT': shelf['zmat']['000-000'],
            'GENBAS': shelf['genbas']
        }, [], **scrkwgs)
        anharmout = dexe['stdout']

        scrkwgs.update({
            'scratch_name': Path(dexe['scratch_directory']).name,
            'scratch_exist_ok': True
        })
        success, dexe = qcng.util.execute(['xsymcor'], {}, [], **scrkwgs)
        anharmout += dexe['stdout']

        for zm12 in zmats0N:
            zm1, zm2 = zm12.split('-')
            success, dexe = qcng.util.execute(
                ['xja2fja'], {'FJOBARC': shelf['fjobarc'][zm12]}, [],
                **scrkwgs)
            anharmout += dexe['stdout']
            success, dexe = qcng.util.execute(['xsymcor'], {}, [], **scrkwgs)
            anharmout += dexe['stdout']

        success, dexe = qcng.util.execute(['xjoda'], {}, [], **scrkwgs)
        anharmout += dexe['stdout']
        scrkwgs.update({'scratch_messy': scratch_messy})
        success, dexe = qcng.util.execute(['xcubic'], {},
                                          ['zmat*', 'JOBARC', 'JAINDX'],
                                          as_binary=['JOBARC', 'JAINDX'],
                                          **scrkwgs)
        anharmout += dexe['stdout']

        jobarc0 = dexe['outfiles']['JOBARC']
        jaindx0 = dexe['outfiles']['JAINDX']

        # Process the gradients into harmonic freq at each normco displaced point
        os.chdir('..')  # psi_scratch/cfour
        for zm1_ in zmatsN0:
            zm1, _ = zm1_.split('-')

            scrkwgs = {
                'scratch_directory': config.scratch_directory,
                'scratch_messy': True,
                'scratch_suffix': f'_{zm1}final'
            }
            success, dexe = qcng.util.execute(['xjoda'], {
                'ZMAT': shelf['zmat'][zm1_],
                'GENBAS': shelf['genbas']
            }, [], **scrkwgs)
            anharmout = dexe['stdout']

            scrkwgs.update({
                'scratch_name': Path(dexe['scratch_directory']).name,
                'scratch_exist_ok': True
            })
            success, dexe = qcng.util.execute(['xsymcor'], {}, [], **scrkwgs)
            anharmout += dexe['stdout']

            for zm12 in [
                    item for item in zmatsNN
                    if (item[:3] == zm1 and item[-3:] != '000')
            ]:
                _, zm2 = zm12.split('-')
                print(zm12, shelf['fjobarc'][zm12])

                success, dexe = qcng.util.execute(
                    ['xja2fja'], {'FJOBARC': shelf['fjobarc'][zm12]}, [],
                    **scrkwgs)
                anharmout += dexe['stdout']

                success, dexe = qcng.util.execute(['xsymcor'], {}, [],
                                                  **scrkwgs)
                anharmout += dexe['stdout']

                os.remove(Path(dexe['scratch_directory']) / 'FJOBARC')

            success, dexe = qcng.util.execute(['xjoda'], {}, [], **scrkwgs)
            anharmout += dexe['stdout']

            scrkwgs.update({'scratch_messy': scratch_messy})
            success, dexe = qcng.util.execute(['xja2fja'], {}, ['FJOBARC'],
                                              **scrkwgs)
            anharmout += dexe['stdout']
            shelf['fjobarc'][zm1_] = dexe['outfiles']['FJOBARC']
            shelf.sync()

            print('PARTIAL', zm1_, '\n', anharmout)

        # Process the harmonic freqs at normco displacements into anharmonic freq
        print('{:_^45}'.format('  VPT2 Results: Anharmonic  '))

        pprint.pprint(shelf['zmat'].keys())
        pprint.pprint(shelf['fjobarc'].keys())

        scrkwgs = {
            'scratch_directory': config.scratch_directory,
            'scratch_messy': True,
            'scratch_suffix': '_000anharm'
        }
        success, dexe = qcng.util.execute(['ls'], {
            'JOBARC': jobarc0,
            'JAINDX': jaindx0
        }, [],
                                          as_binary=['JOBARC', 'JAINDX'],
                                          **scrkwgs)
        anharmout = ''
        scrkwgs.update({
            'scratch_name': Path(dexe['scratch_directory']).name,
            'scratch_exist_ok': True
        })

        for zm1_ in zmatsN0:
            zm1, _ = zm1_.split('-')
            success, dexe = qcng.util.execute(
                ['xja2fja'], {'FJOBARC': shelf['fjobarc'][zm1_]}, [],
                **scrkwgs)
            print(zm1_, 'xja2fja', success)
            anharmout += dexe['stdout']

            success, dexe = qcng.util.execute(['xcubic'], {}, [], **scrkwgs)
            print(zm1_, 'xcubic', success)
            anharmout += dexe['stdout']

        print(anharmout)  # anharm.out

        shelf['status'] = 'vpt2_completed'

    # Finish up
    shelf.close()
Esempio n. 25
0
    def __init__(
        self,
        client: "FractalClient",
        queue_client: "BaseAdapter",
        logger: Optional[logging.Logger] = None,
        max_tasks: int = 200,
        queue_tag: Optional[Union[str, List[str]]] = None,
        manager_name: str = "unlabeled",
        update_frequency: Union[int, float] = 2,
        verbose: bool = True,
        server_error_retries: Optional[int] = 1,
        stale_update_limit: Optional[int] = 10,
        cores_per_task: Optional[int] = None,
        memory_per_task: Optional[float] = None,
        nodes_per_task: Optional[int] = None,
        cores_per_rank: Optional[int] = 1,
        scratch_directory: Optional[str] = None,
        retries: Optional[int] = 2,
        configuration: Optional[Dict[str, Any]] = None,
    ):
        """
        Parameters
        ----------
        client : FractalClient
            A FractalClient connected to a server
        queue_client : BaseAdapter
            The DBAdapter class for queue abstraction
        logger : Optional[logging.Logger], optional
            A logger for the QueueManager
        max_tasks : int, optional
            The maximum number of tasks to hold at any given time
        queue_tag : str, optional
            Allows managers to pull from specific tags
        manager_name : str, optional
            The cluster the manager belongs to
        update_frequency : Union[int, float], optional
            The frequency to check for new tasks in seconds
        verbose : bool, optional
            Whether or not to have the manager be verbose (logger level debug and up)
        server_error_retries : Optional[int], optional
            How many times finished jobs are attempted to be pushed to the server in
            in the event of a server communication error.
            After number of attempts, the failed jobs are dropped from this manager and considered "stale"
            Set to `None` to keep retrying
        stale_update_limit : Optional[int], optional
            Number of stale update attempts to keep around
            If this limit is ever hit, the server initiates as shutdown as best it can
            since communication with the server has gone wrong too many times.
            Set to `None` for unlimited
        cores_per_task : Optional[int], optional
            How many CPU cores per computation task to allocate for QCEngine
            None indicates "use however many you can detect"
        memory_per_task : Optional[float], optional
            How much memory, in GiB, per computation task to allocate for QCEngine
            None indicates "use however much you can consume"
        nodes_per_task : Optional[int], optional
            How many nodes to use per task. Used only for node-parallel tasks
        cores_per_rank: Optional[int], optional
            How many CPUs per rank of an MPI application. Used only for node-parallel tasks
        scratch_directory : Optional[str], optional
            Scratch directory location to do QCEngine compute
            None indicates "wherever the system default is"'
        retries : Optional[int], optional
            Number of retries that QCEngine will attempt for RandomErrors detected when running
            its computations. After this many attempts (or on any other type of error), the
            error will be raised.
        configuration : Optional[Dict[str, Any]], optional
            A JSON description of the settings used to create this object for the database.
        """

        # Setup logging
        if logger:
            self.logger = logger
        else:
            self.logger = logging.getLogger("QueueManager")

        self.name_data = {
            "cluster": manager_name,
            "hostname": socket.gethostname(),
            "uuid": str(uuid.uuid4())
        }
        self._name = self.name_data["cluster"] + "-" + self.name_data[
            "hostname"] + "-" + self.name_data["uuid"]

        self.client = client
        self.cores_per_task = cores_per_task
        self.memory_per_task = memory_per_task
        self.nodes_per_task = nodes_per_task or 1
        self.scratch_directory = scratch_directory
        self.retries = retries
        self.cores_per_rank = cores_per_rank
        self.configuration = configuration
        self.queue_adapter = build_queue_adapter(
            queue_client,
            logger=self.logger,
            cores_per_task=self.cores_per_task,
            memory_per_task=self.memory_per_task,
            nodes_per_task=self.nodes_per_task,
            scratch_directory=self.scratch_directory,
            cores_per_rank=self.cores_per_rank,
            retries=self.retries,
            verbose=verbose,
        )
        self.max_tasks = max_tasks
        self.queue_tag = queue_tag
        self.verbose = verbose

        self.statistics = QueueStatistics(
            max_concurrent_tasks=self.max_tasks,
            cores_per_task=(cores_per_task or 0),
            memory_per_task=(memory_per_task or 0),
            update_frequency=update_frequency,
        )

        self.scheduler = None
        self.update_frequency = update_frequency
        self.periodic = {}
        self.active = 0
        self.exit_callbacks = []

        # Server response/stale job handling
        self.server_error_retries = server_error_retries
        self.stale_update_limit = stale_update_limit
        self._stale_updates_tracked = 0
        self._stale_payload_tracking = []
        self.n_stale_jobs = 0

        # QCEngine data
        self.available_programs = qcng.list_available_programs()
        self.available_procedures = qcng.list_available_procedures()

        # Display a warning if there are non-node-parallel programs and >1 node_per_task
        if self.nodes_per_task > 1:
            for name in self.available_programs:
                program = qcng.get_program(name)
                if not program.node_parallel:
                    self.logger.warning(
                        "Program {} is not node parallel,"
                        " but manager will use >1 node per task".format(name))

        # Print out configuration
        self.logger.info("QueueManager:")
        self.logger.info("    Version:         {}\n".format(
            get_information("version")))

        if self.verbose:
            self.logger.info("    Name Information:")
            self.logger.info("        Cluster:     {}".format(
                self.name_data["cluster"]))
            self.logger.info("        Hostname:    {}".format(
                self.name_data["hostname"]))
            self.logger.info("        UUID:        {}\n".format(
                self.name_data["uuid"]))

        self.logger.info("    Queue Adapter:")
        self.logger.info("        {}\n".format(self.queue_adapter))

        if self.verbose:
            self.logger.info("    QCEngine:")
            self.logger.info("        Version:        {}".format(
                qcng.__version__))
            self.logger.info("        Task Cores:     {}".format(
                self.cores_per_task))
            self.logger.info("        Task Mem:       {}".format(
                self.memory_per_task))
            self.logger.info("        Task Nodes:     {}".format(
                self.nodes_per_task))
            self.logger.info("        Cores per Rank: {}".format(
                self.cores_per_rank))
            self.logger.info("        Scratch Dir:    {}".format(
                self.scratch_directory))
            self.logger.info("        Programs:       {}".format(
                self.available_programs))
            self.logger.info("        Procedures:     {}\n".format(
                self.available_procedures))

        # DGAS Note: Note super happy about how this if/else turned out. Looking for alternatives.
        if self.connected():
            # Pull server info
            self.server_info = client.server_information()
            self.server_name = self.server_info["name"]
            self.server_version = self.server_info["version"]
            self.server_query_limit = self.server_info["query_limit"]
            if self.max_tasks > self.server_query_limit:
                self.max_tasks = self.server_query_limit
                self.logger.warning(
                    "Max tasks was larger than server query limit of {}, reducing to match query limit."
                    .format(self.server_query_limit))
            self.heartbeat_frequency = self.server_info["heartbeat_frequency"]

            # Tell the server we are up and running
            payload = self._payload_template()
            payload["data"]["operation"] = "startup"
            payload["data"]["configuration"] = self.configuration

            self.client._automodel_request("queue_manager", "put", payload)

            if self.verbose:
                self.logger.info("    Connected:")
                self.logger.info("        Version:     {}".format(
                    self.server_version))
                self.logger.info("        Address:     {}".format(
                    self.client.address))
                self.logger.info("        Name:        {}".format(
                    self.server_name))
                self.logger.info("        Queue tag:   {}".format(
                    self.queue_tag))
                self.logger.info("        Username:    {}\n".format(
                    self.client.username))

        else:
            self.logger.info("    QCFractal server information:")
            self.logger.info(
                "        Not connected, some actions will not be available")
Esempio n. 26
0
def get_master_frame(
        kmol: "qcelemental.models.Molecule", scratch_directory
) -> Tuple["qcelemental.models.Molecule", Dict[str, str]]:
    """Do whatever it takes to figure out the GAMESS master frame by which ``kmol`` can be run with full symmetry."""

    harness = qcng.get_program("gamess")

    # want the full frame-independent symmetry, so allow reorientation to Psi4 master frame
    qmol = Molecule.from_schema(kmol.dict() | {
        "fix_com": False,
        "fix_orientation": False
    })
    pgn, naxis = _get_symmetry_card(qmol.full_point_group_with_n(),
                                    qmol.full_pg_n())

    # run exetyp=check asserting full symmetry to extract master frame from GAMESS
    # * fix_*=F so harness returns the internal GAMESS frame, not the naive input frame
    # * uses an arbitrary UHF/6-31G model
    # * most common failure mode is high-symmetry or wrong-quadrant geometry when GAMESS generates too many atoms
    #   * haven't found a reliable programmatic path out that uses full internal symmetry, so fall back to C2v, then C1
    internal_symmetry_card = f"{pgn} {naxis}".strip()

    if not all(kmol.real):
        # TODO is this the best way to handle ghosts?
        # * the C1 early return is to avoid the qcng.compute that uses coord=prinaxis that can't take ghost specification
        # * is the gamess master_frame the input frame for C1? have tentatively assumed so
        data = {
            "unique": list(range(len(kmol.symbols))),
            "symmetry_card": "C1",
        }
        return kmol, data

    for symmetry_card in [internal_symmetry_card, "Cnv 2", "C1"]:
        naive_kmol = kmol.copy(
            update={
                "fix_symmetry": symmetry_card,
                "atom_labels": list(range(len(kmol.symbols))),
                "fix_com": False,
                "fix_orientation": False,
            })

        atin = qcel.models.AtomicInput(
            **{
                "driver": "energy",
                "keywords": {
                    "contrl__exetyp": "check",
                    "contrl__scftyp": "uhf",
                    "basis__ngauss": 6,
                },
                "model": {
                    "method": "hf",
                    "basis": "n31",
                },
                "molecule": naive_kmol,
            })

        try:
            atres = qcng.compute(atin,
                                 "gamess",
                                 local_options={
                                     "nnodes": 1,
                                     "ncores": 1,
                                     "memory": 1
                                 },
                                 raise_error=True)
        except qcng.exceptions.UnknownError as e:
            mobj = re.search(
                # fmt: off
                r"^\s+" + r"AFTER PRINCIPAL AXIS TRANSFORMATION, THE PROGRAM" +
                r"\s*" + r"^\s+" +
                r"HAS CHOSEN THE FOLLOWING ATOMS AS BEING UNIQUE:" + r"\s*" +
                r"((?:\s+([A-Za-z]\w*)\s+\d+\.\d+\s+[-+]?\d+\.\d+\s+[-+]?\d+\.\d+\s+[-+]?\d+\.\d+\s*\n)+)"
                + r"^\s+" + r"EXECUTION OF GAMESS TERMINATED -ABNORMALLY-",
                # fmt: on
                str(e),
                re.MULTILINE | re.IGNORECASE,
            )
            if mobj:
                pass
            else:
                raise e
        else:
            break

    # `mf_kmol` and `mf_qmol` are now in GAMESS master frame
    # * all atoms present (as natural for Molecule classes), and we don't know which atoms are unique
    mf_kmol, _ = kmol.align(atres.molecule,
                            atoms_map=False,
                            mols_align=True,
                            run_mirror=True,
                            verbose=0)

    mf_qmol = Molecule.from_schema(mf_kmol.dict())
    assert (
        mf_qmol.full_point_group_with_n() == qmol.full_point_group_with_n()
    ), f"{mf_qmol.full_point_group_with_n()} (mf) != {qmol.full_point_group_with_n()} (in)"
    assert mf_qmol.full_pg_n() == qmol.full_pg_n(
    ), f"{mf_qmol.full_pg_n()} (mf) != {qmol.full_pg_n()} (in)"
    assert abs(mf_qmol.nuclear_repulsion_energy() -
               qmol.nuclear_repulsion_energy()) < 1.0e-3, "NRE"

    # nunique machinery in psi4/qcdb.Molecule class works within Abelian point groups, so start there
    d2h_subgroup = qmol.point_group().symbol()
    d2h_unique = [mf_qmol.unique(i) for i in range(mf_qmol.nunique())]

    if mf_qmol.get_full_point_group() == d2h_subgroup:
        # Abelian! home free
        full_pg_unique = d2h_unique

    else:
        # `possibly_nonunique` are atom indices that could be redundant with post-D2h symmetry operations
        # * formed from D2h unique less the first index for each element
        d2h_unique_by_element = defaultdict(list)
        for i in d2h_unique:
            d2h_unique_by_element[mf_qmol.symbol(i)].append(i)
        possibly_nonunique = []
        for k, v in d2h_unique_by_element.items():
            possibly_nonunique.extend(v[1:])

        # `trials` are a brute-force set of all possible atom indices, one or more of which must be the post-D2h unique list
        trials = []
        for drop in range(len(possibly_nonunique) + 1):
            for aa in itertools.combinations(possibly_nonunique, drop):
                trial = sorted(list(set(d2h_unique) - set(aa)))
                trials.append(trial)

        all_atom_lines = mf_kmol.to_string(dtype="gamess").splitlines()[3:]

        for selected_atoms in trials:
            selected_atom_lines = "\n".join([
                ln for iln, ln in enumerate(all_atom_lines)
                if iln in selected_atoms
            ])
            inp = _get_exetype_check_input(
                selected_atoms,
                symmetry_card,
                mf_kmol.molecular_charge,
                mf_kmol.molecular_multiplicity,
                selected_atom_lines,
            )

            # run exetyp=check asserting full symmetry to find a unique list that doesn't generate overlapping atoms
            # * can't use AtomicInput into usual harness b/c partial atom list not expressible in Molecule
            gamessrec = {
                "infiles": {
                    "gamess.inp": inp,
                },
                "command": [which("rungms"), "gamess", "00", "1"],
                "scratch_directory": scratch_directory,
                "scratch_messy": False,
            }
            success, dexe = harness.execute(gamessrec)
            # pprint.pprint(dexe, width=200)

            if "THERE ARE ATOMS LESS THAN   0.100 APART, QUITTING..." not in dexe[
                    "stdout"]:
                # "THE NUCLEAR REPULSION ENERGY IS       12.7621426235"
                break

        full_pg_unique = selected_atoms

    data = {
        "unique": full_pg_unique,
        "symmetry_card": symmetry_card,
    }

    return mf_kmol, data