Esempio n. 1
0
def test_queue_error(fractal_compute_server):

    client = portal.FractalClient(fractal_compute_server.get_address())

    hooh = portal.data.get_molecule("hooh.json").to_json()
    del hooh["connectivity"]
    mol_ret = client.add_molecules({"hooh": hooh})

    ret = client.add_compute("rdkit", "UFF", "", "energy", None,
                             mol_ret["hooh"])
    queue_id = ret["submitted"][0]

    # Pull out a special iteration on the queue manager
    fractal_compute_server.update_tasks()
    assert len(fractal_compute_server.list_current_tasks()) == 1

    fractal_compute_server.await_results()
    assert len(fractal_compute_server.list_current_tasks()) == 0

    db = fractal_compute_server.objects["storage_socket"]
    ret = db.get_queue({"status": "ERROR"})["data"]

    assert len(ret) == 1
    assert "connectivity graph" in ret[0]["error"]
    fractal_compute_server.objects["storage_socket"].queue_mark_complete(
        [queue_id])
def test_queue_compute_mixed_molecule(fractal_compute_server):

    client = ptl.FractalClient(fractal_compute_server)

    mol1 = ptl.Molecule.from_data("He 0 0 0\nHe 0 0 2.1")
    mol_ret = client.add_molecules([mol1])

    mol2 = ptl.Molecule.from_data("He 0 0 0\nHe 0 0 2.2")

    ret = client.add_compute("RDKIT",
                             "UFF",
                             "",
                             "energy",
                             None, [mol1, mol2, bad_id1],
                             full_return=True)
    assert len(ret.data.ids) == 3
    assert ret.data.ids[2] is None
    assert len(ret.data.submitted) == 2
    assert len(ret.data.existing) == 0

    # Pull out fireworks launchpad and queue nanny
    fractal_compute_server.await_results()

    db = fractal_compute_server.objects["storage_socket"]

    ret = client.add_compute("rdkit", "UFF", "", "energy", None,
                             [mol_ret[0], bad_id2])
    assert len(ret.ids) == 2
    assert ret.ids[1] is None
    assert len(ret.submitted) == 0
    assert len(ret.existing) == 1
def test_queue_error(fractal_compute_server):
    reset_server_database(fractal_compute_server)

    client = ptl.FractalClient(fractal_compute_server)

    hooh = ptl.data.get_molecule("hooh.json").copy(
        update={"connectivity": None})
    compute_ret = client.add_compute("rdkit", "UFF", "", "energy", None, hooh)

    # Pull out a special iteration on the queue manager
    fractal_compute_server.update_tasks()
    assert len(fractal_compute_server.list_current_tasks()) == 1

    fractal_compute_server.await_results()
    assert len(fractal_compute_server.list_current_tasks()) == 0

    # Pull from database, raw JSON
    db = fractal_compute_server.objects["storage_socket"]
    queue_ret = db.get_queue(status="ERROR")["data"]
    result = db.get_results(id=compute_ret.ids)['data'][0]

    assert len(queue_ret) == 1
    # TODO: task.error is not used anymore
    # assert "connectivity graph" in queue_ret[0].error.error_message
    assert result['status'] == 'ERROR'

    # Force a complete mark and test
    fractal_compute_server.objects["storage_socket"].queue_mark_complete(
        [queue_ret[0].id])
    result = db.get_results(id=compute_ret.ids)['data'][0]
    assert result['status'] == 'COMPLETE'
Esempio n. 4
0
def test_queue_duplicate_procedure(fractal_compute_server):

    client = portal.FractalClient(fractal_compute_server.get_address())

    hooh = portal.data.get_molecule("hooh.json").to_json()
    mol_ret = client.add_molecules({"hooh": hooh})

    geometric_options = {
        "options": None,
        "qc_meta": {
            "driver": "gradient",
            "method": "UFF",
            "basis": "",
            "options": None,
            "program": "rdkit"
        },
    }

    ret = client.add_procedure("optimization", "geometric", geometric_options,
                               mol_ret["hooh"])
    assert len(ret["submitted"]) == 1
    assert len(ret["completed"]) == 0

    # Pull out fireworks launchpad and queue nanny
    fractal_compute_server.await_results()

    db = fractal_compute_server.objects["storage_socket"]

    ret = client.add_procedure("optimization", "geometric", geometric_options,
                               mol_ret["hooh"])
    assert len(ret["submitted"]) == 0
    assert len(ret["completed"]) == 1
Esempio n. 5
0
def test_compute_wavefunction(fractal_compute_server):

    psiver = qcng.get_program("psi4").get_version()
    if parse_version(psiver) < parse_version("1.4a2.dev160"):
        pytest.skip("Must be used a modern version of Psi4 to execute")

    # Build a client
    client = ptl.FractalClient(fractal_compute_server)

    # Add a hydrogen and helium molecule
    hydrogen = ptl.Molecule.from_data([[1, 0, 0, -0.5], [1, 0, 0, 0.5]], dtype="numpy", units="bohr")

    # Ask the server to compute a new computation
    r = client.add_compute(
        program="psi4",
        driver="energy",
        method="HF",
        basis="sto-3g",
        molecule=hydrogen,
        protocols={"wavefunction": "orbitals_and_eigenvalues"},
    )

    fractal_compute_server.await_results()
    assert len(fractal_compute_server.list_current_tasks()) == 0

    result = client.query_results(id=r.ids)[0]
    assert result.wavefunction

    r = result.get_wavefunction("orbitals_a")
    assert isinstance(r, np.ndarray)
    assert r.shape == (2, 2)

    r = result.get_wavefunction(["orbitals_a", "basis"])
    assert r.keys() == {"orbitals_a", "basis"}
Esempio n. 6
0
def test_optimization_dataset(fractal_compute_server):

    client = ptl.FractalClient(fractal_compute_server)

    ds = ptl.collections.OptimizationDataset("testing", client=client)

    opt_spec = {"program": "geometric"}
    qc_spec = {"driver": "gradient", "method": "UFF", "program": "rdkit"}
    ds.add_specification("test", opt_spec, qc_spec)

    hooh1 = ptl.data.get_molecule("hooh.json")
    hooh2 = hooh1.copy(
        update={"geometry": hooh1.geometry + np.array([0, 0, 0.2])})

    ds.add_entry("hooh1", hooh1)
    ds.add_entry("hooh1-2", hooh1)
    ds.add_entry("hooh2", hooh2)

    ds.compute("test")
    fractal_compute_server.await_results()

    ds.query("test")
    assert ds.status().loc["COMPLETE", "test"] == 3

    assert ds.counts().loc["hooh1", "test"] >= 4

    final_energy = 0.00011456853977485626
    for idx, row in ds.df["test"].items():
        assert pytest.approx(row.get_final_energy(), abs=1.e-5) == final_energy

    opt = ds.get_record("hooh1", "test")
    assert pytest.approx(opt.get_final_energy(), abs=1.e-5) == final_energy
Esempio n. 7
0
def test_task_molecule_no_orientation(data, fractal_compute_server):
    """
    Molecule orientation should not change on compute
    """

    # Reset database each run
    reset_server_database(fractal_compute_server)

    client = ptl.FractalClient(fractal_compute_server)

    mol = ptl.Molecule(symbols=["H", "H"],
                       geometry=[0, 0, 0, 0, 5, 0],
                       connectivity=[(0, 1, 1)])

    mol_id = client.add_molecules([mol])[0]

    program, method, basis = data
    ret = client.add_compute(program, method, basis, "energy", None, [mol_id])

    # Manually handle the compute
    fractal_compute_server.await_results()

    # Check for the single result
    ret = client.query_results(id=ret.submitted)
    assert len(ret) == 1
    assert ret[0].status == "COMPLETE"
    assert ret[0].molecule == mol_id

    # Make sure no other molecule was added
    ret = client.query_molecules(molecular_formula=["H2"])
    assert len(ret) == 1
    assert ret[0].id == mol_id
Esempio n. 8
0
def test_procedure_optimization_protocols(fractal_compute_server):

    # Add a hydrogen molecule
    hydrogen = ptl.Molecule.from_data([[1, 0, 0, -0.673], [1, 0, 0, 0.673]], dtype="numpy", units="bohr")
    client = fractal_compute_server.client()

    # Add compute
    options = {
        "keywords": None,
        "qc_spec": {"driver": "gradient", "method": "HF", "basis": "sto-3g", "program": "psi4"},
        "protocols": {"trajectory": "final"},
    }

    # Ask the server to compute a new computation
    r = client.add_procedure("optimization", "geometric", options, [hydrogen])
    assert len(r.ids) == 1
    compute_key = r.ids[0]

    # Manually handle the compute
    fractal_compute_server.await_results()
    assert len(fractal_compute_server.list_current_tasks()) == 0

    # # Query result and check against out manual pul
    proc = client.query_procedures(id=r.ids)[0]
    assert proc.status == "COMPLETE"

    assert len(proc.trajectory) == 1
    assert len(proc.energies) > 1
Esempio n. 9
0
def test_optimization_submissions_with_constraints(fractal_compute_server):
    """
    Make sure that the constraints are added to the optimization and enforced.
    """
    client = FractalClient(fractal_compute_server)
    ethane = Molecule.from_file(get_data("ethane.sdf"), "sdf")
    factory = OptimizationDatasetFactory()
    dataset = OptimizationDataset(
        dataset_name="Test optimizations with constraint",
        description="Test optimization dataset with constraints",
        tagline="Testing optimization datasets")
    # add just mm spec
    dataset.add_qc_spec(method="openff-1.0.0",
                        basis="smirnoff",
                        program="openmm",
                        spec_name="default",
                        spec_description="mm default spec",
                        overwrite=True)
    # build some constraints
    constraints = Constraints()
    constraints.add_set_constraint(constraint_type="dihedral",
                                   indices=[2, 0, 1, 5],
                                   value=60,
                                   bonded=True)
    constraints.add_freeze_constraint(constraint_type="distance",
                                      indices=[0, 1],
                                      bonded=True)
    # add the molecule
    attributes = factory.create_cmiles_metadata(ethane)
    index = ethane.to_smiles()
    dataset.add_molecule(index=index,
                         molecule=ethane,
                         attributes=attributes,
                         constraints=constraints)
    # now add a mock url so we can submit the data
    dataset.metadata.long_description_url = "https://test.org"

    # now submit again
    dataset.submit(client=client, await_result=False)

    fractal_compute_server.await_results()

    # make sure of the results are complete
    ds = client.get_collection("OptimizationDataset", dataset.dataset_name)
    record = ds.get_record(ds.df.index[0], "default")
    assert "constraints" in record.keywords
    assert record.status.value == "COMPLETE"
    assert record.error is None
    assert len(record.trajectory) > 1

    # now make sure the constraints worked
    final_molecule = record.get_final_molecule()
    assert pytest.approx(60, final_molecule.measure((2, 0, 1, 5)))
    assert pytest.approx(record.get_initial_molecule().measure((0, 1)),
                         final_molecule.measure((0, 1)))
Esempio n. 10
0
def test_service_torsiondrive_service_incomplete(fractal_compute_server,
                                                 torsiondrive_fixture):
    hooh = ptl.data.get_molecule("hooh.json")
    hooh.geometry[0] += 0.00031

    spin_up_test, client = torsiondrive_fixture
    ret = spin_up_test(run_service=False)

    # Check the blank
    result = client.query_procedures(id=ret.ids)[0]
    assert len(result.final_energy_dict) == 0
    assert len(result.optimization_history) == 0
    assert result.status == "INCOMPLETE"

    # Update the service, but no compute
    fractal_compute_server.update_services()
    result = client.query_procedures(id=ret.ids)[0]
    status = result.detailed_status()
    assert result.status == "RUNNING"
    assert status["incomplete_tasks"] == 1

    fractal_compute_server.await_results()

    # Take a compute step
    fractal_compute_server.await_services(max_iter=1)
    result = client.query_procedures(id=ret.ids)[0]
    status = result.detailed_status()
    assert status["total_points"] == 4
    assert status["computed_points"] == 3
    assert status["complete_tasks"] >= 3
    assert status["incomplete_tasks"] == 0
    assert len(result.final_energy_dict) == 1  # One complete
    assert len(result.optimization_history) == 3  # Three spawned
    assert result.minimum_positions["[-90]"] == 0
    assert result.status == "RUNNING"

    # Repeat compute step checking for updates
    fractal_compute_server.await_services(max_iter=1)
    result = client.query_procedures(id=ret.ids)[0]
    assert len(result.final_energy_dict) == 3
    assert len(result.optimization_history) == 4
    assert result.minimum_positions["[-90]"] == 0
    assert result.status == "RUNNING"

    # Finalize
    fractal_compute_server.await_services(max_iter=6)
    result = client.query_procedures(id=ret.ids)[0]
    assert len(result.final_energy_dict) == 4
    assert len(result.optimization_history) == 4
    assert result.minimum_positions["[-90]"] == 2
    assert result.status == "COMPLETE"
Esempio n. 11
0
def test_compute_queue_stack(fractal_compute_server):

    # Build a client
    client = ptl.FractalClient(fractal_compute_server)

    # Add a hydrogen and helium molecule
    hydrogen = ptl.Molecule.from_data([[1, 0, 0, -0.5], [1, 0, 0, 0.5]], dtype="numpy", units="bohr")
    helium = ptl.Molecule.from_data([[2, 0, 0, 0.0]], dtype="numpy", units="bohr")

    hydrogen_mol_id, helium_mol_id = client.add_molecules([hydrogen, helium])

    kw = ptl.models.KeywordSet(**{"values": {"e_convergence": 1.0e-8}})
    kw_id = client.add_keywords([kw])[0]

    # Add compute
    compute_args = {"driver": "energy", "method": "HF", "basis": "sto-3g", "keywords": kw_id, "program": "psi4"}

    # Ask the server to compute a new computation
    r = client.add_compute("psi4", "HF", "sto-3g", "energy", kw_id, [hydrogen_mol_id, helium])
    assert len(r.ids) == 2

    r2 = client.add_compute(**compute_args, molecule=[hydrogen_mol_id, helium])
    assert len(r2.ids) == 2
    assert len(r2.submitted) == 0
    assert set(r2.ids) == set(r.ids)

    # Manually handle the compute
    fractal_compute_server.await_results()
    assert len(fractal_compute_server.list_current_tasks()) == 0

    # Query result and check against out manual pul
    results_query = {
        "program": "psi4",
        "molecule": [hydrogen_mol_id, helium_mol_id],
        "method": compute_args["method"],
        "basis": compute_args["basis"],
    }
    results = client.query_results(**results_query, status=None)

    assert len(results) == 2
    for r in results:
        assert r.provenance.creator.lower() == "psi4"
        if r.molecule == hydrogen_mol_id:
            assert pytest.approx(-1.0660263371078127, 1e-5) == r.properties.scf_total_energy
        elif r.molecule == helium_mol_id:
            assert pytest.approx(-2.807913354492941, 1e-5) == r.properties.scf_total_energy
        else:
            raise KeyError("Returned unexpected Molecule ID.")

    assert "RHF Reference" in results[0].get_stdout()
Esempio n. 12
0
def test_compute_reactiondataset_keywords(fractal_compute_server):

    client = ptl.FractalClient(fractal_compute_server)

    mol1 = ptl.Molecule.from_data("He 0 0 -1.1\n--\nHe 0 0 1.1")

    # Build a dataset
    ds = ptl.collections.ReactionDataset("dataset_options",
                                         client,
                                         ds_type="ie")
    ds.set_default_program("Psi4")

    ds.add_ie_rxn("He2", mol1)
    ds.add_keywords("direct",
                    "psi4",
                    ptl.models.KeywordSet(values={"scf_type": "direct"}),
                    default=True)
    ds.add_keywords("df", "psi4",
                    ptl.models.KeywordSet(values={"scf_type": "df"}))

    ds.save()
    ds = client.get_collection("reactiondataset", "dataset_options")

    # Compute, should default to direct options
    r = ds.compute("SCF", "STO-3G")
    fractal_compute_server.await_results()
    assert ds.query("SCF", "STO-3G")
    assert pytest.approx(0.39323818102293856, 1.e-5) == ds.df.loc["He2",
                                                                  "SCF/sto-3g"]

    r = ds.compute("SCF", "sto-3g", keywords="df")
    fractal_compute_server.await_results()
    assert ds.query("SCF", "sto-3g", keywords="df") == "SCF/sto-3g-df"
    assert pytest.approx(0.38748602675524185,
                         1.e-5) == ds.df.loc["He2", "SCF/sto-3g-df"]

    assert ds.list_history().shape[0] == 2
    assert ds.list_history(keywords="DF").shape[0] == 1
    assert ds.list_history(keywords="DIRECT").shape[0] == 1

    # Check saved history
    ds = client.get_collection("reactiondataset", "dataset_options")
    assert ds.list_history().shape[0] == 2
    assert {"df", "direct"} == set(ds.list_history().reset_index()["keywords"])

    # Check keywords
    kw = ds.get_keywords("df", "psi4")
    assert kw.values["scf_type"] == "df"
Esempio n. 13
0
def test_torsiondrive_run(fractal_compute_server):

    # Cannot use this fixture without these services. Also cannot use `mark` and `fixture` decorators
    pytest.importorskip("torsiondrive")
    pytest.importorskip("geometric")
    pytest.importorskip("rdkit")

    client = portal.FractalClient(fractal_compute_server)

    # Add a HOOH
    hooh = {
        'symbols': ['H', 'O', 'O', 'H'],
        'geometry': [
            1.84719633, 1.47046223, 0.80987166, 1.3126021, -0.13023157,
            -0.0513322, -1.31320906, 0.13130216, -0.05020593, -1.83756335,
            -1.48745318, 0.80161212
        ],
        'name':
        'HOOH',
        'connectivity': [[0, 1, 1], [1, 2, 1], [2, 3, 1]],
    }
    mol_ret = client.add_molecules({"hooh": hooh})

    # Geometric options
    instance_options = {
        "torsiondrive_meta": {
            "dihedrals": [[0, 1, 2, 3]],
            "grid_spacing": [90]
        },
        "optimization_meta": {
            "program": "geometric",
            "coordsys": "tric",
        },
        "qc_meta": {
            "driver": "gradient",
            "method": "UFF",
            "basis": "",
            "options": "none",
            "program": "rdkit",
        },
    }

    ret = client.add_service("torsiondrive", [mol_ret["hooh"]],
                             instance_options)
    fractal_compute_server.await_results()
    assert len(fractal_compute_server.list_current_tasks()) == 0
Esempio n. 14
0
def test_procedure_task_error(fractal_compute_server):
    client = portal.FractalClient(fractal_compute_server.get_address())

    ret = client.add_compute("rdkit", "cookiemonster", "", "energy", None, [{
        "geometry": [0, 0, 0],
        "symbols": ["He"]
    }])

    # Manually handle the compute
    fractal_compute_server.await_results()

    # Check for error
    ret = client.check_tasks({"id": ret["submitted"][0]})

    assert len(ret) == 1
    assert ret[0]["status"] == "ERROR"
    assert "run_rdkit" in ret[0]["error"]
Esempio n. 15
0
def test_task_client_restart(fractal_compute_server):
    client = ptl.FractalClient(fractal_compute_server)

    mol = ptl.models.Molecule(**{"geometry": [0, 0, 1], "symbols": ["He"]})
    # Cookiemonster is an invalid method
    ret = client.add_compute("rdkit", "cookiemonster", "", "energy", None, [mol])

    # Manually handle the compute
    fractal_compute_server.await_results()

    tasks = client.query_tasks(base_result=ret.submitted)[0]
    assert tasks.status == "ERROR"

    upd = client.modify_tasks("restart", ret.submitted)
    assert upd.n_updated == 1

    tasks = client.query_tasks(base_result=ret.submitted)[0]
    assert tasks.status == "WAITING"
Esempio n. 16
0
def test_dataset_compute_gradient(fractal_compute_server):
    client = ptl.FractalClient(fractal_compute_server)

    # Build a dataset
    ds = ptl.collections.Dataset("ds_gradient",
                                 client,
                                 default_program="psi4",
                                 default_driver="gradient",
                                 default_units="hartree")

    ds.add_entry("He1", ptl.Molecule.from_data("He -1 0 0\n--\nHe 0 0 1"))
    ds.add_entry("He2", ptl.Molecule.from_data("He -1.1 0 0\n--\nHe 0 0 1.1"))

    contrib = {
        "name": "Gradient",
        "theory_level": "pseudo-random values",
        "values": {
            "He1": [0.03, 0, 0.02, -0.02, 0, -0.03],
            "He2": [0.03, 0, 0.02, -0.02, 0, -0.03]
        },
        "units": "hartree"
    }
    ds.add_contributed_values(contrib)
    ds.save()

    ds = client.get_collection("dataset", "ds_gradient")

    # Compute
    ds.compute("HF", "sto-3g")
    fractal_compute_server.await_results()

    ds.query("HF", "sto-3g", as_array=True)

    # Test out some statistics
    stats = ds.statistics("MUE", "HF/sto-3g", "Gradient")
    assert pytest.approx(stats.mean(), 1.e-5) == 0.00984176986312362

    stats = ds.statistics("UE", "HF/sto-3g", "Gradient")
    assert pytest.approx(stats.loc["He1"].mean(), 1.e-5) == 0.01635020639
    assert pytest.approx(stats.loc["He2"].mean(), 1.e-5) == 0.00333333333

    assert ds.list_history().shape[0] == 1
    assert ds.get_history().shape[0] == 1
Esempio n. 17
0
def reactiondataset_dftd3_fixture_fixture(fractal_compute_server,
                                          tmp_path_factory, request):
    ds_name = "He_DFTD3"
    client = ptl.FractalClient(fractal_compute_server)

    try:
        ds = client.get_collection("ReactionDataset", ds_name)
    except KeyError:
        testing.check_has_module("psi4")
        testing.check_has_module("dftd3")

        ds = ptl.collections.ReactionDataset(ds_name, client, ds_type="ie")

        # Add two helium dimers to the DB at 4 and 8 bohr
        HeDimer = ptl.Molecule.from_data([[2, 0, 0, -4.123], [2, 0, 0, 4.123]],
                                         dtype="numpy",
                                         units="bohr",
                                         frags=[1])
        ds.add_ie_rxn("HeDimer", HeDimer, attributes={"r": 4})
        ds.set_default_program("psi4")
        ds.add_keywords("scf_default",
                        "psi4",
                        ptl.models.KeywordSet(values={}),
                        default=True)

        ds.save()

        ncomp1 = ds.compute("B3LYP-D3", "6-31g")
        assert len(ncomp1.ids) == 4
        assert len(ncomp1.submitted) == 4

        ncomp2 = ds.compute("B3LYP-D3(BJ)", "6-31g")
        assert len(ncomp2.ids) == 4
        assert len(ncomp2.submitted) == 2

        fractal_compute_server.await_results()

        build_dataset_fixture_view(ds, fractal_compute_server)

    ds = handle_dataset_fixture_params(client, "ReactionDataset", ds,
                                       fractal_compute_server, request)

    yield client, ds
Esempio n. 18
0
def test_queue_duplicate_compute(fractal_compute_server):

    client = portal.FractalClient(fractal_compute_server.get_address())

    hooh = portal.data.get_molecule("hooh.json").to_json()
    mol_ret = client.add_molecules({"hooh": hooh})

    ret = client.add_compute("rdkit", "UFF", "", "energy", None,
                             mol_ret["hooh"])
    assert len(ret["submitted"]) == 1
    assert len(ret["completed"]) == 0

    # Pull out fireworks launchpad and queue nanny
    fractal_compute_server.await_results()

    db = fractal_compute_server.objects["storage_socket"]

    ret = client.add_compute("rdkit", "UFF", "", "energy", None,
                             mol_ret["hooh"])
    assert len(ret["submitted"]) == 0
    assert len(ret["completed"]) == 1
Esempio n. 19
0
def test_task_error(fractal_compute_server):
    client = ptl.FractalClient(fractal_compute_server)

    mol = ptl.models.Molecule(**{"geometry": [0, 0, 0], "symbols": ["He"]})
    # Cookiemonster is an invalid method
    ret = client.add_compute("rdkit", "cookiemonster", "", "energy", None, [mol])

    # Manually handle the compute
    fractal_compute_server.await_results()

    # Check for error
    results = client.query_results(id=ret.submitted)
    assert len(results) == 1
    assert results[0].status == "ERROR"

    assert "connectivity" in results[0].get_error().error_message

    # Check manager
    m = fractal_compute_server.storage.get_managers()["data"]
    assert len(m) == 1
    assert m[0]["failures"] > 0
    assert m[0]["completed"] > 0
def test_queue_duplicate_procedure(fractal_compute_server):

    client = ptl.FractalClient(fractal_compute_server)

    hooh = ptl.data.get_molecule("hooh.json")
    mol_ret = client.add_molecules([hooh])

    geometric_options = {
        "keywords": None,
        "qc_spec": {
            "driver": "gradient",
            "method": "UFF",
            "basis": "",
            "keywords": None,
            "program": "rdkit"
        },
    }

    ret = client.add_procedure("optimization", "geometric", geometric_options,
                               [mol_ret[0], bad_id1])
    assert len(ret.ids) == 2
    assert ret.ids[1] is None
    assert len(ret.submitted) == 1
    assert len(ret.existing) == 0

    # Pull out fireworks launchpad and queue nanny
    fractal_compute_server.await_results()

    db = fractal_compute_server.objects["storage_socket"]

    ret2 = client.add_procedure("optimization", "geometric", geometric_options,
                                [bad_id1, hooh])
    assert len(ret2.ids) == 2
    assert ret2.ids[0] is None
    assert len(ret2.submitted) == 0
    assert len(ret2.existing) == 1

    assert ret.ids[0] == ret2.ids[1]
def test_queue_duplicate_compute(fractal_compute_server):
    reset_server_database(fractal_compute_server)

    client = ptl.FractalClient(fractal_compute_server)

    hooh = ptl.data.get_molecule("hooh.json")
    mol_ret = client.add_molecules([hooh])

    ret = client.add_compute("rdkit", "UFF", "", "energy", None, mol_ret)
    assert len(ret.ids) == 1
    assert len(ret.existing) == 0

    # Wait for the compute to execute
    fractal_compute_server.await_results()

    db = fractal_compute_server.objects["storage_socket"]

    # Should catch duplicates both ways
    ret = client.add_compute("RDKIT", "uff", None, "energy", None, mol_ret)
    assert len(ret.ids) == 1
    assert len(ret.existing) == 1

    ret = client.add_compute("rdkit", "uFf", "", "energy", None, mol_ret)
    assert len(ret.ids) == 1
    assert len(ret.existing) == 1

    # Multiple queries
    assert len(client.query_results(program="RDKIT")) == 1
    assert len(client.query_results(program="RDKit")) == 1

    assert len(client.query_results(method="UFF")) == 1
    assert len(client.query_results(method="uff")) == 1

    assert len(client.query_results(basis=None)) == 1
    assert len(client.query_results(basis="")) == 1

    assert len(client.query_results(keywords=None)) == 1
Esempio n. 22
0
def test_compute_reactiondataset_dftd3(fractal_compute_server):

    client = ptl.FractalClient(fractal_compute_server)
    ds_name = "He_DFTD3"
    ds = ptl.collections.ReactionDataset(ds_name, client, ds_type="ie")

    # Add two helium dimers to the DB at 4 and 8 bohr
    HeDimer = ptl.Molecule.from_data([[2, 0, 0, -4.123], [2, 0, 0, 4.123]],
                                     dtype="numpy",
                                     units="bohr",
                                     frags=[1])
    ds.add_ie_rxn("HeDimer", HeDimer, attributes={"r": 4})
    ds.set_default_program("psi4")

    ds.save()

    ncomp1 = ds.compute("B3LYP-D3", "6-31G")
    assert len(ncomp1.ids) == 4
    assert len(ncomp1.submitted) == 4

    ncomp2 = ds.compute("B3LYP-D3(BJ)", "6-31G")
    assert len(ncomp2.ids) == 4
    assert len(ncomp2.submitted) == 2

    fractal_compute_server.await_results()
    assert ds.query("B3LYP", "6-31G")
    assert ds.query("B3LYP-D3", "6-31G")
    assert ds.query("B3LYP-D3(BJ)", "6-31G")

    for key, value in {
            "B3LYP/6-31g": -0.002135,
            "B3LYP-D3/6-31g": -0.005818,
            "B3LYP-D3(BJ)/6-31g": -0.005636
    }.items():

        assert pytest.approx(value, 1.e-3) == ds.df.loc["HeDimer", key]
def test_procedure_optimization(fractal_compute_server):

    # Add a hydrogen molecule
    hydrogen = ptl.Molecule.from_data([[1, 0, 0, -0.672], [1, 0, 0, 0.672]],
                                      dtype="numpy",
                                      units="bohr")
    client = ptl.FractalClient(fractal_compute_server.get_address(""))
    mol_ret = client.add_molecules([hydrogen])

    kw = ptl.models.KeywordSet(
        values={"scf_properties": ["quadrupole", "wiberg_lowdin_indices"]})
    kw_id = client.add_keywords([kw])[0]

    # Add compute
    options = {
        "keywords": None,
        "qc_spec": {
            "driver": "gradient",
            "method": "HF",
            "basis": "sto-3g",
            "keywords": kw_id,
            "program": "psi4"
        },
    }

    # Ask the server to compute a new computation
    r = client.add_procedure("optimization", "geometric", options, mol_ret)
    assert len(r.ids) == 1
    compute_key = r.ids[0]

    # Manually handle the compute
    fractal_compute_server.await_results()
    assert len(fractal_compute_server.list_current_tasks()) == 0

    # # Query result and check against out manual pul
    query1 = client.query_procedures(procedure="optimization",
                                     program="geometric")
    query2 = client.query_procedures(id=compute_key)

    for query in [query1, query2]:
        assert len(query) == 1
        opt_result = query[0]

        assert isinstance(opt_result.provenance.creator, str)
        assert isinstance(str(opt_result), str)  # Check that repr runs
        assert pytest.approx(-1.117530188962681,
                             1e-5) == opt_result.get_final_energy()

        # Check pulls
        traj = opt_result.get_trajectory()
        assert len(traj) == len(opt_result.energies)

        assert np.array_equal(opt_result.get_final_molecule().symbols,
                              ["H", "H"])

        # Check individual elements
        for ind in range(len(opt_result.trajectory)):
            # Check keywords went through
            assert traj[ind].provenance.creator.lower() == "psi4"
            assert "SCF QUADRUPOLE XY" in traj[ind].extras["qcvars"]
            assert "WIBERG_LOWDIN_INDICES" in traj[ind].extras["qcvars"]

            # Make sure extra was popped
            assert "_qcfractal_tags" not in traj[ind].extras

            raw_energy = traj[ind].properties.return_energy
            assert pytest.approx(raw_energy, 1.e-5) == opt_result.energies[ind]

        # Check result stdout
        assert "RHF Reference" in traj[0].get_stdout()

        assert opt_result.get_molecular_trajectory(
        )[0].id == opt_result.initial_molecule
        assert opt_result.get_molecular_trajectory(
        )[-1].id == opt_result.final_molecule

        # Check stdout
        assert "internal coordinates" in opt_result.get_stdout()

    # Check that duplicates are caught
    r = client.add_procedure("optimization", "geometric", options,
                             [mol_ret[0]])
    assert len(r.ids) == 1
    assert len(r.existing) == 1
Esempio n. 24
0
def test_optimization_submissions_with_pcm(fractal_compute_server):
    """Test submitting an Optimization dataset to a snowflake server with PCM."""

    client = FractalClient(fractal_compute_server)

    program = "psi4"
    if not has_program(program):
        pytest.skip(f"Program '{program}' not found.")

    # use a single small molecule due to the extra time PCM takes
    molecules = Molecule.from_smiles("C")

    factory = OptimizationDatasetFactory(driver="gradient")
    factory.add_qc_spec(method="hf",
                        basis="sto-3g",
                        program=program,
                        spec_name="default",
                        spec_description="test",
                        implicit_solvent=PCMSettings(units="au",
                                                     medium_Solvent="water"),
                        overwrite=True)

    dataset = factory.create_dataset(
        dataset_name=f"Test optimizations info with pcm water",
        molecules=molecules,
        description="Test optimization dataset",
        tagline="Testing optimization datasets",
    )

    with pytest.raises(DatasetInputError):
        dataset.submit(client=client, await_result=False)

    # now add a mock url so we can submit the data
    dataset.metadata.long_description_url = "https://test.org"

    # now submit again
    dataset.submit(client=client, await_result=False)

    fractal_compute_server.await_results()

    # make sure of the results are complete
    ds = client.get_collection("OptimizationDataset", dataset.dataset_name)

    # check the metadata
    meta = Metadata(**ds.data.metadata)
    assert meta == dataset.metadata

    # check the provenance
    assert dataset.provenance == ds.data.provenance

    # check the qc spec
    for qc_spec in dataset.qc_specifications.values():
        spec = ds.data.specs[qc_spec.spec_name]

        assert spec.description == qc_spec.spec_description
        assert spec.qc_spec.driver == dataset.driver
        assert spec.qc_spec.method == qc_spec.method
        assert spec.qc_spec.basis == qc_spec.basis
        assert spec.qc_spec.program == qc_spec.program

        # check the keywords
        keywords = client.query_keywords(spec.qc_spec.keywords)[0]

        assert keywords.values["maxiter"] == dataset.maxiter
        assert keywords.values["scf_properties"] == dataset.scf_properties

        # query the dataset
        ds.query(qc_spec.spec_name)

        for index in ds.df.index:
            record = ds.df.loc[index].default
            assert record.status.value == "COMPLETE"
            assert record.error is None
            assert len(record.trajectory) > 1
            result = record.get_trajectory()[0]
            assert "CURRENT DIPOLE X" in result.extras["qcvars"].keys()
            assert "SCF QUADRUPOLE XX" in result.extras["qcvars"].keys()
            # make sure the PCM result was captured
            assert result.extras["qcvars"]["PCM POLARIZATION ENERGY"] < 0
Esempio n. 25
0
def test_optimization_submissions(fractal_compute_server, specification):
    """Test submitting an Optimization dataset to a snowflake server."""

    client = FractalClient(fractal_compute_server)

    qc_spec, driver = specification
    program = qc_spec["program"]
    if not has_program(program):
        pytest.skip(f"Program '{program}' not found.")

    molecules = Molecule.from_file(get_data("butane_conformers.pdb"), "pdb")

    factory = OptimizationDatasetFactory(driver=driver)
    factory.add_qc_spec(**qc_spec,
                        spec_name="default",
                        spec_description="test",
                        overwrite=True)

    dataset = factory.create_dataset(
        dataset_name=f"Test optimizations info {program}, {driver}",
        molecules=molecules[:2],
        description="Test optimization dataset",
        tagline="Testing optimization datasets",
    )

    with pytest.raises(DatasetInputError):
        dataset.submit(client=client, await_result=False)

    # now add a mock url so we can submit the data
    dataset.metadata.long_description_url = "https://test.org"

    # now submit again
    dataset.submit(client=client, await_result=False)

    fractal_compute_server.await_results()

    # make sure of the results are complete
    ds = client.get_collection("OptimizationDataset", dataset.dataset_name)

    # check the metadata
    meta = Metadata(**ds.data.metadata)
    assert meta == dataset.metadata

    # check the provenance
    assert dataset.provenance == ds.data.provenance

    # check the qc spec
    for qc_spec in dataset.qc_specifications.values():
        spec = ds.data.specs[qc_spec.spec_name]

        assert spec.description == qc_spec.spec_description
        assert spec.qc_spec.driver == dataset.driver
        assert spec.qc_spec.method == qc_spec.method
        assert spec.qc_spec.basis == qc_spec.basis
        assert spec.qc_spec.program == qc_spec.program

        # check the keywords
        keywords = client.query_keywords(spec.qc_spec.keywords)[0]

        assert keywords.values["maxiter"] == dataset.maxiter
        assert keywords.values["scf_properties"] == dataset.scf_properties

        # query the dataset
        ds.query(qc_spec.spec_name)

        for index in ds.df.index:
            record = ds.df.loc[index].default
            assert record.status.value == "COMPLETE"
            assert record.error is None
            assert len(record.trajectory) > 1
            # if we used psi4 make sure the properties were captured
            if program == "psi4":
                result = record.get_trajectory()[0]
                assert "CURRENT DIPOLE X" in result.extras["qcvars"].keys()
                assert "SCF QUADRUPOLE XX" in result.extras["qcvars"].keys()
Esempio n. 26
0
def test_basic_submissions_wavefunction(fractal_compute_server):
    """
    Test submitting a basic dataset with a wavefunction protocol and make sure it is executed.
    """
    # only a psi4 test
    if not has_program("psi4"):
        pytest.skip(f"Program psi4 not found.")

    client = FractalClient(fractal_compute_server)
    molecules = Molecule.from_file(get_data("butane_conformers.pdb"), "pdb")

    factory = BasicDatasetFactory(driver="energy")
    factory.clear_qcspecs()
    factory.add_qc_spec(method="hf",
                        basis="sto-6g",
                        program="psi4",
                        spec_name="default",
                        spec_description="wavefunction spec",
                        store_wavefunction="orbitals_and_eigenvalues")

    dataset = factory.create_dataset(
        dataset_name=f"Test single points with wavefunction",
        molecules=molecules,
        description="Test basics dataset",
        tagline="Testing single point datasets with wavefunction",
    )
    # now add a mock url so we can submit the data
    dataset.metadata.long_description_url = "https://test.org"

    # submit the dataset
    # now submit again
    dataset.submit(client=client, await_result=False)

    fractal_compute_server.await_results()

    # make sure of the results are complete
    ds = client.get_collection("Dataset", dataset.dataset_name)

    # check the metadata
    meta = Metadata(**ds.data.metadata)
    assert meta == dataset.metadata

    assert ds.data.description == dataset.description
    assert ds.data.tagline == dataset.dataset_tagline
    assert ds.data.tags == dataset.dataset_tags

    # check the provenance
    assert dataset.provenance == ds.data.provenance

    # check the qc spec
    assert ds.data.default_driver == dataset.driver

    # get the last ran spec
    for specification in ds.data.history:
        driver, program, method, basis, spec_name = specification
        spec = dataset.qc_specifications[spec_name]
        assert driver == dataset.driver
        assert program == spec.program
        assert method == spec.method
        assert basis == spec.basis

    for spec in dataset.qc_specifications.values():
        query = ds.get_records(
            method=spec.method,
            basis=spec.basis,
            program=spec.program,
        )
        for index in query.index:
            result = query.loc[index].record
            assert result.status.value.upper() == "COMPLETE"
            assert result.error is None
            assert result.return_result is not None
            basis = result.get_wavefunction("basis")
            assert basis.name.lower() == "sto-6g"
            orbitals = result.get_wavefunction("orbitals_a")
            assert orbitals.shape is not None
Esempio n. 27
0
def test_basic_submissions_single_spec(fractal_compute_server, specification):
    """Test submitting a basic dataset to a snowflake server."""

    client = FractalClient(fractal_compute_server)

    qc_spec, driver = specification

    program = qc_spec["program"]
    if not has_program(program):
        pytest.skip(f"Program '{program}' not found.")

    molecules = Molecule.from_file(get_data("butane_conformers.pdb"), "pdb")

    factory = BasicDatasetFactory(driver=driver)
    factory.add_qc_spec(**qc_spec,
                        spec_name="default",
                        spec_description="testing the single points",
                        overwrite=True)

    dataset = factory.create_dataset(
        dataset_name=f"Test single points info {program}, {driver}",
        molecules=molecules,
        description="Test basics dataset",
        tagline="Testing single point datasets",
    )

    with pytest.raises(DatasetInputError):
        dataset.submit(client=client, await_result=False)

    # now add a mock url so we can submit the data
    dataset.metadata.long_description_url = "https://test.org"

    # now submit again
    dataset.submit(client=client, await_result=False)

    fractal_compute_server.await_results()

    # make sure of the results are complete
    ds = client.get_collection("Dataset", dataset.dataset_name)

    # check the metadata
    meta = Metadata(**ds.data.metadata)
    assert meta == dataset.metadata

    assert ds.data.description == dataset.description
    assert ds.data.tagline == dataset.dataset_tagline
    assert ds.data.tags == dataset.dataset_tags

    # check the provenance
    assert dataset.provenance == ds.data.provenance

    # check the qc spec
    assert ds.data.default_driver == dataset.driver

    # get the last ran spec
    for specification in ds.data.history:
        driver, program, method, basis, spec_name = specification
        spec = dataset.qc_specifications[spec_name]
        assert driver == dataset.driver
        assert program == spec.program
        assert method == spec.method
        assert basis == spec.basis
        break
    else:
        raise RuntimeError(
            f"The requested compute was not found in the history {ds.data.history}"
        )

    for spec in dataset.qc_specifications.values():
        query = ds.get_records(
            method=spec.method,
            basis=spec.basis,
            program=spec.program,
        )
        for index in query.index:
            result = query.loc[index].record
            assert result.status.value.upper() == "COMPLETE"
            assert result.error is None
            assert result.return_result is not None
Esempio n. 28
0
def test_adding_compute(fractal_compute_server, dataset_data):
    """
    Test adding new compute to each of the dataset types using none psi4 programs.
    """
    client = FractalClient(fractal_compute_server)
    mol = Molecule.from_smiles("CO")
    factory_type, dataset_type = dataset_data
    # make and clear out the qc specs
    factory = factory_type()
    factory.clear_qcspecs()
    factory.add_qc_spec(method="openff-1.0.0",
                        basis="smirnoff",
                        program="openmm",
                        spec_name="default",
                        spec_description="default spec for openff")
    dataset = factory.create_dataset(
        dataset_name=f"Test adding compute to {factory_type}",
        molecules=mol,
        description=f"Testing adding compute to a {dataset_type} dataset",
        tagline="tests for adding compute.")

    # now add a mock url so we can submit the data
    dataset.metadata.long_description_url = "https://test.org"

    # now submit again
    dataset.submit(client=client, await_result=False)
    # make sure that the compute has finished
    fractal_compute_server.await_results()
    fractal_compute_server.await_services(max_iter=50)

    # now lets make a dataset with new compute and submit it
    # transfer the metadata to compare the elements
    compute_dataset = dataset_type(dataset_name=dataset.dataset_name,
                                   metadata=dataset.metadata)
    compute_dataset.clear_qcspecs()
    # now add the new compute spec
    compute_dataset.add_qc_spec(method="uff",
                                basis=None,
                                program="rdkit",
                                spec_name="rdkit",
                                spec_description="rdkit basic spec")

    # make sure the dataset has no molecules and submit it
    assert compute_dataset.dataset == {}
    compute_dataset.submit(client=client)
    # make sure that the compute has finished
    fractal_compute_server.await_results()
    fractal_compute_server.await_services(max_iter=50)

    # make sure of the results are complete
    ds = client.get_collection(dataset.dataset_type, dataset.dataset_name)

    # check the metadata
    meta = Metadata(**ds.data.metadata)
    assert meta == dataset.metadata

    assert ds.data.description == dataset.description
    assert ds.data.tagline == dataset.dataset_tagline
    assert ds.data.tags == dataset.dataset_tags

    # check the provenance
    assert dataset.provenance == ds.data.provenance

    # update all specs into one dataset
    dataset.add_qc_spec(**compute_dataset.qc_specifications["rdkit"].dict())
    # get the last ran spec
    if dataset.dataset_type == "DataSet":
        for specification in ds.data.history:
            driver, program, method, basis, spec_name = specification
            spec = dataset.qc_specifications[spec_name]
            assert driver == dataset.driver
            assert program == spec.program
            assert method == spec.method
            assert basis == spec.basis

        for spec in dataset.qc_specifications.values():
            query = ds.get_records(
                method=spec.method,
                basis=spec.basis,
                program=spec.program,
            )
            for index in query.index:
                result = query.loc[index].record
                assert result.status.value.upper() == "COMPLETE"
                assert result.error is None
                assert result.return_result is not None
    else:
        # check the qc spec
        for qc_spec in dataset.qc_specifications.values():
            spec = ds.data.specs[qc_spec.spec_name]

            assert spec.description == qc_spec.spec_description
            assert spec.qc_spec.driver == dataset.driver
            assert spec.qc_spec.method == qc_spec.method
            assert spec.qc_spec.basis == qc_spec.basis
            assert spec.qc_spec.program == qc_spec.program

            # check the keywords
            keywords = client.query_keywords(spec.qc_spec.keywords)[0]

            assert keywords.values["maxiter"] == dataset.maxiter
            assert keywords.values["scf_properties"] == dataset.scf_properties

            # query the dataset
            ds.query(qc_spec.spec_name)

            for index in ds.df.index:
                record = ds.df.loc[index].default
                # this will take some time so make sure it is running with no error
                assert record.status.value == "COMPLETE", print(record.dict())
                assert record.error is None
Esempio n. 29
0
def test_adding_specifications(fractal_compute_server):
    """
    Test adding specifications to datasets.
    Here we are testing multiple scenarios:
    1) Adding an identical specification to a dataset
    2) Adding a spec with the same name as another but with different options
    3) overwrite a spec which was added but never used.
    """
    client = FractalClient(fractal_compute_server)
    mol = Molecule.from_smiles("CO")
    # make a dataset
    factory = OptimizationDatasetFactory()
    opt_dataset = factory.create_dataset(
        dataset_name="Specification error check",
        molecules=mol,
        description="test adding new compute specs to datasets",
        tagline="test adding new compute specs")
    opt_dataset.clear_qcspecs()
    # add a new mm spec
    opt_dataset.add_qc_spec(method="openff-1.0.0",
                            basis="smirnoff",
                            program="openmm",
                            spec_description="default openff spec",
                            spec_name="openff-1.0.0")

    opt_dataset.metadata.long_description_url = "https://test.org"
    # submit the optimizations and let the compute run
    opt_dataset.submit(client=client, await_result=False)
    fractal_compute_server.await_results()
    fractal_compute_server.await_services()

    # grab the collection
    ds = client.get_collection(opt_dataset.dataset_type,
                               opt_dataset.dataset_name)

    # now try and add the specification again this should return True
    assert opt_dataset.add_dataset_specification(
        spec=opt_dataset.qc_specifications["openff-1.0.0"],
        opt_spec=opt_dataset.optimization_procedure.get_optimzation_spec(),
        collection=ds) is True

    # now change part of the spec but keep the name the same
    opt_dataset.clear_qcspecs()
    opt_dataset.add_qc_spec(method="openff-1.2.1",
                            basis="smirnoff",
                            spec_name="openff-1.0.0",
                            program="openmm",
                            spec_description="openff-1.2.1 with wrong name.")

    # now try and add this specification with the same name but different settings
    with pytest.raises(QCSpecificationError):
        opt_dataset.add_dataset_specification(
            spec=opt_dataset.qc_specifications["openff-1.0.0"],
            opt_spec=opt_dataset.optimization_procedure.get_optimzation_spec(),
            collection=ds)

    # now add a new specification but no compute and make sure it is overwritten
    opt_dataset.clear_qcspecs()
    opt_dataset.add_qc_spec(method="ani1x",
                            basis=None,
                            program="torchani",
                            spec_name="ani",
                            spec_description="a ani spec")
    assert opt_dataset.add_dataset_specification(
        spec=opt_dataset.qc_specifications["ani"],
        opt_spec=opt_dataset.optimization_procedure.get_optimzation_spec(),
        collection=ds) is True

    # now change the spec slightly and add again
    opt_dataset.clear_qcspecs()
    opt_dataset.add_qc_spec(method="ani1ccx",
                            basis=None,
                            program="torchani",
                            spec_name="ani",
                            spec_description="a ani spec")
    assert opt_dataset.add_dataset_specification(
        spec=opt_dataset.qc_specifications["ani"],
        opt_spec=opt_dataset.optimization_procedure.get_optimzation_spec(),
        collection=ds) is True
Esempio n. 30
0
def test_basic_submissions_single_pcm_spec(fractal_compute_server):
    """Test submitting a basic dataset to a snowflake server with pcm water in the specification."""

    client = FractalClient(fractal_compute_server)

    program = "psi4"
    if not has_program(program):
        pytest.skip(f"Program '{program}' not found.")

    molecules = Molecule.from_file(get_data("butane_conformers.pdb"), "pdb")

    factory = BasicDatasetFactory(driver="energy")
    factory.add_qc_spec(method="hf",
                        basis="sto-3g",
                        program=program,
                        spec_name="default",
                        spec_description="testing the single points with pcm",
                        implicit_solvent=PCMSettings(units="au",
                                                     medium_Solvent="water"),
                        overwrite=True)

    # only use one molecule due to the time it takes to run with pcm
    dataset = factory.create_dataset(
        dataset_name=f"Test single points with pcm water",
        molecules=molecules[0],
        description="Test basics dataset with pcm water",
        tagline="Testing single point datasets with pcm water",
    )

    with pytest.raises(DatasetInputError):
        dataset.submit(client=client, await_result=False)

    # now add a mock url so we can submit the data
    dataset.metadata.long_description_url = "https://test.org"

    # now submit again
    dataset.submit(client=client, await_result=False)

    fractal_compute_server.await_results()

    # make sure of the results are complete
    ds = client.get_collection("Dataset", dataset.dataset_name)

    # check the metadata
    meta = Metadata(**ds.data.metadata)
    assert meta == dataset.metadata

    assert ds.data.description == dataset.description
    assert ds.data.tagline == dataset.dataset_tagline
    assert ds.data.tags == dataset.dataset_tags

    # check the provenance
    assert dataset.provenance == ds.data.provenance

    # check the qc spec
    assert ds.data.default_driver == dataset.driver

    # get the last ran spec
    for specification in ds.data.history:
        driver, program, method, basis, spec_name = specification
        spec = dataset.qc_specifications[spec_name]
        assert driver == dataset.driver
        assert program == spec.program
        assert method == spec.method
        assert basis == spec.basis
        break
    else:
        raise RuntimeError(
            f"The requested compute was not found in the history {ds.data.history}"
        )

    for spec in dataset.qc_specifications.values():
        query = ds.get_records(
            method=spec.method,
            basis=spec.basis,
            program=spec.program,
        )
        for index in query.index:
            result = query.loc[index].record
            assert result.status.value.upper() == "COMPLETE"
            assert result.error is None
            assert result.return_result is not None
            # make sure the PCM result was captured
            assert result.extras["qcvars"]["PCM POLARIZATION ENERGY"] < 0