コード例 #1
0
def test_create_torsiondrive_dataset():
    """
    Make sure we can correclt make a dataset using the scan enumerator.
    """
    factory = TorsiondriveDatasetFactory()
    scan_filter = workflow_components.ScanEnumerator()
    scan_filter.add_torsion_scan(smarts="[*:1]~[*:2]-[#8:3]-[#1:4]",
                                 scan_rage=(-90, 90),
                                 scan_increment=10)
    factory.add_workflow_components(scan_filter)
    conformer_generator = workflow_components.StandardConformerGenerator(
        max_conformers=1)
    factory.add_workflow_components(conformer_generator)
    mols = Molecule.from_file(get_data("tautomers_small.smi"),
                              "smi",
                              allow_undefined_stereo=True)
    dataset = factory.create_dataset(dataset_name="test name",
                                     molecules=mols,
                                     description="Force field test",
                                     tagline="A test dataset",
                                     processors=1)

    assert dataset.n_molecules > 0
    assert dataset.n_records > 0
    for entry in dataset.dataset.values():
        assert entry.keywords.dihedral_ranges == [(-90, 90)]
        assert entry.keywords.grid_spacing == [10]
コード例 #2
0
def test_torsiondrive_scan_keywords(fractal_compute_server):
    """
    Test running torsiondrives with unique keyword settings which overwrite the global grid spacing and scan range.
    """

    client = FractalClient(fractal_compute_server)
    molecules = Molecule.from_smiles("CO")
    factory = TorsiondriveDatasetFactory()
    scan_enum = workflow_components.ScanEnumerator()
    scan_enum.add_torsion_scan(smarts="[*:1]~[#6:2]-[#8:3]~[*:4]")
    factory.add_workflow_components(scan_enum)
    factory.clear_qcspecs()
    factory.add_qc_spec(method="openff_unconstrained-1.1.0",
                        basis="smirnoff",
                        program="openmm",
                        spec_description="scan range test",
                        spec_name="openff-1.1.0")
    dataset = factory.create_dataset(
        dataset_name="Torsiondrive scan keywords",
        molecules=molecules,
        description="Testing scan keywords which overwrite the global settings",
        tagline="Testing scan keywords which overwrite the global settings")

    # now set the keywords
    keys = list(dataset.dataset.keys())
    entry = dataset.dataset[keys[0]]
    entry.keywords = {"grid_spacing": [5], "dihedral_ranges": [(-10, 10)]}

    # now submit
    dataset.submit(client=client)
    fractal_compute_server.await_services(max_iter=50)

    # make sure of the results are complete
    ds = client.get_collection("TorsionDriveDataset", dataset.dataset_name)

    # get the entry
    record = ds.get_record(ds.df.index[0], "openff-1.1.0")
    assert record.keywords.grid_spacing == [5]
    assert record.keywords.grid_spacing != dataset.grid_spacing
    assert record.keywords.dihedral_ranges == [(-10, 10)]
    assert record.keywords.dihedral_ranges != dataset.dihedral_ranges
コード例 #3
0
def test_torsiondrive_submissions(fractal_compute_server, specification):
    """
    Test submitting a torsiondrive dataset and computing it.
    """

    client = FractalClient(fractal_compute_server)

    qc_spec, driver = specification
    program = qc_spec["program"]
    if not has_program(program):
        pytest.skip(f"Program '{program}' not found.")

    molecules = Molecule.from_smiles("CO")

    factory = TorsiondriveDatasetFactory(driver=driver)
    factory.add_qc_spec(**qc_spec,
                        spec_name="default",
                        spec_description="test",
                        overwrite=True)

    dataset = factory.create_dataset(
        dataset_name=f"Test torsiondrives info {program}, {driver}",
        molecules=molecules,
        description="Test torsiondrive dataset",
        tagline="Testing torsiondrive datasets",
    )

    # force a metadata validation error
    dataset.metadata.long_description = None

    with pytest.raises(DatasetInputError):
        dataset.submit(client=client)

    # re-add the description so we can submit the data
    dataset.metadata.long_description = "Test basics dataset"

    # now submit again
    dataset.submit(client=client)

    fractal_compute_server.await_services(max_iter=50)

    # make sure of the results are complete
    ds = client.get_collection("TorsionDriveDataset", dataset.dataset_name)

    # check the metadata
    meta = Metadata(**ds.data.metadata)
    assert meta == dataset.metadata

    # check the provenance
    assert dataset.provenance == ds.data.provenance

    # check the qc spec
    for qc_spec in dataset.qc_specifications.values():
        spec = ds.data.specs[qc_spec.spec_name]

        assert spec.description == qc_spec.spec_description
        assert spec.qc_spec.driver == dataset.driver
        assert spec.qc_spec.method == qc_spec.method
        assert spec.qc_spec.basis == qc_spec.basis
        assert spec.qc_spec.program == qc_spec.program

        # check the keywords
        keywords = client.query_keywords(spec.qc_spec.keywords)[0]

        assert keywords.values["maxiter"] == qc_spec.maxiter
        assert keywords.values["scf_properties"] == qc_spec.scf_properties

        # query the dataset
        ds.query(qc_spec.spec_name)

        for index in ds.df.index:
            record = ds.df.loc[index].default
            # this will take some time so make sure it is running with no error
            assert record.status.value == "COMPLETE", print(record.dict())
            assert record.error is None
            assert len(record.final_energy_dict) == 24