Exemple #1
0
def test_generate_opt_in():
    """
    Test generating the optimize in file with various input settings.
    """
    fb = ForceBalanceOptimizer(penalty_type="L1", max_iterations=150)

    # make sure an error is raised if the targets were not set
    with temp_directory():
        with pytest.raises(TargetNotSetError):
            fb.generate_optimize_in(
                priors={"test": 1.23},
                fitting_targets={"AbInitio_SMIRNOFF": ["job1", "job2"]})

        # now set them and run again
        fb.set_optimization_target(AbInitio_SMIRNOFF())
        fb.generate_optimize_in(
            priors={"test": 1.23},
            fitting_targets={"AbInitio_SMIRNOFF": ["job1", "job2"]})

        # now load in the file and check the attributes
        with open("optimize.in") as opt_in:
            data = opt_in.readlines()
            assert "   test :  1.23\n" in data
            assert "penalty_type L1\n" in data
            assert "maxstep 150\n" in data
            assert "type AbInitio_SMIRNOFF\n" in data
            assert "name job1\n" in data
            assert "name job2\n" in data
Exemple #2
0
def test_optimizer_explicit():
    """
    Run the optimizer process in the main thread to make sure it works.
    """
    biphenyl = Molecule.from_file(file_path=get_data("biphenyl.sdf"),
                                  file_format="sdf")
    # now make the schema
    schema = get_fitting_schema(molecules=biphenyl)
    result = TorsionDriveCollectionResult.parse_file(
        get_data("biphenyl.json.xz"))
    schema.update_with_results(results=result)
    # now submit to the executor
    execute = Executor()
    # we dont need the server here
    # put a task in the opt queue then kill it
    execute.total_tasks = 1
    execute.opt_queue.put(schema.tasks[0])
    with temp_directory():
        execute.optimizer()
        # find the task in the finished queue
        task = execute.finished_tasks.get()
        result_schema = execute.update_fitting_schema(task=task,
                                                      fitting_schema=schema)
        smirks = result_schema.tasks[0].final_smirks
        # make sure they have been updated
        for smirk in smirks:
            for term in smirk.terms.values():
                assert float(term.k.split()[0]) != 1e-5
Exemple #3
0
def test_adding_params_parameterize_flag():
    """
    Test adding new smirks patterns with cosmetic attributes.
    """

    ff = ForceFieldEditor(forcefield_name="openff-1.0.0.offxml")
    # add an atom smirks for boron
    boron = AtomSmirks(smirks="[#5:1]", parameterize={"epsilon"}, atoms={(0,)}, epsilon=0.04, rmin_half=3)
    # add boron with the flag
    ff.add_smirks(smirks=[boron, ], parameterize=True)
    with temp_directory():
        ff.forcefield.to_file(filename="boron.offxml")

        # this should fail if the flag was added
        with pytest.raises(SMIRNOFFSpecError):
            _ = ForceField("boron.offxml", allow_cosmetic_attributes=False)

        boron_ff = ForceField("boron.offxml", allow_cosmetic_attributes=True)
        # now look for the parameter we added
        boron_param = boron_ff.get_parameter_handler(
            "vdW"
        ).parameters["[#5:1]"]
        # now make sure it has the param flag
        param_dict = boron_param.__dict__
        assert param_dict["_cosmetic_attribs"] == ["parameterize"]
        assert param_dict["_parameterize"] == "epsilon"
Exemple #4
0
def test_forcebalance_collect_results():
    """
    Test trying to collect results that have been successful and updated the parameters.
    """
    workflow = biphenyl_workflow(target=AbInitio_SMIRNOFF)
    # first make sure the target smirks are set to the default value
    target_smirks = workflow.target_smirks
    for smirk in target_smirks:
        for param in smirk.terms.values():
            # starting value
            assert param.k == "1.048715180139 * mole**-1 * kilocalorie"

    # set up the dummy output folder
    with temp_directory():
        # copy the file over
        shutil.copy(get_data("complete.out"), "optimize.out")
        results_folder = os.path.join("result", "optimize")
        os.makedirs(results_folder, exist_ok=True)
        ff_path = os.path.join(results_folder, "bespoke.offxml")
        shutil.copy(get_data("bespoke.offxml"), ff_path)
        fb = ForceBalanceOptimizer()
        result_workflow = fb.collect_results(schema=workflow)
        # make sure the smirks have been updated
        new_smirks = result_workflow.final_smirks
        for smirk in new_smirks:
            for param in smirk.terms.values():
                assert param.k != "1.048715180139 * mole**-1 * kilocalorie"
def test_torsionprofile_metadata():
    """
    Make sure that when using the torsionprofile target we make the metatdat.json file.
    """
    from openff.qcsubmit.serializers import deserialize
    torsion_target = TorsionProfile_SMIRNOFF()
    target_schema = biphenyl_target(target=torsion_target)
    # now load in a scan result we have saved
    result_data = TorsionDriveCollectionResult.parse_file(
        get_data("biphenyl.json.xz"))
    # now try and update the results
    target_schema.update_with_results(results=result_data)
    assert target_schema.ready_for_fitting is True
    # now try and prep for fitting
    with temp_directory():
        torsion_target.prep_for_fitting(fitting_target=target_schema)
        # we should only have one torsion drive to do here
        folders = os.listdir(".")
        assert len(folders) == 1
        target_files = os.listdir(folders[0])
        assert "molecule.pdb" in target_files
        assert "scan.xyz" in target_files
        assert "molecule.mol2" in target_files
        assert "qdata.txt" in target_files
        assert "metadata.json" in target_files

        metadata = deserialize(
            file_name=os.path.join(folders[0], "metadata.json"))
        # now make sure the json is complete
        entry = target_schema.tasks[0]
        assert entry.dihedrals[0] == tuple(metadata["dihedrals"][0])
        for data in entry.reference_data():
            assert data.extras["dihedral_angle"] in metadata[
                "torsion_grid_ids"]
def test_serializer_round_trips(serializer):
    """
    Test serializing data to and from file with no compression.
    """
    # get data in a dict format
    data = deserialize(get_data("settings_with_workflow.json"))
    file_name = "settings_with_workflow" + serializer
    # now export to file and back
    with temp_directory():
        serialize(serializable=data, file_name=file_name, compression=None)
        deserialized_data = deserialize(file_name=file_name)
        assert data == deserialized_data
def test_abinitio_fitting_prep_no_gradient():
    """
    Test preparing for fitting using the abinitio target.
    """

    torsion_target = AbInitio_SMIRNOFF()
    torsion_target.fit_gradient = False
    target_schema = biphenyl_target(target=torsion_target)
    biphenyl = Molecule.from_file(file_path=get_data("biphenyl.sdf"),
                                  file_format="sdf")
    # now load in a scan result we have saved
    result_data = TorsionDriveCollectionResult.parse_file(
        get_data("biphenyl.json.xz"))
    # now try and update the results
    target_schema.update_with_results(results=result_data)
    assert target_schema.ready_for_fitting is True
    # now try and prep for fitting
    with temp_directory():
        torsion_target.prep_for_fitting(fitting_target=target_schema)
        # we should only have one torsion drive to do here
        folders = os.listdir(".")
        assert len(folders) == 1
        target_files = os.listdir(folders[0])
        assert "molecule.pdb" in target_files
        assert "scan.xyz" in target_files
        assert "molecule.mol2" in target_files
        assert "qdata.txt" in target_files
        # now we need to make sure the pdb order was not changed
        mol = Molecule.from_file(os.path.join(folders[0], "molecule.pdb"),
                                 file_format="pdb")
        isomorphic, atom_map = Molecule.are_isomorphic(biphenyl,
                                                       mol,
                                                       return_atom_map=True)
        assert isomorphic is True
        assert atom_map == dict((i, i) for i in range(biphenyl.n_atoms))

        # also make sure charges are in the mol2 file
        mol = Molecule.from_file(os.path.join(folders[0], "molecule.mol2"),
                                 "mol2")
        assert mol.partial_charges is not None

        # make sure the scan coords and energies match
        qdata_file = os.path.join(folders[0], "qdata.txt")
        coords, energies, gradients = read_qdata(qdata_file=qdata_file)
        # make sure no gradients were written
        assert not gradients
        reference_data = target_schema.tasks[0].reference_data()
        for i, (coord, energy) in enumerate(zip(coords, energies)):
            # find the reference data
            data = reference_data[i]
            assert data.energy == energy
            assert coord == data.molecule.geometry.flatten().tolist()
Exemple #8
0
def test_loading_forcefields():
    """
    Test that loading the forcefield always strips out any constraints.
    """

    # load in the initial FF with constraints
    ff = ForceFieldEditor(forcefield_name="openff-1.0.0.offxml")

    with temp_directory():
        # write out the ff
        ff.forcefield.to_file(filename="bespoke.offxml")
        # read the file and look for the constraints tag
        new_ff = ForceField("bespoke.offxml")
        assert "Constraints" not in new_ff._parameter_handlers
def test_compression_serialization_round_trip_file_name(
        serialization, compression):
    """
    Test all of the different serialization and compression combinations.
    Here the compression is in the file name.
    """
    # get data in a dict format
    data = deserialize(get_data("settings_with_workflow.json"))
    file_name = "".join(
        ["settings_with_workflow", ".", serialization, ".", compression])
    # now export the file and read back
    with temp_directory():
        serialize(serializable=data, file_name=file_name, compression=None)
        deserialized_data = deserialize(file_name=file_name)
        assert data == deserialized_data
Exemple #10
0
def test_forcebalance_collect_result_error():
    """
    Test trying to collect the result when the workflow has an error.
    """
    workflow = biphenyl_workflow(target=AbInitio_SMIRNOFF)
    # we need to set up a dummy folder with the error
    with temp_directory():
        # copy the file over
        shutil.copy(get_data("error.out"), "optimize.out")
        results_folder = os.path.join("result", "optimize")
        os.makedirs(results_folder, exist_ok=True)
        ff_path = os.path.join(results_folder, "bespoke.offxml")
        shutil.copy(get_data("bespoke.offxml"), ff_path)
        fb = ForceBalanceOptimizer()
        result_workflow = fb.collect_results(schema=workflow)
        assert result_workflow.status == Status.ConvergenceError
Exemple #11
0
def test_workflow_export_import():
    """
    Test exporting and importing a workflow.
    """

    workflow = WorkflowFactory()
    # add fb and a target with non standard settings
    fb = ForceBalanceOptimizer(
        penalty_type="L1",
        optimization_targets=[AbInitio_SMIRNOFF(fragmentation=False)])
    workflow.set_optimizer(optimizer=fb)

    with temp_directory():
        workflow.export_workflow(file_name="test.json")
        # now read it back in
        workflow2 = WorkflowFactory.parse_file("test.json")
        assert workflow.dict() == workflow2.dict()
Exemple #12
0
def test_forcebalance_readoutput(output):
    """
    Test reading the output of a forcebalance run.
    """
    file_name, status = output
    with temp_directory():
        # copy the file over
        shutil.copy(get_data(file_name), "optimize.out")
        # now we have to make sum dummy folders
        results_folder = os.path.join("result", "optimize")
        os.makedirs(results_folder, exist_ok=True)
        with open(os.path.join(results_folder, "bespoke.offxml"), "w") as xml:
            xml.write("test")
        fb = ForceBalanceOptimizer()
        result = fb.read_output()
        assert result["status"] == status
        assert "bespoke.offxml" in result["forcefield"]
Exemple #13
0
def test_forcebalance_optimize(optimization_target):
    """
    Test running the full optimization stage for a simple biphenyl system using different targets.
    The data has been extracted from qcarchive.
    """
    from openff.qcsubmit.results import TorsionDriveCollectionResult
    workflow = biphenyl_workflow(target=optimization_target)
    with temp_directory():
        # load the computed results and add them to the workflow
        torsiondrive_result = TorsionDriveCollectionResult.parse_file(
            get_data("biphenyl.json.xz"))
        workflow.update_with_results(results=torsiondrive_result)
        # setup the optimizer
        fb = ForceBalanceOptimizer()
        result = fb.optimize(schema=workflow)
        assert result.status == Status.Complete
        new_smirks = result.target_smirks
        for smirk in new_smirks:
            for param in smirk.terms.values():
                assert param.k != "1e-05 * mole**-1 * kilocalorie"
Exemple #14
0
def test_exporting_settings_no_workflow(file_type, factory_type):
    """
    Test exporting the settings to different file types.
    """

    with temp_directory():
        factory = factory_type()

        changed_attrs = {"maxiter": 400, "priority":  "super_high", "compute_tag": "test tag"}
        for attr, value in changed_attrs.items():
            setattr(factory, attr, value)

        file_name = "test." + file_type

        factory.export_settings(file_name=file_name)

        with open(file_name) as f:
            data = f.read()
            for value in changed_attrs.values():
                assert str(value) in data
Exemple #15
0
def test_adding_new_smirks_types(smirks):
    """
    Test adding new smirks to a forcefield with and without the parameterize flag.
    """

    param, param_id = smirks
    ff = ForceFieldEditor("openff-1.0.0.offxml")
    # now make some new smirks pattern
    ff.add_smirks(smirks=[param, ], parameterize=True)
    # now make sure it was added under the correct parameter handler
    with temp_directory():
        ff.forcefield.to_file(filename="bespoke.offxml")

        new_ff = ForceField("bespoke.offxml", allow_cosmetic_attributes=True)
        parameter = new_ff.get_parameter_handler(param.type.value).parameters[param.smirks]
        param_dict = parameter.__dict__
        assert param_dict["_cosmetic_attribs"] == ["parameterize"]
        assert set(param_dict["_parameterize"].split()) == param.parameterize
        # make sure the id is correct
        assert param_id in parameter.id
Exemple #16
0
def test_export_workflow_only(file_type, factory_type):
    """
    Test exporting the workflow only from the factory.
    """

    with temp_directory():
        factory = factory_type()

        conformer_gen = workflow_components.StandardConformerGenerator()
        conformer_gen.max_conformers = 100

        factory.add_workflow_component(conformer_gen)

        file_name = "workflow." + file_type
        factory.export_workflow(file_name)

        with open(file_name) as workflow:
            data = workflow.read()
            assert "method" not in data
            assert "basis" not in data
            assert "tag" not in data
Exemple #17
0
def test_torsiondrivedataset_export(public_client):
    """
    Make sure that the torsiondrive datasets can be exported.
    """

    with temp_directory():
        result = TorsionDriveCollectionResult.from_server(
            client=public_client,
            spec_name="default",
            dataset_name="TorsionDrive Paper",
            include_trajectory=False,
            final_molecule_only=True)

        result.export_results("dataset.json")

        result2 = TorsionDriveCollectionResult.parse_file("dataset.json")

        assert result.dict(exclude={"collection"}) == result2.dict(
            exclude={"collection"})
        for molecule in result.collection:
            assert molecule in result2.collection
Exemple #18
0
def test_basicdataset_export_round_trip(public_client):
    """
    Test basic dataset round tripping to file.
    """

    with temp_directory():
        result = BasicCollectionResult.from_server(
            client=public_client,
            dataset_name="OpenFF Gen 2 Opt Set 1 Roche",
            spec_name="default",
            method="b3lyp-d3bj",
            basis="dzvp",
        )

        result.export_results("dataset.json")

        result2 = BasicCollectionResult.parse_file("dataset.json")

        assert result.dict(exclude={"collection"}) == result2.dict(
            exclude={"collection"})
        for molecule in result.collection:
            assert molecule in result2.collection
Exemple #19
0
def test_optimization_export_round_trip_compression(public_client,
                                                    compression):
    """Test exporting the results to file and back."""

    with temp_directory():
        result = OptimizationCollectionResult.from_server(
            client=public_client,
            spec_name="default",
            dataset_name="OpenFF Gen 2 Opt Set 1 Roche",
            include_trajectory=False,
            final_molecule_only=True)
        file_name = "dataset.json"
        result.export_results(filename=file_name, compression=compression)

        # now load the dataset back in
        if compression is not None:
            name = "".join([file_name, ".", compression])
        else:
            name = file_name
        result2 = OptimizationCollectionResult.parse_file(name)

        assert result.dict(exclude={"collection"}) == result2.dict(
            exclude={"collection"})
Exemple #20
0
def test_exporting_settings_workflow(file_type, factory_type):
    """
    Test exporting the settings and a workflow to the different file types.
    """

    with temp_directory():

        factory = factory_type()
        changed_attrs = {"maxiter": 400, "priority":  "super_high", "compute_tag": "test tag"}
        for attr, value in changed_attrs.items():
            setattr(factory, attr, value)

        conformer_gen = workflow_components.StandardConformerGenerator()
        conformer_gen.max_conformers = 100
        factory.add_workflow_component(conformer_gen)

        file_name = "test." + file_type

        factory.export_settings(file_name=file_name)

        with open(file_name) as f:
            data = f.read()
            assert conformer_gen.component_name in data
            assert str(conformer_gen.max_conformers) in data