Esempio n. 1
0
    def activate_client(
        self,
        client: Optional[FractalClient],
    ) -> Tuple["FractalServer", FractalClient]:
        """
        Activate the connection to the chosen qcarchive instance or spin up a snowflake when requested.

        Parameters
        ----------

        Notes
        -----
            This can be a snowflake server or a local qcarchive instance error cycling should still work.
        """
        if isinstance(client, FractalClient):
            # we can not get the server from the client instance so we just get info
            return client.server_information(), client
        else:
            from qcfractal import FractalSnowflake

            # TODO fix to spin up workers with settings
            server = FractalSnowflake(max_workers=self.max_workers)
            print(server)
            client = server.client()

            return server, client
Esempio n. 2
0
def test_cached_fractal_client_snowflake():

    from qcfractal import FractalSnowflake

    snowflake = FractalSnowflake(start_server=False)
    client = cached_fractal_client(snowflake.client().address)

    assert client is not None
Esempio n. 3
0
def empty_client():
    pytest.importorskip("qcfractal.interface")
    import qcfractal.interface as ptl
    from qcfractal import FractalSnowflake

    with FractalSnowflake() as server:
        yield ptl.FractalClient(server)
Esempio n. 4
0
def fractal_client(postgres_server):
    pytest.importorskip("qcfractal.postgres_harness")
    from qcfractal import FractalSnowflake, interface as ptl

    with FractalSnowflake(
        max_workers=1,
        storage_project_name="test_psiresp",
        storage_uri=postgres_server.database_uri(),
        reset_database=False,
        start_server=False,
    ) as server:
        yield ptl.FractalClient(server)
Esempio n. 5
0
import qcportal as ptl
from qcfractal import FractalSnowflake
import pandas as pd
import argparse

parser = argparse.ArgumentParser()
parser.add_argument("-d", "--dry-run", action="store_true")
args = parser.parse_args()

SNOWFLAKE = args.dry_run
if SNOWFLAKE:
    snowflake = FractalSnowflake()
    client = snowflake.client()
else:
    client = ptl.FractalClient.from_file()
print(client)

# The new subset you want to add.
dataset_name = "TODO"
ds = ptl.collections.ReactionDataset(dataset_name, client=client)

# Add the paper
ds.data.metadata["citations"] = [
    ptl.models.Citation(
        bibtex="""
TODO
""",
        acs_citation="TODO",
        url="TODO",
        doi="TODO",
    )
Esempio n. 6
0
    def execute_with_snowflake(self,
                               input_paths,
                               output_directory,
                               season,
                               ncores=None,
                               memory=None,
                               dataset_name='Benchmark Scratch',
                               delete_existing=False,
                               keep_existing=True,
                               recursive=False):
        """Execute optimizations from the given SDF molecules locally on this host.

        Optimizations are performed in series for the molecules given,
        with `ncores` and `memory` setting the resource constraints each optimization.

        Parameters
        ----------
        input_paths : iterable of Path-like
            Paths to SDF files or directories; if directories, all files SDF files in are loaded, recursively.
        output_directory : str
            Directory path to deposit exported data.
        season : str
            Benchmark season identifier. Indicates the mix of compute specs to utilize.
        ncores : int
            Number of concurrent cores to use for each optimization.
        dataset_name : str
            Dataset name to extract from the QCFractal server.
        delete_existing : bool (False)
            If True, delete existing directory if present.
        keep_existing : bool (True)
            If True, keep existing files in export directory.
            Files corresponding to server data will not be re-exported.
            Relies *only* on filepaths of existing files for determining match.
        recursive : bool
            If True, recursively load SDFs from any directories given in `input_paths`.
    
        """
        from openff.qcsubmit.factories import OptimizationDatasetFactory

        # fail early if output_directory already exists and we aren't deleting it
        if os.path.isdir(output_directory):
            if delete_existing:
                shutil.rmtree(output_directory)
            elif keep_existing:
                pass
            else:
                raise Exception(
                    f'Output directory {output_directory} already exists. '
                    'Specify `delete_existing=True` to remove, or `keep_existing=True` to tolerate'
                )

        # get paths to submit, using output directory contents to inform choice
        # for the given specs, if *any* expected output files are not present, we submit corresponding input file
        if keep_existing:
            in_out_path_map = self._source_specs_output_paths(
                input_paths,
                SEASONS[season],
                output_directory,
                recursive=recursive)

            input_paths = []
            for input_file, output_files in in_out_path_map.items():
                if not all(map(os.path.exists, output_files)):
                    input_paths.append(input_file)

        from time import sleep
        import psutil

        from tqdm import trange
        from qcfractal import FractalSnowflake

        # start up Snowflake
        server = FractalSnowflake(max_workers=ncores)

        client = server.client()
        fractal_uri = server.get_address()

        # get paths to submit, using output directory contents to inform choice
        # for the given specs, if *any* expected output files are not present, we submit corresponding input file
        if keep_existing:
            in_out_path_map = self._source_specs_output_paths(
                input_paths,
                SEASONS[season],
                output_directory,
                recursive=recursive)

            input_paths = []
            for input_file, output_files in in_out_path_map.items():
                if not all(map(os.path.exists, output_files)):
                    input_paths.append(input_file)

        # submit molecules
        self.submit_molecules(fractal_uri,
                              input_paths,
                              season,
                              dataset_name=dataset_name)

        df = self.get_optimization_status(fractal_uri,
                                          dataset_name,
                                          client=client)
        progbar = trange(df.size)

        complete = 0
        while not self.stop:
            df = self.get_optimization_status(fractal_uri,
                                              dataset_name,
                                              client=client)

            # write out what we can
            self.export_molecule_data(fractal_uri,
                                      output_directory,
                                      dataset_name=dataset_name,
                                      delete_existing=False,
                                      keep_existing=True)

            # break if complete
            complete_i = df.applymap(
                lambda x: x.status.value == 'COMPLETE').sum().sum()
            progbar.update(complete_i - complete)
            complete = complete_i
            if complete == df.size:
                break
            sleep(10)

        # one final export, just in case some completed since last write
        self.export_molecule_data(fractal_uri,
                                  output_directory,
                                  dataset_name=dataset_name,
                                  delete_existing=False,
                                  keep_existing=True)

        # stop the server and all its processes
        #parent = psutil.Process(server._qcfractal_proc.pid)
        #for child in parent.children(recursive=True):
        #    child.kill()
        #parent.kill()

        server.stop()