Ejemplo n.º 1
0
from dask_jobqueue import SLURMCluster
import matplotlib.pyplot as plt

if __name__ == "__main__":

    parser = argparse.ArgumentParser(description="EasyVVUQ applied to BOUT++")
    parser.add_argument(
        "--batch",
        "-b",
        help="Run on a batch (SLURM) system",
        action="store_true",
        default=False,
    )
    args = parser.parse_args()

    campaign = uq.CampaignDask(name="Conduction.")
    print(f"Running in {campaign.campaign_dir}")
    encoder = boutvecma.BOUTEncoder(
        template_input="models/conduction/data/BOUT.inp")
    decoder = boutvecma.BOUTDecoder(variables=["T"])
    params = {
        "conduction:chi": {
            "type": "float",
            "min": 0.0,
            "max": 1e3,
            "default": 1.0
        },
        "T:scale": {
            "type": "float",
            "min": 0.0,
            "max": 1e3,
Ejemplo n.º 2
0
parser.add_argument("--local", "-l", action='store_true', default=False)
args = parser.parse_args()

if args.local:
    print('Running locally')
    from dask.distributed import Client, LocalCluster
else:
    print('Running using SLURM')
    from dask.distributed import Client
    from dask_jobqueue import SLURMCluster

if __name__ == '__main__':  ### This is needed if you are using a local cluster; see https://github.com/dask/dask/issues/3877#issuecomment-425692984

    time_start = time.time()
    # Set up a fresh campaign called "fusion_pce."
    my_campaign = uq.CampaignDask(name='fusion_pce.')

    # Define parameter space
    params = {
        "Qe_tot": {
            "type": "float",
            "min": 1.0e6,
            "max": 50.0e6,
            "default": 2e6
        },
        "H0": {
            "type": "float",
            "min": 0.00,
            "max": 1.0,
            "default": 0
        },
Ejemplo n.º 3
0
def run_MC_case(n_mc, local=True, dask=True, batch_size=os.cpu_count()):
    """
    Inputs:
        n_mc: the number of MC samples
        local: if using Dask, whether to use the local option (True)
        dask: whether to use dask (True)
        batch_size: for the non Dask option, number of cases to run in parallel (16)
    Outputs:
        results_df: Pandas dataFrame containing inputs to and output from the model
        my_campaign: EasyVVUQ MC campaign object
        my_sampler: EasyVVUQ RandomSampler object
    """

    times = np.zeros(9)

    time_start = time.time()
    time_start_whole = time_start
    # Set up a fresh campaign called "fusion_pce."
    if dask:
        my_campaign = uq.CampaignDask(name='fusion_mc.')
    else:
        my_campaign = uq.Campaign(name='fusion_mc.')

        # Define parameter space
    params = define_params()

    # Create an encoder and decoder for PCE test app
    encoder = uq.encoders.GenericEncoder(template_fname='fusion.template',
                                         delimiter='$',
                                         target_filename='fusion_in.json')

    decoder = uq.decoders.SimpleCSV(
        target_filename="output.csv",
        output_columns=["te", "ne", "rho", "rho_norm"])

    # Add the app (automatically set as current app)
    my_campaign.add_app(name="fusion",
                        params=params,
                        encoder=encoder,
                        decoder=decoder)

    time_end = time.time()
    times[1] = time_end - time_start
    print('Time for phase 1 = %.3f' % (times[1]))

    time_start = time.time()
    # Create the sampler
    vary = define_vary()

    # Associate a sampler with the campaign
    my_sampler = uq.sampling.RandomSampler(vary=vary, max_num=n_mc)
    my_campaign.set_sampler(my_sampler)

    # Will draw all (of the finite set of samples)
    my_campaign.draw_samples()
    print('Number of samples = %s' % my_campaign.get_active_sampler().count)

    time_end = time.time()
    times[2] = time_end - time_start
    print('Time for phase 2 = %.3f' % (times[2]))

    time_start = time.time()
    # Create and populate the run directories
    my_campaign.populate_runs_dir()

    time_end = time.time()
    times[3] = time_end - time_start
    print('Time for phase 3 = %.3f' % (times[3]))

    time_start = time.time()
    # Run the cases
    cwd = os.getcwd().replace(' ', '\\ ')  # deal with ' ' in the path
    cmd = f"{cwd}/fusion_model.py fusion_in.json"

    if dask:
        if local:
            print('Running locally')
            import multiprocessing.popen_spawn_posix
            #        from distributed import Client
            from dask.distributed import Client, LocalCluster
            cluster = LocalCluster(threads_per_worker=1)
            #        Client()
            client = Client(cluster)  # processes=True, threads_per_worker=1)
        else:
            print('Running using SLURM')
            from dask.distributed import Client
            from dask_jobqueue import SLURMCluster
            cluster = SLURMCluster(job_extra=[
                '--qos=p.tok.openmp.2h', '--mail-type=end',
                '[email protected]', '-t 2:00:00'
            ],
                                   queue='p.tok.openmp',
                                   cores=8,
                                   memory='8 GB',
                                   processes=8)
            cluster.scale(32)
            print(cluster)
            print(cluster.job_script())
            client = Client(cluster)
        print(client)
        my_campaign.apply_for_each_run_dir(
            uq.actions.ExecuteLocal(cmd, interpret='python3'), client)

        client.close()
        if not local:
            client.shutdown()
        else:
            client.shutdown()
    else:  # in case there is a problem with dask
        execution = my_campaign.apply_for_each_run_dir(
            uq.actions.ExecuteLocalV2(cmd, interpret='python3'),
            batch_size=batch_size)
        execution.start()
        while my_campaign.get_active_sampler().count != execution.progress(
        )['finished']:
            print(execution.progress())
            time.sleep(1)
        print(execution.progress())

    time_end = time.time()
    times[4] = time_end - time_start
    print('Time for phase 4 = %.3f' % (times[4]))

    time_start = time.time()
    # Collate the results
    my_campaign.collate()
    results_df = my_campaign.get_collation_result()

    return results_df, my_campaign, my_sampler
Ejemplo n.º 4
0
import os
import easyvvuq as uq
import chaospy as cp
from dask.distributed import Client
from dask_jobqueue import SLURMCluster

if __name__ == '__main__':
    # Set up a fresh campaign called "coffee_pce"
    my_campaign = uq.CampaignDask(name='coffee_pce')
    # Define parameter space
    params = {
        "temp_init": {"type": "float", "min": 0.0, "max": 100.0, "default": 95.0},
        "kappa": {"type": "float", "min": 0.0, "max": 0.1, "default": 0.025},
        "t_env": {"type": "float", "min": 0.0, "max": 40.0, "default": 15.0},
        "out_file": {"type": "string", "default": "output.csv"}
    }
    # Create an encoder, decoder and collater for PCE test app
    encoder = uq.encoders.GenericEncoder(
        template_fname='cooling.template',
        delimiter='$',
        target_filename='cooling_in.json')

    decoder = uq.decoders.SimpleCSV(target_filename="output.csv",
                                    output_columns=["te"])

    collater = uq.collate.AggregateSamples(average=False)

    # Add the app (automatically set as current app)
    my_campaign.add_app(name="cooling",
                        params=params,
                        encoder=encoder,