Esempio n. 1
0
 def calculate(config, seed):
     """
     Calculate fine and coarse sample and also extract their results
     :param config: dictionary containing simulation configuration
     :return:
     """
     sleep(random() / 10)
     return SynthSimulationWorkspace.calculate(config, seed)
Esempio n. 2
0
def thread_test():
    np.random.seed(3)
    n_moments = 5
    distr = stats.norm(loc=1, scale=2)

    step_range = [0.01, 0.001, 0.0001]

    os.chdir(os.path.dirname(os.path.realpath(__file__)))
    work_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)),
                            '_test_tmp')
    if os.path.exists(work_dir):
        shutil.rmtree(work_dir)
    os.makedirs(work_dir)
    shutil.copyfile('synth_sim_config.yaml',
                    os.path.join(work_dir, 'synth_sim_config.yaml'))

    simulation_config = {
        "config_yaml": os.path.join(work_dir, 'synth_sim_config.yaml')
    }
    simulation_factory = SynthSimulationWorkspace(simulation_config)

    sample_storage = Memory()
    sampling_pool = ThreadPool(4, work_dir=work_dir)

    # Plan and compute samples
    sampler = Sampler(sample_storage=sample_storage,
                      sampling_pool=sampling_pool,
                      sim_factory=simulation_factory,
                      step_range=step_range)

    true_domain = distr.ppf([0.0001, 0.9999])
    moments_fn = Legendre(n_moments, true_domain)

    sampler.set_initial_n_samples()
    # sampler.set_initial_n_samples([1000])
    sampler.schedule_samples()
    sampler.ask_sampling_pool_for_samples()

    sampler.target_var_adding_samples(1e-4, moments_fn, sleep=20)
    print("collected samples ", sampler._n_created_samples)

    means, vars = sampler.estimate_moments(moments_fn)

    print("means ", means)
    print("vars ", vars)
    assert means[0] == 1
    assert np.isclose(means[1], 0, atol=1e-2)
    assert vars[0] == 0
    sampler.schedule_samples()
    sampler.ask_sampling_pool_for_samples()

    storage = sampler.sample_storage
    results = storage.sample_pairs()
Esempio n. 3
0
    def run(self, renew=False):
        np.random.seed(3)
        n_moments = 5
        distr = stats.norm(loc=1, scale=2)
        step_range = [0.01, 0.001]

        # Set work dir
        os.chdir(os.path.dirname(os.path.realpath(__file__)))
        shutil.copyfile('synth_sim_config.yaml',
                        os.path.join(self.work_dir, 'synth_sim_config.yaml'))

        simulation_config = {
            "config_yaml": os.path.join(self.work_dir, 'synth_sim_config.yaml')
        }
        simulation_factory = SynthSimulationWorkspace(simulation_config)

        if self.clean:
            file_path = os.path.join(self.work_dir,
                                     "mlmc_{}.hdf5".format(len(step_range)))
            if os.path.exists(file_path):
                os.remove(
                    os.path.join(self.work_dir,
                                 "mlmc_{}.hdf5".format(len(step_range))))

        sample_storage = SampleStorageHDF(file_path=os.path.join(
            self.work_dir, "mlmc_{}.hdf5".format(len(step_range))),
                                          append=self.append)
        sampling_pool = SamplingPoolPBS(job_weight=20000000,
                                        work_dir=self.work_dir,
                                        clean=self.clean)

        pbs_config = dict(
            n_cores=1,
            n_nodes=1,
            select_flags=['cgroups=cpuacct'],
            mem='128mb',
            queue='charon_2h',
            home_dir='/storage/liberec3-tul/home/martin_spetlik/',
            pbs_process_file_dir=
            '/auto/liberec3-tul/home/martin_spetlik/MLMC_new_design/src/mlmc',
            python='python3',
            env_setting=[
                'cd {work_dir}', 'module load python36-modules-gcc',
                'source env/bin/activate',
                'pip3 install /storage/liberec3-tul/home/martin_spetlik/MLMC_new_design',
                'module use /storage/praha1/home/jan-hybs/modules',
                'module load python36-modules-gcc', 'module list'
            ])

        sampling_pool.pbs_common_setting(flow_3=True, **pbs_config)

        # Plan and compute samples
        sampler = Sampler(sample_storage=sample_storage,
                          sampling_pool=sampling_pool,
                          sim_factory=simulation_factory,
                          step_range=step_range)

        true_domain = distr.ppf([0.0001, 0.9999])
        moments_fn = Legendre(n_moments, true_domain)

        if renew:
            sampler.ask_sampling_pool_for_samples()
            sampler.renew_failed_samples()
            sampler.ask_sampling_pool_for_samples()
        else:
            sampler.set_initial_n_samples([12, 6])
            # sampler.set_initial_n_samples([1000])
            sampler.schedule_samples()
            sampler.ask_sampling_pool_for_samples()

        q_estimator = QuantityEstimate(sample_storage=sample_storage,
                                       moments_fn=moments_fn,
                                       sim_steps=step_range)

        # target_var = 1e-3
        # sleep = 0
        # add_coef = 0.1
        #
        # # @TODO: test
        # # New estimation according to already finished samples
        # variances, n_ops = q_estimator.estimate_diff_vars_regression(sampler._n_scheduled_samples)
        # n_estimated = new_estimator.estimate_n_samples_for_target_variance(target_var, variances, n_ops,
        #                                                                    n_levels=sampler.n_levels)
        # # Loop until number of estimated samples is greater than the number of scheduled samples
        # while not sampler.process_adding_samples(n_estimated, sleep, add_coef):
        #     # New estimation according to already finished samples
        #     variances, n_ops = q_estimator.estimate_diff_vars_regression(sampler._n_scheduled_samples)
        #     n_estimated = new_estimator.estimate_n_samples_for_target_variance(target_var, variances, n_ops,
        #                                                                        n_levels=sampler.n_levels)

        # print("collected samples ", sampler._n_created_samples)
        means, vars = q_estimator.estimate_moments(moments_fn)

        print("means ", means)
        print("vars ", vars)
Esempio n. 4
0
    work_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), '_test_tmp')
    if os.path.exists(work_dir):
        shutil.rmtree(work_dir)
    os.makedirs(work_dir)

    # Create sample storages
    return SampleStorageHDF(file_path=os.path.join(work_dir, "mlmc_test.hdf5"))


@pytest.mark.parametrize("test_case", [(SynthSimulationForTests(simulation_config), Memory(), OneProcessPool()),
                                       (SynthSimulationForTests(simulation_config), Memory(), ProcessPool(4)),
                                       # (SynthSimulationForTests(simulation_config), Memory(), ThreadPool(4)),
                                       (SynthSimulationForTests(simulation_config), hdf_storage_factory(), OneProcessPool()),
                                       (SynthSimulationForTests(simulation_config), hdf_storage_factory(), ProcessPool(4)),
                                       #(SynthSimulationForTests(simulation_config), hdf_storage_factory(), ThreadPool(4)),
                                       (SynthSimulationWorkspace(simulation_config_workspace), Memory(), OneProcessPool(work_dir=work_dir)),
                                       (SynthSimulationWorkspace(simulation_config_workspace), Memory(), ProcessPool(4, work_dir=work_dir)),
                                       #(SynthSimulationWorkspace(simulation_config), storage_memory,  ThreadPool(4, work_dir=work_dir)),
                                       (SynthSimulationWorkspace(simulation_config_workspace), hdf_storage_factory(), OneProcessPool(work_dir=work_dir)),
                                       (SynthSimulationWorkspace(simulation_config_workspace), hdf_storage_factory(), ProcessPool(4, work_dir=work_dir)),
                                       #(SynthSimulationWorkspace(simulation_config), hdf_storage_factory(),  ThreadPool(4, work_dir=work_dir))
                                       ]
                         )
def test_mlmc(test_case):
    np.random.seed(1234)
    n_moments = 5
    step_range = [[0.1], [0.001]]

    simulation_factory, sample_storage, sampling_pool = test_case

    if simulation_factory.need_workspace:
Esempio n. 5
0
def test_sampler_pbs(work_dir, clean=False, debug=False):
    np.random.seed(3)
    n_moments = 5
    distr = stats.norm(loc=1, scale=2)
    step_range = [0.5, 0.01]
    n_levels = 5

    # if clean:
    #     if os.path.isdir(work_dir):
    #         shutil.rmtree(work_dir, ignore_errors=True)
    os.makedirs(work_dir, mode=0o775, exist_ok=True)

    assert step_range[0] > step_range[1]
    level_parameters = []
    for i_level in range(n_levels):
        if n_levels == 1:
            level_param = 1
        else:
            level_param = i_level / (n_levels - 1)
        level_parameters.append(
            [step_range[0]**(1 - level_param) * step_range[1]**level_param])

    failed_fraction = 0
    simulation_config = dict(distr='norm',
                             complexity=2,
                             nan_fraction=failed_fraction,
                             sim_method='_sample_fn')

    with open(os.path.join(work_dir, 'synth_sim_config.yaml'), "w") as file:
        yaml.dump(simulation_config, file, default_flow_style=False)

    simulation_config = {
        "config_yaml": os.path.join(work_dir, 'synth_sim_config.yaml')
    }
    simulation_factory = SynthSimulationWorkspace(simulation_config)

    if clean and os.path.exists(
            os.path.join(work_dir, "mlmc_{}.hdf5".format(len(step_range)))):
        os.remove(
            os.path.join(work_dir, "mlmc_{}.hdf5".format(len(step_range))))

    if clean and os.path.exists(os.path.join(work_dir, "output")):
        shutil.rmtree(os.path.join(work_dir, "output"), ignore_errors=True)

    sample_storage = SampleStorageHDF(file_path=os.path.join(
        work_dir, "mlmc_{}.hdf5".format(len(step_range))))
    sampling_pool = SamplingPoolPBS(work_dir=work_dir, clean=clean)
    #sampling_pool = OneProcessPool()

    shutil.copyfile(
        os.path.join(work_dir, 'synth_sim_config.yaml'),
        os.path.join(sampling_pool._output_dir, 'synth_sim_config.yaml'))

    pbs_config = dict(
        n_cores=1,
        n_nodes=1,
        select_flags=['cgroups=cpuacct'],
        mem='2Gb',
        queue='charon',
        pbs_name='flow123d',
        walltime='72:00:00',
        optional_pbs_requests=[],  # e.g. ['#PBS -m ae', ...]
        home_dir='/auto/liberec3-tul/home/martin_spetlik/',
        python='python3',
        env_setting=[
            'cd $MLMC_WORKDIR',
            'module load python36-modules-gcc',
            'source env/bin/activate',
            # 'pip3 install /storage/liberec3-tul/home/martin_spetlik/MLMC_new_design',
            'module use /storage/praha1/home/jan-hybs/modules',
            'module load python36-modules-gcc',
            'module load flow123d',
            'module list'
        ])

    sampling_pool.pbs_common_setting(flow_3=True, **pbs_config)

    # Plan and compute samples
    sampler = Sampler(sample_storage=sample_storage,
                      sampling_pool=sampling_pool,
                      sim_factory=simulation_factory,
                      level_parameters=level_parameters)

    true_domain = distr.ppf([0.0001, 0.9999])
    moments_fn = Legendre(n_moments, true_domain)

    sampler.set_initial_n_samples([1e7, 5e6, 1e6, 5e5, 1e4])
    #sampler.set_initial_n_samples([1e1, 1e1, 1e1, 1e1, 1e1])
    #sampler.set_initial_n_samples([4, 4, 4, 4, 4])
    sampler.schedule_samples()
    n_running = sampler.ask_sampling_pool_for_samples()

    quantity = mlmc.quantity.make_root_quantity(
        storage=sample_storage, q_specs=sample_storage.load_result_format())
    length = quantity['length']
    time = length[1]
    location = time['10']
    value_quantity = location[0]

    estimator = Estimate(quantity=value_quantity,
                         sample_storage=sample_storage,
                         moments_fn=moments_fn)

    # target_var = 1e-3
    # sleep = 0
    # add_coef = 0.1
    #
    # # @TODO: test
    # # New estimation according to already finished samples
    # variances, n_ops = q_estimator.estimate_diff_vars_regression(sampler._n_scheduled_samples)
    # n_estimated = new_estimator.estimate_n_samples_for_target_variance(target_var, variances, n_ops,
    #                                                                    n_levels=sampler.n_levels)
    # # Loop until number of estimated samples is greater than the number of scheduled samples
    # while not sampler.process_adding_samples(n_estimated, sleep, add_coef):
    #     # New estimation according to already finished samples
    #     variances, n_ops = q_estimator.estimate_diff_vars_regression(sampler._n_scheduled_samples)
    #     n_estimated = new_estimator.estimate_n_samples_for_target_variance(target_var, variances, n_ops,
    #                                                                        n_levels=sampler.n_levels)

    #print("collected samples ", sampler._n_created_samples)
    means, vars = estimator.estimate_moments(moments_fn)
Esempio n. 6
0
def multiprocess_test():
    np.random.seed(3)
    n_moments = 5
    distr = stats.norm(loc=1, scale=2)
    step_range = [0.01, 0.001]  #, 0.001, 0.0001]

    os.chdir(os.path.dirname(os.path.realpath(__file__)))
    work_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)),
                            '_test_tmp')
    if os.path.exists(work_dir):
        shutil.rmtree(work_dir)
    os.makedirs(work_dir)
    shutil.copyfile('synth_sim_config.yaml',
                    os.path.join(work_dir, 'synth_sim_config.yaml'))

    simulation_config = {
        "config_yaml": os.path.join(work_dir, 'synth_sim_config.yaml')
    }
    simulation_factory = SynthSimulationWorkspace(simulation_config)

    sample_storage = Memory()
    sampling_pool = ProcessPool(4, work_dir=work_dir)

    # Plan and compute samples
    sampler = Sampler(sample_storage=sample_storage,
                      sampling_pool=sampling_pool,
                      sim_factory=simulation_factory,
                      step_range=step_range)

    true_domain = distr.ppf([0.0001, 0.9999])
    moments_fn = Legendre(n_moments, true_domain)

    sampler.set_initial_n_samples()
    #sampler.set_initial_n_samples([1000])
    sampler.schedule_samples()
    sampler.ask_sampling_pool_for_samples()

    q_estimator = QuantityEstimate(sample_storage=sample_storage,
                                   moments_fn=moments_fn,
                                   sim_steps=step_range)

    target_var = 1e-4
    sleep = 0
    add_coef = 0.1

    # # @TODO: test
    # # New estimation according to already finished samples
    # variances, n_ops = q_estimator.estimate_diff_vars_regression(sampler._n_scheduled_samples)
    # n_estimated = new_estimator.estimate_n_samples_for_target_variance(target_var, variances, n_ops,
    #                                                                    n_levels=sampler.n_levels)
    # # Loop until number of estimated samples is greater than the number of scheduled samples
    # while not sampler.process_adding_samples(n_estimated, sleep, add_coef):
    #     # New estimation according to already finished samples
    #     variances, n_ops = q_estimator.estimate_diff_vars_regression(sampler._n_scheduled_samples)
    #     n_estimated = new_estimator.estimate_n_samples_for_target_variance(target_var, variances, n_ops,
    #                                                                        n_levels=sampler.n_levels)

    print("collected samples ", sampler._n_scheduled_samples)
    means, vars = q_estimator.estimate_moments(moments_fn)

    print("means ", means)
    print("vars ", vars)
    assert means[0] == 1
    assert np.isclose(means[1], 0, atol=1e-2)
    assert vars[0] == 0
    sampler.schedule_samples()
    sampler.ask_sampling_pool_for_samples()

    storage = sampler.sample_storage
    results = storage.sample_pairs()