コード例 #1
0
def test_mlmc(test_case):
    np.random.seed(1234)
    n_moments = 5
    step_range = [[0.1], [0.001]]

    simulation_factory, sample_storage, sampling_pool = test_case

    if simulation_factory.need_workspace:
        os.chdir(os.path.dirname(os.path.realpath(__file__)))
        shutil.copyfile('synth_sim_config.yaml', os.path.join(work_dir, 'synth_sim_config.yaml'))

    sampler = Sampler(sample_storage=sample_storage, sampling_pool=sampling_pool, sim_factory=simulation_factory,
                      level_parameters=step_range)

    true_domain = distr.ppf([0.0001, 0.9999])
    moments_fn = Legendre(n_moments, true_domain)
    # moments_fn = Monomial(n_moments, true_domain)

    sampler.set_initial_n_samples([10, 10])
    # sampler.set_initial_n_samples([10000])
    sampler.schedule_samples()
    sampler.ask_sampling_pool_for_samples()

    target_var = 1e-3
    sleep = 0
    add_coef = 0.1

    quantity = make_root_quantity(sample_storage, q_specs=simulation_factory.result_format())

    length = quantity['length']
    time = length[1]
    location = time['10']
    value_quantity = location[0]

    estimator = mlmc.estimator.Estimate(value_quantity, sample_storage, moments_fn)

    # New estimation according to already finished samples
    variances, n_ops = estimator.estimate_diff_vars_regression(sampler._n_scheduled_samples)
    n_estimated = mlmc.estimator.estimate_n_samples_for_target_variance(target_var, variances, n_ops,
                                                                       n_levels=sampler.n_levels)

    # Loop until number of estimated samples is greater than the number of scheduled samples
    while not sampler.process_adding_samples(n_estimated, sleep, add_coef):
        # New estimation according to already finished samples
        variances, n_ops = estimator.estimate_diff_vars_regression(sampler._n_scheduled_samples)
        n_estimated = mlmc.estimator.estimate_n_samples_for_target_variance(target_var, variances, n_ops,
                                                                           n_levels=sampler.n_levels)

    means, vars = estimator.estimate_moments(moments_fn)
    assert means[0] == 1
    assert vars[0] == 0
コード例 #2
0
ファイル: test_sampling_pools.py プロジェクト: GeoMop/MLMC
def test_sampling_pools(sampling_pool, simulation_factory):
    n_moments = 5
    np.random.seed(123)
    t.sleep(5)

    work_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)),
                            '_test_tmp')
    if os.path.exists(work_dir):
        shutil.rmtree(work_dir)
    os.makedirs(work_dir)

    if simulation_factory.need_workspace:
        os.chdir(os.path.dirname(os.path.realpath(__file__)))
        shutil.copyfile('synth_sim_config_test.yaml',
                        os.path.join(work_dir, 'synth_sim_config.yaml'))

    sample_storage = SampleStorageHDF(file_path=os.path.join(
        work_dir, "mlmc_{}.hdf5".format(len(step_range))))
    # Plan and compute samples
    sampler = Sampler(sample_storage=sample_storage,
                      sampling_pool=sampling_pool,
                      sim_factory=simulation_factory,
                      level_parameters=step_range)

    true_domain = distr.ppf([0.0001, 0.9999])
    moments_fn = Legendre(n_moments, true_domain)

    sampler.set_initial_n_samples([10, 10, 10])
    sampler.schedule_samples()
    sampler.ask_sampling_pool_for_samples()

    quantity = mlmc.quantity.make_root_quantity(
        storage=sample_storage, q_specs=sample_storage.load_result_format())
    length = quantity['length']
    time = length[1]
    location = time['10']
    value_quantity = location[0]

    estimator = Estimate(quantity=value_quantity,
                         sample_storage=sample_storage,
                         moments_fn=moments_fn)
    means, vars = estimator.estimate_moments(moments_fn)

    assert means[0] == 1
    assert vars[0] == 0
    assert np.allclose(np.array(ref_means), np.array(means), atol=1e-5)
    assert np.allclose(np.array(ref_vars), np.array(ref_vars), atol=1e-5)

    if sampling_pool._debug:
        assert 'output' in next(os.walk(work_dir))[1]
コード例 #3
0
ファイル: test_quantity_concept.py プロジェクト: GeoMop/MLMC
    def _create_sampler(self, step_range, clean=False, memory=False):
        # Set work dir
        os.chdir(os.path.dirname(os.path.realpath(__file__)))
        work_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)),
                                '_test_tmp')
        if clean:
            if os.path.exists(work_dir):
                shutil.rmtree(work_dir)
            os.makedirs(work_dir)

        # Create simulations
        failed_fraction = 0.1
        distr = stats.norm()
        simulation_config = dict(distr=distr,
                                 complexity=2,
                                 nan_fraction=failed_fraction,
                                 sim_method='_sample_fn')
        simulation_factory = SynthSimulationForTests(simulation_config)

        # shutil.copyfile('synth_sim_config.yaml', os.path.join(work_dir, 'synth_sim_config.yaml'))
        # simulation_config = {"config_yaml": os.path.join(work_dir, 'synth_sim_config.yaml')}
        # simulation_workspace = SynthSimulationWorkspace(simulation_config)

        # Create sample storages
        if memory:
            sample_storage = Memory()
        else:
            sample_storage = SampleStorageHDF(
                file_path=os.path.join(work_dir, "mlmc_test.hdf5"))
        # Create sampling pools
        sampling_pool = OneProcessPool()
        # sampling_pool_dir = OneProcessPool(work_dir=work_dir)

        if clean:
            if sampling_pool._output_dir is not None:
                if os.path.exists(work_dir):
                    shutil.rmtree(work_dir)
                os.makedirs(work_dir)
            if simulation_factory.need_workspace:
                os.chdir(os.path.dirname(os.path.realpath(__file__)))
                shutil.copyfile(
                    'synth_sim_config.yaml',
                    os.path.join(work_dir, 'synth_sim_config.yaml'))

        sampler = Sampler(sample_storage=sample_storage,
                          sampling_pool=sampling_pool,
                          sim_factory=simulation_factory,
                          level_parameters=step_range)

        return sampler, simulation_factory
コード例 #4
0
    def setup_config(self, clean):
        """
        Simulation dependent configuration
        :param clean: bool, If True remove existing files
        :return: mlmc.sampler instance
        """
        # Set pbs config, flow123d, gmsh, ..., random fields are set in simulation class
        self.set_environment_variables()

        # Create Pbs sampling pool
        sampling_pool = self.create_sampling_pool()

        simulation_config = {
            'work_dir':
            self.work_dir,
            'env':
            dict(flow123d=self.flow123d, gmsh=self.gmsh,
                 gmsh_version=1),  # The Environment.
            'yaml_file':
            os.path.join(self.work_dir, '01_conductivity.yaml'),
            'geo_file':
            os.path.join(self.work_dir, 'square_1x1.geo'),
            'fields_params':
            dict(model='exp', sigma=4, corr_length=0.1),
            'field_template':
            "!FieldElementwise {mesh_data_file: \"$INPUT_DIR$/%s\", field_name: %s}"
        }

        # Create simulation factory
        simulation_factory = FlowSim(config=simulation_config, clean=clean)

        # Create HDF sample storage
        sample_storage = SampleStorageHDF(file_path=os.path.join(
            self.work_dir, "mlmc_{}.hdf5".format(self.n_levels)),
                                          #append=self.append
                                          )

        # Create sampler, it manages sample scheduling and so on
        sampler = Sampler(sample_storage=sample_storage,
                          sampling_pool=sampling_pool,
                          sim_factory=simulation_factory,
                          level_parameters=self.level_parameters)

        return sampler
コード例 #5
0
def test_sampler():
    # Create simulations
    failed_fraction = 0.1
    distr = stats.norm()
    simulation_config = dict(distr=distr,
                             complexity=2,
                             nan_fraction=failed_fraction,
                             sim_method='_sample_fn')
    simulation = SynthSimulation(simulation_config)
    storage = Memory()
    sampling_pool = OneProcessPool()

    step_range = [[0.1], [0.01], [0.001]]

    sampler = Sampler(sample_storage=storage,
                      sampling_pool=sampling_pool,
                      sim_factory=simulation,
                      level_parameters=step_range)

    assert len(sampler._level_sim_objects) == len(step_range)
    for step, level_sim in zip(step_range, sampler._level_sim_objects):
        assert step[0] == level_sim.config_dict['fine']['step']

    init_samples = list(np.ones(len(step_range)) * 10)

    sampler.set_initial_n_samples(init_samples)
    assert np.allclose(sampler._n_target_samples, init_samples)
    assert 0 == sampler.ask_sampling_pool_for_samples()
    sampler.schedule_samples()
    assert np.allclose(sampler._n_scheduled_samples, init_samples)

    n_estimated = np.array([100, 50, 20])
    sampler.process_adding_samples(n_estimated, 0, 0.1)
    assert np.allclose(sampler._n_target_samples,
                       init_samples + (n_estimated * 0.1),
                       atol=1)
コード例 #6
0
ファイル: process.py プロジェクト: GeoMop/MLMC
    def setup_config(self, step_range, clean):
        """
        Simulation dependent configuration
        :param step_range: Simulation's step range, length of them is number of levels
        :param clean: bool, If True remove existing files
        :return: mlmc.sampler instance
        """
        # Set pbs config, flow123d, gmsh, ..., random fields are set in simulation class
        self.set_environment_variables()

        # Create Pbs sampling pool
        sampling_pool = self.create_pbs_sampling_pool()

        #sampling_pool = OneProcessPool(work_dir=self.work_dir)  # Everything runs in one process
        #sampling_pool = ProcessPool(n_processes=4, work_dir=self.work_dir)  # Simulations run in different processes

        simulation_config = {
            'work_dir': self.work_dir,
            'env': dict(flow123d=self.flow123d, gmsh=self.gmsh, gmsh_version=1),  # The Environment.
            'yaml_file': os.path.join(self.work_dir, '01_conductivity.yaml'),
            # The template with a mesh and field placeholders
            'sim_param_range': step_range,  # Range of MLMC simulation parametr. Here the mesh step.
            'geo_file': os.path.join(self.work_dir, 'square_1x1.geo'),
            # The file with simulation geometry (independent of the step)
            # 'field_template': "!FieldElementwise {mesh_data_file: \"${INPUT}/%s\", field_name: %s}"
            'field_template': "!FieldElementwise {mesh_data_file: \"$INPUT_DIR$/%s\", field_name: %s}"
        }

        print()
        # Create simulation factory
        simulation_factory = FlowSim(config=simulation_config, clean=clean)

        # Create HDF sample storage
        sample_storage = SampleStorageHDF(
            file_path=os.path.join(self.work_dir, "mlmc_{}.hdf5".format(len(step_range))),
            append=self.append)

        # Create sampler, it manages sample scheduling and so on
        sampler = Sampler(sample_storage=sample_storage, sampling_pool=sampling_pool, sim_factory=simulation_factory,
                          level_parameters=step_range)

        return sampler
コード例 #7
0
    def setup_config(self, n_levels, clean):
        """
        # TODO: specify, what should be done here.
        - creation of Simulation
        - creation of Sampler
        - hdf file ?
        - why step_range must be here ?


        Simulation dependent configuration
        :param step_range: Simulation's step range, length of them is number of levels
        :param clean: bool, If True remove existing files
        :return: mlmc.sampler instance
        """
        self.set_environment_variables()

        sampling_pool = self.create_sampling_pool()

        # Create simulation factory
        simulation_factory = Flow123d_WGC2020(config=self.config_dict,
                                              clean=clean)

        # Create HDF sample storage, possibly remove old one
        hdf_file = os.path.join(self.work_dir, "wgc2020_mlmc.hdf5")
        if self.clean:
            # Remove HFD5 file
            if os.path.exists(hdf_file):
                os.remove(hdf_file)
        sample_storage = SampleStorageHDF(file_path=hdf_file,
                                          append=self.append)

        # Create sampler, it manages sample scheduling and so on
        # the length of level_parameters must correspond to number of MLMC levels, at least 1 !!!
        sampler = Sampler(sample_storage=sample_storage,
                          sampling_pool=sampling_pool,
                          sim_factory=simulation_factory,
                          level_parameters=[1])

        return sampler
コード例 #8
0
ファイル: process_debug.py プロジェクト: GeoMop/MLMC
    def run(self, renew=False):
        np.random.seed(3)
        n_moments = 5
        failed_fraction = 0

        # work_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), '_test_tmp')
        # if os.path.exists(work_dir):
        #     shutil.rmtree(work_dir)
        # os.makedirs(work_dir)

        distr = stats.norm()
        step_range = [0.1, 0.001]

        # User configure and create simulation instance
        simulation_config = dict(distr=distr,
                                 complexity=2,
                                 nan_fraction=failed_fraction,
                                 sim_method='_sample_fn')
        #simulation_config = {"config_yaml": 'synth_sim_config.yaml'}
        simulation_factory = SynthSimulation(simulation_config)

        if self.clean:
            os.remove(
                os.path.join(self.work_dir,
                             "mlmc_{}.hdf5".format(len(step_range))))

        sample_storage = SampleStorageHDF(file_path=os.path.join(
            self.work_dir, "mlmc_{}.hdf5".format(len(step_range))),
                                          append=self.append)
        sampling_pool = OneProcessPool()

        # Plan and compute samples
        sampler = Sampler(sample_storage=sample_storage,
                          sampling_pool=sampling_pool,
                          sim_factory=simulation_factory,
                          step_range=step_range)

        true_domain = distr.ppf([0.0001, 0.9999])
        moments_fn = Legendre(n_moments, true_domain)
        # moments_fn = Monomial(n_moments, true_domain)

        if renew:
            sampler.ask_sampling_pool_for_samples()
            sampler.renew_failed_samples()
            sampler.ask_sampling_pool_for_samples()
        else:
            sampler.set_initial_n_samples([12, 6])
            # sampler.set_initial_n_samples([1000])
            sampler.schedule_samples()
            sampler.ask_sampling_pool_for_samples()

        q_estimator = QuantityEstimate(sample_storage=sample_storage,
                                       moments_fn=moments_fn,
                                       sim_steps=step_range)
        #
        target_var = 1e-4
        sleep = 0
        add_coef = 0.1

        # # @TODO: test
        # # New estimation according to already finished samples
        # variances, n_ops = q_estimator.estimate_diff_vars_regression(sampler._n_scheduled_samples)
        # n_estimated = new_estimator.estimate_n_samples_for_target_variance(target_var, variances, n_ops,
        #                                                                    n_levels=sampler.n_levels)
        #
        # # Loop until number of estimated samples is greater than the number of scheduled samples
        # while not sampler.process_adding_samples(n_estimated, sleep, add_coef):
        #     # New estimation according to already finished samples
        #     variances, n_ops = q_estimator.estimate_diff_vars_regression(sampler._n_scheduled_samples)
        #     n_estimated = new_estimator.estimate_n_samples_for_target_variance(target_var, variances, n_ops,
        #                                                                        n_levels=sampler.n_levels)

        print("collected samples ", sampler._n_scheduled_samples)
        means, vars = q_estimator.estimate_moments(moments_fn)

        print("means ", means)
        print("vars ", vars)
        assert means[0] == 1
        assert np.isclose(means[1], 0, atol=1e-2)
        assert vars[0] == 0
コード例 #9
0
ファイル: process_debug.py プロジェクト: GeoMop/MLMC
    def run(self, renew=False):
        np.random.seed(3)
        n_moments = 5
        distr = stats.norm(loc=1, scale=2)
        step_range = [0.01, 0.001]

        # Set work dir
        os.chdir(os.path.dirname(os.path.realpath(__file__)))
        shutil.copyfile('synth_sim_config.yaml',
                        os.path.join(self.work_dir, 'synth_sim_config.yaml'))

        simulation_config = {
            "config_yaml": os.path.join(self.work_dir, 'synth_sim_config.yaml')
        }
        simulation_factory = SynthSimulationWorkspace(simulation_config)

        if self.clean:
            file_path = os.path.join(self.work_dir,
                                     "mlmc_{}.hdf5".format(len(step_range)))
            if os.path.exists(file_path):
                os.remove(
                    os.path.join(self.work_dir,
                                 "mlmc_{}.hdf5".format(len(step_range))))

        sample_storage = SampleStorageHDF(file_path=os.path.join(
            self.work_dir, "mlmc_{}.hdf5".format(len(step_range))),
                                          append=self.append)
        sampling_pool = SamplingPoolPBS(job_weight=20000000,
                                        work_dir=self.work_dir,
                                        clean=self.clean)

        pbs_config = dict(
            n_cores=1,
            n_nodes=1,
            select_flags=['cgroups=cpuacct'],
            mem='128mb',
            queue='charon_2h',
            home_dir='/storage/liberec3-tul/home/martin_spetlik/',
            pbs_process_file_dir=
            '/auto/liberec3-tul/home/martin_spetlik/MLMC_new_design/src/mlmc',
            python='python3',
            env_setting=[
                'cd {work_dir}', 'module load python36-modules-gcc',
                'source env/bin/activate',
                'pip3 install /storage/liberec3-tul/home/martin_spetlik/MLMC_new_design',
                'module use /storage/praha1/home/jan-hybs/modules',
                'module load python36-modules-gcc', 'module list'
            ])

        sampling_pool.pbs_common_setting(flow_3=True, **pbs_config)

        # Plan and compute samples
        sampler = Sampler(sample_storage=sample_storage,
                          sampling_pool=sampling_pool,
                          sim_factory=simulation_factory,
                          step_range=step_range)

        true_domain = distr.ppf([0.0001, 0.9999])
        moments_fn = Legendre(n_moments, true_domain)

        if renew:
            sampler.ask_sampling_pool_for_samples()
            sampler.renew_failed_samples()
            sampler.ask_sampling_pool_for_samples()
        else:
            sampler.set_initial_n_samples([12, 6])
            # sampler.set_initial_n_samples([1000])
            sampler.schedule_samples()
            sampler.ask_sampling_pool_for_samples()

        q_estimator = QuantityEstimate(sample_storage=sample_storage,
                                       moments_fn=moments_fn,
                                       sim_steps=step_range)

        # target_var = 1e-3
        # sleep = 0
        # add_coef = 0.1
        #
        # # @TODO: test
        # # New estimation according to already finished samples
        # variances, n_ops = q_estimator.estimate_diff_vars_regression(sampler._n_scheduled_samples)
        # n_estimated = new_estimator.estimate_n_samples_for_target_variance(target_var, variances, n_ops,
        #                                                                    n_levels=sampler.n_levels)
        # # Loop until number of estimated samples is greater than the number of scheduled samples
        # while not sampler.process_adding_samples(n_estimated, sleep, add_coef):
        #     # New estimation according to already finished samples
        #     variances, n_ops = q_estimator.estimate_diff_vars_regression(sampler._n_scheduled_samples)
        #     n_estimated = new_estimator.estimate_n_samples_for_target_variance(target_var, variances, n_ops,
        #                                                                        n_levels=sampler.n_levels)

        # print("collected samples ", sampler._n_created_samples)
        means, vars = q_estimator.estimate_moments(moments_fn)

        print("means ", means)
        print("vars ", vars)
コード例 #10
0
def test_sampler_pbs(work_dir, clean=False, debug=False):
    np.random.seed(3)
    n_moments = 5
    distr = stats.norm(loc=1, scale=2)
    step_range = [0.5, 0.01]
    n_levels = 5

    # if clean:
    #     if os.path.isdir(work_dir):
    #         shutil.rmtree(work_dir, ignore_errors=True)
    os.makedirs(work_dir, mode=0o775, exist_ok=True)

    assert step_range[0] > step_range[1]
    level_parameters = []
    for i_level in range(n_levels):
        if n_levels == 1:
            level_param = 1
        else:
            level_param = i_level / (n_levels - 1)
        level_parameters.append(
            [step_range[0]**(1 - level_param) * step_range[1]**level_param])

    failed_fraction = 0
    simulation_config = dict(distr='norm',
                             complexity=2,
                             nan_fraction=failed_fraction,
                             sim_method='_sample_fn')

    with open(os.path.join(work_dir, 'synth_sim_config.yaml'), "w") as file:
        yaml.dump(simulation_config, file, default_flow_style=False)

    simulation_config = {
        "config_yaml": os.path.join(work_dir, 'synth_sim_config.yaml')
    }
    simulation_factory = SynthSimulationWorkspace(simulation_config)

    if clean and os.path.exists(
            os.path.join(work_dir, "mlmc_{}.hdf5".format(len(step_range)))):
        os.remove(
            os.path.join(work_dir, "mlmc_{}.hdf5".format(len(step_range))))

    if clean and os.path.exists(os.path.join(work_dir, "output")):
        shutil.rmtree(os.path.join(work_dir, "output"), ignore_errors=True)

    sample_storage = SampleStorageHDF(file_path=os.path.join(
        work_dir, "mlmc_{}.hdf5".format(len(step_range))))
    sampling_pool = SamplingPoolPBS(work_dir=work_dir, clean=clean)
    #sampling_pool = OneProcessPool()

    shutil.copyfile(
        os.path.join(work_dir, 'synth_sim_config.yaml'),
        os.path.join(sampling_pool._output_dir, 'synth_sim_config.yaml'))

    pbs_config = dict(
        n_cores=1,
        n_nodes=1,
        select_flags=['cgroups=cpuacct'],
        mem='2Gb',
        queue='charon',
        pbs_name='flow123d',
        walltime='72:00:00',
        optional_pbs_requests=[],  # e.g. ['#PBS -m ae', ...]
        home_dir='/auto/liberec3-tul/home/martin_spetlik/',
        python='python3',
        env_setting=[
            'cd $MLMC_WORKDIR',
            'module load python36-modules-gcc',
            'source env/bin/activate',
            # 'pip3 install /storage/liberec3-tul/home/martin_spetlik/MLMC_new_design',
            'module use /storage/praha1/home/jan-hybs/modules',
            'module load python36-modules-gcc',
            'module load flow123d',
            'module list'
        ])

    sampling_pool.pbs_common_setting(flow_3=True, **pbs_config)

    # Plan and compute samples
    sampler = Sampler(sample_storage=sample_storage,
                      sampling_pool=sampling_pool,
                      sim_factory=simulation_factory,
                      level_parameters=level_parameters)

    true_domain = distr.ppf([0.0001, 0.9999])
    moments_fn = Legendre(n_moments, true_domain)

    sampler.set_initial_n_samples([1e7, 5e6, 1e6, 5e5, 1e4])
    #sampler.set_initial_n_samples([1e1, 1e1, 1e1, 1e1, 1e1])
    #sampler.set_initial_n_samples([4, 4, 4, 4, 4])
    sampler.schedule_samples()
    n_running = sampler.ask_sampling_pool_for_samples()

    quantity = mlmc.quantity.make_root_quantity(
        storage=sample_storage, q_specs=sample_storage.load_result_format())
    length = quantity['length']
    time = length[1]
    location = time['10']
    value_quantity = location[0]

    estimator = Estimate(quantity=value_quantity,
                         sample_storage=sample_storage,
                         moments_fn=moments_fn)

    # target_var = 1e-3
    # sleep = 0
    # add_coef = 0.1
    #
    # # @TODO: test
    # # New estimation according to already finished samples
    # variances, n_ops = q_estimator.estimate_diff_vars_regression(sampler._n_scheduled_samples)
    # n_estimated = new_estimator.estimate_n_samples_for_target_variance(target_var, variances, n_ops,
    #                                                                    n_levels=sampler.n_levels)
    # # Loop until number of estimated samples is greater than the number of scheduled samples
    # while not sampler.process_adding_samples(n_estimated, sleep, add_coef):
    #     # New estimation according to already finished samples
    #     variances, n_ops = q_estimator.estimate_diff_vars_regression(sampler._n_scheduled_samples)
    #     n_estimated = new_estimator.estimate_n_samples_for_target_variance(target_var, variances, n_ops,
    #                                                                        n_levels=sampler.n_levels)

    #print("collected samples ", sampler._n_created_samples)
    means, vars = estimator.estimate_moments(moments_fn)
コード例 #11
0
ファイル: development_tests.py プロジェクト: GeoMop/MLMC
def multiproces_sampler_test():
    np.random.seed(3)
    n_moments = 5

    failed_fraction = 0.1
    distr = stats.norm(loc=1, scale=2)

    step_range = [[0.01], [0.001], [0.0001]]

    # Create simulation instance
    simulation_config = dict(distr=distr,
                             complexity=2,
                             nan_fraction=failed_fraction,
                             sim_method='_sample_fn')
    simulation_factory = SynthSimulation(simulation_config)

    sample_storage = Memory()
    sampling_pool = ProcessPool(4)

    # Plan and compute samples
    sampler = Sampler(sample_storage=sample_storage,
                      sampling_pool=sampling_pool,
                      sim_factory=simulation_factory,
                      level_parameters=step_range)

    true_domain = distr.ppf([0.0001, 0.9999])
    moments_fn = Legendre(n_moments, true_domain)

    sampler.set_initial_n_samples()
    #sampler.set_initial_n_samples([1000])
    sampler.schedule_samples()
    sampler.ask_sampling_pool_for_samples()

    q_estimator = QuantityEstimate(sample_storage=sample_storage,
                                   moments_fn=moments_fn,
                                   sim_steps=step_range)
    #
    target_var = 1e-4
    sleep = 0
    add_coef = 0.1

    # # @TODO: test
    # # New estimation according to already finished samples
    # variances, n_ops = q_estimator.estimate_diff_vars_regression(sampler._n_scheduled_samples)
    # n_estimated = new_estimator.estimate_n_samples_for_target_variance(target_var, variances, n_ops,
    #                                                                    n_levels=sampler.n_levels)
    #
    # # Loop until number of estimated samples is greater than the number of scheduled samples
    # while not sampler.process_adding_samples(n_estimated, sleep, add_coef):
    #     # New estimation according to already finished samples
    #     variances, n_ops = q_estimator.estimate_diff_vars_regression(sampler._n_scheduled_samples)
    #     n_estimated = new_estimator.estimate_n_samples_for_target_variance(target_var, variances, n_ops,
    #                                                                        n_levels=sampler.n_levels)
    #
    #     print("n estimated ", n_estimated)

    print("collected samples ", sampler._n_scheduled_samples)
    means, vars = q_estimator.estimate_moments(moments_fn)

    print("means ", means)
    print("vars ", vars)
    assert means[0] == 1
    assert np.isclose(means[1], 0, atol=5 * 1e-2)
    assert vars[0] == 0
    sampler.schedule_samples()
    sampler.ask_sampling_pool_for_samples()

    storage = sampler.sample_storage
    results = storage.sample_pairs()
コード例 #12
0
ファイル: development_tests.py プロジェクト: GeoMop/MLMC
def thread_test():
    np.random.seed(3)
    n_moments = 5
    distr = stats.norm(loc=1, scale=2)

    step_range = [0.01, 0.001, 0.0001]

    os.chdir(os.path.dirname(os.path.realpath(__file__)))
    work_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)),
                            '_test_tmp')
    if os.path.exists(work_dir):
        shutil.rmtree(work_dir)
    os.makedirs(work_dir)
    shutil.copyfile('synth_sim_config.yaml',
                    os.path.join(work_dir, 'synth_sim_config.yaml'))

    simulation_config = {
        "config_yaml": os.path.join(work_dir, 'synth_sim_config.yaml')
    }
    simulation_factory = SynthSimulationWorkspace(simulation_config)

    sample_storage = Memory()
    sampling_pool = ThreadPool(4, work_dir=work_dir)

    # Plan and compute samples
    sampler = Sampler(sample_storage=sample_storage,
                      sampling_pool=sampling_pool,
                      sim_factory=simulation_factory,
                      step_range=step_range)

    true_domain = distr.ppf([0.0001, 0.9999])
    moments_fn = Legendre(n_moments, true_domain)

    sampler.set_initial_n_samples()
    # sampler.set_initial_n_samples([1000])
    sampler.schedule_samples()
    sampler.ask_sampling_pool_for_samples()

    sampler.target_var_adding_samples(1e-4, moments_fn, sleep=20)
    print("collected samples ", sampler._n_created_samples)

    means, vars = sampler.estimate_moments(moments_fn)

    print("means ", means)
    print("vars ", vars)
    assert means[0] == 1
    assert np.isclose(means[1], 0, atol=1e-2)
    assert vars[0] == 0
    sampler.schedule_samples()
    sampler.ask_sampling_pool_for_samples()

    storage = sampler.sample_storage
    results = storage.sample_pairs()
コード例 #13
0
ファイル: development_tests.py プロジェクト: GeoMop/MLMC
def multiprocess_test():
    np.random.seed(3)
    n_moments = 5
    distr = stats.norm(loc=1, scale=2)
    step_range = [0.01, 0.001]  #, 0.001, 0.0001]

    os.chdir(os.path.dirname(os.path.realpath(__file__)))
    work_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)),
                            '_test_tmp')
    if os.path.exists(work_dir):
        shutil.rmtree(work_dir)
    os.makedirs(work_dir)
    shutil.copyfile('synth_sim_config.yaml',
                    os.path.join(work_dir, 'synth_sim_config.yaml'))

    simulation_config = {
        "config_yaml": os.path.join(work_dir, 'synth_sim_config.yaml')
    }
    simulation_factory = SynthSimulationWorkspace(simulation_config)

    sample_storage = Memory()
    sampling_pool = ProcessPool(4, work_dir=work_dir)

    # Plan and compute samples
    sampler = Sampler(sample_storage=sample_storage,
                      sampling_pool=sampling_pool,
                      sim_factory=simulation_factory,
                      step_range=step_range)

    true_domain = distr.ppf([0.0001, 0.9999])
    moments_fn = Legendre(n_moments, true_domain)

    sampler.set_initial_n_samples()
    #sampler.set_initial_n_samples([1000])
    sampler.schedule_samples()
    sampler.ask_sampling_pool_for_samples()

    q_estimator = QuantityEstimate(sample_storage=sample_storage,
                                   moments_fn=moments_fn,
                                   sim_steps=step_range)

    target_var = 1e-4
    sleep = 0
    add_coef = 0.1

    # # @TODO: test
    # # New estimation according to already finished samples
    # variances, n_ops = q_estimator.estimate_diff_vars_regression(sampler._n_scheduled_samples)
    # n_estimated = new_estimator.estimate_n_samples_for_target_variance(target_var, variances, n_ops,
    #                                                                    n_levels=sampler.n_levels)
    # # Loop until number of estimated samples is greater than the number of scheduled samples
    # while not sampler.process_adding_samples(n_estimated, sleep, add_coef):
    #     # New estimation according to already finished samples
    #     variances, n_ops = q_estimator.estimate_diff_vars_regression(sampler._n_scheduled_samples)
    #     n_estimated = new_estimator.estimate_n_samples_for_target_variance(target_var, variances, n_ops,
    #                                                                        n_levels=sampler.n_levels)

    print("collected samples ", sampler._n_scheduled_samples)
    means, vars = q_estimator.estimate_moments(moments_fn)

    print("means ", means)
    print("vars ", vars)
    assert means[0] == 1
    assert np.isclose(means[1], 0, atol=1e-2)
    assert vars[0] == 0
    sampler.schedule_samples()
    sampler.ask_sampling_pool_for_samples()

    storage = sampler.sample_storage
    results = storage.sample_pairs()