def generate(self, box, n, sampling=None): if len(box) > 0: if is_io_process(): if sampling is None: sampling = UniformDistribution() elif isinstance(sampling, tuple): assert len(sampling) == len(box) sampling = CompositeDistribution(sampling) self._list = sampling.sample(box, n) self._list = is_io_process.mpi_comm.bcast(self._list, root=0) else: for i in range(n): self._list.append(tuple())
def _generate_time_sampling(self, **kwargs): if "time_sampling" in kwargs: time_sampling = kwargs["time_sampling"] else: time_sampling = UniformDistribution() return DiscreteDistribution(time_sampling, (self.EIM_approximation.dt, ))
def initialize_set(cardinality, name): set_ = ParameterSpaceSubset() assert name in ("training_set", "testing_set") if name == "training_set": sampling = EquispacedDistribution() elif name == "testing_set": sampling = UniformDistribution() set_.generate(mu_range, cardinality, sampling=sampling) set_.save("sets", name)
def _generate_time_sampling(self, **kwargs): if "time_sampling" in kwargs: time_sampling = kwargs["time_sampling"] else: time_sampling = UniformDistribution() try: dt = self.EIM_approximation.truth_problem._time_stepping_parameters["monitor"]["time_step_size"] except KeyError: assert self.EIM_approximation.dt is not None dt = self.EIM_approximation.dt return DiscreteDistribution(time_sampling, (dt, ))
def test_sampling_composite_uniform(): parameter_space_subset = ParameterSpaceSubset() parameter_space_subset.generate(box, n, sampling=(UniformDistribution(), UniformDistribution())) plot(0, box, parameter_space_subset, bins, stats.uniform, loc=box[0][min], scale=box[0][max] - box[0][min]) plot(1, box, parameter_space_subset, bins, stats.uniform, loc=box[1][min], scale=box[1][max] - box[1][min]) plt.show()
def generate(self, box, n, sampling=None): if len(box) > 0: if sampling is None: sampling = UniformDistribution() elif isinstance(sampling, tuple): assert len(sampling) == len(box) sampling = CompositeDistribution(sampling) def run_sampling(): return sampling.sample(box, n) self._list = parallel_generate(run_sampling, self.mpi_comm) else: for i in range(n): self._list.append(tuple())