Ejemplo n.º 1
0
def test_sampling_composite_beta_generator():
    parameter_space_subset = ParameterSpaceSubset()
    parameter_space_subset.generate(box,
                                    n,
                                    sampling=(DrawFrom(random.beta, a=2, b=5),
                                              DrawFrom(random.beta, a=5, b=1)))
    plot(0,
         box,
         parameter_space_subset,
         bins,
         stats.beta,
         a=2,
         b=5,
         loc=box[0][min],
         scale=box[0][max] - box[0][min])
    plot(1,
         box,
         parameter_space_subset,
         bins,
         stats.beta,
         a=5,
         b=1,
         loc=box[1][min],
         scale=box[1][max] - box[1][min])
    plt.show()
Ejemplo n.º 2
0
    def __init__(self, folder_prefix):
        # I/O
        self.folder_prefix = folder_prefix
        self.folder = Folders()

        # $$ OFFLINE DATA STRUCTURES $$ #
        # Maximum reduced order space dimension to be used for the stopping criterion in the basis selection
        self.Nmax = 0
        # Tolerance to be used for the stopping criterion in the basis selection
        self.tol = 0.
        # Training set
        self.training_set = ParameterSpaceSubset()
        # I/O
        self.folder["training_set"] = os.path.join(self.folder_prefix,
                                                   "training_set")

        # $$ ERROR ANALYSIS AND SPEEDUP ANALYSIS DATA STRUCTURES $$ #
        # Testing set
        self.testing_set = ParameterSpaceSubset()
        # I/O
        self.folder["testing_set"] = os.path.join(self.folder_prefix,
                                                  "testing_set")
        self.folder["error_analysis"] = os.path.join(self.folder_prefix,
                                                     "error_analysis")
        self.folder["speedup_analysis"] = os.path.join(self.folder_prefix,
                                                       "speedup_analysis")
Ejemplo n.º 3
0
 def initialize_testing_set(self,
                            ntest,
                            enable_import=False,
                            sampling=None,
                            **kwargs):
     import_successful = EIMApproximationReductionMethod.initialize_testing_set(
         self, ntest, enable_import, sampling, **kwargs)
     # Initialize time testing set
     time_testing_set = ParameterSpaceSubset()
     # Test if can import
     time_import_successful = False
     if enable_import:
         time_import_successful = time_testing_set.load(
             self.folder["testing_set"],
             "time_testing_set") and (len(time_testing_set) == ntest)
     if not import_successful:
         time_sampling = self._generate_time_sampling(**kwargs)
         time_testing_set.generate([(0., self.EIM_approximation.T)], ntest,
                                   time_sampling)
         # Export
         time_testing_set.save(self.folder["testing_set"],
                               "time_testing_set")
     # Combine both sets into one
     self._combine_sets(self.testing_set, time_testing_set)
     # Return
     assert time_import_successful == import_successful
     return import_successful
Ejemplo n.º 4
0
 def initialize_training_set(self,
                             ntrain,
                             enable_import=True,
                             sampling=None,
                             **kwargs):
     import_successful = EIMApproximationReductionMethod.initialize_training_set(
         self, ntrain, enable_import, sampling, **kwargs)
     # Initialize time training set
     time_training_set = ParameterSpaceSubset()
     # Test if can import
     time_import_successful = False
     if enable_import:
         time_import_successful = time_training_set.load(
             self.folder["training_set"],
             "time_training_set") and (len(time_training_set) == ntrain)
     if not time_import_successful:
         time_sampling = self._generate_time_sampling(**kwargs)
         time_training_set.generate([(0., self.EIM_approximation.T)],
                                    ntrain, time_sampling)
         # Export
         time_training_set.save(self.folder["training_set"],
                                "time_training_set")
     # Combine both sets into one
     self._combine_sets(self.training_set, time_training_set)
     # Also initialize the map from parameter values to snapshots container index
     self._training_set_parameters_to_snapshots_container_index = {
         (mu["mu"], mu["t"]): mu_index
         for (mu_index, mu) in enumerate(self.training_set)
     }
     # Return
     assert time_import_successful == import_successful
     return import_successful
Ejemplo n.º 5
0
 def initialize_testing_set(self,
                            ntest,
                            enable_import=False,
                            sampling=None,
                            **kwargs):
     import_successful = EIMApproximationReductionMethod.initialize_testing_set(
         self, ntest, enable_import, sampling, **kwargs)
     # Initialize time testing set
     time_testing_set = ParameterSpaceSubset()
     # Test if can import
     time_import_successful = False
     if enable_import:
         time_import_successful = time_testing_set.load(
             self.folder["testing_set"],
             "time_testing_set") and (len(time_testing_set) == ntest)
     if not import_successful:
         time_sampling = self._generate_time_sampling(**kwargs)
         try:
             t0 = self.EIM_approximation.truth_problem._time_stepping_parameters[
                 "monitor"]["initial_time"]
         except KeyError:
             t0 = self.t0
         T = self.EIM_approximation.T
         time_testing_set.generate([(t0, T)], ntest, time_sampling)
         # Export
         time_testing_set.save(self.folder["testing_set"],
                               "time_testing_set")
     # Combine both sets into one
     self._combine_sets(self.testing_set, time_testing_set)
     # Return
     assert time_import_successful == import_successful
     return import_successful
Ejemplo n.º 6
0
def test_sampling_uniform():
    parameter_space_subset = ParameterSpaceSubset()
    parameter_space_subset.generate(box, n, sampling=UniformDistribution())
    plot(0,
         box,
         parameter_space_subset,
         bins,
         stats.uniform,
         loc=box[0][min],
         scale=box[0][max] - box[0][min])
    plot(1,
         box,
         parameter_space_subset,
         bins,
         stats.uniform,
         loc=box[1][min],
         scale=box[1][max] - box[1][min])
    plt.show()
Ejemplo n.º 7
0
def test_sampling_default():
    parameter_space_subset = ParameterSpaceSubset()
    parameter_space_subset.generate(box, n)
    plot(0,
         box,
         parameter_space_subset,
         bins,
         stats.uniform,
         loc=box[0][min],
         scale=box[0][max] - box[0][min])
    plot(1,
         box,
         parameter_space_subset,
         bins,
         stats.uniform,
         loc=box[1][min],
         scale=box[1][max] - box[1][min])
    plt.show()
Ejemplo n.º 8
0
def test_sampling_equispaced_generator():
    parameter_space_subset = ParameterSpaceSubset()
    parameter_space_subset.generate(box, n, sampling=EquispacedDistribution())
    plot(0,
         box,
         parameter_space_subset,
         bins,
         stats_equispaced,
         loc=box[0][min],
         scale=box[0][max] - box[0][min])
    plot(1,
         box,
         parameter_space_subset,
         bins,
         stats_equispaced,
         loc=box[1][min],
         scale=box[1][max] - box[1][min])
    plt.show()
Ejemplo n.º 9
0
def test_sampling_composite_equispaced_and_log_uniform():
    parameter_space_subset = ParameterSpaceSubset()
    parameter_space_subset.generate(box,
                                    n,
                                    sampling=(EquispacedDistribution(),
                                              LogUniformDistribution()))
    plot(0,
         box,
         parameter_space_subset,
         bins,
         stats_equispaced,
         loc=box[0][min],
         scale=box[0][max] - box[0][min])
    plot(1,
         box,
         parameter_space_subset,
         bins,
         stats_loguniform,
         loc=box[1][min],
         scale=box[1][max] - box[1][min])
    plt.show()
Ejemplo n.º 10
0
def test_sampling_composite_log_uniform_and_beta():
    parameter_space_subset = ParameterSpaceSubset()
    parameter_space_subset.generate(box,
                                    n,
                                    sampling=(LogUniformDistribution(),
                                              DrawFrom(random.beta, a=2, b=5)))
    plot(0,
         box,
         parameter_space_subset,
         bins,
         stats_loguniform,
         loc=box[0][min],
         scale=box[0][max] - box[0][min])
    plot(1,
         box,
         parameter_space_subset,
         bins,
         stats.beta,
         a=2,
         b=5,
         loc=box[1][min],
         scale=box[1][max] - box[1][min])
    plt.show()
Ejemplo n.º 11
0
def initialize_set(cardinality, name):
    set_ = ParameterSpaceSubset()
    assert name in ("training_set", "testing_set")
    if name == "training_set":
        sampling = EquispacedDistribution()
    elif name == "testing_set":
        sampling = UniformDistribution()
    set_.generate(mu_range, cardinality, sampling=sampling)
    set_.save("sets", name)
class GreedySelectedParametersList(object):
    def __init__(self):
        self.parameter_space_subset = ParameterSpaceSubset()

    def save(self, directory, filename):
        self.parameter_space_subset.save(directory, filename)

    def load(self, directory, filename):
        return self.parameter_space_subset.load(directory, filename)

    def append(self, element):
        self.parameter_space_subset.append(element)

    def closest(self, M, mu):
        output = GreedySelectedParametersList()
        output.parameter_space_subset = self.parameter_space_subset.closest(
            M, mu)
        return output

    @overload
    def __getitem__(self, key: int):
        return self.parameter_space_subset[key]

    @overload
    def __getitem__(self, key: slice):
        output = GreedySelectedParametersList()
        output.parameter_space_subset = self.parameter_space_subset[key]
        return output

    def __iter__(self):
        return iter(self.parameter_space_subset)

    def __len__(self):
        return len(self.parameter_space_subset)

    def __str__(self):
        return str(self.parameter_space_subset)
Ejemplo n.º 13
0
class ReductionMethod(object, metaclass=ABCMeta):
    def __init__(self, folder_prefix):
        # I/O
        self.folder_prefix = folder_prefix
        self.folder = Folders()

        # $$ OFFLINE DATA STRUCTURES $$ #
        # Maximum reduced order space dimension to be used for the stopping criterion in the basis selection
        self.Nmax = 0
        # Tolerance to be used for the stopping criterion in the basis selection
        self.tol = 0.
        # Training set
        self.training_set = ParameterSpaceSubset()
        # I/O
        self.folder["training_set"] = os.path.join(self.folder_prefix,
                                                   "training_set")

        # $$ ERROR ANALYSIS AND SPEEDUP ANALYSIS DATA STRUCTURES $$ #
        # Testing set
        self.testing_set = ParameterSpaceSubset()
        # I/O
        self.folder["testing_set"] = os.path.join(self.folder_prefix,
                                                  "testing_set")
        self.folder["error_analysis"] = os.path.join(self.folder_prefix,
                                                     "error_analysis")
        self.folder["speedup_analysis"] = os.path.join(self.folder_prefix,
                                                       "speedup_analysis")

    # OFFLINE: set maximum reduced space dimension (stopping criterion)
    def set_Nmax(self, Nmax, **kwargs):
        self.Nmax = Nmax

    # OFFLINE: set tolerance (stopping criterion)
    def set_tolerance(self, tol, **kwargs):
        self.tol = tol

    # OFFLINE: set the elements in the training set.
    def initialize_training_set(self,
                                mu_range,
                                ntrain,
                                enable_import=True,
                                sampling=None,
                                **kwargs):
        # Create I/O folder
        self.folder["training_set"].create()
        # Test if can import
        import_successful = False
        if enable_import:
            try:
                self.training_set.load(self.folder["training_set"],
                                       "training_set")
            except OSError:
                import_successful = False
            else:
                import_successful = (len(self.training_set) == ntrain)
        if not import_successful:
            self.training_set.generate(mu_range, ntrain, sampling)
            # Export
            self.training_set.save(self.folder["training_set"], "training_set")
        return import_successful

    # ERROR ANALYSIS: set the elements in the testing set.
    def initialize_testing_set(self,
                               mu_range,
                               ntest,
                               enable_import=False,
                               sampling=None,
                               **kwargs):
        # Create I/O folder
        self.folder["testing_set"].create()
        # Test if can import
        import_successful = False
        if enable_import:
            try:
                self.testing_set.load(self.folder["testing_set"],
                                      "testing_set")
            except OSError:
                import_successful = False
            else:
                import_successful = (len(self.testing_set) == ntest)
        if not import_successful:
            self.testing_set.generate(mu_range, ntest, sampling)
            # Export
            self.testing_set.save(self.folder["testing_set"], "testing_set")
        return import_successful

    # Perform the offline phase of the reduced order model
    @abstractmethod
    def offline(self):
        raise NotImplementedError(
            "Please implement the offline phase of the reduced order model.")

    # Initialize data structures required for the offline phase
    def _init_offline(self):
        pass

    # Finalize data structures required after the offline phase
    def _finalize_offline(self):
        pass

    # Compute the error of the reduced order approximation with respect to the full order one
    # over the testing set
    @abstractmethod
    def error_analysis(self, N_generator=None, filename=None, **kwargs):
        raise NotImplementedError(
            "Please implement the error analysis of the reduced order model.")

    # Initialize data structures required for the error analysis phase
    def _init_error_analysis(self, **kwargs):
        pass

    # Finalize data structures required after the error analysis phase
    def _finalize_error_analysis(self, **kwargs):
        pass

    # Compute the speedup analysis of the reduced order approximation with respect to the full order one
    # over the testing set
    @abstractmethod
    def speedup_analysis(self, N_generator=None, filename=None, **kwargs):
        raise NotImplementedError(
            "Please implement the speedup analysis of the reduced order model."
        )

    # Initialize data structures required for the speedup analysis phase
    def _init_speedup_analysis(self, **kwargs):
        pass

    # Finalize data structures required after the speedup analysis phase
    def _finalize_speedup_analysis(self, **kwargs):
        pass
 def __init__(self):
     self.parameter_space_subset = ParameterSpaceSubset()
Ejemplo n.º 15
0
def get_set(name):
    set_ = ParameterSpaceSubset()
    set_.load("sets", name)
    return set_