Ejemplo n.º 1
0
    def __init__(self, solveFullProblem):
        self.radix = 'sobolgfunction'

        self.numDims = 8
        self.solveFullProblem = solveFullProblem

        if self.solveFullProblem:
            self.effectiveDims = self.numDims
        else:
            self.effectiveDims = 4

        # --------------------------------------------------------
        # set distributions of the input parameters
        # --------------------------------------------------------
        builder = ProbabilisticSpaceSGpp(self.effectiveDims)
        self.params = builder.uniform()

        # define input space
        self.rv_trans = define_homogeneous_input_space('uniform',
                                                       self.effectiveDims,
                                                       ranges=[0, 1])

        # --------------------------------------------------------
        # simulation function
        # --------------------------------------------------------
        bs = [1, 2, 5, 10, 20, 50, 100, 500]

        def g(x, a):
            return (np.abs(4 * x - 2) + a) / (a + 1)

        def f(xs, bs, **kws):
            return np.prod([g(x, b) for x, b in zip(xs, bs)])

        if solveFullProblem:
            self.simulation = lambda x, **kws: f(x, bs, **kws)
        else:
            self.simulation = lambda x, **kws: f(
                np.append(x, 0.5 * np.ones(len(bs) - len(x))), bs, **kws)

        # --------------------------------------------------------
        # analytic reference values
        # --------------------------------------------------------
        def vari(i):
            return 1. / (3 * (1 + bs[i])**2)

        def var():
            return np.prod([vari(i) + 1.0 for i in range(self.numDims)]) - 1.0

        self.var = var()

        def sobol_index(ixs):
            return np.prod([vari(i) for i in ixs]) / self.var

        self.sobol_indices = {}
        for k in range(self.numDims):
            for perm in combinations(list(range(self.numDims)), r=k + 1):
                self.sobol_indices[tuple(perm)] = sobol_index(perm)

        self.total_effects = computeTotalEffects(self.sobol_indices)
Ejemplo n.º 2
0
    def run_adaptive_sparse_grid(self, gridType, level, maxGridSize, refinement,
                                 boundaryLevel=None, isFull=False, out=False,
                                 plot=False):

        test_samples, test_values = self.getTestSamples()

        # ----------------------------------------------------------
        # define the learner
        # ----------------------------------------------------------
        uqManager = TestEnvironmentSG().buildSetting(self.params,
                                                     self.simulation,
                                                     level,
                                                     gridType,
                                                     deg=20,
                                                     maxGridSize=maxGridSize,
                                                     isFull=isFull,
                                                     adaptive=refinement,
                                                     adaptPoints=10,
                                                     adaptRate=0.05,
                                                     epsilon=1e-10,
                                                     boundaryLevel=boundaryLevel)
        # ----------------------------------------------
        # first run
        while uqManager.hasMoreSamples():
            uqManager.runNextSamples()

        # ----------------------------------------------------------
        # specify ASGC estimator
        # ----------------------------------------------------------
        analysis = ASGCAnalysisBuilder().withUQManager(uqManager)\
                                        .withAnalyticEstimationStrategy()\
                                        .andGetResult()
        analysis.setVerbose(False)
        # ----------------------------------------------------------
        # expectation values and variances
        sg_mean, sg_var = analysis.mean(), analysis.var()
        stats = {}
        iterations = uqManager.getKnowledge().getAvailableIterations()
        for k, iteration in enumerate(iterations):
            # ----------------------------------------------------------
            # estimated anova decomposition
            anova = analysis.getAnovaDecomposition(iteration=iteration,
                                                   nk=len(self.params))
            # estimate the l2 error
            grid, alpha = uqManager.getKnowledge().getSparseGridFunction(iteration=iteration)
            test_values_pred = evalSGFunction(grid, alpha, test_samples)
            l2test, l1test, maxErrorTest, meanError, varError = \
                self.getErrors(test_values, test_values_pred,
                               sg_mean[iteration][0], sg_var[iteration][0])
            # ----------------------------------------------------------
            # main effects
            sobol_indices = anova.getSobolIndices()
            total_effects = computeTotalEffects(sobol_indices)

            print("-" * 60)
            print("iteration=%i, N=%i" % (iteration, grid.getSize()))
            print("E[x] = %g ~ %g (err=%g)" % (self.E_ana[0], sg_mean[iteration]["value"],
                                               np.abs(self.E_ana[0] - sg_mean[iteration]["value"])))
            print("V[x] = %g ~ %g (err=%g)" % (self.V_ana[0], sg_var[iteration]["value"],
                                               np.abs(self.V_ana[0] - sg_var[iteration]["value"])))

            stats[grid.getSize()] = {'num_model_evaluations': grid.getSize(),
                                     'l2test': l2test,
                                     'l1test': l1test,
                                     'maxErrorTest': maxErrorTest,
                                     'mean_error': meanError,
                                     'var_error': varError,
                                     'mean_estimated': sg_mean[iteration]["value"],
                                     'var_estimated': sg_var[iteration]["value"],
                                     'sobol_indices_estimated': sobol_indices,
                                     'total_effects_estimated': total_effects}

            if plot:
                self.plotResultsSG(grid, alpha, level,
                                   maxGridSize, refinement,
                                   iteration, out)

        if out:
            # store results
            filename = os.path.join(self.pathResults,
                                    "%s_%s_d%i_%s_l%i_Nmax%i_r%s_N%i.pkl" % (self.radix,
                                                                             "sg" if not isFull else "fg",
                                                                             self.numDims,
                                                                             grid.getTypeAsString(),
                                                                             level,
                                                                             maxGridSize,
                                                                             refinement,
                                                                             grid.getSize()))
            fd = open(filename, "w")
            pkl.dump({'surrogate': 'sg',
                      'model': "full" if self.numDims == 4 else "reduced",
                      'num_dims': self.numDims,
                      'grid_type': grid.getTypeAsString(),
                      'level': level,
                      'max_grid_size': maxGridSize,
                      'is_full': isFull,
                      'refinement': refinement,
                      'mean_analytic': self.E_ana[0],
                      'var_analytic': self.V_ana[0],
                      'results': stats},
                     fd)
            fd.close()
Ejemplo n.º 3
0
    def run_regular_sparse_grid(self, gridType, level, maxGridSize,
                                boundaryLevel=1,
                                isFull=False,
                                out=False,
                                plot=False):
        np.random.seed(1234567)

        test_samples, test_values = self.getTestSamples()

        stats = {}
        while True:
            print("-" * 80)
            print("level = %i" % level)
            uqManager = TestEnvironmentSG().buildSetting(self.params,
                                                         self.simulation,
                                                         level,
                                                         gridType,
                                                         deg=20,
                                                         maxGridSize=maxGridSize,
                                                         isFull=isFull,
                                                         boundaryLevel=boundaryLevel)

            if uqManager.sampler.getSize() > maxGridSize:
                print("DONE: %i > %i" % (uqManager.sampler.getSize(), maxGridSize))
                break

            # ----------------------------------------------
            # first run
            while uqManager.hasMoreSamples():
                uqManager.runNextSamples()

            # ----------------------------------------------------------
            # specify ASGC estimator
            analysis = ASGCAnalysisBuilder().withUQManager(uqManager)\
                                            .withAnalyticEstimationStrategy()\
                                            .andGetResult()

            analysis.setVerbose(False)
            # ----------------------------------------------------------
            # expectation values and variances
            sg_mean, sg_var = analysis.mean(), analysis.var()

            # ----------------------------------------------------------
            # estimate the l2 error
            grid, alpha = uqManager.getKnowledge().getSparseGridFunction()
            test_values_pred = evalSGFunction(grid, alpha, test_samples)
            l2test, l1test, maxErrorTest, meanError, varError = \
                self.getErrors(test_values, test_values_pred,
                               sg_mean["value"], sg_var["value"])
            print("-" * 60)
            print("test:  |.|_2 = %g" % l2test)
            print("E[x] = %g ~ %g (err=%g)" % (self.E_ana[0], sg_mean["value"],
                                               np.abs(self.E_ana[0] - sg_mean["value"])))
            print("V[x] = %g ~ %g (err=%g)" % (self.V_ana[0], sg_var["value"],
                                               np.abs(self.V_ana[0] - sg_var["value"])))
            # ----------------------------------------------------------
            # estimated anova decomposition
            if self.inputSpace != "sgde":
                anova = analysis.getAnovaDecomposition(nk=len(self.params))
                sobol_indices = anova.getSobolIndices()
                total_effects = computeTotalEffects(sobol_indices)
            else:
                sobol_indices = {}
                total_effects = {}
            # ----------------------------------------------------------
            stats[level] = {'num_model_evaluations': grid.getSize(),
                            'l2test': l2test,
                            'l1test': l1test,
                            'maxErrorTest': maxErrorTest,
                            'mean_error': meanError,
                            'var_error': varError,
                            'mean_estimated': sg_mean["value"],
                            'var_estimated': sg_var["value"],
                            'sobol_indices_estimated': sobol_indices,
                            'total_effects_estimated': total_effects}

            if plot:
                self.plotResultsSG(grid, alpha, level, maxGridSize, False, 0, out)
            level += 1

        if out:
            # store results
            filename = os.path.join(self.pathResults,
                                    "%s_%s_d%i_%s_Nmax%i_r%i_N%i.pkl" % (self.radix,
                                                                         "sg" if not isFull else "fg",
                                                                         self.numDims,
                                                                         grid.getTypeAsString(),
                                                                         maxGridSize,
                                                                         False,
                                                                         grid.getSize()))
            fd = open(filename, "w")
            pkl.dump({'surrogate': 'sg',
                      'num_dims': self.numDims,
                      'grid_type': grid.getTypeAsString(),
                      'max_grid_size': maxGridSize,
                      'level': level,
                      'boundaryLevel': boundaryLevel,
                      'is_full': isFull,
                      'refinement': False,
                      'mean_analytic': self.E_ana[0],
                      'var_analytic': self.V_ana[0],
                      'results': stats},
                     fd)
            fd.close()
Ejemplo n.º 4
0
    def run_pce(self,
                expansion="total_degree",
                sampling_strategy="leja",
                maxNumSamples=3000,
                out=False,
                plot=False):
        np.random.seed(1234567)

        test_samples, test_values = self.getTestSamples(dtype="prob")
        test_samples = test_samples.T

        stats = {}
        degree_1d = 1
        while True:
            # define pce
            pce = PolynomialChaosExpansion()
            pce.set_random_variable_transformation(self.rv_trans, FULL_TENSOR_BASIS)
            pce.set_orthonormal(True)

            builder = PCEBuilderHeat(self.numDims)
            builder.define_expansion(pce, expansion, degree_1d)

            num_samples = num_terms = pce.num_terms()

            if num_samples > maxNumSamples:
                print("DONE: %i > %i" % (num_samples, maxNumSamples))
                break

            if sampling_strategy == "gauss":
                quadrature_strategy = builder.define_full_tensor_samples("uniform", self.rv_trans, expansion)
            elif sampling_strategy == "fekete":
                samples = 2 * np.random.random((self.numDims, 30000)) - 1.
                quadrature_strategy = builder.define_approximate_fekete_samples(samples, pce, self.rv_trans)
                num_samples = int(num_samples * 1.0)
            elif sampling_strategy == "leja":
                samples = 2 * np.random.random((self.numDims, 30000)) - 1.
                quadrature_strategy = builder.define_approximate_leja_samples(samples, pce, self.rv_trans)
                num_samples = int(num_samples * 1.0)
            elif sampling_strategy == "gauss_leja":
                quadrature_strategy = builder.define_full_tensor_samples("uniform", self.rv_trans, expansion)
                samples = quadrature_strategy.get_quadrature_samples((degree_1d + 1) ** self.numDims, degree_1d + 1)
                quadrature_strategy = builder.define_approximate_leja_samples(samples, pce, self.rv_trans)
                num_samples = int((self.numDims - 1) * num_terms)
            else:
                raise AttributeError("sampling strategy '%s' is unknown" % sampling_strategy)

            samples = quadrature_strategy.get_quadrature_samples(num_samples, degree_1d)
            train_samples, train_values = builder.eval_samples(samples, self.rv_trans, self.simulation)

            # compute coefficients of pce
            _, residual, _, cond_preconditioned = \
                compute_coefficients(pce, train_samples, train_values, "christoffel")
            _, _, train_values_pred = eval_pce(pce, train_samples)
            l2train = np.sqrt(np.mean(train_values - train_values_pred) ** 2)
            _, _, test_values_pred = eval_pce(pce, test_samples)
            l2test, l1test, maxErrorTest, meanError, varError = \
                self.getErrors(test_values, test_values_pred,
                               pce.mean(), pce.variance())
            ###################################################################################################
            print("-" * 60)
            print("degree = %i, #terms = %i, #samples = %i" % (degree_1d, num_terms, num_samples))
            print("train: |.|_2 = %g (res=%g)" % (l2train, residual))
            print("test:  |.|_2 = %g" % l2test)
            print("cond:  %g" % cond_preconditioned)
            print("E[x] = %g ~ %g (err=%g)" % (self.E_ana[0], pce.mean(),
                                               np.abs(self.E_ana[0] - pce.mean())))
            print("V[x] = %g ~ %g (err=%g)" % (self.V_ana[0], pce.variance(),
                                               np.abs(self.V_ana[0] - pce.variance())))

            # get sobol indices
            sobol_indices = builder.getSortedSobolIndices(pce)
            total_effects = computeTotalEffects(sobol_indices)

            stats[num_samples] = {"sobol_indices_estimated": sobol_indices,
                                  "total_effects_estimated": total_effects,
                                  "var_estimated": pce.variance(),
                                  "mean_estimated": pce.mean(),
                                  'num_model_evaluations': num_samples,
                                  'degree_1d': degree_1d,
                                  'num_terms': num_terms,
                                  'l2train': l2train,
                                  'l2test': l2test,
                                  'l1test': l1test,
                                  'maxErrorTest': maxErrorTest,
                                  'mean_error': meanError,
                                  'var_error': varError,
                                  'cond_vand': cond_preconditioned}

            if plot:
                self.plotResultsPCE(pce, train_samples, expansion, sampling_strategy,
                                    num_samples, degree_1d, out)

            degree_1d = 2 * degree_1d + 1

        if out:
            # store results
            filename = os.path.join(self.pathResults,
                                    "%s_pce_d%i_%s_%s_N%i.pkl" % (self.radix,
                                                                  self.numDims,
                                                                  expansion,
                                                                  sampling_strategy,
                                                                  num_samples))
            fd = open(filename, "w")
            pkl.dump({'surrogate': 'pce',
                      'num_dims': self.numDims,
                      'sampling_strategy': sampling_strategy,
                      'max_num_samples': maxNumSamples,
                      'expansion': expansion,
                      'mean_analytic': self.E_ana[0],
                      'var_analytic': self.V_ana[0],
                      'results': stats},
                     fd)
            fd.close()
Ejemplo n.º 5
0
    def run_pce(self,
                expansion="total_degree",
                sampling_strategy="leja",
                degree_1d=2,
                out=False):
        np.random.seed(1234567)

        # define pce
        pce = PolynomialChaosExpansion()
        pce.set_random_variable_transformation(self.rv_trans,
                                               FULL_TENSOR_BASIS)
        pce.set_orthonormal(True)

        builder = PCEBuilderHeat(self.effectiveDims)
        builder.define_expansion(pce, expansion, degree_1d)

        num_samples = num_terms = pce.num_terms()
        if sampling_strategy == "full_tensor":
            quadrature_strategy = builder.define_full_tensor_samples(
                "uniform", self.rv_trans, expansion)
        elif sampling_strategy == "fekete":
            samples = 2 * np.random.random((self.effectiveDims, 30000)) - 1.
            quadrature_strategy = builder.define_approximate_fekete_samples(
                samples, pce, self.rv_trans)
            num_samples = int(num_samples * 1.6)
        elif sampling_strategy == "leja":
            samples = 2 * np.random.random((self.effectiveDims, 50000)) - 1.
            quadrature_strategy = builder.define_approximate_leja_samples(
                samples, pce, self.rv_trans)
            num_samples = 73  # 233  # int(num_samples * 1.6)
        else:
            raise AttributeError("sampling strategy '%s' is unknnown" %
                                 sampling_strategy)

        samples = quadrature_strategy.get_quadrature_samples(
            num_samples, degree_1d)
        train_samples, train_values = builder.eval_samples(
            samples, self.rv_trans, self.simulation)

        samples = np.random.random((self.effectiveDims, 1000))
        test_samples, test_values = builder.eval_samples(
            samples, self.rv_trans, self.simulation)

        # compute coefficients of pce
        compute_coefficients(pce, train_samples, train_values, "christoffel")

        _, _, train_values_pred = eval_pce(pce, train_samples)
        l2train = np.sqrt(np.mean(train_values - train_values_pred)**2)
        print("train: |.|_2 = %g" % l2train)
        _, _, test_values_pred = eval_pce(pce, test_samples)
        l2test = np.sqrt(np.mean(test_values - test_values_pred)**2)
        print("test:  |.|_2 = %g" % l2test)
        ###################################################################################################
        print("-" * 60)
        print("#terms = %i" % num_terms)
        print("V[x] = %g ~ %g" % (self.var, pce.variance()))

        # get sobol indices
        sobol_indices = builder.getSortedSobolIndices(pce)
        total_effects = computeTotalEffects(sobol_indices)
        print(total_effects)
        if out:
            # store results
            # store results
            filename = os.path.join(
                "results", "%s_pce_d%i_%s_deg%i_M%i_N%i.pkl" %
                (self.radix, self.effectiveDims, sampling_strategy, degree_1d,
                 num_terms, num_samples))
            fd = open(filename, "w")
            pkl.dump(
                {
                    'surrogate': 'pce',
                    'model': "full" if self.effectiveDims == 4 else "reduced",
                    'num_dims': self.effectiveDims,
                    'sampling_strategy': sampling_strategy,
                    'degree_1d': degree_1d,
                    'expansion': "total_degree",
                    'num_model_evaluations': num_samples,
                    'num_terms': num_terms,
                    'l2test': l2test,
                    'l2train': l2train,
                    'var_estimated': pce.variance(),
                    'var_analytic': self.var,
                    'sobol_indices_analytic': self.sobol_indices,
                    'sobol_indices_estimated': sobol_indices,
                    'total_effects_analytic': self.total_effects,
                    'total_effects_estimated': total_effects
                }, fd)
            fd.close()

        return sobol_indices, num_samples
Ejemplo n.º 6
0
    def run_sparse_grids(self,
                         gridType,
                         level,
                         maxGridSize,
                         isFull,
                         refinement=None,
                         out=False):
        # ----------------------------------------------------------
        # define the learner
        # ----------------------------------------------------------
        uqManager = TestEnvironmentSG().buildSetting(self.params,
                                                     self.simulation,
                                                     level,
                                                     gridType,
                                                     deg=10,
                                                     maxGridSize=maxGridSize,
                                                     isFull=isFull,
                                                     adaptive=refinement,
                                                     adaptPoints=3,
                                                     epsilon=1e-3)

        # ----------------------------------------------
        # first run
        while uqManager.hasMoreSamples():
            uqManager.runNextSamples()

        # ----------------------------------------------------------
        # specify ASGC estimator
        # ----------------------------------------------------------
        analysis = ASGCAnalysisBuilder().withUQManager(uqManager)\
                                        .withAnalyticEstimationStrategy()\
                                        .andGetResult()

        # ----------------------------------------------------------
        # expectation values and variances
        sg_mean, sg_var = analysis.mean(), analysis.var()

        print("-" * 60)
        print("V[x] = %g ~ %s" % (self.var, sg_var))

        iterations = uqManager.getKnowledge().getAvailableIterations()
        stats = [None] * len(iterations)
        for k, iteration in enumerate(iterations):
            # ----------------------------------------------------------
            # estimated anova decomposition
            anova = analysis.getAnovaDecomposition(iteration=iteration,
                                                   nk=len(self.params))

            # estimate the l2 error
            test_samples = np.random.random((1000, self.effectiveDims))
            test_values = np.ndarray(1000)
            for i, sample in enumerate(test_samples):
                test_values[i] = self.simulation(sample)
            grid, alpha = uqManager.getKnowledge().getSparseGridFunction()
            test_values_pred = evalSGFunction(grid, alpha, test_samples)
            l2test = np.sqrt(np.mean(test_values - test_values_pred)**2)
            # ----------------------------------------------------------
            # main effects
            sobol_indices = anova.getSobolIndices()
            total_effects = computeTotalEffects(sobol_indices)

            stats[k] = {
                'num_model_evaluations': grid.getSize(),
                'l2test': l2test,
                'var_estimated': sg_var[0],
                'var_analytic': self.var,
                'sobol_indices_estimated': sobol_indices,
                'total_effects_estimated': total_effects
            }

        if out:
            # store results
            filename = os.path.join(
                "results", "%s_%s_d%i_%s_l%i_Nmax%i_%s_N%i.pkl" %
                (self.radix, "sg" if not isFull else "fg", self.effectiveDims,
                 grid.getTypeAsString(), level, maxGridSize, refinement,
                 grid.getSize()))
            fd = open(filename, "w")
            pkl.dump(
                {
                    'surrogate': 'sg',
                    'model': "full" if self.effectiveDims == 4 else "reduced",
                    'num_dims': self.effectiveDims,
                    'grid_type': grid.getTypeAsString(),
                    'level': level,
                    'max_grid_size': maxGridSize,
                    'is_full': isFull,
                    'refinement': refinement,
                    'sobol_indices_analytic': self.sobol_indices,
                    'total_effects_analytic': self.total_effects,
                    'results': stats
                }, fd)
            fd.close()

        return sobol_indices, grid.getSize()
Ejemplo n.º 7
0
    def __init__(self):
        self.radix = 'ishigami'

        # --------------------------------------------------------
        # set distributions of the input parameters
        # --------------------------------------------------------
        builder = ParameterBuilder()
        up = builder.defineUncertainParameters()
        up.new().isCalled('x').withUniformDistribution(-np.pi, np.pi)
        up.new().isCalled('y').withUniformDistribution(-np.pi, np.pi)
        up.new().isCalled('z').withUniformDistribution(-np.pi, np.pi)
        self.params = builder.andGetResult()
        self.numDims = self.params.getStochasticDim()

        # --------------------------------------------------------
        # simulation function
        # --------------------------------------------------------
        def f(xs, a=7, b=0.1, **kws):
            x1, x2, x3 = xs
            return np.sin(x1) + a * np.sin(x2)**2 + b * x3**4 * np.sin(x1)

        self.simulation = f

        # --------------------------------------------------------
        # analytic reference values
        # --------------------------------------------------------

        def var(a=7, b=0.1):
            return a * a / 8. + b * np.pi**4 / 5. + b * b * np.pi**8 / 18. + 0.5

        def vi(i, a=7, b=0.1):
            if i == 0:
                return b * np.pi**4 / 5. + b * b * np.pi**8 / 50. + 0.5
            elif i == 1:
                return a * a / 8.
            else:
                return 0.0

        def vij(i, j, a=7, b=0.1):
            if i == 0 and j == 2:
                return 8 * b * b * np.pi**8 / 225.
            else:
                return 0.0

        def vijk(i, j, k):
            return 0.0

        def sobol_index(perm):
            if len(perm) == 1:
                return vi(perm[0]) / var()
            elif len(perm) == 2:
                return vij(perm[0], perm[1]) / var()
            elif len(perm) == 3:
                return vijk(perm[0], perm[1], perm[2]) / var()
            else:
                raise AttributeError("len of perm must be in {1, 2, 3}")

        self.var = var()
        self.sobol_indices = {}
        for k in range(self.numDims):
            for perm in combinations(list(range(self.numDims)), r=k + 1):
                self.sobol_indices[tuple(perm)] = sobol_index(perm)

        self.total_effects = computeTotalEffects(self.sobol_indices)