def test_HighDensityRegionAlgorithm1D(self): # With 1D ot.RandomGenerator.SetSeed(0) numberOfPointsForSampling = 500 ot.ResourceMap.SetAsBool("Distribution-MinimumVolumeLevelSetBySampling", True) ot.ResourceMap.Set( "Distribution-MinimumVolumeLevelSetSamplingSize", str(numberOfPointsForSampling), ) # Dataset ot.RandomGenerator_SetSeed(1976) sample = ot.Normal().getSample(100) # Creation du kernel smoothing ks = ot.KernelSmoothing() distribution = ks.build(sample) dp = othdr.HighDensityRegionAlgorithm(sample, distribution) dp.run() # Draw contour/inliers/outliers otv.View(dp.draw()) otv.View(dp.draw(drawInliers=True)) otv.View(dp.draw(drawOutliers=False)) outlierIndices = dp.computeIndices() expected_outlierIndices = [16, 24, 33, 49, 71, 84] assert_equal(outlierIndices, expected_outlierIndices)
def testBaseFunctionalityMean(self): n_modes = self.AKLR1.getSizeModes() randVect = ot.ComposedDistribution([ot.Normal()] * n_modes) ot.RandomGenerator_SetSeed(68173484786) randPoint = randVect.getRealization() print('random point is', randPoint) ot.RandomGenerator_SetSeed(681348445786) randSample = randVect.getSample(10) #func_pt = self.AKLR1.lift(randPoint) field_pt = self.AKLR1.liftAsField(randPoint) smpl_pt = self.AKLR1.liftAsSample(randPoint) procsamp_samp = self.AKLR1.liftAsProcessSample(randSample) print(smpl_pt[0]) coeffs_field_pt = self.AKLR1.project(field_pt) coeffs_smpl_pt = self.AKLR1.project(smpl_pt) coeffs_procsamp_samp = self.AKLR1.project(procsamp_samp) #self.assertEqual(randPoint, coeffs_func_pt) print('The modes are as follows,', self.AKLR1.__mode_count__) for i, (a, b) in enumerate( list(zip(list(randPoint), list(coeffs_field_pt)))): msg = 'assertAlmostEqual Failed for element {} of list, with values {} and {}'.format( i, a, b) print('a_field:', a, 'b_field:', b) self.assertAlmostEqual(a, b, 7, msg) print('From coeffs to fields to coeffs OK') for i, (a, b) in enumerate( list(zip(list(randPoint), list(coeffs_smpl_pt)))): msg = 'assertAlmostEqual Failed for element {} of list, with values {} and {}'.format( i, a, b) print('a_sample:', a, 'b_sample:', b) self.assertAlmostEqual(a, b, 7, msg) print('From coeffs to samples to coeffs OK') for j in range(randSample.getSize()): pt_j = randSample[j] pt_proc = coeffs_procsamp_samp[j] for i, (a, b) in enumerate(list(zip(list(pt_j), list(pt_proc)))): msg = 'assertAlmostEqual Failed for element {} of list, with values {} and {}'.format( i, a, b) self.assertAlmostEqual(a, b, 7, msg) print('From coeffs to process samples to coeffs OK') print('Tests Passed!')
#! /usr/bin/env python # coding: utf-8 from __future__ import print_function import openturns as ot import openturns.testing import persalys import os myStudy = persalys.Study('myStudy') # data filename = 'données.csv' ot.RandomGenerator_SetSeed(0) ot.Normal(3).getSample(10).exportToCSVFile(filename) inColumns = [0, 2] # Model 1 model = persalys.DataModel('myDataModel', filename, inColumns) myStudy.add(model) print(model) # Model 2 model2 = persalys.SymbolicPhysicalModel( 'SM', [persalys.Input('A'), persalys.Input('B')], [persalys.Output('S')], ['A+B+2']) myStudy.add(model2) importedDOE = persalys.ImportedDesignOfExperiment('doeI', model2, filename, inColumns) myStudy.add(importedDOE)
model_2D = ot.ExponentialModel([1, 1], [1]) ##Now finally let's get our two processes and the ditribution. ### The 1D Gaussian process process_1D = ot.GaussianProcess(model_1D, mesh_1D) ### The 2D Gaussian process process_2D = ot.GaussianProcess(model_2D, mesh_2D) ### The normal distribution: scalar_distribution = ot.Normal() ## Now the we have our processes and distributions, let's first evaluate the function ## without any use of a wrapper or anything. #### First get fields and samples from our processes and distributions ot.RandomGenerator_SetSeed(888) field_1D = process_1D.getRealization() field_2D = process_2D.getRealization() scalar_0 = [scalar_distribution.getRealization()] print('For field 1D:\n', field_1D, '\n') print('For field 2D:\n', field_2D, '\n') print('For scalar :\n', scalar_0, '\n') output_dummy_0 = dummyFunction2Wrap(field_2D, field_1D, scalar_0) print('Output is:\n', output_dummy_0) ## Now that we have our processes defined, our realizations and the corresponding output ## we can create our aggregated object, wrap our function, and check if it behaves accordingly ### For that we will first have to do the Karhunen-Loeve decomposition of the processes. algo_kl_process_1D = ot.KarhunenLoeveP1Algorithm(