def test_design_function(self): obs = TestSpaceTimeKroneckerElement.SimulatedObservationStructure() spde_space = SphereMeshSPDE(level=1) spde_time = LatticeSPDE.construct(dimension_specification=[(23, 27, 5) ], basis_function=WendlandC4Basis(), overlap_factor=2.5) design = SpaceTimeKroneckerDesign(observationstructure=obs, spatial_model=spde_space, alpha=2, temporal_model=spde_time, H=1.01) # Get and check indices of nonzero design elements A_space = spde_space.build_A(obs.location_polar_coordinates()) observation_indices, vertex_indices = A_space.sorted_indices().nonzero( ) numpy.testing.assert_equal(observation_indices, [0, 0, 0, 1, 1, 1]) self.assertEqual((6, ), vertex_indices.shape) # Vertices of observations 0 and 1 # (one row per vertex, one column per coordinate) vertices0 = spde_space.triangulation.points[vertex_indices[0:3], :] vertices1 = spde_space.triangulation.points[vertex_indices[3:6], :] # Multiply vertices by the weights from A and sum to get cartesian locations testpoint0 = A_space[0, vertex_indices[0:3]] * vertices0 testpoint1 = A_space[1, vertex_indices[3:6]] * vertices1 # Check results correspond to original polar coordinates numpy.testing.assert_almost_equal(cartesian_to_polar2d(testpoint0), [[15.0, -7.0]]) numpy.testing.assert_almost_equal(cartesian_to_polar2d(testpoint1), [[5.0, 100.0]]) # So the function with those indices set should give required values, provided it's evaluated at time index == 24 state_vector = numpy.zeros((210, )) state_vector[42 + vertex_indices[0:3]] = 40.0 state_vector[42 + vertex_indices[3:6]] = 28.0 numpy.testing.assert_almost_equal(design.design_function(state_vector), [40.0, 28.0]) # But if we change to a time far away then we get nothing state_vector = numpy.zeros((210, )) state_vector[(42 * 4) + vertex_indices[0:3]] = 40.0 state_vector[(42 * 4) + vertex_indices[3:6]] = 28.0 numpy.testing.assert_almost_equal(design.design_function(state_vector), [0.0, 0.0])
def test_wrap(self): # Model time resolution node_spacing = 1./12.0 # Model start/end bound a decimal year model_start = 1850.0 model_end = 1851.0 # number of model nodes in as 12 nodes within year n_nodes = 12 element = AnnualKroneckerElement(n_triangulation_divisions=1, alpha=2, starttime=model_start, endtime=model_end, n_nodes=n_nodes, overlap_factor=2.5, H=1.0, wrap_dimensions = [True,]) # Check unit diagonal basis function values when point at beginning of years and a each node in the spatial triangulation node_locations_space = cartesian_to_polar2d( element.spatial_model.triangulation.points ) node_locations_time = element.temporal_model.lattice.nodes obs = TestSpaceTimeKroneckerElement.SimulatedObservationStructure(node_locations_space, datetime(1849, 1, 1) ) numpy.testing.assert_almost_equal( numpy.ones(obs.number_of_observations()), element.element_design(obs).design_matrix().diagonal() ) obs = TestSpaceTimeKroneckerElement.SimulatedObservationStructure(node_locations_space, datetime(1850, 1, 1) ) numpy.testing.assert_almost_equal( numpy.ones(obs.number_of_observations()), element.element_design(obs).design_matrix().diagonal() ) obs = TestSpaceTimeKroneckerElement.SimulatedObservationStructure(node_locations_space, datetime(1851, 1, 1) ) numpy.testing.assert_almost_equal( numpy.ones(obs.number_of_observations()), element.element_design(obs).design_matrix().diagonal() )
def test_build_A_space(self): spde = SphereMeshSPDE(level=1, sparse_format=SPARSEFORMAT) design = SeasonalElementDesign( TestSeasonalElement.SimulatedObservationStructure(), spde, n_harmonics=3, include_local_mean=True) A = design.build_A_space() # Check sparse format self.assertEqual(SPARSEFORMAT, A.getformat()) # Shape should be number of obs x number of vertices (basis functions) self.assertEqual((2, 42), A.shape) # Two triangles means 6 points are non-zero self.assertEqual(6, len(A.nonzero()[0])) # Get and check indices of nonzero design elements observation_indices, vertex_indices = A.sorted_indices().nonzero() numpy.testing.assert_equal(observation_indices, [0, 0, 0, 1, 1, 1]) self.assertEqual((6, ), vertex_indices.shape) # Vertices of observations 0 and 1 # (one row per vertex, one column per coordinate) vertices0 = spde.triangulation.points[vertex_indices[0:3], :] vertices1 = spde.triangulation.points[vertex_indices[3:6], :] # Multiply vertices by the weights from A and sum to get cartesian locations testpoint0 = A[0, vertex_indices[0:3]] * vertices0 testpoint1 = A[1, vertex_indices[3:6]] * vertices1 # Check results correspond to original polar coordinates numpy.testing.assert_almost_equal(cartesian_to_polar2d(testpoint0), [[15.0, -7.0]]) numpy.testing.assert_almost_equal(cartesian_to_polar2d(testpoint1), [[5.0, 100.0]])
def test_mini_world_large_and_local(self): # Use a number of time steps number_of_simulated_time_steps = 30 # Large-scale spatial variability simulated_large_variation = 10.0 # Local variability simulated_local_variation = 1.0 # Iterations to use number_of_solution_iterations = 5 # Build system # Large-scale factor element_large = SpaceTimeKroneckerElement( n_triangulation_divisions=1, alpha=2, starttime=0, endtime=number_of_simulated_time_steps + 1, n_nodes=number_of_simulated_time_steps + 2, overlap_factor=2.5, H=1) initial_hyperparameters_large = SpaceTimeSPDEHyperparameters( space_log_sigma=0.0, space_log_rho=numpy.log(numpy.radians(5.0)), time_log_rho=numpy.log(1.0 / 365.0)) component_large = SpaceTimeComponent( ComponentStorage_InMemory(element_large, initial_hyperparameters_large), SpaceTimeComponentSolutionStorage_InMemory()) # And a local process component_local = SpatialComponent( ComponentStorage_InMemory( LocalElement(n_triangulation_divisions=3), LocalHyperparameters(log_sigma=0.0, log_rho=numpy.log(numpy.radians(5.0)))), SpatialComponentSolutionStorage_InMemory()) analysis_system = AnalysisSystem([component_large, component_local], ObservationSource.TMEAN, log=StringIO()) # analysis_system = AnalysisSystem([ component_large ], ObservationSource.TMEAN) # analysis_system = AnalysisSystem([ component_local ], ObservationSource.TMEAN) # use fixed locations from icosahedron fixed_locations = cartesian_to_polar2d( MeshIcosahedronSubdivision.build(3).points) # random measurement at each location numpy.random.seed(8976) field_basis = simulated_large_variation * numpy.random.randn( fixed_locations.shape[0]) # some time function that varies over a year time_basis = numpy.cos( numpy.linspace(0.1, 1.75 * numpy.pi, number_of_simulated_time_steps)) # kronecker product of the two large_scale_process = numpy.kron(field_basis, numpy.expand_dims(time_basis, 1)) # Random local changes where mean change at each time is zero # local_process = simulated_local_variation * numpy.random.randn(large_scale_process.shape[0], large_scale_process.shape[1]) # local_process -= numpy.tile(local_process.mean(axis=1), (local_process.shape[1], 1)).T local_process = numpy.zeros(large_scale_process.shape) somefield = simulated_local_variation * numpy.random.randn( 1, large_scale_process.shape[1]) somefield -= somefield.ravel().mean() local_process[10, :] = somefield local_process[11, :] = -somefield # Add the two processes measurement = large_scale_process + local_process # Simulated inputs simulated_input_loader = SimulatedInputLoader(fixed_locations, measurement, 0.001) # Simulate evaluation of this time index simulated_time_indices = range(number_of_simulated_time_steps) # All systems linear so single update should be ok analysis_system.update([simulated_input_loader], simulated_time_indices) # Get all results result = numpy.zeros(measurement.shape) for t in range(number_of_simulated_time_steps): result[t, :] = analysis_system.evaluate_expected_value( 'MAP', SimulatedObservationStructure(t, fixed_locations, None, None), flag='POINTWISE') disparity_large_scale = (numpy.abs(result - large_scale_process)).ravel().max() # print 'large scale disparity: ', disparity_large_scale disparity_overall = (numpy.abs(result - measurement)).ravel().max() # print 'overall disparity: ', disparity_overall numpy.testing.assert_almost_equal(result, measurement, decimal=4) self.assertTrue(disparity_overall < 1E-4)
def test_mini_world_noiseless(self): number_of_simulated_time_steps = 1 # Build system element = SeasonalElement(n_triangulation_divisions=3, n_harmonics=5, include_local_mean=True) hyperparameters = SeasonalHyperparameters(n_spatial_components=6, common_log_sigma=0.0, common_log_rho=0.0) component = SpaceTimeComponent( ComponentStorage_InMemory(element, hyperparameters), SpaceTimeComponentSolutionStorage_InMemory()) analysis_system = AnalysisSystem([component], ObservationSource.TMEAN, log=StringIO()) # use fixed locations from icosahedron fixed_locations = cartesian_to_polar2d( MeshIcosahedronSubdivision.build(3).points) # random measurement at each location numpy.random.seed(8976) field_basis = numpy.random.randn(fixed_locations.shape[0]) #print(field_basis.shape) #time_basis = numpy.array(harmonics_list) # some time function that varies over a year #decimal_years = numpy.array([datetime_to_decimal_year(epoch_plus_days(step)) for step in range(number_of_simulated_time_steps)]) time_basis = numpy.cos( numpy.linspace(0.1, 1.75 * numpy.pi, number_of_simulated_time_steps)) # kronecker product of the two #print(numpy.expand_dims(time_basis, 1)) measurement = numpy.kron(field_basis, numpy.expand_dims( time_basis, 1)) #numpy.expand_dims(time_basis, 1)) #print(measurement.shape) # Simulated inputs simulated_input_loader = SimulatedInputLoader(fixed_locations, measurement, 0.0001) # Simulate evaluation of this time index simulated_time_indices = range(number_of_simulated_time_steps) # Iterate for iteration in range(5): analysis_system.update([simulated_input_loader], simulated_time_indices) # Get all results result = numpy.zeros(measurement.shape) for t in range(number_of_simulated_time_steps): result[t, :] = analysis_system.evaluate_expected_value( 'MAP', SimulatedObservationStructure(t, fixed_locations, None, None), flag='POINTWISE') # Should be very close to original because specified noise is low numpy.testing.assert_almost_equal(result, measurement) max_disparity = (numpy.abs(result - measurement)).ravel().max() self.assertTrue(max_disparity < 1E-5) # test output gridding, pointwise limit outputstructure = OutputRectilinearGridStructure( 2, epoch_plus_days(2), latitudes=numpy.linspace(-60., 60., num=5), longitudes=numpy.linspace(-90., 90, num=10)) pointwise_result = analysis_system.evaluate_expected_value( 'MAP', outputstructure, 'POINTWISE') pointwise_limit_result = analysis_system.evaluate_expected_value( 'MAP', outputstructure, 'GRID_CELL_AREA_AVERAGE', [1, 1], 10) numpy.testing.assert_array_almost_equal(pointwise_result, pointwise_limit_result)
def test_mini_world_noiseless(self): # Use a number of time steps number_of_simulated_time_steps = 1 # Build system element = SpaceTimeFactorElement( n_triangulation_divisions=3, alpha=2, starttime=0, endtime=number_of_simulated_time_steps + 1, n_nodes=number_of_simulated_time_steps + 2, overlap_factor=2.5, H=1) initial_hyperparameters = SpaceTimeSPDEHyperparameters( space_log_sigma=0.0, space_log_rho=numpy.log(numpy.radians(45.0)), time_log_rho=numpy.log(3.0 / 365.0)) component = SpaceTimeComponent( ComponentStorage_InMemory(element, initial_hyperparameters), SpaceTimeComponentSolutionStorage_InMemory()) analysis_system = AnalysisSystem([component], ObservationSource.TMEAN, log=StringIO()) # use fixed locations from icosahedron fixed_locations = cartesian_to_polar2d( MeshIcosahedronSubdivision.build(3).points) # random measurement at each location numpy.random.seed(8976) field_basis = 10.0 * numpy.random.randn(fixed_locations.shape[0]) # some time function that varies over a year time_basis = numpy.cos( numpy.linspace(0.1, 1.75 * numpy.pi, number_of_simulated_time_steps)) # kronecker product of the two measurement = numpy.kron(field_basis, numpy.expand_dims(time_basis, 1)) # Simulated inputs simulated_input_loader = SimulatedInputLoader(fixed_locations, measurement, 0.01) # Simulate evaluation of this time index simulated_time_indices = range(number_of_simulated_time_steps) # Iterate for iteration in range(5): analysis_system.update([simulated_input_loader], simulated_time_indices) # Get all results result = numpy.zeros(measurement.shape) for t in range(number_of_simulated_time_steps): result[t, :] = analysis_system.evaluate_expected_value( 'MAP', SimulatedObservationStructure(t, fixed_locations, None, None), flag='POINTWISE') # Should be very close to original because specified noise is low numpy.testing.assert_almost_equal(result, measurement) max_disparity = (numpy.abs(result - measurement)).ravel().max() self.assertTrue(max_disparity < 1E-5)
def demo_non_stationary(): full_resolution_level = 5 neighbourhood_level = 2 full_spde = SphereMeshViewGlobal(level=full_resolution_level) active_triangles = full_spde.neighbours_at_level(neighbourhood_level, 0) n_regions = full_spde.n_triangles_at_level(neighbourhood_level) merge_method = 'new' if merge_method == 'old': local_spdes = [] local_hyperparameters = [] for region_index in range(n_regions): local_spdes.append( SphereMeshViewSuperTriangle(full_resolution_level, neighbourhood_level, region_index)) hyperparameters = numpy.array( [numpy.float64(region_index), numpy.float64(region_index)]) hyperparameters = numpy.log( numpy.concatenate([ numpy.random.uniform(1.0, 3.0, 1), numpy.random.uniform(5.0, 30.0, 1) * numpy.pi / 180. ])) #hyperparameters = numpy.log( numpy.concatenate( [numpy.ones(1), numpy.random.uniform(15.0,45.0, 1) *numpy.pi/180.] ) ) #hyperparameters = numpy.array([2.0, 3.0]) #hyperparameters = numpy.log([2.0, numpy.pi/4]) local_hyperparameters.append(hyperparameters) global_hyperparameters, global_sigma_design, global_rho_design = full_spde.merge_local_parameterisations( local_spdes, local_hyperparameters, merge_method='exp_average') log_sigmas = global_sigma_design.dot(global_hyperparameters) log_rhos = global_rho_design.dot(global_hyperparameters) elif merge_method == 'new': sigma_accumulator = None rho_accumulator = None contribution_counter = None for region_index in range(n_regions): local_spde = SphereMeshViewSuperTriangle(full_resolution_level, neighbourhood_level, region_index) local_hyperparameters = hyperparameters = numpy.log( numpy.concatenate([ numpy.random.uniform(1.0, 5.0, 1), numpy.random.uniform(10.0, 45.0, 1) * numpy.pi / 180. ])) accumulators = SphereMeshViewGlobal.accumulate_local_parameterisations( sigma_accumulator, rho_accumulator, contribution_counter, local_spde, local_hyperparameters) sigma_accumulator, rho_accumulator, contribution_counter = accumulators log_sigmas, log_rhos = SphereMeshViewGlobal.finalise_local_parameterisation_sigma_rho( sigma_accumulator, rho_accumulator, contribution_counter) #print global_hyperparameters, global_sigma_design, global_rho_design import matplotlib.pyplot as plt from eustace.analysis.mesh.geometry import cartesian_to_polar2d polar_coords = cartesian_to_polar2d(full_spde.triangulation.points) plt.figure() plt.scatter(polar_coords[:, 1], polar_coords[:, 0], c=255. * log_sigmas / numpy.max(numpy.abs(log_sigmas)), linewidth=0.0, s=8.0) plt.figure() plt.scatter(polar_coords[:, 1], polar_coords[:, 0], c=255. * log_rhos / numpy.max(numpy.abs(log_rhos)), linewidth=0.0, s=8.0) #plt.show() #numpy.testing.assert_almost_equal( log_sigmas, 2.0 * numpy.ones(full_spde.triangulation.points.shape[0]) ) #numpy.testing.assert_almost_equal( log_rhos, 3.0 * numpy.ones(full_spde.triangulation.points.shape[0]) ) from eustace.analysis.advanced_standard.components.storage_inmemory import ComponentStorage_InMemory from eustace.analysis.advanced_standard.components.storage_inmemory import SpatialComponentSolutionStorage_InMemory from eustace.analysis.advanced_standard.components.spatialdelayed import DelayedSpatialComponent from eustace.analysis.advanced_standard.elements.local_view import NonStationaryLocal, ExpandedLocalHyperparameters from eustace.analysis.advanced_standard.elements.local import LocalElement, LocalHyperparameters nonstationary_component = DelayedSpatialComponent( ComponentStorage_InMemory( NonStationaryLocal(full_resolution_level), ExpandedLocalHyperparameters(log_sigma=log_sigmas, log_rho=log_rhos)), SpatialComponentSolutionStorage_InMemory()) #nonstationary_component = DelayedSpatialComponent( #ComponentStorage_InMemory(LocalElement(full_resolution_level), LocalHyperparameters(log_sigma = hyperparameters[0], log_rho = hyperparameters[1])), #SpatialComponentSolutionStorage_InMemory()) #print log_sigmas, log_rhos #plt.figure() #plt.scatter(polar_coords[:,1], polar_coords[:,0], c = 255.* process_sample / numpy.max(numpy.abs(process_sample)), linewidth = 0.0, s = 8.0 ) #plt.figure() #plt.imshow( numpy.asarray( Q.todense() ) ) # setup an output grid out_lats = numpy.linspace(-89.5, 89.5, 180) out_lons = numpy.linspace(-179.5, 179.5, 360) out_lons, out_lats = numpy.meshgrid(out_lons, out_lats) out_coords = numpy.vstack([out_lats.ravel(), out_lons.ravel()]).T design_matrix = nonstationary_component.storage.element.spde.build_A( out_coords) # setup solver for sampling from eustace.analysis.advanced_standard.linalg.extendedcholmodwrapper import ExtendedCholmodWrapper Q = nonstationary_component.storage.element.element_prior( nonstationary_component.storage.hyperparameters).prior_precision() factor = ExtendedCholmodWrapper.cholesky(Q) # draw samples, project onto output grid and plot random_values = numpy.random.normal(0.0, 1.0, (Q.shape[0], 1)) process_sample = factor.solve_backward_substitution(random_values) out_values = design_matrix.dot(process_sample) plt.figure() plt.scatter(out_coords[:, 1], out_coords[:, 0], c=255. * out_values / numpy.max(numpy.abs(out_values)), linewidth=0.0, s=8.0) random_values = numpy.random.normal(0.0, 1.0, (Q.shape[0], 1)) process_sample = factor.solve_backward_substitution(random_values) out_values = design_matrix.dot(process_sample) plt.figure() plt.scatter(out_coords[:, 1], out_coords[:, 0], c=255. * out_values / numpy.max(numpy.abs(out_values)), linewidth=0.0, s=8.0) random_values = numpy.random.normal(0.0, 1.0, (Q.shape[0], 1)) process_sample = factor.solve_backward_substitution(random_values) out_values = design_matrix.dot(process_sample) plt.figure() plt.scatter(out_coords[:, 1], out_coords[:, 0], c=255. * out_values / numpy.max(numpy.abs(out_values)), linewidth=0.0, s=8.0) plt.show()