def test_evaluate_pointlike_limit_expected_value(self): A = AnalysisSystem([ TestAnalysisSystem.FirstDummyComponent(), TestAnalysisSystem.SecondDummyComponent() ], 'B', 'C') # Checking only valid flags gets used self.assertRaises(ValueError, A.evaluate_expected_value, 'A', 'MAP', 'C') self.assertRaises(ValueError, A.evaluate_expected_value, 'POINTWISE', 'B', 'C') # Checking computations only for MAP and posterior marginal STD: for field in GLOBAL_FIELD_OUTPUT_FLAGS[:2]: pointwise_result = A.evaluate_expected_value( field, self.structure, 'POINTWISE') pointwise_limit_result = A.evaluate_expected_value( field, self.structure, 'GRID_CELL_AREA_AVERAGE', [1, 1], 10) numpy.testing.assert_array_equal(pointwise_result, pointwise_limit_result) self.assertRaises(ValueError, A.evaluate_expected_value, GLOBAL_FIELD_OUTPUT_FLAGS[2], self.structure, 'POINTWISE') self.assertRaises(ValueError, A.evaluate_expected_value, GLOBAL_FIELD_OUTPUT_FLAGS[2], self.structure, 'POINTWISE', [2, 3], 3)
def test_mini_world_local(self): # Local component local_component = SpatialComponent( ComponentStorage_InMemory( LocalElement(n_triangulation_divisions=1), LocalHyperparameters(log_sigma=0.0, log_rho=numpy.log(1.0))), SpatialComponentSolutionStorage_InMemory(), compute_uncertainties=True, method='APPROXIMATED') # Analysis system using the specified components, for the Tmean observable analysis_system = AnalysisSystem([local_component], ObservationSource.TMEAN, log=StringIO()) # Simulated inputs simulated_input_loader = SimulatedInputLoader() # Simulate evaluation of this time index simulated_time_indices = [0] # Update with data analysis_system.update([simulated_input_loader], simulated_time_indices) # Check state vector directly statevector = analysis_system.components[ 0].solutionstorage.partial_state_read(0).ravel() # These are the nodes where observations were put (see SimulatedObservationSource above) # - check they correspond to within 3 times the stated noise level self.assertAlmostEqual(20.0, statevector[12], delta=0.3) self.assertAlmostEqual(-15.0, statevector[17], delta=0.3) self.assertAlmostEqual(5.0, statevector[41], delta=0.3) # Also check entire state vector within outer bounds set by obs self.assertTrue(all(statevector < 20.0)) self.assertTrue(all(statevector > -15.0)) # And check output corresponds too # (evaluate result on output structure same as input) simulated_output_structure = SimulatedObservationStructure(0) result = analysis_system.evaluate_expected_value( 'MAP', simulated_output_structure, flag='POINTWISE') numpy.testing.assert_almost_equal(statevector[[12, 17, 41]], result) # test output gridding, pointwise limit outputstructure = OutputRectilinearGridStructure( 2, epoch_plus_days(2), latitudes=numpy.linspace(-89.875, 89.875, num=10), longitudes=numpy.linspace(-179.875, 179.875, num=20)) pointwise_result = analysis_system.evaluate_expected_value( 'MAP', outputstructure, 'POINTWISE') pointwise_limit_result = analysis_system.evaluate_expected_value( 'MAP', outputstructure, 'GRID_CELL_AREA_AVERAGE', [1, 1], 3) numpy.testing.assert_array_almost_equal(pointwise_result, pointwise_limit_result)
def test_mini_world_geography_based_mock_data(self): """Testing on a simple mock data file, with mock covariate values""" # GENERATING OBSERVATIONS # Simulated locations: they will exactly sits on the grid points of the covariate datafile locations = numpy.array([[0.0, 0.0], [0.0, 0.5], [0.05, 0.0]]) # Simulated measurements: simple linear relation of type: y = 2*x measurement = numpy.array([2., 2., 2.]) # Simulated errors uncorrelatederror = 0.1 * numpy.ones(measurement.shape) # Simulated inputs simulated_input_loader = SimulatedInputLoader(locations, measurement, uncorrelatederror) # Simulate evaluation of this time index simulated_time_indices = [0] # GENERATING THE MODEL # Local component geography_covariate_element = GeographyBasedElement( self.covariate_file.name, 'lat', 'lon', 'covariate', 1.0) geography_covariate_element.load() geography_based_component = SpatialComponent( ComponentStorage_InMemory( geography_covariate_element, CovariateHyperparameters(-0.5 * numpy.log(10.))), SpatialComponentSolutionStorage_InMemory()) # GENERATING THE ANALYSIS # Analysis system using the specified components, for the Tmean observable analysis_system = AnalysisSystem([geography_based_component], ObservationSource.TMEAN, log=StringIO()) # Update with data analysis_system.update([simulated_input_loader], simulated_time_indices) # Check state vector directly statevector = analysis_system.components[ 0].solutionstorage.partial_state_read(0).ravel() # These are the nodes where observations were put (see SimulatedObservationSource above) # - check they correspond to within 3 times the stated noise level self.assertAlmostEqual(2., statevector[0], delta=0.3) # Also check entire state vector within outer bounds set by obs self.assertTrue(all(statevector < 2.0)) # And check output corresponds too # (evaluate result on output structure same as input) simulated_output_structure = SimulatedObservationStructure( 0, locations, None, None) result = analysis_system.evaluate_expected_value( 'MAP', simulated_output_structure, flag='POINTWISE') numpy.testing.assert_almost_equal( statevector[0] * numpy.ones(len(measurement)), result)
def main(): print 'Advanced standard example using a few days of EUSTACE data' parser = argparse.ArgumentParser(description='Advanced standard example using a few days of EUSTACE data') parser.add_argument('outpath', help='directory where the output should be redirected') parser.add_argument('--json_descriptor', default = None, help='a json descriptor containing the covariates to include in the climatology model') parser.add_argument('--land_biases', action='store_true', help='include insitu land homogenization bias terms') parser.add_argument('--global_biases', action='store_true', help='include global satellite bias terms') parser.add_argument('--n_iterations', type=int, default=5, help='number of solving iterations') args = parser.parse_args() # Input data path basepath = os.path.join('/work/scratch/eustace/rawbinary3') # Days to process time_indices = range(int(days_since_epoch(datetime(2006, 2, 1))), int(days_since_epoch(datetime(2006, 2, 2)))) # Sources to use sources = [ 'surfaceairmodel_land', 'surfaceairmodel_ocean', 'surfaceairmodel_ice', 'insitu_land', 'insitu_ocean' ] #SETUP # setup for the seasonal core: climatology covariates setup read from file seasonal_setup = {'n_triangulation_divisions':5, 'n_harmonics':4, 'n_spatial_components':6, 'amplitude':2., 'space_length_scale':5., # length scale in units of degrees } grandmean_amplitude = 15.0 # setup for the large scale component spacetime_setup = {'n_triangulation_divisions':2, 'alpha':2, 'starttime':0, 'endtime':10., 'n_nodes':2, 'overlap_factor':2.5, 'H':1, 'amplitude':1., 'space_lenght_scale':15.0, # length scale in units of degrees 'time_length_scale':15.0 # length scal in units of days } bias_amplitude = .9 # setup for the local component local_setup = {'n_triangulation_divisions':6, 'amplitude':2., 'space_length_scale':2. # length scale in units of degrees } globalbias_amplitude = 15.0 # CLIMATOLOGY COMPONENT: combining the seasonal core along with latitude harmonics, altitude and coastal effects if args.json_descriptor is not None: loader = LoadCovariateElement(args.json_descriptor) loader.check_keys() covariate_elements, covariate_hyperparameters = loader.load_covariates_and_hyperparameters() print('The following fields have been added as covariates of the climatology model') print(loader.data.keys()) else: covariate_elements, covariate_hyperparameters = [], [] climatology_element = CombinationElement( [SeasonalElement(n_triangulation_divisions=seasonal_setup['n_triangulation_divisions'], n_harmonics=seasonal_setup['n_harmonics'], include_local_mean=True), GrandMeanElement()]+covariate_elements) climatology_hyperparameters = CombinationHyperparameters( [SeasonalHyperparameters(n_spatial_components=seasonal_setup['n_spatial_components'], common_log_sigma=numpy.log(seasonal_setup['amplitude']), common_log_rho=numpy.log(numpy.radians(seasonal_setup['space_length_scale']))), CovariateHyperparameters(numpy.log(grandmean_amplitude))] + covariate_hyperparameters ) climatology_component = SpaceTimeComponent(ComponentStorage_InMemory(climatology_element, climatology_hyperparameters), SpaceTimeComponentSolutionStorage_InMemory(), compute_uncertainties=True, method='APPROXIMATED', compute_sample=True, sample_size=definitions.GLOBAL_SAMPLE_SHAPE[3]) # LARGE SCALE (kronecker product) COMPONENT: combining large scale trends with bias terms accounting for homogeneization effects if args.land_biases: bias_element, bias_hyperparameters = [InsituLandBiasElement(BREAKPOINTS_FILE)], [CovariateHyperparameters(numpy.log(bias_amplitude))] print('Adding bias terms for insitu land homogenization') else: bias_element, bias_hyperparameters = [], [] large_scale_element = CombinationElement( [SpaceTimeKroneckerElement(n_triangulation_divisions=spacetime_setup['n_triangulation_divisions'], alpha=spacetime_setup['alpha'], starttime=spacetime_setup['starttime'], endtime=spacetime_setup['endtime'], n_nodes=spacetime_setup['n_nodes'], overlap_factor=spacetime_setup['overlap_factor'], H=spacetime_setup['H'])] + bias_element) large_scale_hyperparameters = CombinationHyperparameters( [SpaceTimeSPDEHyperparameters(space_log_sigma=numpy.log(spacetime_setup['amplitude']), space_log_rho=numpy.log(numpy.radians(spacetime_setup['space_lenght_scale'])), time_log_rho=numpy.log(spacetime_setup['time_length_scale']))] + bias_hyperparameters) large_scale_component = SpaceTimeComponent(ComponentStorage_InMemory(large_scale_element, large_scale_hyperparameters), SpaceTimeComponentSolutionStorage_InMemory(), compute_uncertainties=True, method='APPROXIMATED', compute_sample=True, sample_size=definitions.GLOBAL_SAMPLE_SHAPE[3]) # LOCAL COMPONENT: combining local scale variations with global satellite bias terms if args.global_biases: bias_elements = [BiasElement(groupname, 1) for groupname in GLOBAL_BIASES_GROUP_LIST] bias_hyperparameters = [CovariateHyperparameters(numpy.log(globalbias_amplitude)) for index in range(len(GLOBAL_BIASES_GROUP_LIST))] print('Adding global bias terms for all the surfaces') else: bias_elements, bias_hyperparameters = [], [] local_scale_element = CombinationElement([LocalElement(n_triangulation_divisions=local_setup['n_triangulation_divisions'])] + bias_elements) local_scale_hyperparameters = CombinationHyperparameters([LocalHyperparameters(log_sigma=numpy.log(local_setup['amplitude']), log_rho=numpy.log(numpy.radians(local_setup['space_length_scale'])))] + bias_hyperparameters) local_component = SpatialComponent(ComponentStorage_InMemory(local_scale_element, local_scale_hyperparameters), SpatialComponentSolutionStorage_InMemory(), compute_uncertainties=True, method='APPROXIMATED', compute_sample=True, sample_size=definitions.GLOBAL_SAMPLE_SHAPE[3]) # Analysis system using the specified components, for the Tmean observable print 'Analysing inputs' analysis_system = AnalysisSystem( [ climatology_component, large_scale_component, local_component ], ObservationSource.TMEAN) # Object to load raw binary inputs at time indices inputloaders = [ AnalysisSystemInputLoaderRawBinary_Sources(basepath, source, time_indices) for source in sources ] for iteration in range(args.n_iterations): message = 'Iteration {}'.format(iteration) print(message) # Update with data analysis_system.update(inputloaders, time_indices) print 'Computing outputs' # Produce an output for each time index for time_index in time_indices: # Get date for output outputdate = inputloaders[0].datetime_at_time_index(time_index) print 'Evaluating output grid: ', outputdate #Configure output grid outputstructure = OutputRectilinearGridStructure( time_index, outputdate, latitudes=numpy.linspace(-90.+definitions.GLOBAL_FIELD_RESOLUTION/2., 90.-definitions.GLOBAL_FIELD_RESOLUTION/2., num=definitions.GLOBAL_FIELD_SHAPE[1]), longitudes=numpy.linspace(-180.+definitions.GLOBAL_FIELD_RESOLUTION/2., 180.-definitions.GLOBAL_FIELD_RESOLUTION/2., num=definitions.GLOBAL_FIELD_SHAPE[2])) # Evaluate expected value at these locations for field in ['MAP', 'post_STD']: print 'Evaluating: ',field result_expected_value = analysis_system.evaluate_expected_value('MAP', outputstructure, 'GRID_CELL_AREA_AVERAGE', [1,1], 1000) result_expected_uncertainties = analysis_system.evaluate_expected_value('post_STD', outputstructure, 'GRID_CELL_AREA_AVERAGE', [1,1], 1000) print 'Evaluating: climatology fraction' climatology_fraction = analysis_system.evaluate_climatology_fraction(outputstructure, [1,1], 1000) print 'Evaluating: the sample' sample = analysis_system.evaluate_projected_sample(outputstructure) # Make output filename pathname = 'eustace_example_output_{0:04d}{1:02d}{2:02d}.nc'.format(outputdate.year, outputdate.month, outputdate.day) pathname = os.path.join(args.outpath, pathname) print 'Saving: ', pathname # Save results filebuilder = FileBuilderGlobalField( pathname, time_index, 'Infilling Example', 'UNVERSIONED', definitions.TAS.name, '', 'Example data only', 'eustace.analysis.advanced_standard.examples.example_eustace_few_days', '') filebuilder.add_global_field(definitions.TAS, result_expected_value.reshape(definitions.GLOBAL_FIELD_SHAPE)) filebuilder.add_global_field(definitions.TASUNCERTAINTY, result_expected_uncertainties.reshape(definitions.GLOBAL_FIELD_SHAPE)) filebuilder.add_global_field(definitions.TAS_CLIMATOLOGY_FRACTION, climatology_fraction.reshape(definitions.GLOBAL_FIELD_SHAPE)) for index in range(definitions.GLOBAL_SAMPLE_SHAPE[3]): variable = copy.deepcopy(definitions.TASENSEMBLE) variable.name = variable.name + '_' + str(index) selected_sample = sample[:,index].ravel()+result_expected_value filebuilder.add_global_field(variable, selected_sample.reshape(definitions.GLOBAL_FIELD_SHAPE)) filebuilder.save_and_close() print 'Complete'
def test_mini_world_altitude_with_latitude(self): """Testing using altitude as a covariate""" # GENERATING OBSERVATIONS # Simulated locations: they will exactly sits on the grid points of the covariate datafile DEM = Dataset(self.altitude_datafile) latitude = DEM.variables['lat'][:] longitude = DEM.variables['lon'][:] altitude = DEM.variables['dem'][:] indices = numpy.stack( (numpy.array([1, 3, 5, 7, 8, 9, 10, 11 ]), numpy.array([0, 0, 0, 0, 0, 0, 0, 0])), axis=1) selected_location = [] altitude_observations = [] for couple in indices: selected_location.append([ latitude[couple[0], couple[1]], longitude[couple[0], couple[1]] ]) altitude_observations.append(altitude[couple[0], couple[1]]) DEM.close() locations = numpy.array(selected_location) # Simulated model is y = z + a*cos(2x) + c*cos(4*x) + b*sin(2x) + d*sin(4*x), with z = altitude, x = latitude, a=b=c=d=0 slope = 1e-3 measurement = slope * numpy.array(altitude_observations) # Simulated errors uncorrelatederror = 0.1 * numpy.ones(measurement.shape) # Simulated inputs simulated_input_loader = SimulatedInputLoader(locations, measurement, uncorrelatederror) # Simulate evaluation of this time index simulated_time_indices = [0] # GENERATING THE MODEL # Local component geography_covariate_element = GeographyBasedElement( self.altitude_datafile, 'lat', 'lon', 'dem', 1.0) geography_covariate_element.load() combined_element = CombinationElement( [geography_covariate_element, LatitudeHarmonicsElement()]) combined_hyperparamters = CombinationHyperparameters([ CovariateHyperparameters(-0.5 * numpy.log(10.)), CombinationHyperparameters([ CovariateHyperparameters(-0.5 * numpy.log(p)) for p in [10.0, 10.0, 10.0, 10.0] ]) ]) combined_component = SpatialComponent( ComponentStorage_InMemory(combined_element, combined_hyperparamters), SpatialComponentSolutionStorage_InMemory()) # GENERATING THE ANALYSIS # Analysis system using the specified components, for the Tmean observable analysis_system = AnalysisSystem([combined_component], ObservationSource.TMEAN, log=StringIO()) # Update with data analysis_system.update([simulated_input_loader], simulated_time_indices) # Check state vector directly statevector = analysis_system.components[ 0].solutionstorage.partial_state_read(0).ravel() # These are the nodes where observations were put (see SimulatedObservationSource above) # - check they correspond to within 3 times the stated noise level self.assertAlmostEqual(slope, statevector[0], delta=0.3) self.assertAlmostEqual(0., statevector[1], delta=0.3) self.assertAlmostEqual(0., statevector[2], delta=0.3) self.assertAlmostEqual(0., statevector[3], delta=0.3) self.assertAlmostEqual(0., statevector[4], delta=0.3) # And check output corresponds too # (evaluate result on output structure same as input) simulated_output_structure = SimulatedObservationStructure( 0, locations, None, None) result = analysis_system.evaluate_expected_value( 'MAP', simulated_output_structure, flag='POINTWISE') expected = statevector[0]*numpy.array(altitude_observations)\ + statevector[1]*LatitudeFunction(numpy.cos, 2.0).compute(locations[:,0]).ravel()\ + statevector[2]*LatitudeFunction(numpy.sin, 2.0).compute(locations[:,0]).ravel()\ + statevector[3]*LatitudeFunction(numpy.cos, 4.0).compute(locations[:,0]).ravel()\ + statevector[4]*LatitudeFunction(numpy.sin, 2.0).compute(locations[:,0]).ravel() numpy.testing.assert_almost_equal(expected, result) # test output gridding, pointwise limit outputstructure = OutputRectilinearGridStructure( 2, epoch_plus_days(2), latitudes=numpy.linspace(-60., 60., num=5), longitudes=numpy.linspace(-90., 90, num=10)) pointwise_result = analysis_system.evaluate_expected_value( 'MAP', outputstructure, 'POINTWISE') pointwise_limit_result = analysis_system.evaluate_expected_value( 'MAP', outputstructure, 'GRID_CELL_AREA_AVERAGE', [1, 1], 10) numpy.testing.assert_array_almost_equal(pointwise_result, pointwise_limit_result)
def test_mini_world_altitude(self): """Testing using altitude as a covariate""" # GENERATING OBSERVATIONS # Simulated locations: they will exactly sits on the grid points of the covariate datafile DEM = Dataset(self.altitude_datafile) latitude = DEM.variables['lat'][:] longitude = DEM.variables['lon'][:] altitude = DEM.variables['dem'][:] indices = numpy.stack( (numpy.array([1, 3, 267, 80, 10, 215, 17, 120]), numpy.array([2, 256, 9, 110, 290, 154, 34, 151])), axis=1) selected_location = [] altitude_observations = [] for couple in indices: selected_location.append([ latitude[couple[0], couple[1]], longitude[couple[0], couple[1]] ]) altitude_observations.append(altitude[couple[0], couple[1]]) DEM.close() locations = numpy.array(selected_location) # Simulated measurements: simple linear relation of type: y = PI*x measurement = numpy.pi * numpy.array(altitude_observations) # Simulated errors uncorrelatederror = 0.1 * numpy.ones(measurement.shape) # Simulated inputs simulated_input_loader = SimulatedInputLoader(locations, measurement, uncorrelatederror) # Simulate evaluation of this time index simulated_time_indices = [0] # GENERATING THE MODEL # Local component geography_covariate_element = GeographyBasedElement( self.altitude_datafile, 'lat', 'lon', 'dem', 1.0) geography_covariate_element.load() geography_based_component = SpatialComponent( ComponentStorage_InMemory( geography_covariate_element, CovariateHyperparameters(-0.5 * numpy.log(10.))), SpatialComponentSolutionStorage_InMemory()) # GENERATING THE ANALYSIS # Analysis system using the specified components, for the Tmean observable analysis_system = AnalysisSystem([geography_based_component], ObservationSource.TMEAN, log=StringIO()) # Update with data analysis_system.update([simulated_input_loader], simulated_time_indices) # Check state vector directly statevector = analysis_system.components[ 0].solutionstorage.partial_state_read(0).ravel() # These are the nodes where observations were put (see SimulatedObservationSource above) # - check they correspond to within 3 times the stated noise level self.assertAlmostEqual(numpy.pi, statevector[0], delta=0.3) # Also check entire state vector within outer bounds set by obs self.assertTrue(all(statevector < numpy.pi)) # And check output corresponds too # (evaluate result on output structure same as input) simulated_output_structure = SimulatedObservationStructure( 0, locations, None, None) result = analysis_system.evaluate_expected_value( 'MAP', simulated_output_structure, flag='POINTWISE') numpy.testing.assert_almost_equal( statevector[0] * numpy.array(altitude_observations), result)
def test_mini_world_latitude_harmonics(self): """Testing on a simple mock data file using latitude harmonics""" # GENERATING OBSERVATIONS # Simulated locations: they will exactly sits on the grid points of the covariate datafile locations = numpy.array([[0.0, 0.0], [0.25, 0.5], [0.5, 0.0]]) # Simulated model is y = a*cos(2x) + c*cos(4*x) + b*sin(2x) + d*sin(4*x) with x = latitude, so we expect a=c=1, c=d=0 measurement = LatitudeFunction(numpy.cos, 2.0).compute( locations[:, 0]).ravel() + LatitudeFunction( numpy.cos, 4.0).compute(locations[:, 0]).ravel() # Simulated errors uncorrelatederror = 0.1 * numpy.ones(measurement.shape) # Simulated inputs simulated_input_loader = SimulatedInputLoader(locations, measurement, uncorrelatederror) # Simulate evaluation of this time index simulated_time_indices = [0] latitude_harmonics_component = SpatialComponent( ComponentStorage_InMemory( LatitudeHarmonicsElement(), CombinationHyperparameters([ CovariateHyperparameters(-0.5 * numpy.log(p)) for p in [10.0, 10.0, 10.0, 10.0] ])), SpatialComponentSolutionStorage_InMemory()) # Analysis system using the specified components, for the Tmean observable analysis_system = AnalysisSystem([latitude_harmonics_component], ObservationSource.TMEAN, log=StringIO()) # GENERATING THE ANALYSIS # Update with data analysis_system.update([simulated_input_loader], simulated_time_indices) # Check state vector directly statevector = analysis_system.components[ 0].solutionstorage.partial_state_read(0).ravel() # These are the nodes where observations were put (see SimulatedObservationSource above) # - check they correspond to within 3 times the stated noise level self.assertAlmostEqual(1., statevector[0], delta=0.3) self.assertAlmostEqual(1., statevector[2], delta=0.3) self.assertAlmostEqual(0., statevector[1], delta=0.3) self.assertAlmostEqual(0., statevector[3], delta=0.3) # Also check entire state vector within outer bounds set by obs self.assertTrue(all(statevector < 1.0)) # And check output corresponds too # (evaluate result on output structure same as input) simulated_output_structure = SimulatedObservationStructure( 0, locations, None, None) result = analysis_system.evaluate_expected_value( 'MAP', simulated_output_structure, flag='POINTWISE') expected = statevector[0]*LatitudeFunction(numpy.cos, 2.0).compute(locations[:,0]).ravel() + statevector[1]*LatitudeFunction(numpy.sin, 2.0).compute(locations[:,0]).ravel()\ + statevector[2] *LatitudeFunction(numpy.cos, 4.0).compute(locations[:,0]).ravel()+ statevector[3]*LatitudeFunction(numpy.sin, 4.0).compute(locations[:,0]).ravel() numpy.testing.assert_almost_equal(expected, result)
def test_mini_world_large_and_local(self): # Use a number of time steps number_of_simulated_time_steps = 30 # Large-scale spatial variability simulated_large_variation = 10.0 # Local variability simulated_local_variation = 1.0 # Iterations to use number_of_solution_iterations = 5 # Build system # Large-scale factor element_large = SpaceTimeKroneckerElement( n_triangulation_divisions=1, alpha=2, starttime=0, endtime=number_of_simulated_time_steps + 1, n_nodes=number_of_simulated_time_steps + 2, overlap_factor=2.5, H=1) initial_hyperparameters_large = SpaceTimeSPDEHyperparameters( space_log_sigma=0.0, space_log_rho=numpy.log(numpy.radians(5.0)), time_log_rho=numpy.log(1.0 / 365.0)) component_large = SpaceTimeComponent( ComponentStorage_InMemory(element_large, initial_hyperparameters_large), SpaceTimeComponentSolutionStorage_InMemory()) # And a local process component_local = SpatialComponent( ComponentStorage_InMemory( LocalElement(n_triangulation_divisions=3), LocalHyperparameters(log_sigma=0.0, log_rho=numpy.log(numpy.radians(5.0)))), SpatialComponentSolutionStorage_InMemory()) analysis_system = AnalysisSystem([component_large, component_local], ObservationSource.TMEAN, log=StringIO()) # analysis_system = AnalysisSystem([ component_large ], ObservationSource.TMEAN) # analysis_system = AnalysisSystem([ component_local ], ObservationSource.TMEAN) # use fixed locations from icosahedron fixed_locations = cartesian_to_polar2d( MeshIcosahedronSubdivision.build(3).points) # random measurement at each location numpy.random.seed(8976) field_basis = simulated_large_variation * numpy.random.randn( fixed_locations.shape[0]) # some time function that varies over a year time_basis = numpy.cos( numpy.linspace(0.1, 1.75 * numpy.pi, number_of_simulated_time_steps)) # kronecker product of the two large_scale_process = numpy.kron(field_basis, numpy.expand_dims(time_basis, 1)) # Random local changes where mean change at each time is zero # local_process = simulated_local_variation * numpy.random.randn(large_scale_process.shape[0], large_scale_process.shape[1]) # local_process -= numpy.tile(local_process.mean(axis=1), (local_process.shape[1], 1)).T local_process = numpy.zeros(large_scale_process.shape) somefield = simulated_local_variation * numpy.random.randn( 1, large_scale_process.shape[1]) somefield -= somefield.ravel().mean() local_process[10, :] = somefield local_process[11, :] = -somefield # Add the two processes measurement = large_scale_process + local_process # Simulated inputs simulated_input_loader = SimulatedInputLoader(fixed_locations, measurement, 0.001) # Simulate evaluation of this time index simulated_time_indices = range(number_of_simulated_time_steps) # All systems linear so single update should be ok analysis_system.update([simulated_input_loader], simulated_time_indices) # Get all results result = numpy.zeros(measurement.shape) for t in range(number_of_simulated_time_steps): result[t, :] = analysis_system.evaluate_expected_value( 'MAP', SimulatedObservationStructure(t, fixed_locations, None, None), flag='POINTWISE') disparity_large_scale = (numpy.abs(result - large_scale_process)).ravel().max() # print 'large scale disparity: ', disparity_large_scale disparity_overall = (numpy.abs(result - measurement)).ravel().max() # print 'overall disparity: ', disparity_overall numpy.testing.assert_almost_equal(result, measurement, decimal=4) self.assertTrue(disparity_overall < 1E-4)
def test_mini_world_noiseless(self): number_of_simulated_time_steps = 1 # Build system element = SeasonalElement(n_triangulation_divisions=3, n_harmonics=5, include_local_mean=True) hyperparameters = SeasonalHyperparameters(n_spatial_components=6, common_log_sigma=0.0, common_log_rho=0.0) component = SpaceTimeComponent( ComponentStorage_InMemory(element, hyperparameters), SpaceTimeComponentSolutionStorage_InMemory()) analysis_system = AnalysisSystem([component], ObservationSource.TMEAN, log=StringIO()) # use fixed locations from icosahedron fixed_locations = cartesian_to_polar2d( MeshIcosahedronSubdivision.build(3).points) # random measurement at each location numpy.random.seed(8976) field_basis = numpy.random.randn(fixed_locations.shape[0]) #print(field_basis.shape) #time_basis = numpy.array(harmonics_list) # some time function that varies over a year #decimal_years = numpy.array([datetime_to_decimal_year(epoch_plus_days(step)) for step in range(number_of_simulated_time_steps)]) time_basis = numpy.cos( numpy.linspace(0.1, 1.75 * numpy.pi, number_of_simulated_time_steps)) # kronecker product of the two #print(numpy.expand_dims(time_basis, 1)) measurement = numpy.kron(field_basis, numpy.expand_dims( time_basis, 1)) #numpy.expand_dims(time_basis, 1)) #print(measurement.shape) # Simulated inputs simulated_input_loader = SimulatedInputLoader(fixed_locations, measurement, 0.0001) # Simulate evaluation of this time index simulated_time_indices = range(number_of_simulated_time_steps) # Iterate for iteration in range(5): analysis_system.update([simulated_input_loader], simulated_time_indices) # Get all results result = numpy.zeros(measurement.shape) for t in range(number_of_simulated_time_steps): result[t, :] = analysis_system.evaluate_expected_value( 'MAP', SimulatedObservationStructure(t, fixed_locations, None, None), flag='POINTWISE') # Should be very close to original because specified noise is low numpy.testing.assert_almost_equal(result, measurement) max_disparity = (numpy.abs(result - measurement)).ravel().max() self.assertTrue(max_disparity < 1E-5) # test output gridding, pointwise limit outputstructure = OutputRectilinearGridStructure( 2, epoch_plus_days(2), latitudes=numpy.linspace(-60., 60., num=5), longitudes=numpy.linspace(-90., 90, num=10)) pointwise_result = analysis_system.evaluate_expected_value( 'MAP', outputstructure, 'POINTWISE') pointwise_limit_result = analysis_system.evaluate_expected_value( 'MAP', outputstructure, 'GRID_CELL_AREA_AVERAGE', [1, 1], 10) numpy.testing.assert_array_almost_equal(pointwise_result, pointwise_limit_result)
def test_mini_world_noiseless(self): # Use a number of time steps number_of_simulated_time_steps = 1 # Build system element = SpaceTimeFactorElement( n_triangulation_divisions=3, alpha=2, starttime=0, endtime=number_of_simulated_time_steps + 1, n_nodes=number_of_simulated_time_steps + 2, overlap_factor=2.5, H=1) initial_hyperparameters = SpaceTimeSPDEHyperparameters( space_log_sigma=0.0, space_log_rho=numpy.log(numpy.radians(45.0)), time_log_rho=numpy.log(3.0 / 365.0)) component = SpaceTimeComponent( ComponentStorage_InMemory(element, initial_hyperparameters), SpaceTimeComponentSolutionStorage_InMemory()) analysis_system = AnalysisSystem([component], ObservationSource.TMEAN, log=StringIO()) # use fixed locations from icosahedron fixed_locations = cartesian_to_polar2d( MeshIcosahedronSubdivision.build(3).points) # random measurement at each location numpy.random.seed(8976) field_basis = 10.0 * numpy.random.randn(fixed_locations.shape[0]) # some time function that varies over a year time_basis = numpy.cos( numpy.linspace(0.1, 1.75 * numpy.pi, number_of_simulated_time_steps)) # kronecker product of the two measurement = numpy.kron(field_basis, numpy.expand_dims(time_basis, 1)) # Simulated inputs simulated_input_loader = SimulatedInputLoader(fixed_locations, measurement, 0.01) # Simulate evaluation of this time index simulated_time_indices = range(number_of_simulated_time_steps) # Iterate for iteration in range(5): analysis_system.update([simulated_input_loader], simulated_time_indices) # Get all results result = numpy.zeros(measurement.shape) for t in range(number_of_simulated_time_steps): result[t, :] = analysis_system.evaluate_expected_value( 'MAP', SimulatedObservationStructure(t, fixed_locations, None, None), flag='POINTWISE') # Should be very close to original because specified noise is low numpy.testing.assert_almost_equal(result, measurement) max_disparity = (numpy.abs(result - measurement)).ravel().max() self.assertTrue(max_disparity < 1E-5)
def main(): print 'EUSTACE example using HadCRUT4 monthly data' # Input data path input_basepath = os.path.join(WORKSPACE_PATH, 'data/incoming/HadCRUT4.5.0.0') # Input filenames input_filenames = [ 'hadcrut4_median_netcdf.nc', 'hadcrut4_uncorrelated_supplementary.nc', 'hadcrut4_blended_uncorrelated.nc' ] # Months to process time_indices = range(2) # Climatology component climatology_component = SpaceTimeComponent(ComponentStorage_InMemory(SeasonalElement(n_triangulation_divisions=5, n_harmonics=5, include_local_mean=True), SeasonalHyperparameters(n_spatial_components=6, common_log_sigma=1.0, common_log_rho=0.0)), SpaceTimeComponentSolutionStorage_InMemory()) # Number of factors for large scale (factor analysis) component and initial hyperparameters n_factors = 5 factors = [ ] factor_hyperparameters = [ ] for factor_index in range(n_factors): factor_hyperparameters.append( SpaceTimeSPDEHyperparameters( space_log_sigma=0.0, space_log_rho=numpy.log(10.0 * numpy.pi/180 + 25.0 * numpy.pi/180 *(n_factors - factor_index) / n_factors), time_log_rho=numpy.log(1/12.0 + 6/12.0*(n_factors - factor_index) / n_factors)) ) factors.append( SpaceTimeFactorElement(n_triangulation_divisions=5, alpha=2, starttime=0, endtime=36, overlap_factor=2.5, H=1) ) # Large scale (factor analysis) component large_scale_component = SpaceTimeComponent(ComponentStorage_InMemory(CombinationElement(factors), CombinationHyperparameters(factor_hyperparameters)), SpaceTimeComponentSolutionStorage_InMemory()) # Local component local_component = SpatialComponent(ComponentStorage_InMemory(LocalElement(n_triangulation_divisions=4), LocalHyperparameters(log_sigma=0.0, log_rho=numpy.log(10.0 * numpy.pi/180))), SpatialComponentSolutionStorage_InMemory()) print 'Analysing inputs' # Analysis system using the specified components, for the Tmean observable analysis_system = AnalysisSystem( [ climatology_component, large_scale_component, local_component ], ObservationSource.TMEAN) # Make filelist input_filelist = [ os.path.join(input_basepath, filename) for filename in input_filenames ] # Object to load HadCRUT4 inputs at time indices inputloader = AnalysisSystemInputLoaderHadCRUT4(input_filelist) # Update with data analysis_system.update([ inputloader ], time_indices) print 'Computing outputs' # Produce an output for each time index for time_index in time_indices: # Make output filename outputdate = inputloader.datetime_at_time_index(time_index) pathname = 'example_output_{0:04d}{1:02d}.nc'.format(outputdate.year, outputdate.month) print 'Saving: ', pathname # Configure output grid outputstructure = OutputRectilinearGridStructure( time_index, outputdate, latitudes=numpy.linspace(-87.5, 87.5, num=36), longitudes=numpy.linspace(-177.5, 177.5, num=72)) # Evaluate expected value at these locations result_expected_value = analysis_system.evaluate_expected_value(outputstructure) # Save results filebuilder = FileBuilderHadCRUT4ExampleOutput(pathname, outputstructure) filebuilder.add_global_field(TAS_ANOMALY, result_expected_value.reshape(1,36,72)) filebuilder.save_and_close() print 'Complete'