def test_get_array(self): p = CombinationHyperparameters([ CovariateHyperparameters(23.6), LocalHyperparameters(log_sigma=0.1, log_rho=1.2) ]) numpy.testing.assert_equal([23.6, 0.1, 1.2], p.get_array())
def test_get_element_ranges(self): p = CombinationHyperparameters([ CovariateHyperparameters(23.6), LocalHyperparameters(log_sigma=0.1, log_rho=1.2), CovariateHyperparameters(24.7) ]) self.assertEqual([[0], [1, 2], [3]], p.get_element_ranges())
def test_set_array(self): p = CombinationHyperparameters([ CovariateHyperparameters(23.6), LocalHyperparameters(log_sigma=0.1, log_rho=1.2), CovariateHyperparameters(24.7) ]) p.set_array(numpy.array([1.4, 1.5, 1.6, 1.7])) self.assertEqual(3, len(p.elementparameters)) self.assertEqual(1.4, p.elementparameters[0].value) self.assertEqual(1.5, p.elementparameters[1].log_sigma) self.assertEqual(1.6, p.elementparameters[1].log_rho) self.assertEqual(1.7, p.elementparameters[2].value) numpy.testing.assert_equal([1.4, 1.5, 1.6, 1.7], p.get_array())
def __init__(self): # Number of factors for large scale (factor analysis) component and initial hyperparameters n_factors = 5 factors = [] factor_hyperparameters = [] for factor_index in range(n_factors): factor_hyperparameters.append( SpaceTimeSPDEHyperparameters( space_log_sigma=0.0, space_log_rho=numpy.log(10.0 * numpy.pi / 180 + 25.0 * numpy.pi / 180 * (n_factors - factor_index) / n_factors), time_log_rho=numpy.log(1 / 12.0 + 6 / 12.0 * (n_factors - factor_index) / n_factors))) factors.append( SpaceTimeFactorElement(n_triangulation_divisions=5, alpha=2, starttime=0, endtime=36, overlap_factor=2.5, H=1)) super(LargeScaleDefinition, self).__init__( CombinationElement(factors), CombinationHyperparameters(factor_hyperparameters))
def __init__(self, bias_terms=False, global_biases_group_list=[], local_hyperparameter_file=None): setup = ShortScaleSetup(local_hyperparameter_file) bias_elements, bias_hyperparameters = [], [] if bias_terms: for groupname in global_biases_group_list: if (groupname == 'surfaceairmodel_land_global') or ( groupname == 'surfaceairmodel_ocean_global'): bias_elements.append(BiasElement(groupname, 1)) bias_hyperparameters.append( CovariateHyperparameters( numpy.log(setup.bias_settings.bias_amplitude))) elif (groupname == 'surfaceairmodel_ice_global'): bias_elements.append(BiasElement(groupname, 2)) bias_hyperparameters.append( CovariateHyperparameters( numpy.log(setup.bias_settings.bias_amplitude))) local_hyperparameters = ExpandedLocalHyperparameters(log_sigma=None, log_rho=None) local_hyperparameters.values_from_npy_savefile( local_hyperparameter_file) super(NonStationaryLocalDefinition, self).__init__( CombinationElement([ NonStationaryLocal( setup.local_settings.n_triangulation_divisions) ] + bias_elements), CombinationHyperparameters([local_hyperparameters] + bias_hyperparameters))
def test_init(self): p = CombinationHyperparameters( [CovariateHyperparameters(23.6), CovariateHyperparameters(22.9)]) self.assertEqual(2, len(p.elementparameters)) self.assertEqual(23.6, p.elementparameters[0].value) self.assertEqual(22.9, p.elementparameters[1].value)
def test_prior_number_of_state_parameters(self): h = CombinationHyperparameters( [CovariateHyperparameters(0.1), CovariateHyperparameters(0.2)]) priorlist = [ CovariatePrior(h.elementparameters[0], number_of_state_parameters=4), CovariatePrior(h.elementparameters[1], number_of_state_parameters=5) ] prior = CombinationPrior(h, priorlist) self.assertEqual(9, prior.prior_number_of_state_parameters())
def test_element_prior(self): hyperparameters = CombinationHyperparameters([ CovariateHyperparameters(23.6), LocalHyperparameters(log_sigma=0.1, log_rho=1.2) ]) prior = CombinationElement([GrandMeanElement(), LocalElement(0) ]).element_prior(hyperparameters) self.assertTrue(isinstance(prior, CombinationPrior)) self.assertEqual(2, len(prior.priorlist)) self.assertTrue(isinstance(prior.priorlist[0], CovariatePrior)) self.assertTrue(isinstance(prior.priorlist[1], LocalPrior))
def __init__(self, bias_terms = False, global_biases_group_list = []): setup = LocalSetup() if bias_terms: bias_elements = [BiasElement(groupname, 1) for groupname in global_biases_group_list] bias_hyperparameters = [CovariateHyperparameters(numpy.log(setup.bias_amplitude)) for index in range(len(global_biases_group_list))] else: bias_elements, bias_hyperparameters = [], [] super(LocalDefinition, self).__init__( CombinationElement([LocalElement(setup.n_triangulation_divisions)] + bias_elements), CombinationHyperparameters([LocalHyperparameters(numpy.log(setup.amplitude), numpy.log(numpy.radians(setup.space_length_scale)))] + bias_hyperparameters))
def __init__(self, bias_terms = False, global_biases_group_list = [], local_hyperparameter_file = None): setup = NonStationaryLocalSetup() if bias_terms: bias_elements = [BiasElement(groupname, 1) for groupname in global_biases_group_list] bias_hyperparameters = [CovariateHyperparameters(numpy.log(setup.bias_amplitude)) for index in range(len(global_biases_group_list))] else: bias_elements, bias_hyperparameters = [], [] local_hyperparameters = ExpandedLocalHyperparameters(log_sigma = None, log_rho = None) local_hyperparameters.values_from_npy_savefile(local_hyperparameter_file) super(NonStationaryLocalDefinition, self).__init__( CombinationElement([NonStationaryLocal(setup.n_triangulation_divisions)] + bias_elements), CombinationHyperparameters([local_hyperparameters]+bias_hyperparameters))
def test_element_prior(self): element = LatitudeHarmonicsElement() prior = element.element_prior( CombinationHyperparameters( [CovariateHyperparameters(c) for c in [1.0, 1.1, 1.2, 1.3]])) self.assertIsInstance(prior, LatitudeHarmonicsPrior) # As an example check precision - should be diagonals with exp(-2 x hyperparameter) precision = prior.prior_precision() self.assertEqual(SPARSEFORMAT, precision.getformat()) self.assertEqual(4, precision.nnz) self.assertAlmostEqual(numpy.exp(-2.0), precision[0, 0]) self.assertAlmostEqual(numpy.exp(-2.2), precision[1, 1]) self.assertAlmostEqual(numpy.exp(-2.4), precision[2, 2]) self.assertAlmostEqual(numpy.exp(-2.6), precision[3, 3])
def __init__(self): setup = SlowSetupT2M1() model_elements = CombinationElement( [AnnualKroneckerElement( setup.n_triangulation_divisions, setup.alpha, setup.starttime, setup.endtime, setup.n_nodes, setup.overlap_factor, setup.H ),] ) model_hyperparameters = CombinationHyperparameters( [SpaceTimeSPDEHyperparameters(numpy.log(setup.amplitude), numpy.log(numpy.radians(setup.space_length_scale)), numpy.log(setup.time_length_scale)),] ) super(TestComponentDefinition, self).__init__(model_elements, model_hyperparameters)
def __init__(self, bias_terms=False, breakpoints_file=None): setup = MidScaleSetup() if bias_terms: bias_element = [ InsituLandBiasElement(breakpoints_file, apply_policy=True, cut_value=3) ] bias_hyperparameters = [ CovariateHyperparameters( numpy.log(setup.bias_settings.bias_amplitude)) ] else: bias_element, bias_hyperparameters = [], [] super(LargeScaleDefinition, self).__init__( CombinationElement([ SpaceTimeKroneckerElement( setup.midscale_settings.n_triangulation_divisions, setup. midscale_settings.alpha, setup.midscale_settings.starttime, setup.midscale_settings.endtime, setup.midscale_settings. n_nodes, setup.midscale_settings.overlap_factor, setup.midscale_settings.H), AnnualKroneckerElement( setup.slow_settings.n_triangulation_divisions, setup.slow_settings.alpha, setup.slow_settings.starttime, setup.slow_settings.endtime, setup.slow_settings.n_nodes, setup.slow_settings.overlap_factor, setup.slow_settings.H), ] + bias_element), CombinationHyperparameters([ SpaceTimeSPDEHyperparameters( numpy.log(setup.midscale_settings.amplitude), numpy.log( numpy.radians( setup.midscale_settings.space_length_scale)), numpy.log(setup.midscale_settings.time_length_scale)), SpaceTimeSPDEHyperparameters( numpy.log(setup.slow_settings.amplitude), numpy.log( numpy.radians(setup.slow_settings.space_length_scale)), numpy.log(setup.slow_settings.time_length_scale)), ] + bias_hyperparameters))
def test_prior_precision_derivative(self): h = CombinationHyperparameters([ CovariateHyperparameters(-0.5 * numpy.log(23.6)), CovariateHyperparameters(-0.5 * numpy.log(88.9)) ]) priorlist = [ CovariatePrior(h.elementparameters[0], number_of_state_parameters=2), CovariatePrior(h.elementparameters[1], number_of_state_parameters=3) ] prior = CombinationPrior(h, priorlist) dQ = prior.prior_precision_derivative(1) self.assertEqual(SPARSEFORMAT, dQ.getformat()) self.assertEqual((5, 5), dQ.shape) self.assertEqual(3, dQ.nnz) self.assertAlmostEqual(-2.0 * 88.9, dQ[2, 2]) self.assertAlmostEqual(-2.0 * 88.9, dQ[3, 3]) self.assertAlmostEqual(-2.0 * 88.9, dQ[4, 4])
def test_prior_precision(self): h = CombinationHyperparameters([ CovariateHyperparameters(-0.5 * numpy.log(23.6)), CovariateHyperparameters(-0.5 * numpy.log(88.9)) ]) priorlist = [ CovariatePrior(h.elementparameters[0], number_of_state_parameters=2), CovariatePrior(h.elementparameters[1], number_of_state_parameters=3) ] prior = CombinationPrior(h, priorlist) Q = prior.prior_precision() self.assertEqual(SPARSEFORMAT, Q.getformat()) numpy.testing.assert_almost_equal( Q.todense(), [[23.6, 0.0, 0.0, 0.0, 0.0], [0.0, 23.6, 0.0, 0.0, 0.0], [0.0, 0.0, 88.9, 0.0, 0.0], [0.0, 0.0, 0.0, 88.9, 0.0], [0.0, 0.0, 0.0, 0.0, 88.9]])
def __init__(self, covariates_descriptor): if covariates_descriptor is not None: loader = LoadCovariateElement(covariates_descriptor) loader.check_keys() covariate_elements, covariate_hyperparameters = loader.load_covariates_and_hyperparameters() print('The following fields have been added as covariates of the climatology model') print(loader.data.keys()) else: covariate_elements, covariate_hyperparameters = [], [] setup = ClimatologySetup() super(ClimatologyDefinition, self).__init__( CombinationElement( [SeasonalElement(setup.n_triangulation_divisions, setup.n_harmonics, include_local_mean=True), GrandMeanElement()] + covariate_elements), CombinationHyperparameters( [SeasonalHyperparameters(setup.n_spatial_components, numpy.log(setup.amplitude), numpy.log(numpy.radians(setup.space_length_scale))), CovariateHyperparameters(numpy.log(setup.grandmean_amplitude))] + covariate_hyperparameters))
def __init__(self, bias_terms=False, global_biases_group_list=[]): setup = ShortScaleSetup() if bias_terms: bias_elements = [ BiasElement(groupname, 1) for groupname in global_biases_group_list ] bias_hyperparameters = [ CovariateHyperparameters( numpy.log(setup.bias_settings.bias_amplitude)) for index in range(len(global_biases_group_list)) ] else: bias_elements, bias_hyperparameters = [], [] super(PureBiasComponentDefinition, self).__init__(CombinationElement(bias_elements), CombinationHyperparameters(bias_hyperparameters))
def test_mini_world_latitude_harmonics(self): """Testing on a simple mock data file using latitude harmonics""" # GENERATING OBSERVATIONS # Simulated locations: they will exactly sits on the grid points of the covariate datafile locations = numpy.array([[0.0, 0.0], [0.25, 0.5], [0.5, 0.0]]) # Simulated model is y = a*cos(2x) + c*cos(4*x) + b*sin(2x) + d*sin(4*x) with x = latitude, so we expect a=c=1, c=d=0 measurement = LatitudeFunction(numpy.cos, 2.0).compute( locations[:, 0]).ravel() + LatitudeFunction( numpy.cos, 4.0).compute(locations[:, 0]).ravel() # Simulated errors uncorrelatederror = 0.1 * numpy.ones(measurement.shape) # Simulated inputs simulated_input_loader = SimulatedInputLoader(locations, measurement, uncorrelatederror) # Simulate evaluation of this time index simulated_time_indices = [0] latitude_harmonics_component = SpatialComponent( ComponentStorage_InMemory( LatitudeHarmonicsElement(), CombinationHyperparameters([ CovariateHyperparameters(-0.5 * numpy.log(p)) for p in [10.0, 10.0, 10.0, 10.0] ])), SpatialComponentSolutionStorage_InMemory()) # Analysis system using the specified components, for the Tmean observable analysis_system = AnalysisSystem([latitude_harmonics_component], ObservationSource.TMEAN, log=StringIO()) # GENERATING THE ANALYSIS # Update with data analysis_system.update([simulated_input_loader], simulated_time_indices) # Check state vector directly statevector = analysis_system.components[ 0].solutionstorage.partial_state_read(0).ravel() # These are the nodes where observations were put (see SimulatedObservationSource above) # - check they correspond to within 3 times the stated noise level self.assertAlmostEqual(1., statevector[0], delta=0.3) self.assertAlmostEqual(1., statevector[2], delta=0.3) self.assertAlmostEqual(0., statevector[1], delta=0.3) self.assertAlmostEqual(0., statevector[3], delta=0.3) # Also check entire state vector within outer bounds set by obs self.assertTrue(all(statevector < 1.0)) # And check output corresponds too # (evaluate result on output structure same as input) simulated_output_structure = SimulatedObservationStructure( 0, locations, None, None) result = analysis_system.evaluate_expected_value( 'MAP', simulated_output_structure, flag='POINTWISE') expected = statevector[0]*LatitudeFunction(numpy.cos, 2.0).compute(locations[:,0]).ravel() + statevector[1]*LatitudeFunction(numpy.sin, 2.0).compute(locations[:,0]).ravel()\ + statevector[2] *LatitudeFunction(numpy.cos, 4.0).compute(locations[:,0]).ravel()+ statevector[3]*LatitudeFunction(numpy.sin, 4.0).compute(locations[:,0]).ravel() numpy.testing.assert_almost_equal(expected, result)
def __init__(self, bias_terms=False, global_biases_group_list=[]): setup = ShortScaleSetup() bias_elements, bias_hyperparameters = [], [] if bias_terms: for groupname in global_biases_group_list: #if (groupname == 'surfaceairmodel_land_global') or (groupname == 'surfaceairmodel_ocean_global'): #bias_elements.append( BiasElement(groupname, 1) ) #bias_hyperparameters.append( CovariateHyperparameters(numpy.log(setup.bias_settings.bias_amplitude)) ) if (groupname == 'surfaceairmodel_land_global'): # global mean term bias_elements.append(BiasElement(groupname, 1)) bias_hyperparameters.append( CovariateHyperparameters( numpy.log(setup.bias_settings.bias_amplitude))) # spatial bias term bias_elements.append( SpatialBiasElement( groupname, setup.spatial_bias_settings. n_triangulation_divisions)) bias_hyperparameters.append( LocalHyperparameters( numpy.log(setup.spatial_bias_settings. spatial_bias_amplitutde), numpy.log( numpy.radians(setup.spatial_bias_settings. spatial_bias_length_scale)))) elif (groupname == 'surfaceairmodel_ocean_global'): # global mean term bias_elements.append(BiasElement(groupname, 1)) bias_hyperparameters.append( CovariateHyperparameters( numpy.log(setup.bias_settings.bias_amplitude))) elif (groupname == 'surfaceairmodel_ice_global'): # hemispheric term bias_elements.append(BiasElement(groupname, 2)) bias_hyperparameters.append( CovariateHyperparameters( numpy.log(setup.bias_settings.bias_amplitude))) # spatial bias term bias_elements.append( SpatialBiasElement( groupname, setup.spatial_bias_settings. n_triangulation_divisions)) bias_hyperparameters.append( LocalHyperparameters( numpy.log(setup.spatial_bias_settings. spatial_bias_amplitutde), numpy.log( numpy.radians(setup.spatial_bias_settings. spatial_bias_length_scale)))) super(LocalDefinition, self).__init__( CombinationElement( [LocalElement(setup.local_settings.n_triangulation_divisions) ] + bias_elements), CombinationHyperparameters([ LocalHyperparameters( numpy.log(setup.local_settings.amplitude), numpy.log( numpy.radians( setup.local_settings.space_length_scale))) ] + bias_hyperparameters))
def main(): print 'Advanced standard example using a few days of EUSTACE data' parser = argparse.ArgumentParser( description='Advanced standard example using a few days of EUSTACE data' ) parser.add_argument('outpath', help='directory where the output should be redirected') parser.add_argument( '--json_descriptor', default=None, help= 'a json descriptor containing the covariates to include in the climatology model' ) parser.add_argument('--land_biases', action='store_true', help='include insitu land homogenization bias terms') parser.add_argument('--global_biases', action='store_true', help='include global satellite bias terms') parser.add_argument('--n_iterations', type=int, default=5, help='number of solving iterations') args = parser.parse_args() # Input data path basepath = os.path.join('/work/scratch/eustace/rawbinary3') # Days to process #time_indices = range(int(days_since_epoch(datetime(2006, 2, 1))), int(days_since_epoch(datetime(2006, 2, 2)))) #time_indices = range(int(days_since_epoch(datetime(1906, 2, 1))), int(days_since_epoch(datetime(1906, 2, 2)))) date_list = [ datetime(2006, 1, 1) + relativedelta(days=k) for k in range(3) ] #backwards_list = [date_list[i] for i in range(11, -1, -1)] #date_list = backwards_list time_indices = [int(days_since_epoch(date)) for date in date_list] # Sources to use sources = [ 'surfaceairmodel_land', 'surfaceairmodel_ocean', 'surfaceairmodel_ice', 'insitu_land', 'insitu_ocean' ] sources = ['insitu_land', 'insitu_ocean'] #sources = [ 'surfaceairmodel_land' ] # CLIMATOLOGY COMPONENT: combining the seasonal core along with latitude harmonics, altitude and coastal effects if args.json_descriptor is not None: loader = LoadCovariateElement(args.json_descriptor) loader.check_keys() covariate_elements, covariate_hyperparameters = loader.load_covariates_and_hyperparameters( ) print( 'The following fields have been added as covariates of the climatology model' ) print(loader.data.keys()) else: covariate_elements, covariate_hyperparameters = [], [] #climatology_element = CombinationElement( [SeasonalElement(n_triangulation_divisions=2, n_harmonics=2, include_local_mean=False), GrandMeanElement()]+covariate_elements) #climatology_hyperparameters = CombinationHyperparameters( [SeasonalHyperparameters(n_spatial_components=2, common_log_sigma=0.0, common_log_rho=0.0), CovariateHyperparameters(numpy.log(15.0))] + covariate_hyperparameters ) climatology_element = CombinationElement([ GrandMeanElement(), ] + covariate_elements) climatology_hyperparameters = CombinationHyperparameters([ CovariateHyperparameters(numpy.log(15.0)), ] + covariate_hyperparameters) #climatology_element =SeasonalElement(n_triangulation_divisions=2, n_harmonics=2, include_local_mean=False) #climatology_hyperparameters = SeasonalHyperparameters(n_spatial_components=2, common_log_sigma=0.0, common_log_rho=0.0) climatology_component = SpaceTimeComponent( ComponentStorage_InMemory(climatology_element, climatology_hyperparameters), SpaceTimeComponentSolutionStorage_InMemory(), compute_uncertainties=True, method='APPROXIMATED') # LARGE SCALE (kronecker product) COMPONENT: combining large scale trends with bias terms accounting for homogeneization effects if args.land_biases: bias_element, bias_hyperparameters = [ InsituLandBiasElement(BREAKPOINTS_FILE) ], [CovariateHyperparameters(numpy.log(.9))] print('Adding bias terms for insitu land homogenization') else: bias_element, bias_hyperparameters = [], [] large_scale_element = CombinationElement([ SpaceTimeKroneckerElement(n_triangulation_divisions=2, alpha=2, starttime=-30, endtime=365 * 1 + 30, n_nodes=12 * 1 + 2, overlap_factor=2.5, H=1) ] + bias_element) large_scale_hyperparameters = CombinationHyperparameters([ SpaceTimeSPDEHyperparameters(space_log_sigma=0.0, space_log_rho=numpy.log( numpy.radians(15.0)), time_log_rho=numpy.log(15.0)) ] + bias_hyperparameters) large_scale_component = SpaceTimeComponent( ComponentStorage_InMemory(large_scale_element, large_scale_hyperparameters), SpaceTimeComponentSolutionStorage_InMemory(), compute_uncertainties=True, method='APPROXIMATED') # LOCAL COMPONENT: combining local scale variations with global satellite bias terms if args.global_biases: bias_elements = [ BiasElement(groupname, 1) for groupname in GLOBAL_BIASES_GROUP_LIST ] bias_hyperparameters = [ CovariateHyperparameters(numpy.log(15.0)) for index in range(3) ] print('Adding global bias terms for all the surfaces') else: bias_elements, bias_hyperparameters = [], [] n_triangulation_divisions_local = 7 local_log_sigma = numpy.log(5) local_log_rho = numpy.log(numpy.radians(5.0)) local_element = NonStationaryLocal( n_triangulation_divisions=n_triangulation_divisions_local) n_local_nodes = local_element.spde.n_latent_variables() local_scale_element = CombinationElement([local_element] + bias_elements) local_hyperparameters = ExpandedLocalHyperparameters( log_sigma=numpy.repeat(local_log_sigma, n_local_nodes), log_rho=numpy.repeat(local_log_rho, n_local_nodes)) local_scale_hyperparameters = CombinationHyperparameters( [local_hyperparameters] + bias_hyperparameters) local_component = DelayedSpatialComponent( ComponentStorage_InMemory(local_scale_element, local_scale_hyperparameters), SpatialComponentSolutionStorage_InMemory(), compute_uncertainties=True, method='APPROXIMATED') print "hyperparameter storage:", local_component.storage.hyperparameters print 'Analysing inputs' # Analysis system using the specified components, for the Tmean observable ##analysis_system = AnalysisSystem( ## [ climatology_component, large_scale_component, local_component ], ## ObservationSource.TMEAN) analysis_system = OptimizationSystem( [climatology_component, local_component], ObservationSource.TMEAN) # Object to load raw binary inputs at time indices inputloaders = [ AnalysisSystemInputLoaderRawBinary_Sources(basepath, source, time_indices) for source in sources ] for iteration in range(args.n_iterations): message = 'Iteration {}'.format(iteration) print(message) # Update with data analysis_system.update(inputloaders, time_indices) ################################################## # Optimize local model hyperparameters # Loop over local regions, generate optimization systems, fit hyperparameters and save # split spde and bias models for local component into two components global_spde_sub_component_definition = ComponentStorage_InMemory( CombinationElement([local_element]), CombinationHyperparameters([local_hyperparameters])) global_spde_sub_component_storage_solution = SpatialComponentSolutionStorage_InMemory( ) global_spde_sub_component = DelayedSpatialComponent( global_spde_sub_component_definition, global_spde_sub_component_storage_solution) bias_sub_component_definition = ComponentStorage_InMemory( CombinationElement(bias_elements), CombinationHyperparameters(bias_hyperparameters)) bias_sub_component_storage_solution = SpatialComponentSolutionStorage_InMemory( ) bias_sub_component = DelayedSpatialComponent( bias_sub_component_definition, bias_sub_component_storage_solution) element_optimisation_flags = [True, False, False, False] # one spde, three biases for time_key in time_indices: split_states_time(local_component, global_spde_sub_component, bias_sub_component, element_optimisation_flags, time_key) # Define subregions and extract their states neighbourhood_level = 1 n_subregions = global_spde_sub_component.storage.element_read( ).combination[0].spde.n_triangles_at_level(neighbourhood_level) hyperparameter_file_template = "local_hyperparameters.%i.%i.%i.npy" fit_hyperparameters = True optimization_component_index = 2 if fit_hyperparameters: for region_index in range(n_subregions): # Setup model for local subregion of neighours with super triangle view_flags = [ True, ] region_element = CombinationElement([ LocalSubRegion(n_triangulation_divisions_local, neighbourhood_level, region_index) ]) region_hyperparameters = ExtendedCombinationHyperparameters([ LocalHyperparameters(log_sigma=local_log_sigma, log_rho=local_log_rho) ]) region_component_storage_solution = SpatialComponentSolutionStorage_InMemory( ) region_sub_component = DelayedSpatialComponent( ComponentStorage_InMemory(region_element, region_hyperparameters), region_component_storage_solution) for time_key in time_indices: print "region_index, time_key:", region_index, time_key extract_local_view_states_time(global_spde_sub_component, region_sub_component, view_flags, time_key) print "running optimization for region:", region_index region_optimization_system = OptimizationSystem([ climatology_component, bias_sub_component, region_sub_component ], ObservationSource.TMEAN) for time_key in time_indices: region_optimization_system.update_component_time( inputloaders, optimization_component_index, time_key) # commented version that works for few days inputs #region_optimization_system.components[optimization_component_index].component_solution().optimize() #region_optimization_system.components[optimization_component_index].storage.hyperparameters.get_array() #hyperparameter_file = os.path.join(args.outpath, hyperparameter_file_template % (n_triangulation_divisions_local, neighbourhood_level, region_index) ) #region_sub_component.storage.hyperparameters.values_to_npy_savefile( hyperparameter_file ) # replaced with version for full processing based json dump of input files - need to generate the input_descriptor dict hyperparameter_file = os.path.join( args.outpath, hyperparameter_file_template % (n_triangulation_divisions_local, neighbourhood_level, region_index)) region_optimization_system.process_inputs( input_descriptor, optimization_component_index, time_indices) region_optimization_system.optimize_component( optimization_component_index, hyperparameter_storage_file=hyperparameter_file) fitted_hyperparameters_converted = region_sub_component.storage.hyperparameters.get_array( ) fitted_hyperparameters_converted[0] = numpy.exp( fitted_hyperparameters_converted[0]) fitted_hyperparameters_converted[1] = numpy.exp( fitted_hyperparameters_converted[1]) * 180.0 / numpy.pi print 'fitted_hyperparameters_converted:', fitted_hyperparameters_converted # Setup model for the super triangle without neighbours for hyperparameter merging region_spdes = [] region_hyperparameter_values = [] for region_index in range(n_subregions): # Redefine the region sub component as a supertriangle rather than a neighbourhood region_element = CombinationElement([ LocalSuperTriangle(n_triangulation_divisions_local, neighbourhood_level, region_index) ]) region_hyperparameters = ExtendedCombinationHyperparameters([ LocalHyperparameters(log_sigma=local_log_sigma, log_rho=local_log_rho) ]) region_component_storage_solution = SpatialComponentSolutionStorage_InMemory( ) region_sub_component = DelayedSpatialComponent( ComponentStorage_InMemory(region_element, region_hyperparameters), region_component_storage_solution) # Read the optimized hyperparameters hyperparameter_file = os.path.join( args.outpath, hyperparameter_file_template % (n_triangulation_divisions_local, neighbourhood_level, region_index)) region_sub_component.storage.hyperparameters.values_from_npy_savefile( hyperparameter_file) # Append the spde model and hyperparameters to their lists for merging region_spdes.append(region_element.combination[0].spde) region_hyperparameter_values.append( region_sub_component.storage.hyperparameters.get_array()) # merge and save hyperparameters full_spde = local_element.spde new_hyperparameter_values, global_sigma_design, global_rho_design = full_spde.merge_local_parameterisations( region_spdes, region_hyperparameter_values, merge_method='exp_average') local_hyperparameters.set_array(new_hyperparameter_values) hyperparameter_file_merged = "merged_hyperparameters.%i.%i.npy" % ( n_triangulation_divisions_local, neighbourhood_level) local_hyperparameters.values_to_npy_savefile( os.path.join(args.outpath, hyperparameter_file_merged)) # Refit local model with the optimized hyperparameters analysis_system.update_component(inputloaders, 1, time_indices) ################################################## print 'Computing outputs' # Produce an output for each time index for time_index in time_indices: # Get date for output outputdate = inputloaders[0].datetime_at_time_index(time_index) print 'Evaluating output grid: ', outputdate #Configure output grid outputstructure = OutputRectilinearGridStructure( time_index, outputdate, latitudes=numpy.linspace(-89.875, 89.875, num=definitions.GLOBAL_FIELD_SHAPE[1]), longitudes=numpy.linspace(-179.875, 179.875, num=definitions.GLOBAL_FIELD_SHAPE[2])) # print 'Size of grid : ', outputstructure.number_of_observations() # Evaluate expected value at these locations result_expected_value = analysis_system.evaluate_expected_value( 'MAP', outputstructure, 'POINTWISE') result_expected_uncertainties = analysis_system.evaluate_expected_value( 'post_STD', outputstructure, 'POINTWISE') # Make output filename pathname = 'eustace_example_output_{0:04d}{1:02d}{2:02d}.nc'.format( outputdate.year, outputdate.month, outputdate.day) pathname = os.path.join(args.outpath, pathname) print 'Saving: ', pathname # Save results filebuilder = FileBuilderGlobalField( pathname, time_index, 'Infilling Example', 'UNVERSIONED', definitions.TAS.name, '', 'Example data only', 'eustace.analysis.advanced_standard.examples.example_eustace_few_days', '') filebuilder.add_global_field( definitions.TAS, result_expected_value.reshape(definitions.GLOBAL_FIELD_SHAPE)) filebuilder.add_global_field( definitions.TASUNCERTAINTY, result_expected_uncertainties.reshape( definitions.GLOBAL_FIELD_SHAPE)) filebuilder.save_and_close() print 'Complete'
def main(): print 'EUSTACE example using HadCRUT4 monthly data' # Input data path input_basepath = os.path.join(WORKSPACE_PATH, 'data/incoming/HadCRUT4.5.0.0') # Input filenames input_filenames = [ 'hadcrut4_median_netcdf.nc', 'hadcrut4_uncorrelated_supplementary.nc', 'hadcrut4_blended_uncorrelated.nc' ] # Months to process time_indices = range(2) # Climatology component climatology_component = SpaceTimeComponent(ComponentStorage_InMemory(SeasonalElement(n_triangulation_divisions=5, n_harmonics=5, include_local_mean=True), SeasonalHyperparameters(n_spatial_components=6, common_log_sigma=1.0, common_log_rho=0.0)), SpaceTimeComponentSolutionStorage_InMemory()) # Number of factors for large scale (factor analysis) component and initial hyperparameters n_factors = 5 factors = [ ] factor_hyperparameters = [ ] for factor_index in range(n_factors): factor_hyperparameters.append( SpaceTimeSPDEHyperparameters( space_log_sigma=0.0, space_log_rho=numpy.log(10.0 * numpy.pi/180 + 25.0 * numpy.pi/180 *(n_factors - factor_index) / n_factors), time_log_rho=numpy.log(1/12.0 + 6/12.0*(n_factors - factor_index) / n_factors)) ) factors.append( SpaceTimeFactorElement(n_triangulation_divisions=5, alpha=2, starttime=0, endtime=36, overlap_factor=2.5, H=1) ) # Large scale (factor analysis) component large_scale_component = SpaceTimeComponent(ComponentStorage_InMemory(CombinationElement(factors), CombinationHyperparameters(factor_hyperparameters)), SpaceTimeComponentSolutionStorage_InMemory()) # Local component local_component = SpatialComponent(ComponentStorage_InMemory(LocalElement(n_triangulation_divisions=4), LocalHyperparameters(log_sigma=0.0, log_rho=numpy.log(10.0 * numpy.pi/180))), SpatialComponentSolutionStorage_InMemory()) print 'Analysing inputs' # Analysis system using the specified components, for the Tmean observable analysis_system = AnalysisSystem( [ climatology_component, large_scale_component, local_component ], ObservationSource.TMEAN) # Make filelist input_filelist = [ os.path.join(input_basepath, filename) for filename in input_filenames ] # Object to load HadCRUT4 inputs at time indices inputloader = AnalysisSystemInputLoaderHadCRUT4(input_filelist) # Update with data analysis_system.update([ inputloader ], time_indices) print 'Computing outputs' # Produce an output for each time index for time_index in time_indices: # Make output filename outputdate = inputloader.datetime_at_time_index(time_index) pathname = 'example_output_{0:04d}{1:02d}.nc'.format(outputdate.year, outputdate.month) print 'Saving: ', pathname # Configure output grid outputstructure = OutputRectilinearGridStructure( time_index, outputdate, latitudes=numpy.linspace(-87.5, 87.5, num=36), longitudes=numpy.linspace(-177.5, 177.5, num=72)) # Evaluate expected value at these locations result_expected_value = analysis_system.evaluate_expected_value(outputstructure) # Save results filebuilder = FileBuilderHadCRUT4ExampleOutput(pathname, outputstructure) filebuilder.add_global_field(TAS_ANOMALY, result_expected_value.reshape(1,36,72)) filebuilder.save_and_close() print 'Complete'
def main(): print 'Advanced standard example using a few days of EUSTACE data' parser = argparse.ArgumentParser(description='Advanced standard example using a few days of EUSTACE data') parser.add_argument('outpath', help='directory where the output should be redirected') parser.add_argument('--json_descriptor', default = None, help='a json descriptor containing the covariates to include in the climatology model') parser.add_argument('--land_biases', action='store_true', help='include insitu land homogenization bias terms') parser.add_argument('--global_biases', action='store_true', help='include global satellite bias terms') parser.add_argument('--n_iterations', type=int, default=5, help='number of solving iterations') args = parser.parse_args() # Input data path basepath = os.path.join('/work/scratch/eustace/rawbinary3') # Days to process time_indices = range(int(days_since_epoch(datetime(2006, 2, 1))), int(days_since_epoch(datetime(2006, 2, 2)))) # Sources to use sources = [ 'surfaceairmodel_land', 'surfaceairmodel_ocean', 'surfaceairmodel_ice', 'insitu_land', 'insitu_ocean' ] #SETUP # setup for the seasonal core: climatology covariates setup read from file seasonal_setup = {'n_triangulation_divisions':5, 'n_harmonics':4, 'n_spatial_components':6, 'amplitude':2., 'space_length_scale':5., # length scale in units of degrees } grandmean_amplitude = 15.0 # setup for the large scale component spacetime_setup = {'n_triangulation_divisions':2, 'alpha':2, 'starttime':0, 'endtime':10., 'n_nodes':2, 'overlap_factor':2.5, 'H':1, 'amplitude':1., 'space_lenght_scale':15.0, # length scale in units of degrees 'time_length_scale':15.0 # length scal in units of days } bias_amplitude = .9 # setup for the local component local_setup = {'n_triangulation_divisions':6, 'amplitude':2., 'space_length_scale':2. # length scale in units of degrees } globalbias_amplitude = 15.0 # CLIMATOLOGY COMPONENT: combining the seasonal core along with latitude harmonics, altitude and coastal effects if args.json_descriptor is not None: loader = LoadCovariateElement(args.json_descriptor) loader.check_keys() covariate_elements, covariate_hyperparameters = loader.load_covariates_and_hyperparameters() print('The following fields have been added as covariates of the climatology model') print(loader.data.keys()) else: covariate_elements, covariate_hyperparameters = [], [] climatology_element = CombinationElement( [SeasonalElement(n_triangulation_divisions=seasonal_setup['n_triangulation_divisions'], n_harmonics=seasonal_setup['n_harmonics'], include_local_mean=True), GrandMeanElement()]+covariate_elements) climatology_hyperparameters = CombinationHyperparameters( [SeasonalHyperparameters(n_spatial_components=seasonal_setup['n_spatial_components'], common_log_sigma=numpy.log(seasonal_setup['amplitude']), common_log_rho=numpy.log(numpy.radians(seasonal_setup['space_length_scale']))), CovariateHyperparameters(numpy.log(grandmean_amplitude))] + covariate_hyperparameters ) climatology_component = SpaceTimeComponent(ComponentStorage_InMemory(climatology_element, climatology_hyperparameters), SpaceTimeComponentSolutionStorage_InMemory(), compute_uncertainties=True, method='APPROXIMATED', compute_sample=True, sample_size=definitions.GLOBAL_SAMPLE_SHAPE[3]) # LARGE SCALE (kronecker product) COMPONENT: combining large scale trends with bias terms accounting for homogeneization effects if args.land_biases: bias_element, bias_hyperparameters = [InsituLandBiasElement(BREAKPOINTS_FILE)], [CovariateHyperparameters(numpy.log(bias_amplitude))] print('Adding bias terms for insitu land homogenization') else: bias_element, bias_hyperparameters = [], [] large_scale_element = CombinationElement( [SpaceTimeKroneckerElement(n_triangulation_divisions=spacetime_setup['n_triangulation_divisions'], alpha=spacetime_setup['alpha'], starttime=spacetime_setup['starttime'], endtime=spacetime_setup['endtime'], n_nodes=spacetime_setup['n_nodes'], overlap_factor=spacetime_setup['overlap_factor'], H=spacetime_setup['H'])] + bias_element) large_scale_hyperparameters = CombinationHyperparameters( [SpaceTimeSPDEHyperparameters(space_log_sigma=numpy.log(spacetime_setup['amplitude']), space_log_rho=numpy.log(numpy.radians(spacetime_setup['space_lenght_scale'])), time_log_rho=numpy.log(spacetime_setup['time_length_scale']))] + bias_hyperparameters) large_scale_component = SpaceTimeComponent(ComponentStorage_InMemory(large_scale_element, large_scale_hyperparameters), SpaceTimeComponentSolutionStorage_InMemory(), compute_uncertainties=True, method='APPROXIMATED', compute_sample=True, sample_size=definitions.GLOBAL_SAMPLE_SHAPE[3]) # LOCAL COMPONENT: combining local scale variations with global satellite bias terms if args.global_biases: bias_elements = [BiasElement(groupname, 1) for groupname in GLOBAL_BIASES_GROUP_LIST] bias_hyperparameters = [CovariateHyperparameters(numpy.log(globalbias_amplitude)) for index in range(len(GLOBAL_BIASES_GROUP_LIST))] print('Adding global bias terms for all the surfaces') else: bias_elements, bias_hyperparameters = [], [] local_scale_element = CombinationElement([LocalElement(n_triangulation_divisions=local_setup['n_triangulation_divisions'])] + bias_elements) local_scale_hyperparameters = CombinationHyperparameters([LocalHyperparameters(log_sigma=numpy.log(local_setup['amplitude']), log_rho=numpy.log(numpy.radians(local_setup['space_length_scale'])))] + bias_hyperparameters) local_component = SpatialComponent(ComponentStorage_InMemory(local_scale_element, local_scale_hyperparameters), SpatialComponentSolutionStorage_InMemory(), compute_uncertainties=True, method='APPROXIMATED', compute_sample=True, sample_size=definitions.GLOBAL_SAMPLE_SHAPE[3]) # Analysis system using the specified components, for the Tmean observable print 'Analysing inputs' analysis_system = AnalysisSystem( [ climatology_component, large_scale_component, local_component ], ObservationSource.TMEAN) # Object to load raw binary inputs at time indices inputloaders = [ AnalysisSystemInputLoaderRawBinary_Sources(basepath, source, time_indices) for source in sources ] for iteration in range(args.n_iterations): message = 'Iteration {}'.format(iteration) print(message) # Update with data analysis_system.update(inputloaders, time_indices) print 'Computing outputs' # Produce an output for each time index for time_index in time_indices: # Get date for output outputdate = inputloaders[0].datetime_at_time_index(time_index) print 'Evaluating output grid: ', outputdate #Configure output grid outputstructure = OutputRectilinearGridStructure( time_index, outputdate, latitudes=numpy.linspace(-90.+definitions.GLOBAL_FIELD_RESOLUTION/2., 90.-definitions.GLOBAL_FIELD_RESOLUTION/2., num=definitions.GLOBAL_FIELD_SHAPE[1]), longitudes=numpy.linspace(-180.+definitions.GLOBAL_FIELD_RESOLUTION/2., 180.-definitions.GLOBAL_FIELD_RESOLUTION/2., num=definitions.GLOBAL_FIELD_SHAPE[2])) # Evaluate expected value at these locations for field in ['MAP', 'post_STD']: print 'Evaluating: ',field result_expected_value = analysis_system.evaluate_expected_value('MAP', outputstructure, 'GRID_CELL_AREA_AVERAGE', [1,1], 1000) result_expected_uncertainties = analysis_system.evaluate_expected_value('post_STD', outputstructure, 'GRID_CELL_AREA_AVERAGE', [1,1], 1000) print 'Evaluating: climatology fraction' climatology_fraction = analysis_system.evaluate_climatology_fraction(outputstructure, [1,1], 1000) print 'Evaluating: the sample' sample = analysis_system.evaluate_projected_sample(outputstructure) # Make output filename pathname = 'eustace_example_output_{0:04d}{1:02d}{2:02d}.nc'.format(outputdate.year, outputdate.month, outputdate.day) pathname = os.path.join(args.outpath, pathname) print 'Saving: ', pathname # Save results filebuilder = FileBuilderGlobalField( pathname, time_index, 'Infilling Example', 'UNVERSIONED', definitions.TAS.name, '', 'Example data only', 'eustace.analysis.advanced_standard.examples.example_eustace_few_days', '') filebuilder.add_global_field(definitions.TAS, result_expected_value.reshape(definitions.GLOBAL_FIELD_SHAPE)) filebuilder.add_global_field(definitions.TASUNCERTAINTY, result_expected_uncertainties.reshape(definitions.GLOBAL_FIELD_SHAPE)) filebuilder.add_global_field(definitions.TAS_CLIMATOLOGY_FRACTION, climatology_fraction.reshape(definitions.GLOBAL_FIELD_SHAPE)) for index in range(definitions.GLOBAL_SAMPLE_SHAPE[3]): variable = copy.deepcopy(definitions.TASENSEMBLE) variable.name = variable.name + '_' + str(index) selected_sample = sample[:,index].ravel()+result_expected_value filebuilder.add_global_field(variable, selected_sample.reshape(definitions.GLOBAL_FIELD_SHAPE)) filebuilder.save_and_close() print 'Complete'
def test_mini_world_altitude_with_latitude(self): """Testing using altitude as a covariate""" # GENERATING OBSERVATIONS # Simulated locations: they will exactly sits on the grid points of the covariate datafile DEM = Dataset(self.altitude_datafile) latitude = DEM.variables['lat'][:] longitude = DEM.variables['lon'][:] altitude = DEM.variables['dem'][:] indices = numpy.stack( (numpy.array([1, 3, 5, 7, 8, 9, 10, 11 ]), numpy.array([0, 0, 0, 0, 0, 0, 0, 0])), axis=1) selected_location = [] altitude_observations = [] for couple in indices: selected_location.append([ latitude[couple[0], couple[1]], longitude[couple[0], couple[1]] ]) altitude_observations.append(altitude[couple[0], couple[1]]) DEM.close() locations = numpy.array(selected_location) # Simulated model is y = z + a*cos(2x) + c*cos(4*x) + b*sin(2x) + d*sin(4*x), with z = altitude, x = latitude, a=b=c=d=0 slope = 1e-3 measurement = slope * numpy.array(altitude_observations) # Simulated errors uncorrelatederror = 0.1 * numpy.ones(measurement.shape) # Simulated inputs simulated_input_loader = SimulatedInputLoader(locations, measurement, uncorrelatederror) # Simulate evaluation of this time index simulated_time_indices = [0] # GENERATING THE MODEL # Local component geography_covariate_element = GeographyBasedElement( self.altitude_datafile, 'lat', 'lon', 'dem', 1.0) geography_covariate_element.load() combined_element = CombinationElement( [geography_covariate_element, LatitudeHarmonicsElement()]) combined_hyperparamters = CombinationHyperparameters([ CovariateHyperparameters(-0.5 * numpy.log(10.)), CombinationHyperparameters([ CovariateHyperparameters(-0.5 * numpy.log(p)) for p in [10.0, 10.0, 10.0, 10.0] ]) ]) combined_component = SpatialComponent( ComponentStorage_InMemory(combined_element, combined_hyperparamters), SpatialComponentSolutionStorage_InMemory()) # GENERATING THE ANALYSIS # Analysis system using the specified components, for the Tmean observable analysis_system = AnalysisSystem([combined_component], ObservationSource.TMEAN, log=StringIO()) # Update with data analysis_system.update([simulated_input_loader], simulated_time_indices) # Check state vector directly statevector = analysis_system.components[ 0].solutionstorage.partial_state_read(0).ravel() # These are the nodes where observations were put (see SimulatedObservationSource above) # - check they correspond to within 3 times the stated noise level self.assertAlmostEqual(slope, statevector[0], delta=0.3) self.assertAlmostEqual(0., statevector[1], delta=0.3) self.assertAlmostEqual(0., statevector[2], delta=0.3) self.assertAlmostEqual(0., statevector[3], delta=0.3) self.assertAlmostEqual(0., statevector[4], delta=0.3) # And check output corresponds too # (evaluate result on output structure same as input) simulated_output_structure = SimulatedObservationStructure( 0, locations, None, None) result = analysis_system.evaluate_expected_value( 'MAP', simulated_output_structure, flag='POINTWISE') expected = statevector[0]*numpy.array(altitude_observations)\ + statevector[1]*LatitudeFunction(numpy.cos, 2.0).compute(locations[:,0]).ravel()\ + statevector[2]*LatitudeFunction(numpy.sin, 2.0).compute(locations[:,0]).ravel()\ + statevector[3]*LatitudeFunction(numpy.cos, 4.0).compute(locations[:,0]).ravel()\ + statevector[4]*LatitudeFunction(numpy.sin, 2.0).compute(locations[:,0]).ravel() numpy.testing.assert_almost_equal(expected, result) # test output gridding, pointwise limit outputstructure = OutputRectilinearGridStructure( 2, epoch_plus_days(2), latitudes=numpy.linspace(-60., 60., num=5), longitudes=numpy.linspace(-90., 90, num=10)) pointwise_result = analysis_system.evaluate_expected_value( 'MAP', outputstructure, 'POINTWISE') pointwise_limit_result = analysis_system.evaluate_expected_value( 'MAP', outputstructure, 'GRID_CELL_AREA_AVERAGE', [1, 1], 10) numpy.testing.assert_array_almost_equal(pointwise_result, pointwise_limit_result)
def __init__(self, covariates_descriptor): if covariates_descriptor is not None: loader = LoadCovariateElement(covariates_descriptor) loader.check_keys() covariate_elements, covariate_hyperparameters = loader.load_covariates_and_hyperparameters( ) print( 'The following fields have been added as covariates of the climatology model' ) print(loader.data.keys()) else: covariate_elements, covariate_hyperparameters = [], [] setup = MovingClimatologySetup() model_elements = CombinationElement([ AnnualKroneckerElement( setup.seasonal_spline_settings.n_triangulation_divisions, setup .seasonal_spline_settings.alpha, setup.seasonal_spline_settings .starttime, setup.seasonal_spline_settings.endtime, setup.seasonal_spline_settings.n_nodes, setup.seasonal_spline_settings.overlap_factor, setup.seasonal_spline_settings.H, setup.seasonal_spline_settings.wrap_dimensions), #SeasonalElement(setup.seasonal_settings.n_triangulation_divisions, #setup.seasonal_settings.n_harmonics, #include_local_mean=setup.seasonal_settings.include_local_mean), GrandMeanElement(), LatitudeSplineElement( setup.latitude_settings.alpha, setup.latitude_settings.n_nodes, setup.latitude_settings.overlap_factor, setup.latitude_settings.H, ), ] + covariate_elements) seasonal_hyperparameters = SeasonalHyperparameters( setup.seasonal_settings.n_spatial_components, numpy.log(setup.seasonal_settings.amplitude), numpy.log(numpy.radians( setup.seasonal_settings.space_length_scale))) seasonal_spline_hyperparameters = SpaceTimeSPDEHyperparameters( numpy.log(setup.seasonal_spline_settings.amplitude), numpy.log( numpy.radians( setup.seasonal_spline_settings.space_length_scale)), numpy.log(setup.seasonal_spline_settings.time_length_scale)) seasonal_params = zip( numpy.log(setup.seasonal_settings.harmonic_amplitudes), numpy.log( numpy.radians(setup.seasonal_settings.harmonic_length_scales))) seasonal_hyperparameters.set_array( [val for pair in seasonal_params for val in pair]) model_hyperparameters = CombinationHyperparameters([ seasonal_spline_hyperparameters, #seasonal_hyperparameters, CovariateHyperparameters( numpy.log(setup.covariate_settings.grandmean_amplitude)), #SpaceTimeSPDEHyperparameters(numpy.log(setup.slow_settings.amplitude), #numpy.log(numpy.radians(setup.slow_settings.space_length_scale)), #numpy.log(setup.slow_settings.time_length_scale)), LocalHyperparameters( numpy.log(setup.latitude_settings.amplitude), numpy.log(setup.latitude_settings.length_scale)) ] + covariate_hyperparameters) super(ClimatologyDefinition, self).__init__(model_elements, model_hyperparameters)