Example #1
0
 def test_split_states_time(self):
     
     global_biases_group_list = ['global bias',]
     
     full_component_definition = example_eustace.LocalDefinition(bias_terms = True, global_biases_group_list = global_biases_group_list)
     full_component_storage_solution = SpatialComponentSolutionStorage_InMemory()
     full_component = DelayedSpatialComponent(full_component_definition, full_component_storage_solution)
     
     local_sub_component_definition = example_optimization.PureLocalComponentDefinition()
     local_sub_component_storage_solution = SpatialComponentSolutionStorage_InMemory()
     local_sub_component = DelayedSpatialComponent(local_sub_component_definition, local_sub_component_storage_solution)
     
     bias_sub_component_definition = example_optimization.PureBiasComponentDefinition(bias_terms = True, global_biases_group_list = global_biases_group_list)
     bias_sub_component_storage_solution = SpatialComponentSolutionStorage_InMemory()
     bias_sub_component = DelayedSpatialComponent(bias_sub_component_definition, bias_sub_component_storage_solution)
     
     element_optimisation_flags = [True, False]
     time_key = 0
     
     state_length = full_component.storage.element_read().element_prior( full_component.storage.hyperparameters  ).prior_number_of_state_parameters()
     random_state = numpy.random.normal(0.0, 1.0, state_length)        
     full_component.solutionstorage.partial_state_write(random_state, time_key)
     
     example_optimization.split_states_time( full_component, local_sub_component, bias_sub_component, element_optimisation_flags, time_key )
     
     full_state = full_component.solutionstorage.partial_state_read( time_key )
     
     (local_target_state, bias_target_state) = full_component.storage.element_read().element_prior(full_component.storage.hyperparameters_read()).element_states(full_state)
     
     numpy.testing.assert_almost_equal( local_target_state, local_sub_component.solutionstorage.partial_state_read(time_key) )
     numpy.testing.assert_almost_equal( bias_target_state, bias_sub_component.solutionstorage.partial_state_read(time_key) )
Example #2
0
    def atest_process_observations_compute_uncertainties(self):

        # Our test system is:
        #
        # ( [ 2.0  0.0 ]  +  [ -1.5  0.0 ] [ 5.0  0.0 ] [ -1.5  2.2 ] ) x = [ -1.5  0.0 ] [ 10.0  3.0 ] [ 7.0 - 2.0 ]
        # ( [ 0.0  2.0 ]     [  2.2  3.3 ] [ 0.0  5.0 ] [  0.0  3.3 ] )     [  2.2  3.3 ] [  3.0 10.0 ] [ 9.0 - 3.0 ]
        #
        # [  3.25 -16.5  ] x = [ -37.5 ]
        # [-16.5   80.65 ]     [ 154.0 ]
        #
        # => x = [ -0.60697861 ]
        #        [  1.78530506 ]
        #

        c = DelayedSpatialComponent(
            ComponentStorage_InMemory(
                TestDelayedSpatialComponentSolution.TestElement(),
                CovariateHyperparameters(-0.5 * numpy.log(2.0))),
            DelayedSpatialComponentSolutionStorage_Files(),
            compute_uncertainties=True)
        s = c.component_solution()
        self.assertIsInstance(s, DelayedSpatialComponentSolution)
        self.assertTrue(s.compute_uncertainties)
        test_offset = numpy.array([2.0, 3.0])
        s.process_observations(
            TestDelayedSpatialComponentSolution.TestObservations(t=21),
            test_offset[0:1])
        s.update_time_step()
        s.process_observations(
            TestDelayedSpatialComponentSolution.TestObservations(t=532),
            test_offset[1:2])
        s.update_time_step()
        s.update()

        # In this case we are considering the last iteration of model solving, hence marginal variances should have been stored
        expected_marginal_std = numpy.sqrt(
            numpy.diag(
                numpy.linalg.inv(numpy.array([[13.25, -16.5], [-16.5,
                                                               80.65]]))))
        numpy.testing.assert_array_almost_equal(
            s.solutionstorage.state_marginal_std, expected_marginal_std)

        # Now we compute the projection of marginal variances onto the given observations
        for time in [532]:
            # Observation at time t=t* should be design matrix for that time multiplied by expected state
            expected_projection = TestDelayedSpatialComponentSolution.TestDesign(
                t=time).design_function(expected_marginal_std)
            numpy.testing.assert_almost_equal(
                s.solution_observation_expected_uncertainties(
                    TestDelayedSpatialComponentSolution.TestObservations(
                        t=time)), expected_projection)
Example #3
0
 def test_extract_local_view_states_time(self):
     
     # Define model components for full global and local views
     global_sub_component_definition = example_optimization.PureLocalComponentDefinition()
     global_sub_component_storage_solution = SpatialComponentSolutionStorage_InMemory()
     global_component = DelayedSpatialComponent(global_sub_component_definition, global_sub_component_storage_solution)
     
     # Assign a random state
     time_key = 0
     
     state_length = global_component.storage.element_read().element_prior( global_component.storage.hyperparameters  ).prior_number_of_state_parameters()
     random_state = numpy.random.normal(0.0, 1.0, state_length)        
     global_component.solutionstorage.partial_state_write(random_state, time_key)
     
     # Define subregions and extract their states
     neighbourhood_level = 0
     
     n_subregions = global_component.storage.element_read().combination[0].spde.n_triangles_at_level(neighbourhood_level)
     
     # split states for each local model
     setup = example_eustace.LocalSetup()
     
     subregion_component_list = []
     for region_index in range(n_subregions):
         print region_index
         view_flags = [True,]
         region_component_definition = example_optimization.LocalViewDefinition( neighbourhood_level, region_index )
         region_component_storage_solution = SpatialComponentSolutionStorage_InMemory()
         region_component = DelayedSpatialComponent(region_component_definition, region_component_storage_solution)
         
         print "extracting state"
         example_optimization.extract_local_view_states_time( global_component, region_component, view_flags, time_key )
         
         subregion_component_list.append(region_component)
     
     # reconstruct the full state by averaging over states for local splits
     state_accumulator = numpy.zeros( state_length )
     n_contributors_accumulator = numpy.zeros( state_length )
     
     for region_index in range(n_subregions):
         region_state_vector = subregion_component_list[region_index].solutionstorage.partial_state_read( time_key )
         full_state_indices = subregion_component_list[region_index].storage.element_read().combination[0].spde.active_vertex_indices
         state_accumulator[full_state_indices] += region_state_vector
         n_contributors_accumulator[full_state_indices] += 1.0
         
     reconstructed_state = state_accumulator / n_contributors_accumulator
     
     # assert that the reconstructed state matches the original full component state vector
     numpy.testing.assert_almost_equal( reconstructed_state, global_component.solutionstorage.partial_state_read(time_key) )
Example #4
0
    def __init__(self,
                 storage_climatology,
                 storage_large_scale,
                 storage_local_bias,
                 storage_region_spde,
                 covariates_descriptor,
                 insitu_biases=False,
                 breakpoints_file=None,
                 global_biases=False,
                 global_biases_group_list=[],
                 compute_uncertainties=False,
                 method='EXACT',
                 compute_sample=False,
                 sample_size=definitions.GLOBAL_SAMPLE_SHAPE[3],
                 neighbourhood_level=0,
                 region_index=0,
                 regionspec='LocalSubRegion'):

        # initialise the OptimizationSystem
        super(RegionOptimizationSystem_EUSTACE,
              self).__init__(components=[
                  SpaceTimeComponent(
                      ClimatologyDefinition(covariates_descriptor),
                      storage_climatology, True, compute_uncertainties, method,
                      compute_sample, sample_size),
                  SpaceTimeComponent(
                      LargeScaleDefinition(insitu_biases, breakpoints_file),
                      storage_large_scale, True, compute_uncertainties, method,
                      compute_sample, sample_size),
                  SpatialComponent(
                      PureBiasComponentDefinition(global_biases,
                                                  global_biases_group_list),
                      storage_local_bias, compute_uncertainties, method,
                      compute_sample, sample_size),
                  DelayedSpatialComponent(
                      LocalViewDefinition(neighbourhood_level, region_index,
                                          regionspec), storage_region_spde,
                      compute_uncertainties, method, compute_sample,
                      sample_size)
              ],
                             observable=ObservationSource.TMEAN)
Example #5
0
def demo_non_stationary():

    full_resolution_level = 5
    neighbourhood_level = 2

    full_spde = SphereMeshViewGlobal(level=full_resolution_level)

    active_triangles = full_spde.neighbours_at_level(neighbourhood_level, 0)

    n_regions = full_spde.n_triangles_at_level(neighbourhood_level)

    merge_method = 'new'
    if merge_method == 'old':
        local_spdes = []
        local_hyperparameters = []

        for region_index in range(n_regions):

            local_spdes.append(
                SphereMeshViewSuperTriangle(full_resolution_level,
                                            neighbourhood_level, region_index))

            hyperparameters = numpy.array(
                [numpy.float64(region_index),
                 numpy.float64(region_index)])
            hyperparameters = numpy.log(
                numpy.concatenate([
                    numpy.random.uniform(1.0, 3.0, 1),
                    numpy.random.uniform(5.0, 30.0, 1) * numpy.pi / 180.
                ]))
            #hyperparameters = numpy.log( numpy.concatenate( [numpy.ones(1), numpy.random.uniform(15.0,45.0, 1) *numpy.pi/180.] ) )
            #hyperparameters = numpy.array([2.0, 3.0])

            #hyperparameters = numpy.log([2.0, numpy.pi/4])

            local_hyperparameters.append(hyperparameters)

        global_hyperparameters, global_sigma_design, global_rho_design = full_spde.merge_local_parameterisations(
            local_spdes, local_hyperparameters, merge_method='exp_average')

        log_sigmas = global_sigma_design.dot(global_hyperparameters)
        log_rhos = global_rho_design.dot(global_hyperparameters)

    elif merge_method == 'new':

        sigma_accumulator = None
        rho_accumulator = None
        contribution_counter = None

        for region_index in range(n_regions):
            local_spde = SphereMeshViewSuperTriangle(full_resolution_level,
                                                     neighbourhood_level,
                                                     region_index)
            local_hyperparameters = hyperparameters = numpy.log(
                numpy.concatenate([
                    numpy.random.uniform(1.0, 5.0, 1),
                    numpy.random.uniform(10.0, 45.0, 1) * numpy.pi / 180.
                ]))

            accumulators = SphereMeshViewGlobal.accumulate_local_parameterisations(
                sigma_accumulator, rho_accumulator, contribution_counter,
                local_spde, local_hyperparameters)

            sigma_accumulator, rho_accumulator, contribution_counter = accumulators

        log_sigmas, log_rhos = SphereMeshViewGlobal.finalise_local_parameterisation_sigma_rho(
            sigma_accumulator, rho_accumulator, contribution_counter)

    #print global_hyperparameters, global_sigma_design, global_rho_design

    import matplotlib.pyplot as plt
    from eustace.analysis.mesh.geometry import cartesian_to_polar2d
    polar_coords = cartesian_to_polar2d(full_spde.triangulation.points)

    plt.figure()
    plt.scatter(polar_coords[:, 1],
                polar_coords[:, 0],
                c=255. * log_sigmas / numpy.max(numpy.abs(log_sigmas)),
                linewidth=0.0,
                s=8.0)

    plt.figure()
    plt.scatter(polar_coords[:, 1],
                polar_coords[:, 0],
                c=255. * log_rhos / numpy.max(numpy.abs(log_rhos)),
                linewidth=0.0,
                s=8.0)

    #plt.show()

    #numpy.testing.assert_almost_equal( log_sigmas, 2.0 * numpy.ones(full_spde.triangulation.points.shape[0]) )
    #numpy.testing.assert_almost_equal( log_rhos, 3.0 * numpy.ones(full_spde.triangulation.points.shape[0]) )

    from eustace.analysis.advanced_standard.components.storage_inmemory import ComponentStorage_InMemory
    from eustace.analysis.advanced_standard.components.storage_inmemory import SpatialComponentSolutionStorage_InMemory
    from eustace.analysis.advanced_standard.components.spatialdelayed import DelayedSpatialComponent
    from eustace.analysis.advanced_standard.elements.local_view import NonStationaryLocal, ExpandedLocalHyperparameters
    from eustace.analysis.advanced_standard.elements.local import LocalElement, LocalHyperparameters

    nonstationary_component = DelayedSpatialComponent(
        ComponentStorage_InMemory(
            NonStationaryLocal(full_resolution_level),
            ExpandedLocalHyperparameters(log_sigma=log_sigmas,
                                         log_rho=log_rhos)),
        SpatialComponentSolutionStorage_InMemory())

    #nonstationary_component = DelayedSpatialComponent(
    #ComponentStorage_InMemory(LocalElement(full_resolution_level), LocalHyperparameters(log_sigma = hyperparameters[0], log_rho = hyperparameters[1])),
    #SpatialComponentSolutionStorage_InMemory())

    #print log_sigmas, log_rhos

    #plt.figure()
    #plt.scatter(polar_coords[:,1], polar_coords[:,0], c = 255.* process_sample / numpy.max(numpy.abs(process_sample)), linewidth = 0.0, s = 8.0 )

    #plt.figure()
    #plt.imshow( numpy.asarray( Q.todense() ) )

    # setup an output grid
    out_lats = numpy.linspace(-89.5, 89.5, 180)
    out_lons = numpy.linspace(-179.5, 179.5, 360)
    out_lons, out_lats = numpy.meshgrid(out_lons, out_lats)
    out_coords = numpy.vstack([out_lats.ravel(), out_lons.ravel()]).T

    design_matrix = nonstationary_component.storage.element.spde.build_A(
        out_coords)

    # setup solver for sampling
    from eustace.analysis.advanced_standard.linalg.extendedcholmodwrapper import ExtendedCholmodWrapper
    Q = nonstationary_component.storage.element.element_prior(
        nonstationary_component.storage.hyperparameters).prior_precision()
    factor = ExtendedCholmodWrapper.cholesky(Q)

    # draw samples, project onto output grid and plot
    random_values = numpy.random.normal(0.0, 1.0, (Q.shape[0], 1))
    process_sample = factor.solve_backward_substitution(random_values)
    out_values = design_matrix.dot(process_sample)
    plt.figure()
    plt.scatter(out_coords[:, 1],
                out_coords[:, 0],
                c=255. * out_values / numpy.max(numpy.abs(out_values)),
                linewidth=0.0,
                s=8.0)

    random_values = numpy.random.normal(0.0, 1.0, (Q.shape[0], 1))
    process_sample = factor.solve_backward_substitution(random_values)
    out_values = design_matrix.dot(process_sample)
    plt.figure()
    plt.scatter(out_coords[:, 1],
                out_coords[:, 0],
                c=255. * out_values / numpy.max(numpy.abs(out_values)),
                linewidth=0.0,
                s=8.0)

    random_values = numpy.random.normal(0.0, 1.0, (Q.shape[0], 1))
    process_sample = factor.solve_backward_substitution(random_values)
    out_values = design_matrix.dot(process_sample)
    plt.figure()
    plt.scatter(out_coords[:, 1],
                out_coords[:, 0],
                c=255. * out_values / numpy.max(numpy.abs(out_values)),
                linewidth=0.0,
                s=8.0)

    plt.show()
Example #6
0
    def test_process_observations_no_uncertainties(self):

        # Our test system for the first time step (key 21) is:
        #
        # ( [ 2.0  0.0 ]  +  [ -1.5 ] [ 5.0 ] [ -1.5 2.2 ] ) x = [ -1.5 ] [ 5.0 ] [ 7.0 - 2.0 ]
        # ( [ 0.0  2.0 ]     [ 2.2  ]                      )     [  2.2 ]
        #
        # [ 13.25 -16.5  ] x = [ -37.5 ]
        # [-16.5   26.2 ]      [ 55.0  ]
        #
        # => x = [-1.00133511 ]
        #        [ 1.46862483 ]

        # Our test system for the first time step (key 21) is:
        #
        # ( [ 2.0  0.0 ]  +  [ 0.0 ] [ 5.0 ] [ 0.0 3.3 ] ) x = [  0.0 ] [ 5.0 ] [ 9.0 - 3.0 ]
        # ( [ 0.0  2.0 ]     [ 3.3  ]                    )     [  3.3 ]
        #
        # [ 2.0   0.0  ] x = [ 0.0  ]
        # [ 0.0   56.45 ]     [ 99.0 ]
        #
        # => x = [ 0.         ]
        #        [ 1.75376439 ]

        for component_storage_class in DelayedSpatialComponentSolutionStorage_Files, SpatialComponentSolutionStorage_InMemory:

            c = DelayedSpatialComponent(
                ComponentStorage_InMemory(
                    TestDelayedSpatialComponentSolution.TestElement(),
                    CovariateHyperparameters(-0.5 * numpy.log(2.0))),
                component_storage_class())
            c.solutionstorage.statefiledictionary_read = None
            c.solutionstorage.statefiledictionary_write = {
                21: 'state_test.A.pickle',
                532: 'state_test.B.pickle'
            }
            c.solutionstorage.measurementfiledictionary_write = c.solutionstorage.measurementfiledictionary_read = {
                21: 'measurement_test.A.pickle',
                532: 'measurement_test.B.pickle'
            }

            s = c.component_solution()
            self.assertIsInstance(s, DelayedSpatialComponentSolution)
            self.assertFalse(s.compute_uncertainties)
            test_offset = numpy.array([2.0, 3.0])
            c.solutionstorage.state_time_index = 21
            c.solutionstorage.measurement_time_index_write = 21
            s.process_observations(
                TestDelayedSpatialComponentSolution.TestObservations(t=21),
                test_offset[0:1])
            s.update_time_step()
            c.solutionstorage.state_time_index = 532
            c.solutionstorage.measurement_time_index_write = 532
            s.process_observations(
                TestDelayedSpatialComponentSolution.TestObservations(t=532),
                test_offset[1:2])
            s.update_time_step()
            s.update()

            c.solutionstorage.statefiledictionary_read = c.solutionstorage.statefiledictionary_write  # now enable reading from the previously written files

            numpy.testing.assert_almost_equal(
                s.solutionstorage.partial_state_read(21),
                numpy.array([-1.00133511, 1.46862483]))
            numpy.testing.assert_almost_equal(
                s.solutionstorage.partial_state_read(532),
                numpy.array([0.0, 1.75376439]))

            # No marginal variances should have been computed at all
            self.assertEqual(
                None, s.solutionstorage.partial_state_marginal_std_read(21))

            for time, expected_array in zip([21, 532], [
                    numpy.array([-1.5 * -1.00133511 + 2.2 * 1.46862483]),
                    numpy.array([3.3 * 1.75376439])
            ]):
                # Observation at time t=t* should be design matrix for that time multiplied by expected state
                numpy.testing.assert_almost_equal(
                    s.solution_observation_expected_value(
                        TestDelayedSpatialComponentSolution.TestObservations(
                            t=time)), expected_array)

                # In this case we are considering a generical model solving iteration for the model, no marginal variances stored, hence we expect 0. as observation uncertainties
                numpy.testing.assert_array_equal(
                    s.solution_observation_expected_uncertainties(
                        TestDelayedSpatialComponentSolution.TestObservations(
                            t=time)), 0.)
Example #7
0
def main():

    print 'Advanced standard example using a few days of EUSTACE data'
    parser = argparse.ArgumentParser(
        description='Advanced standard example using a few days of EUSTACE data'
    )
    parser.add_argument('outpath',
                        help='directory where the output should be redirected')
    parser.add_argument(
        '--json_descriptor',
        default=None,
        help=
        'a json descriptor containing the covariates to include in the climatology model'
    )
    parser.add_argument('--land_biases',
                        action='store_true',
                        help='include insitu land homogenization bias terms')
    parser.add_argument('--global_biases',
                        action='store_true',
                        help='include global satellite bias terms')
    parser.add_argument('--n_iterations',
                        type=int,
                        default=5,
                        help='number of solving iterations')
    args = parser.parse_args()

    # Input data path
    basepath = os.path.join('/work/scratch/eustace/rawbinary3')

    # Days to process
    #time_indices = range(int(days_since_epoch(datetime(2006, 2, 1))), int(days_since_epoch(datetime(2006, 2, 2))))
    #time_indices = range(int(days_since_epoch(datetime(1906, 2, 1))), int(days_since_epoch(datetime(1906, 2, 2))))

    date_list = [
        datetime(2006, 1, 1) + relativedelta(days=k) for k in range(3)
    ]

    #backwards_list = [date_list[i] for i in range(11, -1, -1)]
    #date_list = backwards_list

    time_indices = [int(days_since_epoch(date)) for date in date_list]

    # Sources to use
    sources = [
        'surfaceairmodel_land', 'surfaceairmodel_ocean', 'surfaceairmodel_ice',
        'insitu_land', 'insitu_ocean'
    ]
    sources = ['insitu_land', 'insitu_ocean']
    #sources = [ 'surfaceairmodel_land' ]
    # CLIMATOLOGY COMPONENT: combining the seasonal core along with latitude harmonics, altitude and coastal effects

    if args.json_descriptor is not None:
        loader = LoadCovariateElement(args.json_descriptor)
        loader.check_keys()
        covariate_elements, covariate_hyperparameters = loader.load_covariates_and_hyperparameters(
        )
        print(
            'The following fields have been added as covariates of the climatology model'
        )
        print(loader.data.keys())
    else:
        covariate_elements, covariate_hyperparameters = [], []

    #climatology_element = CombinationElement( [SeasonalElement(n_triangulation_divisions=2, n_harmonics=2, include_local_mean=False), GrandMeanElement()]+covariate_elements)
    #climatology_hyperparameters = CombinationHyperparameters( [SeasonalHyperparameters(n_spatial_components=2, common_log_sigma=0.0, common_log_rho=0.0), CovariateHyperparameters(numpy.log(15.0))] + covariate_hyperparameters )

    climatology_element = CombinationElement([
        GrandMeanElement(),
    ] + covariate_elements)
    climatology_hyperparameters = CombinationHyperparameters([
        CovariateHyperparameters(numpy.log(15.0)),
    ] + covariate_hyperparameters)

    #climatology_element =SeasonalElement(n_triangulation_divisions=2, n_harmonics=2, include_local_mean=False)
    #climatology_hyperparameters = SeasonalHyperparameters(n_spatial_components=2, common_log_sigma=0.0, common_log_rho=0.0)

    climatology_component = SpaceTimeComponent(
        ComponentStorage_InMemory(climatology_element,
                                  climatology_hyperparameters),
        SpaceTimeComponentSolutionStorage_InMemory(),
        compute_uncertainties=True,
        method='APPROXIMATED')

    # LARGE SCALE (kronecker product) COMPONENT: combining large scale trends with bias terms accounting for homogeneization effects

    if args.land_biases:
        bias_element, bias_hyperparameters = [
            InsituLandBiasElement(BREAKPOINTS_FILE)
        ], [CovariateHyperparameters(numpy.log(.9))]
        print('Adding bias terms for insitu land homogenization')
    else:
        bias_element, bias_hyperparameters = [], []

    large_scale_element = CombinationElement([
        SpaceTimeKroneckerElement(n_triangulation_divisions=2,
                                  alpha=2,
                                  starttime=-30,
                                  endtime=365 * 1 + 30,
                                  n_nodes=12 * 1 + 2,
                                  overlap_factor=2.5,
                                  H=1)
    ] + bias_element)
    large_scale_hyperparameters = CombinationHyperparameters([
        SpaceTimeSPDEHyperparameters(space_log_sigma=0.0,
                                     space_log_rho=numpy.log(
                                         numpy.radians(15.0)),
                                     time_log_rho=numpy.log(15.0))
    ] + bias_hyperparameters)
    large_scale_component = SpaceTimeComponent(
        ComponentStorage_InMemory(large_scale_element,
                                  large_scale_hyperparameters),
        SpaceTimeComponentSolutionStorage_InMemory(),
        compute_uncertainties=True,
        method='APPROXIMATED')

    # LOCAL COMPONENT: combining local scale variations with global satellite bias terms

    if args.global_biases:
        bias_elements = [
            BiasElement(groupname, 1) for groupname in GLOBAL_BIASES_GROUP_LIST
        ]
        bias_hyperparameters = [
            CovariateHyperparameters(numpy.log(15.0)) for index in range(3)
        ]
        print('Adding global bias terms for all the surfaces')
    else:
        bias_elements, bias_hyperparameters = [], []

    n_triangulation_divisions_local = 7
    local_log_sigma = numpy.log(5)
    local_log_rho = numpy.log(numpy.radians(5.0))
    local_element = NonStationaryLocal(
        n_triangulation_divisions=n_triangulation_divisions_local)
    n_local_nodes = local_element.spde.n_latent_variables()
    local_scale_element = CombinationElement([local_element] + bias_elements)
    local_hyperparameters = ExpandedLocalHyperparameters(
        log_sigma=numpy.repeat(local_log_sigma, n_local_nodes),
        log_rho=numpy.repeat(local_log_rho, n_local_nodes))
    local_scale_hyperparameters = CombinationHyperparameters(
        [local_hyperparameters] + bias_hyperparameters)
    local_component = DelayedSpatialComponent(
        ComponentStorage_InMemory(local_scale_element,
                                  local_scale_hyperparameters),
        SpatialComponentSolutionStorage_InMemory(),
        compute_uncertainties=True,
        method='APPROXIMATED')
    print "hyperparameter storage:", local_component.storage.hyperparameters
    print 'Analysing inputs'

    # Analysis system using the specified components, for the Tmean observable
    ##analysis_system = AnalysisSystem(
    ##    [ climatology_component, large_scale_component, local_component ],
    ##    ObservationSource.TMEAN)

    analysis_system = OptimizationSystem(
        [climatology_component, local_component], ObservationSource.TMEAN)

    # Object to load raw binary inputs at time indices
    inputloaders = [
        AnalysisSystemInputLoaderRawBinary_Sources(basepath, source,
                                                   time_indices)
        for source in sources
    ]

    for iteration in range(args.n_iterations):

        message = 'Iteration {}'.format(iteration)
        print(message)

        # Update with data
        analysis_system.update(inputloaders, time_indices)

    ##################################################

    # Optimize local model hyperparameters

    # Loop over local regions, generate optimization systems, fit hyperparameters and save

    # split spde and bias models for local component into two components
    global_spde_sub_component_definition = ComponentStorage_InMemory(
        CombinationElement([local_element]),
        CombinationHyperparameters([local_hyperparameters]))
    global_spde_sub_component_storage_solution = SpatialComponentSolutionStorage_InMemory(
    )
    global_spde_sub_component = DelayedSpatialComponent(
        global_spde_sub_component_definition,
        global_spde_sub_component_storage_solution)

    bias_sub_component_definition = ComponentStorage_InMemory(
        CombinationElement(bias_elements),
        CombinationHyperparameters(bias_hyperparameters))
    bias_sub_component_storage_solution = SpatialComponentSolutionStorage_InMemory(
    )
    bias_sub_component = DelayedSpatialComponent(
        bias_sub_component_definition, bias_sub_component_storage_solution)

    element_optimisation_flags = [True, False, False,
                                  False]  # one spde, three biases

    for time_key in time_indices:
        split_states_time(local_component, global_spde_sub_component,
                          bias_sub_component, element_optimisation_flags,
                          time_key)

    # Define subregions and extract their states
    neighbourhood_level = 1

    n_subregions = global_spde_sub_component.storage.element_read(
    ).combination[0].spde.n_triangles_at_level(neighbourhood_level)
    hyperparameter_file_template = "local_hyperparameters.%i.%i.%i.npy"

    fit_hyperparameters = True
    optimization_component_index = 2
    if fit_hyperparameters:
        for region_index in range(n_subregions):
            # Setup model for local subregion of neighours with super triangle
            view_flags = [
                True,
            ]
            region_element = CombinationElement([
                LocalSubRegion(n_triangulation_divisions_local,
                               neighbourhood_level, region_index)
            ])
            region_hyperparameters = ExtendedCombinationHyperparameters([
                LocalHyperparameters(log_sigma=local_log_sigma,
                                     log_rho=local_log_rho)
            ])
            region_component_storage_solution = SpatialComponentSolutionStorage_InMemory(
            )
            region_sub_component = DelayedSpatialComponent(
                ComponentStorage_InMemory(region_element,
                                          region_hyperparameters),
                region_component_storage_solution)

            for time_key in time_indices:
                print "region_index, time_key:", region_index, time_key
                extract_local_view_states_time(global_spde_sub_component,
                                               region_sub_component,
                                               view_flags, time_key)

            print "running optimization for region:", region_index

            region_optimization_system = OptimizationSystem([
                climatology_component, bias_sub_component, region_sub_component
            ], ObservationSource.TMEAN)

            for time_key in time_indices:
                region_optimization_system.update_component_time(
                    inputloaders, optimization_component_index, time_key)

            # commented version that works for few days inputs
            #region_optimization_system.components[optimization_component_index].component_solution().optimize()
            #region_optimization_system.components[optimization_component_index].storage.hyperparameters.get_array()
            #hyperparameter_file = os.path.join(args.outpath, hyperparameter_file_template % (n_triangulation_divisions_local, neighbourhood_level, region_index) )
            #region_sub_component.storage.hyperparameters.values_to_npy_savefile( hyperparameter_file )

            # replaced with version for full processing based json dump of input files - need to generate the input_descriptor dict
            hyperparameter_file = os.path.join(
                args.outpath, hyperparameter_file_template %
                (n_triangulation_divisions_local, neighbourhood_level,
                 region_index))
            region_optimization_system.process_inputs(
                input_descriptor, optimization_component_index, time_indices)
            region_optimization_system.optimize_component(
                optimization_component_index,
                hyperparameter_storage_file=hyperparameter_file)

            fitted_hyperparameters_converted = region_sub_component.storage.hyperparameters.get_array(
            )
            fitted_hyperparameters_converted[0] = numpy.exp(
                fitted_hyperparameters_converted[0])
            fitted_hyperparameters_converted[1] = numpy.exp(
                fitted_hyperparameters_converted[1]) * 180.0 / numpy.pi
            print 'fitted_hyperparameters_converted:', fitted_hyperparameters_converted

    # Setup model for the super triangle without neighbours for hyperparameter merging
    region_spdes = []
    region_hyperparameter_values = []
    for region_index in range(n_subregions):
        # Redefine the region sub component as a supertriangle rather than a neighbourhood
        region_element = CombinationElement([
            LocalSuperTriangle(n_triangulation_divisions_local,
                               neighbourhood_level, region_index)
        ])
        region_hyperparameters = ExtendedCombinationHyperparameters([
            LocalHyperparameters(log_sigma=local_log_sigma,
                                 log_rho=local_log_rho)
        ])
        region_component_storage_solution = SpatialComponentSolutionStorage_InMemory(
        )
        region_sub_component = DelayedSpatialComponent(
            ComponentStorage_InMemory(region_element, region_hyperparameters),
            region_component_storage_solution)

        # Read the optimized hyperparameters
        hyperparameter_file = os.path.join(
            args.outpath,
            hyperparameter_file_template % (n_triangulation_divisions_local,
                                            neighbourhood_level, region_index))
        region_sub_component.storage.hyperparameters.values_from_npy_savefile(
            hyperparameter_file)

        # Append the spde model and hyperparameters to their lists for merging
        region_spdes.append(region_element.combination[0].spde)
        region_hyperparameter_values.append(
            region_sub_component.storage.hyperparameters.get_array())

    # merge and save hyperparameters
    full_spde = local_element.spde
    new_hyperparameter_values, global_sigma_design, global_rho_design = full_spde.merge_local_parameterisations(
        region_spdes, region_hyperparameter_values, merge_method='exp_average')

    local_hyperparameters.set_array(new_hyperparameter_values)
    hyperparameter_file_merged = "merged_hyperparameters.%i.%i.npy" % (
        n_triangulation_divisions_local, neighbourhood_level)
    local_hyperparameters.values_to_npy_savefile(
        os.path.join(args.outpath, hyperparameter_file_merged))

    # Refit local model with the optimized hyperparameters
    analysis_system.update_component(inputloaders, 1, time_indices)

    ##################################################

    print 'Computing outputs'

    # Produce an output for each time index
    for time_index in time_indices:

        # Get date for output
        outputdate = inputloaders[0].datetime_at_time_index(time_index)
        print 'Evaluating output grid: ', outputdate

        #Configure output grid
        outputstructure = OutputRectilinearGridStructure(
            time_index,
            outputdate,
            latitudes=numpy.linspace(-89.875,
                                     89.875,
                                     num=definitions.GLOBAL_FIELD_SHAPE[1]),
            longitudes=numpy.linspace(-179.875,
                                      179.875,
                                      num=definitions.GLOBAL_FIELD_SHAPE[2]))

        # print 'Size of grid : ', outputstructure.number_of_observations()

        # Evaluate expected value at these locations
        result_expected_value = analysis_system.evaluate_expected_value(
            'MAP', outputstructure, 'POINTWISE')
        result_expected_uncertainties = analysis_system.evaluate_expected_value(
            'post_STD', outputstructure, 'POINTWISE')

        # Make output filename
        pathname = 'eustace_example_output_{0:04d}{1:02d}{2:02d}.nc'.format(
            outputdate.year, outputdate.month, outputdate.day)
        pathname = os.path.join(args.outpath, pathname)
        print 'Saving: ', pathname

        # Save results
        filebuilder = FileBuilderGlobalField(
            pathname, time_index, 'Infilling Example', 'UNVERSIONED',
            definitions.TAS.name, '', 'Example data only',
            'eustace.analysis.advanced_standard.examples.example_eustace_few_days',
            '')
        filebuilder.add_global_field(
            definitions.TAS,
            result_expected_value.reshape(definitions.GLOBAL_FIELD_SHAPE))
        filebuilder.add_global_field(
            definitions.TASUNCERTAINTY,
            result_expected_uncertainties.reshape(
                definitions.GLOBAL_FIELD_SHAPE))
        filebuilder.save_and_close()

    print 'Complete'