Exemple #1
0
    def test_set_array(self):

        p = SpaceTimeSPDEHyperparameters(space_log_sigma=0.3,
                                         space_log_rho=0.4,
                                         time_log_rho=0.5)
        p.set_array(numpy.array([23.2, 22.1, 20.5]))
        self.assertEqual(20.5, p.time_log_rho)
        self.assertEqual(23.2, p.space_log_sigma)
        self.assertEqual(22.1, p.space_log_rho)
Exemple #2
0
    def __init__(self):

        # Number of factors for large scale (factor analysis) component and initial hyperparameters
        n_factors = 5
        factors = []
        factor_hyperparameters = []
        for factor_index in range(n_factors):

            factor_hyperparameters.append(
                SpaceTimeSPDEHyperparameters(
                    space_log_sigma=0.0,
                    space_log_rho=numpy.log(10.0 * numpy.pi / 180 +
                                            25.0 * numpy.pi / 180 *
                                            (n_factors - factor_index) /
                                            n_factors),
                    time_log_rho=numpy.log(1 / 12.0 + 6 / 12.0 *
                                           (n_factors - factor_index) /
                                           n_factors)))

            factors.append(
                SpaceTimeFactorElement(n_triangulation_divisions=5,
                                       alpha=2,
                                       starttime=0,
                                       endtime=36,
                                       overlap_factor=2.5,
                                       H=1))

        super(LargeScaleDefinition, self).__init__(
            CombinationElement(factors),
            CombinationHyperparameters(factor_hyperparameters))
Exemple #3
0
    def test_get_array(self):

        numpy.testing.assert_equal([0.3, 0.4, 0.5],
                                   SpaceTimeSPDEHyperparameters(
                                       space_log_sigma=0.3,
                                       space_log_rho=0.4,
                                       time_log_rho=0.5).get_array())
Exemple #4
0
    def test_init(self):

        p = SpaceTimeSPDEHyperparameters(space_log_sigma=0.2,
                                         space_log_rho=0.8,
                                         time_log_rho=0.7)
        self.assertEqual(0.2, p.space_log_sigma)
        self.assertEqual(0.8, p.space_log_rho)
        self.assertEqual(0.7, p.time_log_rho)
Exemple #5
0
    def test_prior_precision_derivative(self):

        dQ_0 = self.prior.prior_precision_derivative(0)
        dQ_1 = self.prior.prior_precision_derivative(1)
        dQ_2 = self.prior.prior_precision_derivative(2)

        self.assertEqual(SPARSEFORMAT, dQ_0.getformat())
        self.assertEqual(SPARSEFORMAT, dQ_1.getformat())
        self.assertEqual(SPARSEFORMAT, dQ_2.getformat())
        self.assertEqual((210, 210), dQ_0.shape)
        self.assertEqual((210, 210), dQ_1.shape)
        self.assertEqual((210, 210), dQ_2.shape)

        # Numerical derivative
        numerical = [[], [], []]
        epsilon = 0.0001
        for parameter_index in range(3):

            for sign_index, sign in enumerate([-epsilon, +epsilon]):

                parameter_vector = numpy.array(
                    [numpy.log(1.0),
                     numpy.log(1.1),
                     numpy.log(1.2)])
                parameter_vector[parameter_index] += sign

                Q_numerical = SpaceTimeKroneckerPrior(
                    hyperparameters=SpaceTimeSPDEHyperparameters(
                        parameter_vector[0], parameter_vector[1],
                        parameter_vector[2]),
                    spatial_model=SphereMeshSPDE(level=1),
                    alpha=2,
                    temporal_model=LatticeSPDE.construct(
                        dimension_specification=[(23, 27, 5)],
                        basis_function=WendlandC4Basis(),
                        overlap_factor=2.5),
                    H=1.01).prior_precision()

                numerical[parameter_index].append(Q_numerical)

        numerical_dQ0 = ((numerical[0][1]) -
                         (numerical[0][0])) / (2.0 * epsilon)
        numerical_dQ1 = ((numerical[1][1]) -
                         (numerical[1][0])) / (2.0 * epsilon)
        numerical_dQ2 = ((numerical[2][1]) -
                         (numerical[2][0])) / (2.0 * epsilon)

        # Check numerical derivative corresponds to computed one
        numpy.testing.assert_almost_equal(dQ_0.todense(),
                                          numerical_dQ0.todense())
        numpy.testing.assert_almost_equal(dQ_1.todense(),
                                          numerical_dQ1.todense())
        numpy.testing.assert_almost_equal(dQ_2.todense(),
                                          numerical_dQ2.todense(),
                                          decimal=7)
Exemple #6
0
    def setUp(self):

        self.prior = SpaceTimeFactorPrior(
            hyperparameters=SpaceTimeSPDEHyperparameters(numpy.log(1.0), numpy.log(1.1), numpy.log(1.2)),
            spatial_model=SphereMeshSPDE(level=1),
            alpha=2,
            temporal_model=LatticeSPDE.construct(
                dimension_specification = [(23, 27, 5)],
                basis_function=WendlandC4Basis(),
                overlap_factor=2.5),
            H=1.01)
Exemple #7
0
    def test_element_prior(self):

        element = SpaceTimeKroneckerElement(n_triangulation_divisions=1,
                                            alpha=2,
                                            starttime=23,
                                            endtime=27,
                                            n_nodes=5,
                                            overlap_factor=2.5,
                                            H=1.2)
        prior = element.element_prior(
            SpaceTimeSPDEHyperparameters(0.0, 1.0, 1.0))
        self.assertTrue(isinstance(prior, SpaceTimeKroneckerPrior))
Exemple #8
0
    def test_mini_world_large_and_local(self):

        # Use a number of time steps
        number_of_simulated_time_steps = 30

        # Large-scale spatial variability
        simulated_large_variation = 10.0

        # Local variability
        simulated_local_variation = 1.0

        # Iterations to use
        number_of_solution_iterations = 5

        # Build system

        # Large-scale factor
        element_large = SpaceTimeKroneckerElement(
            n_triangulation_divisions=1,
            alpha=2,
            starttime=0,
            endtime=number_of_simulated_time_steps + 1,
            n_nodes=number_of_simulated_time_steps + 2,
            overlap_factor=2.5,
            H=1)
        initial_hyperparameters_large = SpaceTimeSPDEHyperparameters(
            space_log_sigma=0.0,
            space_log_rho=numpy.log(numpy.radians(5.0)),
            time_log_rho=numpy.log(1.0 / 365.0))

        component_large = SpaceTimeComponent(
            ComponentStorage_InMemory(element_large,
                                      initial_hyperparameters_large),
            SpaceTimeComponentSolutionStorage_InMemory())

        # And a local process
        component_local = SpatialComponent(
            ComponentStorage_InMemory(
                LocalElement(n_triangulation_divisions=3),
                LocalHyperparameters(log_sigma=0.0,
                                     log_rho=numpy.log(numpy.radians(5.0)))),
            SpatialComponentSolutionStorage_InMemory())

        analysis_system = AnalysisSystem([component_large, component_local],
                                         ObservationSource.TMEAN,
                                         log=StringIO())
        # analysis_system = AnalysisSystem([ component_large ], ObservationSource.TMEAN)
        # analysis_system = AnalysisSystem([ component_local ], ObservationSource.TMEAN)

        # use fixed locations from icosahedron
        fixed_locations = cartesian_to_polar2d(
            MeshIcosahedronSubdivision.build(3).points)

        # random measurement at each location
        numpy.random.seed(8976)
        field_basis = simulated_large_variation * numpy.random.randn(
            fixed_locations.shape[0])

        # some time function that varies over a year
        time_basis = numpy.cos(
            numpy.linspace(0.1, 1.75 * numpy.pi,
                           number_of_simulated_time_steps))

        # kronecker product of the two
        large_scale_process = numpy.kron(field_basis,
                                         numpy.expand_dims(time_basis, 1))

        # Random local changes where mean change at each time is zero
        # local_process = simulated_local_variation * numpy.random.randn(large_scale_process.shape[0], large_scale_process.shape[1])
        # local_process -= numpy.tile(local_process.mean(axis=1), (local_process.shape[1], 1)).T

        local_process = numpy.zeros(large_scale_process.shape)
        somefield = simulated_local_variation * numpy.random.randn(
            1, large_scale_process.shape[1])
        somefield -= somefield.ravel().mean()
        local_process[10, :] = somefield
        local_process[11, :] = -somefield

        # Add the two processes
        measurement = large_scale_process + local_process

        # Simulated inputs
        simulated_input_loader = SimulatedInputLoader(fixed_locations,
                                                      measurement, 0.001)

        # Simulate evaluation of this time index
        simulated_time_indices = range(number_of_simulated_time_steps)

        # All systems linear so single update should be ok
        analysis_system.update([simulated_input_loader],
                               simulated_time_indices)

        # Get all results
        result = numpy.zeros(measurement.shape)
        for t in range(number_of_simulated_time_steps):
            result[t, :] = analysis_system.evaluate_expected_value(
                'MAP',
                SimulatedObservationStructure(t, fixed_locations, None, None),
                flag='POINTWISE')

        disparity_large_scale = (numpy.abs(result -
                                           large_scale_process)).ravel().max()
        # print 'large scale disparity: ', disparity_large_scale

        disparity_overall = (numpy.abs(result - measurement)).ravel().max()
        # print 'overall disparity: ', disparity_overall

        numpy.testing.assert_almost_equal(result, measurement, decimal=4)
        self.assertTrue(disparity_overall < 1E-4)
Exemple #9
0
    def test_mini_world_noiseless(self):

        # Use a number of time steps
        number_of_simulated_time_steps = 1

        # Build system
        element = SpaceTimeFactorElement(
            n_triangulation_divisions=3,
            alpha=2,
            starttime=0,
            endtime=number_of_simulated_time_steps + 1,
            n_nodes=number_of_simulated_time_steps + 2,
            overlap_factor=2.5,
            H=1)
        initial_hyperparameters = SpaceTimeSPDEHyperparameters(
            space_log_sigma=0.0,
            space_log_rho=numpy.log(numpy.radians(45.0)),
            time_log_rho=numpy.log(3.0 / 365.0))
        component = SpaceTimeComponent(
            ComponentStorage_InMemory(element, initial_hyperparameters),
            SpaceTimeComponentSolutionStorage_InMemory())
        analysis_system = AnalysisSystem([component],
                                         ObservationSource.TMEAN,
                                         log=StringIO())

        # use fixed locations from icosahedron
        fixed_locations = cartesian_to_polar2d(
            MeshIcosahedronSubdivision.build(3).points)

        # random measurement at each location
        numpy.random.seed(8976)
        field_basis = 10.0 * numpy.random.randn(fixed_locations.shape[0])

        # some time function that varies over a year
        time_basis = numpy.cos(
            numpy.linspace(0.1, 1.75 * numpy.pi,
                           number_of_simulated_time_steps))

        # kronecker product of the two
        measurement = numpy.kron(field_basis, numpy.expand_dims(time_basis, 1))

        # Simulated inputs
        simulated_input_loader = SimulatedInputLoader(fixed_locations,
                                                      measurement, 0.01)

        # Simulate evaluation of this time index
        simulated_time_indices = range(number_of_simulated_time_steps)

        # Iterate
        for iteration in range(5):
            analysis_system.update([simulated_input_loader],
                                   simulated_time_indices)

        # Get all results
        result = numpy.zeros(measurement.shape)
        for t in range(number_of_simulated_time_steps):
            result[t, :] = analysis_system.evaluate_expected_value(
                'MAP',
                SimulatedObservationStructure(t, fixed_locations, None, None),
                flag='POINTWISE')

        # Should be very close to original because specified noise is low
        numpy.testing.assert_almost_equal(result, measurement)
        max_disparity = (numpy.abs(result - measurement)).ravel().max()
        self.assertTrue(max_disparity < 1E-5)
Exemple #10
0
def main():

    print 'EUSTACE example using HadCRUT4 monthly data'

    # Input data path
    input_basepath = os.path.join(WORKSPACE_PATH, 'data/incoming/HadCRUT4.5.0.0')

    # Input filenames
    input_filenames = [
        'hadcrut4_median_netcdf.nc',
        'hadcrut4_uncorrelated_supplementary.nc',
        'hadcrut4_blended_uncorrelated.nc' ]

    # Months to process
    time_indices = range(2)

    # Climatology component
    climatology_component = SpaceTimeComponent(ComponentStorage_InMemory(SeasonalElement(n_triangulation_divisions=5, n_harmonics=5, include_local_mean=True),
                                                                         SeasonalHyperparameters(n_spatial_components=6, common_log_sigma=1.0, common_log_rho=0.0)),
                                               SpaceTimeComponentSolutionStorage_InMemory())

    # Number of factors for large scale (factor analysis) component and initial hyperparameters
    n_factors = 5
    factors = [ ]
    factor_hyperparameters = [ ]
    for factor_index in range(n_factors):

        factor_hyperparameters.append( SpaceTimeSPDEHyperparameters(
                space_log_sigma=0.0,
                space_log_rho=numpy.log(10.0 * numpy.pi/180 + 25.0 * numpy.pi/180 *(n_factors - factor_index) / n_factors),
                time_log_rho=numpy.log(1/12.0 + 6/12.0*(n_factors - factor_index) / n_factors)) )

        factors.append( SpaceTimeFactorElement(n_triangulation_divisions=5, alpha=2, starttime=0, endtime=36, overlap_factor=2.5, H=1) )

    # Large scale (factor analysis) component
    large_scale_component = SpaceTimeComponent(ComponentStorage_InMemory(CombinationElement(factors), CombinationHyperparameters(factor_hyperparameters)),
                                               SpaceTimeComponentSolutionStorage_InMemory())

    # Local component
    local_component = SpatialComponent(ComponentStorage_InMemory(LocalElement(n_triangulation_divisions=4), 
                                                                 LocalHyperparameters(log_sigma=0.0, log_rho=numpy.log(10.0 * numpy.pi/180))),
                                       SpatialComponentSolutionStorage_InMemory())

    print 'Analysing inputs'

    # Analysis system using the specified components, for the Tmean observable
    analysis_system = AnalysisSystem(
        [ climatology_component, large_scale_component, local_component ],
        ObservationSource.TMEAN)

    # Make filelist
    input_filelist = [ os.path.join(input_basepath, filename) for filename in input_filenames ]

    # Object to load HadCRUT4 inputs at time indices
    inputloader = AnalysisSystemInputLoaderHadCRUT4(input_filelist)

    # Update with data
    analysis_system.update([ inputloader ], time_indices)

    print 'Computing outputs'

    # Produce an output for each time index
    for time_index in time_indices:

        # Make output filename
        outputdate = inputloader.datetime_at_time_index(time_index)
        pathname = 'example_output_{0:04d}{1:02d}.nc'.format(outputdate.year, outputdate.month)
        print 'Saving: ', pathname

        # Configure output grid
        outputstructure = OutputRectilinearGridStructure(
            time_index, outputdate,
            latitudes=numpy.linspace(-87.5, 87.5, num=36),
            longitudes=numpy.linspace(-177.5, 177.5, num=72))

        # Evaluate expected value at these locations
        result_expected_value = analysis_system.evaluate_expected_value(outputstructure)

        # Save results
        filebuilder = FileBuilderHadCRUT4ExampleOutput(pathname, outputstructure)
        filebuilder.add_global_field(TAS_ANOMALY, result_expected_value.reshape(1,36,72))
        filebuilder.save_and_close()

    print 'Complete'