예제 #1
0
    def test_pga(self):
        sa = SA(period=1e-50, damping=5)
        pga = PGA()

        cormo = JB2009CorrelationModel(vs30_clustering=False)
        corma = cormo._get_correlation_matrix(self.SITECOL, sa)
        corma2 = cormo._get_correlation_matrix(self.SITECOL, pga)
        self.assertTrue((corma == corma2).all())

        cormo = JB2009CorrelationModel(vs30_clustering=True)
        corma = cormo._get_correlation_matrix(self.SITECOL, sa)
        corma2 = cormo._get_correlation_matrix(self.SITECOL, pga)
        self.assertTrue((corma == corma2).all())
예제 #2
0
    def test_correlation_with_total_stddev(self):
        mean1 = 10
        mean2 = 14
        inter = 1e-300
        intra1 = 0.2
        intra2 = 1.6
        p1 = Point(0, 0)
        p2 = Point(0, 0.3)
        sites = [
            Site(p1, mean1, False, inter, intra1),
            Site(p2, mean2, False, inter, intra2)
        ]
        self.sites = SiteCollection(sites)

        numpy.random.seed(41)
        cormo = JB2009CorrelationModel(vs30_clustering=False)
        gsim = FakeGSIMTotalStdDev(self)
        gsim.expect_same_sitecol = False
        with self.assertRaises(CorrelationButNoInterIntraStdDevs):
            ground_motion_fields(self.rupture,
                                 self.sites, [self.imt1],
                                 gsim,
                                 truncation_level=None,
                                 realizations=6000,
                                 correlation_model=cormo)
예제 #3
0
    def test_rupture_site_filtering(self):
        mean = 10
        inter = 2
        intra = 3
        points = [Point(0, 0), Point(0, 0.05)]
        sites = [Site(point, mean, False, inter, intra) for point in points]
        self.sites = SiteCollection(sites)

        def rupture_site_filter(rupture_sites):
            [(rupture, sites)] = rupture_sites
            yield rupture, sites.filter(sites.mesh.lats == 0)

        self.gsim.expect_same_sitecol = False

        numpy.random.seed(37)
        cormo = JB2009CorrelationModel(vs30_clustering=False)
        gmfs = ground_motion_fields(self.rupture,
                                    self.sites, [self.imt1],
                                    self.gsim,
                                    truncation_level=None,
                                    realizations=1,
                                    correlation_model=cormo,
                                    rupture_site_filter=rupture_site_filter)

        s1gmf, s2gmf = gmfs[self.imt1]
        numpy.testing.assert_array_equal(s2gmf, 0)
        numpy.testing.assert_array_almost_equal(s1gmf, 11.1852253)
예제 #4
0
    def test_no_correlation_mean_and_intra_respected(self):
        mean1 = 10
        mean2 = 14
        inter = 1e-300
        intra1 = 0.2
        intra2 = 1.6
        p1 = Point(0, 0)
        p2 = Point(0, 0.3)
        sites = [
            Site(p1, mean1, False, inter, intra1),
            Site(p2, mean2, False, inter, intra2)
        ]
        self.sites = SiteCollection(sites)

        numpy.random.seed(41)
        cormo = JB2009CorrelationModel(vs30_clustering=False)
        s1_intensity, s2_intensity = ground_motion_fields(
            self.rupture,
            self.sites,
            [self.imt1],
            self.gsim,
            truncation_level=None,
            realizations=6000,
            correlation_model=cormo,
        )[self.imt1]

        self.assertAlmostEqual(s1_intensity.mean(), mean1, delta=1e-3)
        self.assertAlmostEqual(s2_intensity.mean(), mean2, delta=1e-3)
        self.assertAlmostEqual(s1_intensity.std(), intra1, delta=2e-3)
        self.assertAlmostEqual(s2_intensity.std(), intra2, delta=1e-2)
예제 #5
0
    def test_no_truncation(self):
        mean = 10
        inter = 1e-300
        intra = 3
        points = [
            Point(0, 0),
            Point(0, 0.05),
            Point(0.06, 0.025),
            Point(0, 1.0),
            Point(-10, -10)
        ]
        sites = [Site(point, mean, False, inter, intra) for point in points]
        self.sites = SiteCollection(sites)

        numpy.random.seed(23)
        cormo = JB2009CorrelationModel(vs30_clustering=False)
        corma = cormo._get_correlation_matrix(self.sites, self.imt1)
        gmfs = ground_motion_fields(self.rupture,
                                    self.sites, [self.imt1],
                                    self.gsim,
                                    truncation_level=None,
                                    realizations=6000,
                                    correlation_model=cormo)

        sampled_corma = numpy.corrcoef(gmfs[self.imt1])
        assert_allclose(corma, sampled_corma, rtol=0, atol=0.02)
예제 #6
0
    def test_with_intra_and_inter(self):
        mean = 10
        inter = 2
        intra = 3
        sites = SiteCollection([Site(Point(0, 0), mean, False, inter, intra)])

        self.gsim.expect_same_sitecol = False

        numpy.random.seed(37)
        cormo = JB2009CorrelationModel(vs30_clustering=False)

        distribution = scipy.stats.norm()
        eps_intra = distribution.rvs(size=(1, 1))
        eps_intra = numpy.array(
            cormo.apply_correlation(sites, self.imt1, eps_intra))[0]

        eps_inter = distribution.rvs(size=(1, 1))

        gmf = ground_motion_field_with_residuals(
            self.rupture,
            sites,
            self.imt1,
            self.gsim,
            truncation_level=None,
            intra_residual_epsilons=eps_intra,
            inter_residual_epsilons=eps_inter)

        numpy.testing.assert_array_almost_equal(gmf, 11.1852253)
예제 #7
0
    def test_period_one_and_above(self):
        cormo = JB2009CorrelationModel(vs30_clustering=False)
        cormo2 = JB2009CorrelationModel(vs30_clustering=True)
        imt = SA(period=1.0, damping=5)
        corma = cormo._get_correlation_matrix(self.SITECOL, imt)
        aaae(corma, [[1, 0.2730787, 1, 0.2730787],
                     [0.2730787, 1, 0.2730787, 0.07457198],
                     [1, 0.2730787, 1, 0.2730787],
                     [0.2730787, 0.07457198, 0.2730787, 1]])
        corma2 = cormo2._get_correlation_matrix(self.SITECOL, imt)
        self.assertTrue((corma == corma2).all())

        imt = SA(period=10.0, damping=5)
        corma = cormo._get_correlation_matrix(self.SITECOL, imt)
        aaae(corma, [[1, 0.56813402, 1, 0.56813402],
                     [0.56813402, 1, 0.56813402, 0.32277627],
                     [1, 0.56813402, 1, 0.56813402],
                     [0.56813402, 0.32277627, 0.56813402, 1]])
        corma2 = cormo2._get_correlation_matrix(self.SITECOL, imt)
        self.assertTrue((corma == corma2).all())
예제 #8
0
 def test_filtered_sitecol(self):
     filtered = self.SITECOL.filtered([0, 2])
     numpy.random.seed(13)
     cormo = JB2009CorrelationModel(vs30_clustering=False)
     intra_residuals_sampled = numpy.random.normal(size=(2, 5))
     intra_residuals_correlated = cormo.apply_correlation(
         filtered, PGA(), intra_residuals_sampled)
     aaae(intra_residuals_correlated,
          [[-0.71239066, 0.75376638, -0.04450308, 0.45181234, 1.34510171],
           [0.51816327, 1.36481251, 0.86016437, 1.48732124, -1.01860545]],
          decimal=6)
예제 #9
0
    def test_clustered(self):
        cormo = JB2009CorrelationModel(vs30_clustering=True)
        imt = SA(period=0.001, damping=5)
        corma = cormo._get_correlation_matrix(self.SITECOL, imt)
        aaae(corma, [[1, 0.44046654, 1, 0.44046654],
                     [0.44046654, 1, 0.44046654, 0.19401077],
                     [1, 0.44046654, 1, 0.44046654],
                     [0.44046654, 0.19401077, 0.44046654, 1]])

        imt = SA(period=0.5, damping=5)
        corma = cormo._get_correlation_matrix(self.SITECOL, imt)
        aaae(corma, [[1, 0.36612758, 1, 0.36612758],
                     [0.36612758, 1, 0.36612758, 0.1340494],
                     [1, 0.36612758, 1, 0.36612758],
                     [0.36612758, 0.1340494, 0.36612758, 1]])
예제 #10
0
    def test_no_clustering(self):
        cormo = JB2009CorrelationModel(vs30_clustering=False)
        imt = SA(period=0.1, damping=5)
        corma = cormo._get_correlation_matrix(self.SITECOL, imt)
        aaae(corma, [[1, 0.03823366, 1, 0.03823366],
                     [0.03823366, 1, 0.03823366, 0.00146181],
                     [1, 0.03823366, 1, 0.03823366],
                     [0.03823366, 0.00146181, 0.03823366, 1]])

        imt = SA(period=0.95, damping=5)
        corma = cormo._get_correlation_matrix(self.SITECOL, imt)
        aaae(corma, [[1, 0.26107857, 1, 0.26107857],
                     [0.26107857, 1, 0.26107857, 0.06816202],
                     [1, 0.26107857, 1, 0.26107857],
                     [0.26107857, 0.06816202, 0.26107857, 1]])
예제 #11
0
    def test(self):
        numpy.random.seed(13)
        cormo = JB2009CorrelationModel(vs30_clustering=False)
        intra_residuals_sampled = numpy.random.normal(size=(3, 100000))
        intra_residuals_correlated = cormo.apply_correlation(
            self.SITECOL, PGA(), intra_residuals_sampled)
        inferred_corrcoef = numpy.corrcoef(intra_residuals_correlated)
        mean = intra_residuals_correlated.mean()
        std = intra_residuals_correlated.std()
        self.assertAlmostEqual(mean, 0, delta=0.002)
        self.assertAlmostEqual(std, 1, delta=0.002)

        actual_corrcoef = cormo._get_correlation_matrix(self.SITECOL, PGA())
        numpy.testing.assert_almost_equal(inferred_corrcoef,
                                          actual_corrcoef,
                                          decimal=2)
예제 #12
0
    def setUp(self):
        self.imt = "SA(0.15)"

        points = [Point(0, 0), Point(10, 10), Point(20, 20)]
        sites = [Site(point, 10, False, 2, 3, id=i)
                 for i, point in enumerate(points)]
        self.sites = models.SiteCollection(sites)

        assets = [mock.Mock] * 5
        self.sites_assets = ((0, assets[0:1]),
                             (1, assets[1:]),
                             (2, assets[2:]))

        self.gsims = mock.Mock()
        self.gsims.__getitem__ = mock.Mock(return_value=mock.Mock())
        self.cormo = JB2009CorrelationModel(vs30_clustering=False)
from collections import OrderedDict
from shapely import wkt
from openquake.hazardlib.geo.point import Point
from openquake.hazardlib.geo.surface import PlanarSurface
from openquake.hazardlib.geo.geodetic import geodetic_distance
from openquake.hazardlib.correlation import JB2009CorrelationModel
from openquake.hazardlib.site import Site, SiteCollection
from openquake.hazardlib.imt import from_string
from openquake.hazardlib.gsim import get_available_gsims
from openquake.hazardlib.gsim.base import ContextMaker
from openquake.hazardlib.sourceconverter import RuptureConverter
from openquake.hazardlib import nrml
from smtk.residuals.gmpe_residuals import Residuals


DEFAULT_CORRELATION = JB2009CorrelationModel(False)
GSIM_LIST = get_available_gsims()


def build_planar_surface(geometry):
    """
    Builds the planar rupture surface from the openquake.nrmllib.models
    instance
    """
    # Read geometry from wkt
    geom = wkt.loads(geometry.wkt)
    top_left = Point(geom.xy[0][0],
                     geom.xy[1][0],
                     geometry.upper_seismo_depth)
    top_right = Point(geom.xy[0][1],
                      geom.xy[1][1],
예제 #14
0
 def test(self):
     cormo = JB2009CorrelationModel(vs30_clustering=False)
     lt = cormo.get_lower_triangle_correlation_matrix(self.SITECOL, PGA())
     aaae(lt, [[1.0, 0.0, 0.0], [1.97514806e-02, 9.99804920e-01, 0.0],
               [1.97514806e-02, 5.42206860e-20, 9.99804920e-01]])
예제 #15
0
def main(var, r, voi, rand, cor_model, vs_corr):
    """
    Main program for computing spatial correlation
    
    INPUTS: 
    var- variables dictionary from initialize function. Contains 
    M,N,K,site_collection_SM, site_collection_station
    uncertaintydata, data, location_lon/lat_g
    r - radius
    voi- variable of interest, i.e. PGA
    rand- array of random variables
    cor_model- JB2009 or GA2010
    vs_corr- boolean to determine if Vs30 are correlated. See
    JB2009
    intensity_factor- factor for non-native data
    OUT: cor- grid of spatially correlated epsilon
    data- grid of ShakeMap data
    data_new- data with added spatial correlation
    grid_arr- array for storing grid indices for multiple realizations
    mu_arr- array for storing Sig21.T*Sig11inv for multiple realizations
    sigma_arr- array for storing sigma for multiple realizations 
    """
    start = time.time()    
    OL_time = 0
    IL_time = 0

    M = var['M']
    N = var['N']
    K = var['K']


    if cor_model == 'JB2009':
        CM = JB2009CorrelationModel(vs_corr)
    else:
        CM = GA2010CorrelationModel()

    # Initialize vector where data_new will be stored
    X = np.zeros([M*N,1])

    # Initialize vectors for storing data for multiple realizations
    grid_arr = [None] * (M*N)
    mu_arr = [None] * (M*N)
    sigma_arr = np.zeros([M*N,1])
    rand_arr = np.zeros([M*N,1])
    
    # Get spcing of horozontal and vertical points
    ld  = set_up_grid_dist(M,N,var['site_collection_SM'])
    pre_loop_time = time.time() - start

    for i in range(0,M):
        OL_start = time.time()
        
        # Find the number of points in radius horozontally and vertically for each row
        vhva = calc_vert_hor(i, r, ld['l'], ld['d'])

        # Calculate the full distance matrix for each row
        dist = calc_full_dist(i, vhva['vert'], vhva['hor'], N, var['site_collection_SM'])
        first_time_per_row = 1
        OL_time += time.time() - OL_start

        for j in range(0,N):
            IL_start = time.time()
            num = i*N+j
            
            # Find the reduced distance matrix 
            dist_calc = reduce_distance(j, vhva['vert'], vhva['hor'], vhva['added_vert'], N, dist['distance_matrix'], dist['grid_indices'])

            # Include stations in distance matrix and find the indices of the points within the radius
            out = inc_stations(j, i, N, K, r, var['site_collection_SM'], var['site_collection_station'], 
                               dist_calc['dist_mat'], X, dist_calc['inc_ind'])

            if np.size(dist_calc['inc_indices']) == 1:
                # no conditioning points, correlation value is random
                X[num] = rand[num]

                # Store for multiple realizations
                grid_arr [num] = np.zeros(0)
                mu_arr   [num] = np.zeros(0)
                sigma_arr[num] = 1
                rand_arr [num] = X[num]
            else:
                # Check if reduced distance matrix is full distance matrix
                if ((vhva['vert'] == 1 and dist_calc['num_indices'] == vhva['hor']+1)or(vhva['vert'] != 1 and \
                       dist_calc['num_indices'] == 2*vhva['hor']+1)) and (np.size(out['inc_sta_indices']) == 0):
                    # If this is the first full distance matrix per row, calculate base case
                    if first_time_per_row == 1:
                        base = calculate_corr(out['dist_mat'], voi, CM)
                        first_time_per_row = 0

                    mu  = base['Sig12'].T*base['Sig11inv']*out['x']
                    rand_num = rand[num]
                    X[num] = mu+rand_num*base['R']

                    # Store for multiple realizations
                    grid_arr [num] = dist_calc['inc_ind'][0:-1]
                    mu_arr   [num] = base['Sig12'].T*base['Sig11inv']
                    sigma_arr[num] = base['R']
                    rand_arr [num] = rand_num

                else:
                    other = calculate_corr(out['dist_mat'], voi, CM)
                    mu = other['Sig12'].T*other['Sig11inv']*out['x']
                    rand_num = rand[num]
                    X[num] = mu+rand_num*other['R']

                    # Store for multiple realizations                                                                             
                    grid_arr [num] = dist_calc['inc_ind'][0:-1]
                    mu_arr   [num] = other['Sig12'].T*other['Sig11inv']
                    sigma_arr[num] = other['R']
                    rand_arr [num] = rand_num

            IL_time += time.time() - IL_start
            if np.mod(i*N+j,5000) == 0:
                print 'Finishing step:', i*N+j

    DATA = var['data']
    COR = np.reshape(X, [M,N]) #units epsilon

    X = np.multiply(COR, var['uncertaintydata']) # ln(pctg)
    DATA_NEW = np.multiply(DATA,np.exp(X))

    end = time.time() - start
    print 'Total Time', end
    print 'Pre loop Time', pre_loop_time
    print 'Inner loop time', IL_time
    print 'Outer loop time', OL_time

    return {'cor':COR, 'data':DATA, 'data_new':DATA_NEW, 'grid_arr':grid_arr, 'mu_arr':mu_arr, 'sigma_arr':sigma_arr}