예제 #1
0
    def test_matrices(self):

        # distance matrix
        lons = numpy.array([84., 84., 84., 85.5, 85.5, 85.5, 87., 87., 87.])
        lats = numpy.array([26., 27.5, 29., 26., 27.5, 29., 26., 27.5, 29.])
        dmatrix = geo.geodetic.distance_matrix(lons, lats)
        aae(dmatrix.sum(), 18539.6131407)

        # spatial correlation
        sca = spatial_correlation_array(dmatrix, imts, 'yes')
        aae(sca.sum(), 36.000370229)

        # spatial covariance
        std = numpy.array([(0.5, 0.52, 0.64, 0.73)] * 9, imt_dt)  # 9 sites
        scov = spatial_covariance_array([std[n] for n in imt_dt.names], sca)
        aae(scov.sum(), 13.166200147)

        # cross correlation
        ccor = cross_correlation_matrix(imts, 'yes')
        aae(ccor.sum(), 10.49124788)

        # cholesky decomposition
        L = cholesky(scov, ccor)
        self.assertEqual(L.shape, (36, 36))
        aae(L.sum(), 30.5121263)

        # intensity
        val = numpy.array(
            [(5.38409665, 3.9383686, 3.55435415, 4.37692394)] * 9, imt_dt)

        shakemap = numpy.zeros(9, shakemap_dt)  # 9 sites
        shakemap['lon'] = lons
        shakemap['lat'] = lats
        shakemap['vs30'] = numpy.array([301.17] * 9)
        shakemap['val'] = val
        shakemap['std'] = std
        _, gmfs = to_gmfs(
            shakemap, 'yes', 'no', site_effects=False, trunclevel=3,
            num_gmfs=2, seed=42)
        # shape (N, E, M)
        aae(gmfs[..., 0].sum(axis=0), [0.3708301, 0.5671011])  # PGA

        _, gmfs = to_gmfs(
            shakemap, 'yes', 'yes', site_effects=True, trunclevel=3,
            num_gmfs=2, seed=42)
        aae(gmfs[..., 0].sum(axis=0), [0.5127146, 0.7800232])  # PGA
        aae(gmfs[..., 2].sum(axis=0), [0.4932519, 0.6731384])  # SA(1.0)

        # disable spatial correlation
        _, gmfs = to_gmfs(
            shakemap, 'no', 'no', site_effects=False,
            trunclevel=3, num_gmfs=2, seed=42)
        # shape (N, E, M)
        aae(gmfs[..., 0].sum(axis=0), [0.370832, 0.5670994])  # PGA

        _, gmfs = to_gmfs(
            shakemap, 'no', 'yes', site_effects=True,
            trunclevel=3, num_gmfs=2, seed=42)
        aae(gmfs[..., 0].sum(axis=0), [0.5127171, 0.7800206])  # PGA
        aae(gmfs[..., 2].sum(axis=0), [0.4932519, 0.6731384])  # SA(1.0)
예제 #2
0
    def test_matrices(self):

        # distance matrix
        lons = numpy.array([84., 84., 84., 85.5, 85.5, 85.5, 87., 87., 87.])
        lats = numpy.array([26., 27.5, 29., 26., 27.5, 29., 26., 27.5, 29.])
        dmatrix = geo.geodetic.distance_matrix(lons, lats)
        aae(dmatrix.sum(), 18539.6131407)

        # spatial correlation
        sca = spatial_correlation_array(dmatrix, imts, 'spatial')
        aae(sca.sum(), 36.000370229)

        # spatial covariance
        std = numpy.array([(0.5, 0.52, 0.64, 0.73)] * 9, imt_dt)  # 9 sites
        scov = spatial_covariance_array([std[n] for n in imt_dt.names], sca)
        aae(scov.sum(), 13.166200147)

        # cross correlation
        ccor = cross_correlation_matrix(imts, 'cross')
        aae(ccor.sum(), 10.49124788)

        # cholesky decomposition
        L = cholesky(scov, ccor)
        self.assertEqual(L.shape, (36, 36))
        aae(L.sum(), 30.5121263)

        # intensity
        val = numpy.array([(5.38409665, 3.9383686, 3.55435415, 4.37692394)] *
                          9, imt_dt)

        shakemap = numpy.zeros(9, shakemap_dt)  # 9 sites
        shakemap['lon'] = lons
        shakemap['lat'] = lats
        shakemap['vs30'] = numpy.array([301.17] * 9)
        shakemap['val'] = val
        shakemap['std'] = std
        gmfs = to_gmfs(shakemap,
                       crosscorr='corr',
                       site_effects=False,
                       trunclevel=3,
                       num_gmfs=2,
                       seed=42)
        # shape (R, N, E, M)
        aae(gmfs[..., 0].sum(axis=1), [[0.3708301, 0.5671011]])  # PGA

        gmfs = to_gmfs(shakemap,
                       crosscorr='cross',
                       site_effects=True,
                       trunclevel=3,
                       num_gmfs=2,
                       seed=42)
        aae(gmfs[..., 0].sum(axis=1), [[0.4101717, 0.6240185]])  # PGA
        aae(gmfs[..., 2].sum(axis=1), [[0.3946015, 0.5385107]])  # SA(1.0)
예제 #3
0
    def read_shakemap(self, haz_sitecol, assetcol):
        """
        Enabled only if there is a shakemap_id parameter in the job.ini.
        Download, unzip, parse USGS shakemap files and build a corresponding
        set of GMFs which are then filtered with the hazard site collection
        and stored in the datastore.
        """
        oq = self.oqparam
        E = oq.number_of_ground_motion_fields
        oq.risk_imtls = oq.imtls or self.datastore.parent['oqparam'].imtls
        logging.info('Getting/reducing shakemap')
        with self.monitor('getting/reducing shakemap'):
            # for instance for the test case_shakemap the haz_sitecol
            # has sids in range(0, 26) while sitecol.sids is
            # [8, 9, 10, 11, 13, 15, 16, 17, 18];
            # the total assetcol has 26 assets on the total sites
            # and the reduced assetcol has 9 assets on the reduced sites
            smap = oq.shakemap_id if oq.shakemap_id else numpy.load(
                oq.inputs['shakemap'])
            sitecol, shakemap, discarded = get_sitecol_shakemap(
                smap, oq.imtls, haz_sitecol,
                oq.asset_hazard_distance['default'],
                oq.discard_assets)
            if len(discarded):
                self.datastore['discarded'] = discarded
            assetcol.reduce_also(sitecol)

        logging.info('Building GMFs')
        with self.monitor('building/saving GMFs'):
            imts, gmfs = to_gmfs(
                shakemap, oq.spatial_correlation, oq.cross_correlation,
                oq.site_effects, oq.truncation_level, E, oq.random_seed,
                oq.imtls)
            save_gmf_data(self.datastore, sitecol, gmfs, imts)
        return sitecol, assetcol
예제 #4
0
파일: base.py 프로젝트: cbworden/oq-engine
    def read_shakemap(self, haz_sitecol, assetcol):
        """
        Enabled only if there is a shakemap_id parameter in the job.ini.
        Download, unzip, parse USGS shakemap files and build a corresponding
        set of GMFs which are then filtered with the hazard site collection
        and stored in the datastore.
        """
        oq = self.oqparam
        E = oq.number_of_ground_motion_fields
        oq.risk_imtls = oq.imtls or self.datastore.parent['oqparam'].imtls
        extra = self.riskmodel.get_extra_imts(oq.risk_imtls)
        if extra:
            logging.warn('There are risk functions for not available IMTs '
                         'which will be ignored: %s' % extra)

        logging.info('Getting/reducing shakemap')
        with self.monitor('getting/reducing shakemap'):
            smap = oq.shakemap_id if oq.shakemap_id else numpy.load(
                oq.inputs['shakemap'])
            sitecol, shakemap = get_sitecol_shakemap(
                smap, oq.imtls, haz_sitecol, oq.asset_hazard_distance
                or oq.region_grid_spacing)
            assetcol = assetcol.reduce_also(sitecol)

        logging.info('Building GMFs')
        with self.monitor('building/saving GMFs'):
            gmfs = to_gmfs(shakemap, oq.cross_correlation, oq.site_effects,
                           oq.truncation_level, E, oq.random_seed, oq.imtls)
            save_gmf_data(self.datastore, sitecol, gmfs)
            events = numpy.zeros(E, readinput.stored_event_dt)
            events['eid'] = numpy.arange(E, dtype=U64)
            self.datastore['events'] = events
        return sitecol, assetcol
예제 #5
0
    def read_shakemap(self):
        """
        Enabled only if there is a shakemap_id parameter in the job.ini.
        Download, unzip, parse USGS shakemap files and build a corresponding
        set of GMFs which are then filtered with the hazard site collection
        and stored in the datastore.
        """
        oq = self.oqparam
        E = oq.number_of_ground_motion_fields
        haz_sitecol = self.datastore.parent['sitecol']

        logging.info('Getting/reducing shakemap')
        with self.monitor('getting/reducing shakemap'):
            smap = oq.shakemap_id if oq.shakemap_id else numpy.load(
                oq.inputs['shakemap'])
            self.sitecol, shakemap = get_sitecol_shakemap(
                smap, haz_sitecol, oq.asset_hazard_distance
                or oq.region_grid_spacing)

        logging.info('Building GMFs')
        with self.monitor('building/saving GMFs'):
            gmfs = to_gmfs(shakemap, oq.site_effects, oq.truncation_level, E,
                           oq.random_seed)
            save_gmf_data(self.datastore, self.sitecol, gmfs)
            events = numpy.zeros(E, readinput.stored_event_dt)
            events['eid'] = numpy.arange(E, dtype=U64)
            self.datastore['events'] = events
예제 #6
0
def mean_gmf(shakemap):
    gmfs = to_gmfs(shakemap,
                   site_effects=True,
                   trunclevel=3,
                   num_gmfs=10,
                   seed=42)
    return [gmfs[..., i].mean() for i in range(len(imts))]
예제 #7
0
파일: base.py 프로젝트: gem/oq-engine
    def read_shakemap(self, haz_sitecol, assetcol):
        """
        Enabled only if there is a shakemap_id parameter in the job.ini.
        Download, unzip, parse USGS shakemap files and build a corresponding
        set of GMFs which are then filtered with the hazard site collection
        and stored in the datastore.
        """
        oq = self.oqparam
        E = oq.number_of_ground_motion_fields
        oq.risk_imtls = oq.imtls or self.datastore.parent['oqparam'].imtls
        extra = self.riskmodel.get_extra_imts(oq.risk_imtls)
        if extra:
            logging.warning('There are risk functions for not available IMTs '
                            'which will be ignored: %s' % extra)

        logging.info('Getting/reducing shakemap')
        with self.monitor('getting/reducing shakemap'):
            smap = oq.shakemap_id if oq.shakemap_id else numpy.load(
                oq.inputs['shakemap'])
            sitecol, shakemap, discarded = get_sitecol_shakemap(
                smap, oq.imtls, haz_sitecol,
                oq.asset_hazard_distance['default'],
                oq.discard_assets)
            if len(discarded):
                self.datastore['discarded'] = discarded
            assetcol = assetcol.reduce_also(sitecol)

        logging.info('Building GMFs')
        with self.monitor('building/saving GMFs'):
            imts, gmfs = to_gmfs(
                shakemap, oq.spatial_correlation, oq.cross_correlation,
                oq.site_effects, oq.truncation_level, E, oq.random_seed,
                oq.imtls)
            save_gmf_data(self.datastore, sitecol, gmfs, imts)
        return sitecol, assetcol
예제 #8
0
    def read_shakemap(self, haz_sitecol, assetcol):
        """
        Enabled only if there is a shakemap_id parameter in the job.ini.
        Download, unzip, parse USGS shakemap files and build a corresponding
        set of GMFs which are then filtered with the hazard site collection
        and stored in the datastore.
        """
        oq = self.oqparam
        E = oq.number_of_ground_motion_fields
        oq.risk_imtls = oq.imtls or self.datastore.parent['oqparam'].imtls
        logging.info('Getting/reducing shakemap')
        with self.monitor('getting/reducing shakemap'):
            # for instance for the test case_shakemap the haz_sitecol
            # has sids in range(0, 26) while sitecol.sids is
            # [8, 9, 10, 11, 13, 15, 16, 17, 18];
            # the total assetcol has 26 assets on the total sites
            # and the reduced assetcol has 9 assets on the reduced sites
            smap = oq.shakemap_id if oq.shakemap_id else numpy.load(
                oq.inputs['shakemap'])
            sitecol, shakemap, discarded = get_sitecol_shakemap(
                smap, oq.imtls, haz_sitecol,
                oq.asset_hazard_distance['default'],
                oq.discard_assets)
            if len(discarded):
                self.datastore['discarded'] = discarded
            assetcol.reduce_also(sitecol)

        logging.info('Building GMFs')
        with self.monitor('building/saving GMFs'):
            imts, gmfs = to_gmfs(
                shakemap, oq.spatial_correlation, oq.cross_correlation,
                oq.site_effects, oq.truncation_level, E, oq.random_seed,
                oq.imtls)
            save_gmf_data(self.datastore, sitecol, gmfs, imts)
        return sitecol, assetcol
예제 #9
0
def mean_std(shakemap, site_effects):
    _, gmfs = to_gmfs(shakemap,
                      'yes',
                      'yes',
                      site_effects,
                      trunclevel=3,
                      num_gmfs=1000,
                      seed=42)
    return gmfs.mean(axis=1), numpy.log(gmfs).std(axis=1)
예제 #10
0
파일: base.py 프로젝트: jotru/oq-engine
    def read_shakemap(self, haz_sitecol, assetcol):
        """
        Enabled only if there is a shakemap_id parameter in the job.ini.
        Download, unzip, parse USGS shakemap files and build a corresponding
        set of GMFs which are then filtered with the hazard site collection
        and stored in the datastore.
        """
        oq = self.oqparam
        E = oq.number_of_ground_motion_fields
        oq.risk_imtls = oq.imtls or self.datastore.parent['oqparam'].imtls
        logging.info('Getting/reducing shakemap')
        with self.monitor('getting/reducing shakemap'):
            # for instance for the test case_shakemap the haz_sitecol
            # has sids in range(0, 26) while sitecol.sids is
            # [8, 9, 10, 11, 13, 15, 16, 17, 18];
            # the total assetcol has 26 assets on the total sites
            # and the reduced assetcol has 9 assets on the reduced sites
            smap = oq.shakemap_id if oq.shakemap_id else numpy.load(
                oq.inputs['shakemap'])
            sitecol, shakemap, discarded = get_sitecol_shakemap(
                smap, oq.imtls, haz_sitecol,
                oq.asset_hazard_distance['default'], oq.discard_assets)
            if len(discarded):
                self.datastore['discarded'] = discarded
            assetcol.reduce_also(sitecol)

        logging.info('Building GMFs')
        with self.monitor('building/saving GMFs'):
            imts, gmfs = to_gmfs(shakemap, oq.spatial_correlation,
                                 oq.cross_correlation, oq.site_effects,
                                 oq.truncation_level, E, oq.random_seed,
                                 oq.imtls)
            N, E, M = gmfs.shape
            events = numpy.zeros(E, rupture.events_dt)
            events['id'] = numpy.arange(E, dtype=U32)
            self.datastore['events'] = events
            # convert into an array of dtype gmv_data_dt
            lst = [(sitecol.sids[s], ei) + tuple(gmfs[s, ei])
                   for s in numpy.arange(N, dtype=U32)
                   for ei, event in enumerate(events)]
            oq.hazard_imtls = {imt: [0] for imt in imts}
            data = numpy.array(lst, oq.gmf_data_dt())
            create_gmf_data(self.datastore, len(imts), data=data)
        return sitecol, assetcol
예제 #11
0
def mean_gmf(shakemap):
    _, gmfs = to_gmfs(
        shakemap, 'yes', 'yes', site_effects=True, trunclevel=3,
        num_gmfs=10, seed=42)
    return [gmfs[..., i].mean() for i in range(len(imts))]
예제 #12
0
    def test_matrices(self):

        # distance matrix
        lons = numpy.array([84., 84., 84., 85.5, 85.5, 85.5, 87., 87., 87.])
        lats = numpy.array([26., 27.5, 29., 26., 27.5, 29., 26., 27.5, 29.])
        dmatrix = geo.geodetic.distance_matrix(lons, lats)
        aae(dmatrix.sum(), 18539.6131407)

        # spatial correlation
        sca = spatial_correlation_array(dmatrix, imts, 'yes')
        aae(sca.sum(), 36.000370229)

        # spatial covariance
        std = numpy.array([(0.5, 0.52, 0.64, 0.73)] * 9, imt_dt)  # 9 sites
        scov = spatial_covariance_array([std[n] for n in imt_dt.names], sca)
        aae(scov.sum(), 13.166200147)

        # cross correlation
        ccor = cross_correlation_matrix(imts, 'yes')
        aae(ccor.sum(), 10.49124788)

        # cholesky decomposition
        L = cholesky(scov, ccor)
        self.assertEqual(L.shape, (36, 36))
        aae(L.sum(), 30.5121263)

        # intensity
        val = numpy.array([(5.38409665, 3.9383686, 3.55435415, 4.37692394)] *
                          9, imt_dt)

        shakemap = numpy.zeros(9, shakemap_dt)  # 9 sites
        shakemap['lon'] = lons
        shakemap['lat'] = lats
        shakemap['vs30'] = numpy.array([301.17] * 9)
        shakemap['val'] = val
        shakemap['std'] = std
        _, gmfs = to_gmfs(shakemap,
                          'yes',
                          'no',
                          site_effects=False,
                          trunclevel=3,
                          num_gmfs=2,
                          seed=42)
        # shape (N, E, M)
        aae(gmfs[..., 0].sum(axis=0), [0.3708301, 0.5671011])  # PGA

        _, gmfs = to_gmfs(shakemap,
                          'yes',
                          'yes',
                          site_effects=True,
                          trunclevel=3,
                          num_gmfs=2,
                          seed=42)
        aae(gmfs[..., 0].sum(axis=0), [0.5127146, 0.7800232])  # PGA
        aae(gmfs[..., 2].sum(axis=0), [0.4932519, 0.6731384])  # SA(1.0)

        # disable spatial correlation
        _, gmfs = to_gmfs(shakemap,
                          'no',
                          'no',
                          site_effects=False,
                          trunclevel=3,
                          num_gmfs=2,
                          seed=42)
        # shape (N, E, M)
        aae(gmfs[..., 0].sum(axis=0), [0.370832, 0.5670994])  # PGA

        _, gmfs = to_gmfs(shakemap,
                          'no',
                          'yes',
                          site_effects=True,
                          trunclevel=3,
                          num_gmfs=2,
                          seed=42)
        aae(gmfs[..., 0].sum(axis=0), [0.5127171, 0.7800206])  # PGA
        aae(gmfs[..., 2].sum(axis=0), [0.4932519, 0.6731384])  # SA(1.0)

        # set stddev to zero
        shakemap['std'] = 0
        with self.assertRaises(ValueError) as ctx:
            to_gmfs(shakemap,
                    'no',
                    'yes',
                    site_effects=True,
                    trunclevel=3,
                    num_gmfs=2,
                    seed=42)
        self.assertIn('stddev==0 for IMT=PGA', str(ctx.exception))