def test_get_mean_and_stddevs_good(self):
     """
     Tests the full execution of the GMPE tables for valid data
     """
     gsim = GMPETable(gmpe_table=self.TABLE_FILE)
     rctx = RuptureContext()
     rctx.mag = 6.0
     dctx = DistancesContext()
     # Test values at the given distances and those outside range
     dctx.rjb = np.array([0.5, 1.0, 10.0, 100.0, 500.0])
     sctx = SitesContext()
     sctx.vs30 = 1000. * np.ones(5)
     stddevs = [const.StdDev.TOTAL]
     expected_mean = np.array([2.0, 2.0, 1.0, 0.5, 1.0E-20])
     expected_sigma = 0.25 * np.ones(5)
     # PGA
     mean, sigma = gsim.get_mean_and_stddevs(sctx, rctx, dctx,
                                             imt_module.PGA(), stddevs)
     np.testing.assert_array_almost_equal(np.exp(mean), expected_mean, 5)
     np.testing.assert_array_almost_equal(sigma[0], expected_sigma, 5)
     # SA
     mean, sigma = gsim.get_mean_and_stddevs(sctx, rctx, dctx,
                                             imt_module.SA(1.0), stddevs)
     np.testing.assert_array_almost_equal(np.exp(mean), expected_mean, 5)
     np.testing.assert_array_almost_equal(sigma[0], 0.4 * np.ones(5), 5)
     # PGV
     mean, sigma = gsim.get_mean_and_stddevs(sctx, rctx, dctx,
                                             imt_module.PGV(), stddevs)
     np.testing.assert_array_almost_equal(np.exp(mean), 10. * expected_mean,
                                          5)
     np.testing.assert_array_almost_equal(sigma[0], expected_sigma, 5)
 def test_get_mean_and_stddevs(self):
     """
     Tests mean and standard deviations without amplification
     """
     gsim = GMPETable(gmpe_table=self.TABLE_FILE)
     rctx = RuptureContext()
     rctx.mag = 6.0
     dctx = DistancesContext()
     # Test values at the given distances and those outside range
     dctx.rjb = np.array([0.5, 1.0, 10.0, 100.0, 500.0])
     sctx = SitesContext()
     stddevs = [const.StdDev.TOTAL]
     expected_mean = np.array([2.0, 2.0, 1.0, 0.5, 1.0E-20])
     # PGA
     mean, sigma = gsim.get_mean_and_stddevs(sctx, rctx, dctx,
                                             imt_module.PGA(), stddevs)
     np.testing.assert_array_almost_equal(np.exp(mean), expected_mean, 5)
     np.testing.assert_array_almost_equal(sigma[0], 0.5 * np.ones(5), 5)
     # SA
     mean, sigma = gsim.get_mean_and_stddevs(sctx, rctx, dctx,
                                             imt_module.SA(1.0), stddevs)
     np.testing.assert_array_almost_equal(np.exp(mean), expected_mean, 5)
     np.testing.assert_array_almost_equal(sigma[0], 0.8 * np.ones(5), 5)
     # PGV
     mean, sigma = gsim.get_mean_and_stddevs(sctx, rctx, dctx,
                                             imt_module.PGV(), stddevs)
     np.testing.assert_array_almost_equal(np.exp(mean), 10. * expected_mean,
                                          5)
     np.testing.assert_array_almost_equal(sigma[0], 0.5 * np.ones(5), 5)
示例#3
0
 def test_get_amplification_factors(self):
     """
     Tests the amplification tables
     """
     ctx = RuptureContext()
     ctx.rake = 45.0
     ctx.mag = 6.0
     # Takes distances at the values found in the table (not checking
     # distance interpolation)
     ctx.rjb = np.copy(self.amp_table.distances[:, 0, 0])
     # Test Vs30 is 700.0 m/s midpoint between the 400 m/s and 1000 m/s
     # specified in the table
     stddevs = [const.StdDev.TOTAL]
     expected_mean = np.ones_like(ctx.rjb)
     # Check PGA and PGV
     mean_amp, sigma_amp = self.amp_table.get_amplification_factors(
         imt_module.PGA(), ctx, ctx.rjb, stddevs)
     np.testing.assert_array_almost_equal(
         mean_amp,
         midpoint(1.0, 1.5) * expected_mean)
     np.testing.assert_array_almost_equal(sigma_amp[0], 0.9 * expected_mean)
     mean_amp, sigma_amp = self.amp_table.get_amplification_factors(
         imt_module.PGV(), ctx, ctx.rjb, stddevs)
     np.testing.assert_array_almost_equal(
         mean_amp,
         midpoint(1.0, 0.5) * expected_mean)
     np.testing.assert_array_almost_equal(sigma_amp[0], 0.9 * expected_mean)
     # Sa (0.5)
     mean_amp, sigma_amp = self.amp_table.get_amplification_factors(
         imt_module.SA(0.5), ctx, ctx.rjb, stddevs)
     np.testing.assert_array_almost_equal(
         mean_amp,
         midpoint(1.0, 2.0) * expected_mean)
     np.testing.assert_array_almost_equal(sigma_amp[0], 0.9 * expected_mean)
 def test_retreival_tables_good_no_interp(self):
     """
     Tests the retreival of the IML tables for 'good' conditions without
     applying magnitude interpolations
     """
     gsim = GMPETable(gmpe_table=self.TABLE_FILE)
     # PGA
     np.testing.assert_array_almost_equal(
         gsim._return_tables(6.0, imt_module.PGA(), "IMLs"),
         np.array([2., 1., 0.5]))
     # PGV
     np.testing.assert_array_almost_equal(
         gsim._return_tables(6.0, imt_module.PGV(), "IMLs"),
         np.array([20., 10., 5.]), 5)
     # SA(1.0)
     np.testing.assert_array_almost_equal(
         gsim._return_tables(6.0, imt_module.SA(1.0), "IMLs"),
         np.array([2.0, 1., 0.5]))
     # Also for standard deviations
     np.testing.assert_array_almost_equal(
         gsim._return_tables(6.0, imt_module.PGA(), "Total"),
         0.5 * np.ones(3))
     np.testing.assert_array_almost_equal(
         gsim._return_tables(6.0, imt_module.SA(1.0), "Total"),
         0.8 * np.ones(3))
示例#5
0
 def test_retreival_tables_good_interp(self):
     """
     Tests the retreival of the IML tables for 'good' conditions with
     magnitude interpolations
     """
     gsim = GMPETable(gmpe_table=self.TABLE_FILE)
     expected_table_pgv = np.array([midpoint(20., 40.),
                                    midpoint(10., 20.),
                                    midpoint(5., 10.)])
     np.testing.assert_array_almost_equal(
         gsim._return_tables(6.5, imt_module.PGV(), "IMLs"),
         expected_table_pgv,
         5)
     expected_table_sa1 = np.array([midpoint(2., 4.),
                                    midpoint(1., 2.),
                                    midpoint(0.5, 1.)])
     np.testing.assert_array_almost_equal(
         gsim._return_tables(6.5, imt_module.SA(1.0), "IMLs"),
         expected_table_sa1)
示例#6
0
from shakelib.gmpe.nga_east import NGAEast

home_dir = os.path.dirname(os.path.abspath(__file__))
data_dir = os.path.join(home_dir, 'nga_east_data')

stddev_types = [StdDev.TOTAL]
gmpe = NGAEast()

dx = base.DistancesContext()
dx.rrup = np.logspace(-1, np.log10(2000), 100)

rx = base.RuptureContext()
sx = base.SitesContext()

IMTS = [imt.PGA(), imt.PGV(), imt.SA(0.3), imt.SA(1.0), imt.SA(3.0)]

MAGS = [3, 5, 6, 7]

VS30 = [180, 380, 760, 2000]


def update_results():
    # To build the data for testing
    result = {}
    for i in IMTS:
        ikey = i.__str__()
        result[ikey] = {}
        for mag in MAGS:
            rx.mag = mag
            result[ikey][str(mag)] = {}
示例#7
0
def _get_extent_from_multigmpe(rupture, config=None):
    """
    Use MultiGMPE to determine extent
    """
    (clon, clat) = _rupture_center(rupture)
    origin = rupture.getOrigin()
    if config is not None:
        gmpe = MultiGMPE.from_config(config)
        gmice = get_object_from_config('gmice', 'modeling', config)
        if imt.SA in gmice.DEFINED_FOR_INTENSITY_MEASURE_TYPES:
            default_imt = imt.SA(1.0)
        elif imt.PGV in gmice.DEFINED_FOR_INTENSITY_MEASURE_TYPES:
            default_imt = imt.PGV()
        else:
            default_imt = imt.PGA()
    else:
        # Put in some default values for conf
        config = {
            'extent': {
                'mmi': {
                    'threshold': 4.5,
                    'mindist': 100,
                    'maxdist': 1000
                }
            }
        }

        # Generic GMPEs choices based only on active vs stable
        # as defaults...
        stable = is_stable(origin.lon, origin.lat)
        if not stable:
            ASK14 = AbrahamsonEtAl2014()
            CB14 = CampbellBozorgnia2014()
            CY14 = ChiouYoungs2014()
            gmpes = [ASK14, CB14, CY14]
            site_gmpes = None
            weights = [1/3.0, 1/3.0, 1/3.0]
            gmice = WGRW12()
        else:
            Fea96 = FrankelEtAl1996MwNSHMP2008()
            Tea97 = ToroEtAl1997MwNSHMP2008()
            Sea02 = SilvaEtAl2002MwNSHMP2008()
            C03 = Campbell2003MwNSHMP2008()
            TP05 = TavakoliPezeshk2005MwNSHMP2008()
            AB06p = AtkinsonBoore2006Modified2011()
            Pea11 = PezeshkEtAl2011()
            Atk08p = Atkinson2008prime()
            Sea01 = SomervilleEtAl2001NSHMP2008()
            gmpes = [Fea96, Tea97, Sea02, C03,
                     TP05, AB06p, Pea11, Atk08p, Sea01]
            site_gmpes = [AB06p]
            weights = [0.16, 0.0, 0.0, 0.17, 0.17, 0.3, 0.2, 0.0, 0.0]
            gmice = AK07()

        gmpe = MultiGMPE.from_list(
            gmpes, weights, default_gmpes_for_site=site_gmpes)
        default_imt = imt.SA(1.0)

    min_mmi = config['extent']['mmi']['threshold']
    sd_types = [const.StdDev.TOTAL]

    # Distance context
    dx = DistancesContext()
    # This imposes minimum/ maximum distances of:
    #   80 and 800 km; could make this configurable
    d_min = config['extent']['mmi']['mindist']
    d_max = config['extent']['mmi']['maxdist']
    dx.rjb = np.logspace(np.log10(d_min), np.log10(d_max), 2000)
    # Details don't matter for this; assuming vertical surface rupturing fault
    # with epicenter at the surface.
    dx.rrup = dx.rjb
    dx.rhypo = dx.rjb
    dx.repi = dx.rjb
    dx.rx = np.zeros_like(dx.rjb)
    dx.ry0 = np.zeros_like(dx.rjb)
    dx.rvolc = np.zeros_like(dx.rjb)

    # Sites context
    sx = SitesContext()
    # Set to soft soil conditions
    sx.vs30 = np.full_like(dx.rjb, 180)
    sx = MultiGMPE.set_sites_depth_parameters(sx, gmpe)
    sx.vs30measured = np.full_like(sx.vs30, False, dtype=bool)
    sx = Sites._addDepthParameters(sx)
    sx.backarc = np.full_like(sx.vs30, False, dtype=bool)

    # Rupture context
    rx = RuptureContext()
    rx.mag = origin.mag
    rx.rake = 0.0
    # From WC94...
    rx.width = 10**(-0.76 + 0.27*rx.mag)
    rx.dip = 90.0
    rx.ztor = origin.depth
    rx.hypo_depth = origin.depth

    gmpe_imt_mean, _ = gmpe.get_mean_and_stddevs(
        sx, rx, dx, default_imt, sd_types)

    # Convert to MMI
    gmpe_to_mmi, _ = gmice.getMIfromGM(gmpe_imt_mean, default_imt)

    # Minimum distance that exceeds threshold MMI?
    dists_exceed_mmi = dx.rjb[gmpe_to_mmi > min_mmi]
    if len(dists_exceed_mmi):
        mindist_km = np.max(dists_exceed_mmi)
    else:
        mindist_km = d_min

    # Get a projection
    proj = OrthographicProjection(clon - 4, clon + 4, clat + 4, clat - 4)
    if isinstance(rupture, (QuadRupture, EdgeRupture)):
        ruptx, rupty = proj(
            rupture.lons[~np.isnan(rupture.lons)],
            rupture.lats[~np.isnan(rupture.lats)]
        )
    else:
        ruptx, rupty = proj(clon, clat)

    xmin = np.nanmin(ruptx) - mindist_km
    ymin = np.nanmin(rupty) - mindist_km
    xmax = np.nanmax(ruptx) + mindist_km
    ymax = np.nanmax(rupty) + mindist_km

    # Put a limit on range of aspect ratio
    dx = xmax - xmin
    dy = ymax - ymin
    ar = dy / dx
    if ar > 1.2:
        # Inflate x
        dx_target = dy / 1.2
        ddx = dx_target - dx
        xmax = xmax + ddx / 2
        xmin = xmin - ddx / 2
    if ar < 0.83:
        # Inflate y
        dy_target = dx * 0.83
        ddy = dy_target - dy
        ymax = ymax + ddy / 2
        ymin = ymin - ddy / 2

    lonmin, latmin = proj(np.array([xmin]), np.array([ymin]), reverse=True)
    lonmax, latmax = proj(np.array([xmax]), np.array([ymax]), reverse=True)

    #
    # Round coordinates to the nearest minute -- that should make the
    # output grid register with common grid resolutions (60c, 30c,
    # 15c, 7.5c)
    #
    logging.debug("Extent: %f, %f, %f, %f" %
                  (lonmin, lonmax, latmin, latmax))
    return _round_coord(lonmin[0]), _round_coord(lonmax[0]), \
        _round_coord(latmin[0]), _round_coord(latmax[0])
示例#8
0
    def get_mean_and_stddevs(self, sites, rx, dists, imt, stddev_types):
        # List of GMPE weights, which is the product of the the branch weights
        # for the seed models vs the NGA East resampled models as well as the
        # weights for the indivudual GMPES as defined by Petersen et al. (2019)
        #
        # Note that the NGA East resampled models are a function of spectral
        # period.
        #
        # NGA East Seeds (1/3)
        #     ├── B_bca10d (0.06633), wts = 0.333 * 0.06633 = 0.02208789
        #     ├── B_ab95 (0.02211), wts = 0.333 * 0.02211 = 0.00736263
        #     ...
        # NGA East Resampled or "USGS" (2/3)
        #     ├── Model 1 (0.1009 for PGA), wts = 0.667 * 0.1009 = 0.0673003
        #     ├── Model 2 (0.1606 for PGA), wts = 0.667 * 0.1606 = 0.1071202
        #     ...
        #
        wts = [0] * len(self.gmpes)

        # Is IMT PGA or PGV?
        is_pga = imt == IMT.PGA()
        is_pgv = imt == IMT.PGV()

        # Is magnitude less than 4? If so, we will need to set it to 4.0 and
        # then extrapolate the tables at the end.
        # But... in the brave new world of new OpenQuake, sites, rx, and
        # dists are all exactly the same object, so when we copy rx to
        # rup, and change it, as well as when we change the distances
        # below, we need to do it all in rup, then pass rup as all three
        # contexts when we call the gmpe.get_mean_and_stddevs()
        rup = copy.deepcopy(rx)
        if rup.mag < 4.0:
            is_small_mag = True
            delta_mag = rup.mag - 4.0
            rup.mag = 4.0
        else:
            is_small_mag = False

        for i, tp in enumerate(self.ALL_TABLE_PATHS):
            if 'usgs' in tp:
                # Get model number from i-th path using regex
                mod_num = int(re.search(r'\d+', tp).group())
                coefs = np.array(self.NGA_EAST_USGS.iloc[mod_num - 1])
                # Is the IMT PGA, PGA, or SA?
                if is_pga:
                    iweight = coefs[-2]
                elif is_pgv:
                    iweight = coefs[-1]
                else:
                    # For SA, need to interpolate; we'll use log-period and
                    # linear-weight interpolation.
                    iweight = np.interp(
                        np.log(imt.period), np.log(self.per_array),
                        coefs[self.per_idx_start:self.per_idx_end])
                wts[i] = self.NGA_EAST_USGS_WEIGHT * iweight
            else:
                # Strip off the cruft to get the string we need to match
                str_match = tp.replace('nga_east_', '').replace('.hdf5', '')
                matched = self.NGA_EAST_SEEDS[self.NGA_EAST_SEEDS['model'] ==
                                              str_match]
                if len(matched):
                    iweight = self.NGA_EAST_SEEDS[self.NGA_EAST_SEEDS['model']
                                                  == str_match].iloc[0, 1]
                    wts[i] = self.NGA_EAST_SEED_WEIGHT * iweight

        total_gmpe_weights = self.sigma_weights * wts

        if not np.allclose(np.sum(total_gmpe_weights), 1.0):
            raise ValueError('Weights must sum to 1.0.')

        mean = np.full_like(sites.vs30, 0)
        stddevs = []
        for i in range(len(stddev_types)):
            stddevs.append(np.full_like(sites.vs30, 0))

        # Apply max distance to dists.rrup -->> now rup.rrup
        np.clip(rup.rrup, 0, MAX_RRUP)

        #
        # Some models don't have PGV terms, so we will make PSA for them
        # and then use the conditional conversion to get PGV
        #
        if is_pgv:
            ab2020 = AbrahamsonBhasin2020(rup.mag)
            vimt = IMT.SA(ab2020.getTref())

        # Loop over gmpes
        for i, gm in enumerate(self.gmpes):
            if is_pgv:
                # Is PGV and also not available for gm?
                try:
                    _ = _return_tables(gm, rup.mag, imt, "IMLs")
                except KeyError:
                    #
                    # No table for PGV, compute vimt, then convert to PGV
                    #
                    vmean, vstddevs = gm.get_mean_and_stddevs(
                        rup, rup, rup, vimt, stddev_types)
                    tmean, tstddevs = ab2020.getPGVandSTDDEVS(
                        vmean, vstddevs, stddev_types, rup.rrup, rup.vs30)
                except Exception:
                    logging.error("Unexpected error:", sys.exc_info()[0])
                else:
                    #
                    # Table exists for PGV, proceed normally
                    #
                    tmean, tstddevs = gm.get_mean_and_stddevs(
                        rup, rup, rup, imt, stddev_types)
            else:
                tmean, tstddevs = gm.get_mean_and_stddevs(
                    rup, rup, rup, imt, stddev_types)

            mean += tmean * total_gmpe_weights[i]
            for j, sd in enumerate(tstddevs):
                stddevs[j] += sd * total_gmpe_weights[i]

        # Zero out values at distances beyond the range for which NGA East
        # was defined. -->> was dists.rrup, now rup.rrup
        mean[rup.rrup > MAX_RRUP] = -999.0

        # Do we need to extrapolate for small magnitude factor?
        if is_small_mag:
            if is_pga:
                slopes = np.interp(np.log(rup.rrup), np.log(self.SMALL_M_DIST),
                                   self.SMALL_M_SLOPE_PGA)
            elif is_pgv:
                slopes = np.interp(np.log(rup.rrup), np.log(self.SMALL_M_DIST),
                                   self.SMALL_M_SLOPE_PGV)
            else:
                interp_obj = RectBivariateSpline(np.log(self.SMALL_M_DIST),
                                                 np.log(self.SMALL_M_PER),
                                                 self.SMALL_M_SLOPE,
                                                 kx=1,
                                                 ky=1)
                slopes = interp_obj.ev(np.log(rup.rrup), np.log(imt.period))
            mean = mean + slopes * delta_mag

        return mean, stddevs
示例#9
0
    def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types):
        # List of GMPE weights, which is the product of the the branch weights
        # for the seed models vs the NGA East resampled models as well as the
        # weights for the indivudual GMPES as defined by Petersen et al. (2019)
        #
        # Note that the NGA East resampled models are a function of spectral
        # period.
        #
        # NGA East Seeds (1/3)
        #     ├── B_bca10d (0.06633), wts = 0.333 * 0.06633 = 0.02208789
        #     ├── B_ab95 (0.02211), wts = 0.333 * 0.02211 = 0.00736263
        #     ...
        # NGA East Resampled or "USGS" (2/3)
        #     ├── Model 1 (0.1009 for PGA), wts = 0.667 * 0.1009 = 0.0673003
        #     ├── Model 2 (0.1606 for PGA), wts = 0.667 * 0.1606 = 0.1071202
        #     ...
        #
        wts = [0] * len(self.gmpes)

        # Is IMT PGA or PGV?
        is_pga = imt == IMT.PGA()
        is_pgv = imt == IMT.PGV()

        # Is magnitude less than 4? If so, we will need to set it to 4.0 and
        # then extrapolate the tables at the end.
        if rup.mag < 4.0:
            is_small_mag = True
            delta_mag = rup.mag - 4.0
            rup.mag = 4.0
        else:
            is_small_mag = False

        for i, tp in enumerate(self.ALL_TABLE_PATHS):
            if 'usgs' in tp:
                # Get model number from i-th path using regex
                mod_num = int(re.search(r'\d+', tp).group())
                coefs = np.array(
                    self.NGA_EAST_USGS.iloc[mod_num - 1]
                )
                # Is the IMT PGA, PGA, or SA?
                if is_pga:
                    iweight = coefs[-2]
                elif is_pgv:
                    iweight = coefs[-1]
                else:
                    # For SA, need to interpolate; we'll use log-period and
                    # linear-weight interpolation.
                    iweight = np.interp(
                        np.log(imt.period),
                        np.log(self.per_array),
                        coefs[self.per_idx_start:self.per_idx_end]
                    )
                wts[i] = self.NGA_EAST_USGS_WEIGHT * iweight
            else:
                # Strip off the cruft to get the string we need to match
                str_match = tp.replace('nga_east_', '').replace('.hdf5', '')
                matched = self.NGA_EAST_SEEDS[
                    self.NGA_EAST_SEEDS['model'] == str_match]
                if len(matched):
                    iweight = self.NGA_EAST_SEEDS[
                        self.NGA_EAST_SEEDS['model'] == str_match].iloc[0, 1]
                    wts[i] = self.NGA_EAST_SEED_WEIGHT * iweight

        total_gmpe_weights = self.sigma_weights * wts

        if not np.allclose(np.sum(total_gmpe_weights), 1.0):
            raise ValueError('Weights must sum to 1.0.')

        mean = np.full_like(sites.vs30, 0)
        stddevs = []
        for i in range(len(stddev_types)):
            stddevs.append(np.full_like(sites.vs30, 0))

        # Apply max distance to dists.rrup
        np.clip(dists.rrup, 0, MAX_RRUP)

        # Since we will be dropping the models that don't have PGV,
        # we now also need to track the total sum of weights for when
        # the imt is PGV so that we can re-distribute the weights.
        if is_pgv:
            twts = []

        # Loop over gmpes
        for i, gm in enumerate(self.gmpes):
            if is_pgv:
                # Is PGV and also not available for gm?
                try:
                    gm._return_tables(rup.mag, imt, "IMLs")
                except KeyError:
                    continue
                except:
                    logging.error("Unexpected error:", sys.exc_info()[0])
            tmean, tstddevs = gm.get_mean_and_stddevs(
                sites, rup, dists, imt, stddev_types)
            mean += tmean * total_gmpe_weights[i]
            for j, sd in enumerate(tstddevs):
                stddevs[j] += sd * total_gmpe_weights[i]
            if is_pgv:
                twts.append(total_gmpe_weights[i])

        if is_pgv:
            # Rescale the PGV wieghts so that they sum to 1 after dropping
            # the models that are not defined for PGV.
            mean = mean / np.sum(twts)
            for j, sd in enumerate(stddevs):
                stddevs[j] = stddevs[j] / np.sum(twts)

        # Zero out values at distances beyond the range for which NGA East
        # was defined.
        mean[dists.rrup > MAX_RRUP] = -999.0

        # Do we need to extrapolate for small magnitude factor?
        if is_small_mag:
            if is_pga:
                slopes = np.interp(
                    np.log(dists.rrup),
                    np.log(self.SMALL_M_DIST),
                    self.SMALL_M_SLOPE_PGA)
            elif is_pgv:
                slopes = np.interp(
                    np.log(dists.rrup),
                    np.log(self.SMALL_M_DIST),
                    self.SMALL_M_SLOPE_PGV)
            else:
                interp_obj = RectBivariateSpline(
                    np.log(self.SMALL_M_DIST),
                    np.log(self.SMALL_M_PER),
                    self.SMALL_M_SLOPE, kx=1, ky=1)
                slopes = interp_obj.ev(
                    np.log(dists.rrup),
                    np.log(imt.period)
                )
            mean = mean + slopes * delta_mag

        return mean, stddevs
示例#10
0
# Print output
print(out)
print(err)
log = str(out) + str(err)

## Read in the output file:
vs30_data = np.genfromtxt('tmp2', usecols=2)

pd.DataFrame(vs30_data).to_csv(
    '/home/eking/Documents/internship/data/Kappa/vs30.csv')

kappa_data = fullfile[' tstar(s) ']

#############    GMPEs     ###################
imt_pga = imt.PGA()
imt_pgv = imt.PGV()
imt_arias = imt.IA()
uncertaintytype = const.StdDev.TOTAL

## Set GMPEs:
zhao2006 = ZhaoEtAl2006SInter()
travasarou = TravasarouEtAl2003()
bssa14 = BooreEtAl2014()

## Set the empty arrays:
median_zhao2006 = np.array([])
median_travasarou = np.array([])
median_bssa14 = np.array([])

sd_zhao2006 = np.array([])
sd_travasarou = np.array([])