Beispiel #1
0
def get_events_in_poly(cat, poly, depmin, depmax):
    '''
    mvect  = preferred MW
    mxvect = preferred original magnitudes
    tvect  = datetime array
    dec_tvect = decimal datetime
    ev_dict = event dictionary
    '''

    # set arrays
    mvect = []
    mxvect = []
    tvect = []
    dec_tvect = []
    ev_dict = []

    # now loop through earthquakes in cat
    for ev in cat:

        # check if pt in poly and compile mag and years
        pt = Point(ev['lon'], ev['lat'])
        if pt.within(poly) and ev['dep'] >= depmin and ev['dep'] <= depmax:
            mvect.append(ev['prefmag'])
            mxvect.append(
                ev['mx_origML'])  # original catalogue mag for Mc model
            tvect.append(ev['datetime'])
            dec_tvect.append(toYearFraction(ev['datetime']))
            ev_dict.append(ev)

    return array(mvect), array(mxvect), array(tvect), array(dec_tvect), ev_dict
Beispiel #2
0
def get_events_in_poly_simple(cat, poly):

    # set arrays
    mvect = []
    mxvect = []
    tvect = []
    dec_tvect = []
    ev_dict = []

    # now loop through earthquakes in cat
    for ev in cat:

        # check if pt in poly and compile mag and years
        pt = Point(ev['lon'], ev['lat'])
        if pt.within(poly):

            mvect.append(ev['prefmag'])
            mxvect.append(
                ev['mx_origML'])  # original catalogue mag for Mc model
            tvect.append(ev['datetime'])
            dec_tvect.append(toYearFraction(ev['datetime']))
            ev_dict.append(ev)

    #print('len mvect:', len(mvect)
    return array(mvect), array(mxvect), array(tvect), array(dec_tvect), ev_dict
Beispiel #3
0
# set arrays for testing
bval_vect = []
bsig_vect = []

srcidx = range(len(src_code))

###############################################################################
# parse NSHA-Cat and ISC-GEM catalogues
###############################################################################

# parse NSHA-Cat catalogue
hmtk_csv = path.join('..', '..', 'catalogue', 'data',
                     'NSHA18CAT_V0.2_hmtk_declustered.csv')
nshaCat, neq = parse_alt_mag_catalogue(hmtk_csv)
nshaMaxYear = toYearFraction(nshaCat[-1]['datetime'])

###############################################################################
# loop through source zones
###############################################################################
mfdtxt = 'SRCZONE,SRCAREA,MAGTYPE,NEQ,A0,BVAL,BVALSIG\n'

src_area = []
fig = plt.figure(1, figsize=(16, 10))
k = 0
for i in srcidx:
    print '\nFitting MFD for', src_code[i]

    if i == 2 or i == 3:
        k += 1
        ax = fig.add_subplot(1, 2, k)
Beispiel #4
0
from tools.nsha_tools import toYearFraction, get_shapely_centroid
from mfd_tools import parse_hmtk_cat, get_mfds # get_mfds, get_annualised_rates, fit_a_value, parse_hmtk_cat, parse_hmtk_cat
from tools.source_shapefile_builder import get_completeness_model_point

###############################################################################
# parse NSHA-Cat and ISC-GEM catalogues
###############################################################################

# parse NSHA-Cat catalogue
if getcwd().startswith('/Users'):
    hmtk_csv = '/Users/trev/Documents/Geoscience_Australia/NSHA2018/catalogue/data/NSHA18CAT_V0.1_hmtk_declustered.csv'
else:
    hmtk_csv = '/nas/active/ops/community_safety/ehp/georisk_earthquake/modelling/sandpits/tallen/NSHA2018/catalogue/data/NSHA18CAT_V0.1_hmtk_declustered.csv'
    
nshaCat, full_neq = parse_hmtk_cat(hmtk_csv)
nshaMaxYear = toYearFraction(nshaCat[-1]['datetime'])

eqla = dictlist2array(nshaCat, 'lat')
eqlo = dictlist2array(nshaCat, 'lon')
eqmg = dictlist2array(nshaCat, 'prefmag')
eqdt = dictlist2array(nshaCat, 'datetime')

dec_dt = []
for dt in eqdt:
    dec_dt.append(toYearFraction(dt))
dec_dt = array(dec_dt)

###############################################################################
# set params
###############################################################################
Beispiel #5
0
def get_events_in_poly(idx, cat, poly, polygons, src_usd, src_lsd,
                       src_overwrite_lsd):
    # (i, sourcecat, poly, polygons, src_usd, src_lsd, src_overwrite_lsd)
    '''
    Input:
        idx = index for current source
        cat = catalogue in array of dict format from "parse_hmtk_cat" above
        poly = current polygon of interest - this will be the original, unmodified area
        polygons = shapely polygons for all sources
        src_usd = list of upper seismogenic depths for each area source
        src_lsd = list of lower seismogenic depths for each area source
        src_overwrite_lsd = overwritten src_lsd to allow capture of deeper eqs
        
    Output:
        mvect  = preferred MW
        mxvect = preferred original magnitudes
        tvect  = datetime array
        dec_tvect = decimal datetime
        ev_dict = event dictionary
        
    appendEvent key:
        0 = possibly add, if not in any other zones - first pass
        1 = definitely add - in zone area and depth range
        2 = do not add - will be double-counted
        
    '''
    #print(src_usd[idx], src_lsd[idx], src_overwrite_lsd[idx]
    from numpy import isnan

    # set arrays
    mvect = []
    mxvect = []
    tvect = []
    dec_tvect = []
    ev_dict = []

    # now loop through earthquakes in cat
    for ev in cat:

        # first check that depths are real numbers - set to zero
        if isnan(ev['dep']):
            ev['dep'] = 0.

        # check if pt in poly and compile mag and years
        pt = Point(ev['lon'], ev['lat'])
        if pt.within(poly) and ev['dep'] >= src_usd[idx] \
                     and ev['dep'] < src_overwrite_lsd[idx]:

            appendEvent = 0

            # ignore if above sources - this is for testing variable b-values with depth
            if ev['dep'] < src_usd[idx]:
                appendEvent = 2

            # check if within actual depth range
            elif ev['dep'] >= src_usd[idx] and ev['dep'] < src_lsd[idx]:
                appendEvent = 1

            # now check that eqs won't be double counted in other zones
            else:
                maxdepths = []
                for j in range(0, len(polygons)):
                    # skip current polygon
                    if j != idx:
                        if pt.within(polygons[j]) and ev['dep'] >= src_usd[j] \
                                     and ev['dep'] < src_overwrite_lsd[j]:
                            maxdepths.append(src_usd[j])

                    # deepest source gets priority
                    if not maxdepths == []:
                        if max(maxdepths) > src_lsd[idx]:
                            appendEvent = 2
                        else:
                            appendEvent = 1

            if appendEvent <= 1:
                mvect.append(ev['prefmag'])
                mxvect.append(
                    ev['mx_origML'])  # original catalogue mag for Mc model
                tvect.append(ev['datetime'])
                dec_tvect.append(toYearFraction(ev['datetime']))
                ev_dict.append(ev)

    #print('len mvect:', len(mvect)
    return array(mvect), array(mxvect), array(tvect), array(dec_tvect), ev_dict
Beispiel #6
0
        ggcat = crustcat
    """

    # get max decimal year and round up!
    lr = ggcat[-1]
    max_comp_yr = lr['year'] + lr['month'] / 12.

    # now loop through earthquakes in cat
    for s in ggcat:

        # check if pt in poly and compile mag and years
        pt = Point(s['lon'], s['lat'])
        if pt.within(poly):
            mvect.append(s['prefmag'])
            tvect.append(s['datetime'])
            dec_tvect.append(toYearFraction(s['datetime']))
            ev_dict.append(s)

    # get annual occurrence rates for each mag bin
    ycomps = array([int(x) for x in src_ycomp[i].split(';')])
    mcomps = array([float(x) for x in src_mcomp[i].split(';')])

    # set mag bins
    mrng = arange(min(mcomps) - bin_width / 2, src_mmax[i], bin_width)
    #mrng = arange(min(mcomps), src_mmax[i], bin_width)

    # convert lists to arrays
    mvect = array(mvect)
    tvect = array(tvect)
    dec_tvect = array(dec_tvect)