Ejemplo n.º 1
0
def get_ul_seismo_depths(target_codes, target_usd, target_lsd):
    from os import path
    import shapefile
    from numpy import array, median, std
    from tools.nsha_tools import get_field_data

    shmaxshp = path.join('..', 'Domains', 'Domains_NSHA18_single_Mc.shp')

    print('Reading SHmax shapefile...')
    sf = shapefile.Reader(shmaxshp)

    # get shmax attributes
    source_codes = get_field_data(sf, 'CODE', 'str')
    source_usd = get_field_data(sf, 'USD', 'float')
    source_lsd = get_field_data(sf, 'LSD', 'float')

    for j, tc in enumerate(target_codes):
        matchCodes = False
        for i, sc in enumerate(source_codes):
            if tc == sc:
                target_usd[j] = source_usd[i]
                target_lsd[j] = source_lsd[i]
                matchCodes = True

        # no match
        if matchCodes == False:
            print('  Cannot match seis depths:', tc)

    return target_usd, target_lsd
Ejemplo n.º 2
0
def get_rate_adjust_factor(newshp, newField, origshp, origField):
    import shapefile
    from numpy import ones
    from tools.mapping_tools import get_WGS84_area
    from shapely.geometry import Polygon
    from tools.nsha_tools import get_field_data

    print('\nChecking shape geometrties...')

    newsf = shapefile.Reader(newshp)
    new_shapes = newsf.shapes()
    new_codes = get_field_data(newsf, newField, 'str')

    origsf = shapefile.Reader(origshp)
    orig_shapes = origsf.shapes()
    orig_codes = get_field_data(origsf, origField, 'str')

    # set initial values
    rte_adj_fact = ones(len(new_codes))
    i = 0
    for newCode, newPoly in zip(new_codes, new_shapes):
        newPolyArea = get_WGS84_area(Polygon(newPoly.points))

        for origCode, origPoly in zip(orig_codes, orig_shapes):
            if origCode == newCode:
                origPolyArea = get_WGS84_area(Polygon(origPoly.points))

                if round(newPolyArea, 4) != round(origPolyArea, 4):
                    rte_adj_fact[i] = round(newPolyArea / origPolyArea, 4)

                    print('    ', newCode, 'rate adjustment factor:',
                          rte_adj_fact[i])

        i += 1

    return rte_adj_fact
Ejemplo n.º 3
0
        #        a_vals = np.log10((data[:,2] + b_val*5.0)*dxdy)
        #        a_vals = data[:,2]*np.power(10, b_val*5.0)/100.
        #       a_vals = np.log10(data[:,2]*np.power(10,5*b_val)*dxdy)
        a_vals = np.log10(data[:, 2] * np.power(10, 5 * b_val) * dxdy)
        b_vals = np.ones(data[:, 2].size) * b_val
print a_vals

###############################################################################
# get neotectonic domain number from centroid
###############################################################################
# load domains shp
dsf = shapefile.Reader(
    os.path.join('..', '..', 'zones', 'Domains', 'shapefiles',
                 'DOMAINS_NSHA18.shp'))
# get domains
neo_doms = get_field_data(dsf, 'DOMAIN', 'float')
dom_mmax = get_field_data(dsf, 'MMAX_BEST', 'float')
# get domain polygons
dom_shapes = dsf.shapes()

###############################################################################
# get TRT, depth, from Leonard08
###############################################################################
# load domains shp
lsf = shapefile.Reader(
    os.path.join('..', '..', 'zones', 'Leonard2008', 'shapefiles',
                 'LEONARD08_NSHA18.shp'))

# get domains
ltrt = get_field_data(lsf, 'TRT', 'str')
ldep = get_field_data(lsf, 'DEP_BEST', 'float')
Ejemplo n.º 4
0
    python check_shape_overlap.py ARUP/shapefiles/ARUP_source_model.shp sub_zone
'''    

###############################################################################
# parse shapefile
###############################################################################

shpfile = argv[1]
code_field = argv[2]

print 'Reading source shapefile...'
sf = shapefile.Reader(shpfile)
shapes = sf.shapes()
    
# get src code
codes = get_field_data(sf, code_field, 'str')

###############################################################################
# find poly points within other polygons
###############################################################################

# loop through zones 1st time
for code1, poly1 in zip(codes, shapes):
    
    # loop through zones 2nd time
    for code2, poly2 in zip(codes, shapes):
        
        # now loop through all points in poly2
        for vlon, vlat in poly2.points:
            point = Point(vlon, vlat)
            
Ejemplo n.º 5
0
###############################################################################
# parse Leonard shp exported from OQ
###############################################################################

leoshp = 'source_model_leonard_2008.shp'

print 'Reading source shapefile...'
sf = shapefile.Reader(leoshp)
shapes = sf.shapes()
polygons = []
for poly in shapes:
    polygons.append(Polygon(poly.points))

# get shp data
codes = get_field_data(sf, 'name', 'str')
mmax = get_field_data(sf, 'max_mag', 'float')
mmin = get_field_data(sf, 'min_mag', 'float')
trt = get_field_data(sf, 'trt', 'str')
dep_b = get_field_data(sf, 'hd1', 'float')

###############################################################################
# parse Leonard lookup csv to get completeness info
###############################################################################
#l08_lookup = 'leonard08_alt_mc_lookup.csv'
l08_lookup = 'leonard08_lookup.csv'

lu_code = []
src_name = []
mcomp = []
ycomp = []
Ejemplo n.º 6
0
csvLines = ','.join(newFields) + '\n'

fieldType = ['C','C','F','F','F','F','F','F','F','F','F','F','F','C','C','F', \
             'F','F','F','F','F','F','F','F','F','F','F','F','F','C','F','C','C','C','C','F','C']

fieldSize = [
    100, 12, 2, 4, 10, 6, 6, 6, 6, 6, 5, 4, 4, 30, 30, 8, 8, 8, 6, 6, 6, 6, 6,
    6, 5, 5, 5, 5, 5, 20, 3, 30, 30, 70, 50, 8, 50
]

fieldDecimal = [
    0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 2, 2, 0, 0, 5, 5, 5, 4, 3, 3, 3, 3, 3, 1,
    1, 1, 1, 1, 0, 1, 0, 0, 0, 0, 3, 0
]

domains = get_field_data(sf, 'DOMAIN', 'float')
trts = get_field_data(sf, 'TRT', 'str')
old_mmaxs = get_field_data(sf, 'MMAX_BEST', 'float')
##############################################################################
# build shapefile
##############################################################################

# set shapefile fields
w = shapefile.Writer(shapefile.POLYGON)
for fn, ft, fs, fd in zip(newFields, fieldType, fieldSize, fieldDecimal):
    if ft == 'F':
        w.field(fn, ft, fs, fd)
    else:
        w.field(fn, ft, str(fs))

# now loop through records and get data
Ejemplo n.º 7
0
    lats.append(float(location[2]))
rates = data['f1']/4.
a_vals = np.log10(data['f1']/4.) # Divide by 4 as cells are overlapping
b_vals = data['f2']

###############################################################################
# get neotectonic domain number from centroid
###############################################################################
# load domains shp
domains_shp =  '../../zones/2018_mw/Domains_single_mc/shapefiles/Domains_NSHA18_MFD.shp'
#os.path.join('..','..','zones','shapefiles','Domains','Domains_NSHA18_single_Mc.shp')
dsf = shapefile.Reader(domains_shp)
lt  = logic_tree.LogicTree('../../../shared/seismic_source_model_weights_rounded_p0.4.csv')
params = params_from_shp(domains_shp, trt_ignore=['Interface', 'Active', 'Oceanic', 'Intraslab'])
# get domains
neo_doms  = get_field_data(dsf, 'DOMAIN', 'float')
dom_codes = get_field_data(dsf, 'CODE', 'str')
dom_mmax = get_field_data(dsf, 'MMAX_BEST', 'float')
dom_trt  = get_field_data(dsf, 'GMM_TRT', 'str')
dom_dep  = get_field_data(dsf, 'DEP_BEST', 'float')

# Build some dictionaries of parameters for each domain
param_index = {}
k = 0
print params
for dom in params:
    print 'Processing source %s' % dom['CODE']
    if dom['TRT'] == 'NCratonic' or dom['TRT'] == 'Extended':
        dom['TRT'] = 'Non_cratonic'
        # For the moment, only consider regions within AUstralia
    if dom['TRT'] == 'Active' or dom['TRT'] == 'Interface' or \
Ejemplo n.º 8
0
###############################################################################
'''
Source zones edited to simplify zone boundaries and to add background zones
'''

smshp = 'SIN_MCC_NSHA18_EDIT.shp'

print 'Reading source shapefile...'
sf = shapefile.Reader(smshp)
shapes = sf.shapes()
polygons = []
for poly in shapes:
    polygons.append(Polygon(poly.points))

# get src name
names = get_field_data(sf, 'SRC_NAME', 'str')
codes = get_field_data(sf, 'CODE', 'str')

###############################################################################
# get neotectonic superdomains number and Mmax from zone centroid
###############################################################################
# get path to reference shapefile
shapepath = open('..//reference_shp.txt').read()

print '\nNOTE: Getting Domains info for original magnitudes\n'
shapepath = open('..//reference_shp_mx.txt').read()

# load domains shp
dsf = shapefile.Reader(shapepath)

# get domains
Ejemplo n.º 9
0
###############################################################################
# parse Domains shp and prep data
###############################################################################

domshp = 'Domains_NSHA18_Merged.shp'

print 'Reading source shapefile...'
sf = shapefile.Reader(domshp)
shapes = sf.shapes()
polygons = []
for poly in shapes:
    polygons.append(Polygon(poly.points))

# get field data
src_codes = get_field_data(sf, 'code', 'str')
src_names = get_field_data(sf, 'Name', 'str')
domains = get_field_data(sf, 'DOMAIN', 'float')
mmax = get_field_data(sf, 'max_mag', 'float')
trt = get_field_data(sf, 'trt', 'str')
usd = get_field_data(sf, 'usd', 'float')
lsd = get_field_data(sf, 'lsd', 'float')
hd = get_field_data(sf, 'hd1', 'float')
stk = get_field_data(sf, 'strike1', 'float')
dip = get_field_data(sf, 'dip1', 'float')
rke = get_field_data(sf, 'rake1', 'float')

# set domain for unset domains
trt_new = []
for i in range(0, len(trt)):
    if trt[i] == 'Active':
Ejemplo n.º 10
0
def get_completeness_model(src_codes, src_shapes, domains, singleCorner):
    '''
    singleCorner
        1 = do singleCorner (True)
        0 = do not do singleCorner (False)
    '''

    from os import path
    import shapefile
    from shapely.geometry import Point, Polygon
    from tools.nsha_tools import get_field_data, get_shp_centroid
    from mapping_tools import distance

    # load completeness shp
    if singleCorner == 1:
        compshp = path.join('..', 'Other',
                            'Mcomp_NSHA18_single.shp')  # single corner
    else:
        #compshp = path.join('..','Other','Mcomp_NSHA18_multi.shp') # multi corner
        compshp = path.join('..', 'Other',
                            'gridded_polygons_3d_completeness.shp'
                            )  # gridded model for updated Mc - Jan 2020

    mcsf = shapefile.Reader(compshp)

    # get completeness data
    mc_ycomp = get_field_data(mcsf, 'YCOMP', 'str')
    mc_mcomp = get_field_data(mcsf, 'MCOMP', 'str')

    # get completeness polygons
    mc_shapes = mcsf.shapes()

    # set empty completeness values
    ycomp = []
    mcomp = []
    min_rmag = []

    # loop through Mcomp zones
    for code, poly, dom in zip(src_codes, src_shapes, domains):
        # get centroid of completeness sources
        clon, clat = get_shp_centroid(poly.points)
        point = Point(clon, clat)
        print(clon, clat)

        # loop through target and find point in poly
        mccompFound = False
        dist_to_comp_cent = 9999.
        for i, mc_shape in enumerate(mc_shapes):
            mc_poly = Polygon(mc_shape.points)
            mclon, mclat = get_shp_centroid(mc_shape.points)

            # check if target centroid in completeness poly
            if point.within(mc_poly) or point.touches(mc_poly):
                # get dist to centroids
                rngkm = distance(clat, clon, mclat, mclon)[0]
                if rngkm < dist_to_comp_cent:
                    tmp_ycmp = mc_ycomp[i]
                    tmp_mcmp = mc_mcomp[i]
                    mccompFound = True

        # now fill completeness if True
        if mccompFound == True:
            ycomp.append(tmp_ycmp)
            mcomp.append(tmp_mcmp)

        # if no Mcomp model assigned, use conservative model
        elif mccompFound == False:
            if dom >= 1 and dom <= 8:
                # for single-corner
                if singleCorner == 1:
                    ycomp.append('1980;1980')
                    mcomp.append('3.5;3.5')

                # for mult-corner
                else:
                    ycomp.append('1980;1964;1900')
                    mcomp.append('3.5;5.0;6.0')

            # use approx ISC-GEM completeness
            else:
                ycomp.append('1975;1964;1904')
                mcomp.append('5.75;6.25;7.5')

        # set rmin range
        min_rmag.append(max([3.0, float(mcomp[-1].split(';')[0])]))

    return ycomp, mcomp, min_rmag
Ejemplo n.º 11
0
def get_aus_shmax_vectors(src_codes, src_shapes):
    from os import path
    import shapefile
    from numpy import array, median, std
    from shapely.geometry import Point, Polygon
    from tools.nsha_tools import get_field_data

    print('Reading SHmax shapefile...')
    try:
        #shmaxshp = path.join('..','Other','SHMax_Rajabi_2016.shp')
        shmaxshp = '/nas/active/ops/community_safety/ehp/georisk_earthquake/modelling/sandpits/trev/NSHA2018/source_models/zones/shapefiles/Other/SHMax_Rajabi_2016.shp'
        sf = shapefile.Reader(shmaxshp)
    except:
        shmaxshp = '/Users/trev/Documents/Geoscience_Australia/NSHA2018/source_models/zones/shapefiles/Other/SHMax_Rajabi_2016.shp'
        sf = shapefile.Reader(shmaxshp)

    # get shmax attributes
    shmax_lat = get_field_data(sf, 'LAT', 'float')
    shmax_lon = get_field_data(sf, 'LON', 'float')
    shmax = get_field_data(sf, 'SHMAX', 'float')

    ###############################################################################
    # get preferred strike
    ###############################################################################
    shmax_pref = []
    shmax_sig = []

    for code, poly in zip(src_codes, src_shapes):
        # get shmax points in polygon
        shm_in = []

        # now loop through earthquakes in cat
        for shmlo, shmla, shm in zip(shmax_lon, shmax_lat, shmax):

            # check if pt in poly and compile mag and years
            pt = Point(shmlo, shmla)
            if pt.within(Polygon(poly.points)):
                shm_in.append(shm)

        if len(shm_in) > 0:
            shmax_pref.append(median(array(shm_in)))

            # check sigma and make sure it is at least +/- 15 degrees
            shmax_sig.append(max([std(array(shm_in)), 15.]))

            print('Getting SHmax for', code)

        # if no points in polygons, get nearest neighbour
        else:
            print('Getting nearest neighbour...')
            min_dist = 9999.
            for shmlo, shmla, shm in zip(shmax_lon, shmax_lat, shmax):
                pt = Point(shmlo, shmla)
                pt_dist = pt.distance(Polygon(poly.points))
                if pt_dist < min_dist:
                    min_dist = pt_dist
                    shm_near = shm

            shmax_pref.append(shm_near)  # set nearest neighbour
            shmax_sig.append(15.)  # set std manually

    return shmax_pref, shmax_sig
Ejemplo n.º 12
0
def get_simple_neotectonic_domain_params(target_sf, refShpFile):
    import shapefile
    from shapely.geometry import Point, Polygon
    from tools.nsha_tools import get_field_data, get_shp_centroid

    # load target shapefile
    polygons = target_sf.shapes()

    # load domains shp
    domshp = open(refShpFile).read()
    dsf = shapefile.Reader(domshp)

    # get domains
    neo_doms = get_field_data(dsf, 'DOMAIN', 'float')
    neo_min_reg = get_field_data(dsf, 'MIN_RMAG', 'float')
    neo_mmax = get_field_data(dsf, 'MMAX_BEST', 'float')
    neo_bval = get_field_data(dsf, 'BVAL_BEST', 'float')
    neo_bval_l = get_field_data(dsf, 'BVAL_LOWER', 'float')
    neo_trt = get_field_data(dsf, 'TRT', 'str')
    neo_usd = get_field_data(dsf, 'USD', 'float')
    neo_lsd = get_field_data(dsf, 'LSD', 'float')
    neo_dep_b = get_field_data(dsf, 'DEP_BEST', 'float')
    neo_dep_u = get_field_data(dsf, 'DEP_UPPER', 'float')
    neo_dep_l = get_field_data(dsf, 'DEP_LOWER', 'float')

    # get bval sigma
    bval_sig = neo_bval_l - neo_bval

    # get domain polygons
    dom_shapes = dsf.shapes()
    domain = []
    min_rmag = []
    mmax = []
    trt = []
    bval_fix = []
    bval_sig_fix = []
    usd = []
    lsd = []
    dep_b = []
    dep_u = []
    dep_l = []

    # loop through target zones
    for poly in polygons:
        # get centroid of target sources
        clon, clat = get_shp_centroid(poly.points)
        point = Point(clon, clat)

        # loop through domains and find point in poly
        matchidx = -99
        for i in range(0, len(dom_shapes)):
            # make sure trts match
            dom_poly = Polygon(dom_shapes[i].points)

            # check if target centroid in domains poly
            if point.within(dom_poly):
                matchidx = i

        # set dummy values
        if matchidx == -99:
            domain.append(-99)
            min_rmag.append(3.5)
            mmax.append(8.5)
            trt.append('')
            bval_fix.append(-99)
            bval_sig_fix.append(-99)
            usd.append(-99)
            lsd.append(-99)
            dep_b.append(-99)
            dep_u.append(-99)
            dep_l.append(-99)
        # fill real values
        else:
            domain.append(neo_doms[matchidx])
            min_rmag.append(neo_min_reg[matchidx])
            mmax.append(neo_mmax[matchidx])
            trt.append(neo_trt[matchidx])
            bval_fix.append(neo_bval[matchidx])
            bval_sig_fix.append(bval_sig[matchidx])
            usd.append(neo_usd[matchidx])
            lsd.append(neo_lsd[matchidx])
            dep_b.append(neo_dep_b[matchidx])
            dep_u.append(neo_dep_u[matchidx])
            dep_l.append(neo_dep_l[matchidx])

    return domain, min_rmag, mmax, trt, bval_fix, bval_sig_fix, usd, lsd, dep_b, dep_u, dep_l
Ejemplo n.º 13
0
def get_completeness_model_point(clat, clon, singleCorner):
    '''
    singleCorner
        1 = do singleCorner (True)
        0 = do not do singleCorner (False)
        
        assume AU, dom = 0
    '''
    dom = 0
    from os import path, getcwd
    import shapefile
    from shapely.geometry import Point, Polygon
    from tools.nsha_tools import get_field_data, get_shp_centroid
    from mapping_tools import distance

    # load completeness shp
    if getcwd().startswith('/Users'):
        if singleCorner == 1:
            compshp = path.join(
                '/Users/trev/Documents/Geoscience_Australia/NSHA2018/source_models/zones/shapefiles/Other/Mcomp_NSHA18_single.shp'
            )  # single corner
        else:
            compshp = path.join(
                '/Users/trev/Documents/Geoscience_Australia/NSHA2018/source_models/zones/shapefiles/Other/gridded_polygons_3d_completeness.shp'
            )  # multi corner
    else:
        if singleCorner == 1:
            compshp = path.join(
                '/nas/active/ops/community_safety/ehp/georisk_earthquake/modelling/sandpits/trev/NSHA2018/source_models/zones/shapefiles/Other/Mcomp_NSHA18_single.shp'
            )  # single corner
        else:
            #compshp = path.join('/nas/active/ops/community_safety/ehp/georisk_earthquake/modelling/sandpits/trev/NSHA2018/source_models/zones/shapefiles/Other/Mcomp_NSHA18_multi.shp') # multi corner
            compshp = path.join(
                '/nas/active/ops/community_safety/ehp/georisk_earthquake/modelling/sandpits/trev/NSHA2018/source_models/zones/shapefiles/Other/Mcomp_NSHA18_multi_20191217.shp'
            )  # multi corner

    mcsf = shapefile.Reader(compshp)

    # get completeness data
    mc_ycomp = get_field_data(mcsf, 'YCOMP', 'str')
    mc_mcomp = get_field_data(mcsf, 'MCOMP', 'str')

    # get completeness polygons
    mc_shapes = mcsf.shapes()

    # set empty completeness values
    ycomp = []
    mcomp = []
    min_rmag = []
    point = Point(clon, clat)
    print(clon, clat)

    # loop through target and find point in poly
    mccompFound = False
    dist_to_comp_cent = 9999.
    for i, mc_shape in enumerate(mc_shapes):
        mc_poly = Polygon(mc_shape.points)
        mclon, mclat = get_shp_centroid(mc_shape.points)

        # check if target centroid in completeness poly
        if point.within(mc_poly) or point.touches(mc_poly):
            # get dist to centroids
            rngkm = distance(clat, clon, mclat, mclon)[0]
            print(rngkm)
            if rngkm < dist_to_comp_cent:
                ycomp.append(mc_ycomp[i])
                mcomp.append(mc_mcomp[i])
                mccompFound = True
                print(mc_poly)

    # if no Mcomp model assigned, use conservative model
    if mccompFound == False:
        if dom <= 8:
            # for single-corner
            if singleCorner == 1:
                ycomp = '1980;1980'
                mcomp = '3.5;3.5'

            # for mult-corner
            else:
                ycomp = '1980;1964;1900'
                mcomp = '3.5;5.0;6.0'

        # use approx ISC-GEM completeness
        else:
            ycomp = '1975;1964;1904'
            mcomp = '5.75;6.25;7.5'

    # set rmin range
    min_rmag.append(max([3.0, float(mcomp.split(';')[0])]))

    return ycomp, mcomp, min_rmag
###############################################################################
# parse Leonard shp exported from OQ
###############################################################################

leoshp = 'NSHA13_background_source_model.shp'

print 'Reading source shapefile...'
sf = shapefile.Reader(leoshp)
shapes = sf.shapes()
polygons = []
for poly in shapes:
    polygons.append(Polygon(poly.points))
    
# get shp data
code  = get_field_data(sf, 'name', 'str')
mmax  = get_field_data(sf, 'max_mag', 'float')
mmin  = get_field_data(sf, 'min_mag', 'float')
trt   = get_field_data(sf, 'trt', 'str')
dep_b = get_field_data(sf, 'hd1', 'float')

###############################################################################
# parse NSHA background lookup csv to get completeness info
###############################################################################
nsha_lookup = 'NSHA13_background_source_model.csv'

names = []
codes = []

# get codes and source names
lines = open(nsha_lookup).readlines()[1:]
Ejemplo n.º 15
0
###############################################################################
# parse Leonard shp exported from OQ
###############################################################################

leoshp = path.join('shapefiles', 'source_model_leonard_2008.shp')

print 'Reading source shapefile...'
sf = shapefile.Reader(leoshp)
shapes = sf.shapes()
polygons = []
for poly in shapes:
    polygons.append(Polygon(poly.points))
    
# get shp data
code  = get_field_data(sf, 'name', 'str')
mmax  = get_field_data(sf, 'max_mag', 'float')
mmin  = get_field_data(sf, 'min_mag', 'float')
trt   = get_field_data(sf, 'trt', 'str')
dep_b = get_field_data(sf, 'hd1', 'float')

###############################################################################
# parse Leonard lookup csv to get completeness info
###############################################################################
#l08_lookup = 'leonard08_alt_mc_lookup.csv'
l08_lookup = 'leonard08_lookup.csv'

lu_code = []
lu_name = []
mcomp = []
ycomp = []
Ejemplo n.º 16
0
def get_completeness_model_vertex(src_codes, src_shapes, domains,
                                  singleCorner):
    '''
    singleCorner
        1 = do singleCorner (True)
        0 = do not do singleCorner (False)
    '''

    from os import path
    import shapefile
    from shapely.geometry import Point, Polygon
    from tools.nsha_tools import get_field_data, get_shp_centroid

    # load completeness shp
    if singleCorner == 1:
        compshp = path.join('..', 'Other',
                            'Mcomp_NSHA18_single.shp')  # single corner
    else:
        #compshp = path.join('..','Other','Mcomp_NSHA18_multi.shp') # multi corner
        compshp = path.join('..', 'Other',
                            'gridded_polygons_3d_completeness.shp'
                            )  # gridded model for updated Mc - Jan 2020

    mcsf = shapefile.Reader(compshp)

    # get completeness data
    mc_ycomp = get_field_data(mcsf, 'YCOMP', 'str')
    mc_mcomp = get_field_data(mcsf, 'MCOMP', 'str')

    # get completeness polygons
    mc_shapes = mcsf.shapes()

    # set empty completeness values
    ycomp = []
    mcomp = []
    min_rmag = []

    # loop through Mcomp zones
    for code, poly, dom in zip(src_codes, src_shapes, domains):
        tmp_mcomp = '-99;-99'  # dummy value
        mccompFound = False

        # get centroid of completeness sources
        for clon, clat in poly.points:
            point = Point(clon, clat)

            # loop through target and find point in poly
            for i in range(0, len(mc_shapes)):
                mc_poly = Polygon(mc_shapes[i].points)

                # check if target vertex in completeness poly
                if point.within(mc_poly):
                    # select most conservative option
                    if float(mc_mcomp[i].strip().split(';')[0]) > float(
                            tmp_mcomp.strip().split(';')[0]):
                        tmp_ycomp = mc_ycomp[i]
                        tmp_mcomp = mc_mcomp[i]
                        mccompFound = True

        # if no Mcomp model assigned, use conservative model
        if mccompFound == False:
            if dom <= 8:
                # for single-corner
                if singleCorner == 1:
                    tmp_ycomp = '1980;1980'
                    tmp_mcomp = '3.5;3.5'

                # for mult-corner
                else:
                    tmp_ycomp = '1980;1964;1900'
                    tmp_mcomp = '3.5;5.0;6.0'

            # use approx ISC-GEM completeness
            else:
                tmp_ycomp = '1975;1964;1904'
                tmp_mcomp = '5.75;6.25;7.5'

        ycomp.append(tmp_ycomp)
        mcomp.append(tmp_mcomp)

        # set rmin range
        min_rmag.append(max([3.0, float(mcomp[-1].split(';')[0])]))

    return ycomp, mcomp, min_rmag
Ejemplo n.º 17
0
#arupshp = 'ARUP_source_model.shp'
mcshp = 'completeness_zones.shp'

# get preferred catalogues
prefCat = get_preferred_catalogue(mcshp)

print 'Reading source shapefile...'
sf = shapefile.Reader(mcshp)
shapes = sf.shapes()
polygons = []
for poly in shapes:
    polygons.append(Polygon(poly.points))

# get src name
src_names = get_field_data(sf, 'NAME', 'str')
src_codes = src_names

###############################################################################
# get neotectonic domains class and Mmax from zone centroid
###############################################################################
# get path to reference shapefile
shapepath = open('..//reference_shp.txt').read()

# load domains shp
dsf = shapefile.Reader(shapepath)

# get domains
neo_doms = get_field_data(dsf, 'DOMAIN', 'float')
neo_mmax = get_field_data(dsf, 'MMAX_BEST', 'float')
neo_trt = get_field_data(dsf, 'TRT', 'str')
Ejemplo n.º 18
0
   
###############################################################################
# parse Domains shp and prep data
###############################################################################

domshp = 'ARUP_NSHA18_Merged.shp'

print 'Reading source shapefile...'
sf = shapefile.Reader(domshp)
shapes = sf.shapes()
polygons = []
for poly in shapes:
    polygons.append(Polygon(poly.points))
    
# get field data
src_codes = get_field_data(sf, 'CODE', 'str')
src_names1 = get_field_data(sf, 'Name', 'str')
src_names2 = get_field_data(sf, 'SRC_NAME', 'str')
domains = get_field_data(sf, 'DOMAIN', 'float')
mmax1 = get_field_data(sf, 'max_mag', 'float')
mmax2 = get_field_data(sf, 'MMAX_BEST', 'float')
trt = get_field_data(sf, 'TRT', 'str')
usd = get_field_data(sf, 'usd', 'float')
lsd = get_field_data(sf, 'lsd', 'float')
hd = get_field_data(sf, 'hd1', 'float')
stk = get_field_data(sf, 'strike1', 'float')
dip = get_field_data(sf, 'dip1', 'float')
rke = get_field_data(sf, 'rake1', 'float')

# merge source names
src_names = []
lines = open(auscsv).readlines()[1:]
for line in lines:
    dat = line.strip().split(',')
    name.append(dat[3])
    codes.append('ZN' + dat[2])  # use "sub_zone" instead
    neo_domains.append(dat[1])

###############################################################################
# get neotectonic domain number and Mmax from zone centroid
###############################################################################
# load domains shp
dsf = shapefile.Reader(
    path.join('..', 'Domains', 'shapefiles', 'DOMAINS_NSHA18.shp'))

# get domains
neo_doms = get_field_data(dsf, 'DOMAIN', 'float')
neo_mmax = get_field_data(dsf, 'MMAX_BEST', 'float')

# get domain polygons
dom_shapes = dsf.shapes()
dom = []
mmax = []

# loop through ARUP zones
for code, poly in zip(codes, shapes):
    # get centroid of leonard sources
    clon, clat = get_shp_centroid(poly.points)
    point = Point(clon, clat)
    print clon, clat
    tmp_dom = -99
    tmp_mmax = -99
Ejemplo n.º 20
0
try:
    from tools.nsha_tools import get_field_data, get_shp_centroid
except:
    print 'Add PYTHONPATH to NSHA18 root directory'
from source_models.smoothed_seismicity.combine_ss_models import gr2inc_mmax
from source_models.smoothed_seismicity.utilities import params_from_shp
from source_models.logic_trees import logic_tree

source_data_filename = 'AB_Values.shp'
nrml_version = '04'
msr = Leonard2014_SCR()
tom = PoissonTOM(50)

source_data = shapefile.Reader(source_data_filename)
shapes = source_data.shapes()
a_values = get_field_data(source_data, 'aVal', 'float')
b_values = get_field_data(source_data, 'bVal', 'float')
ids = get_field_data(source_data, 'ID', 'float')

# Default values - real values should be based on neotectonic domains
min_mag = 4.5
max_mag = 7.2
depth = 10.0

###############################################################################
# get neotectonic domain number from centroid
###############################################################################
# load domains shp
domains_shapefile = '../../zones/2018_mw/Domains_single_mc/shapefiles/Domains_NSHA18_MFD.shp'
#os.path.join('..','..','zones','shapefiles','Domains','Domains_NSHA18_single_Mc.shp')
dsf = shapefile.Reader(domains_shapefile)
Ejemplo n.º 21
0
#ausshp = 'DIMAUS_Model_postNSHA.shp'
ausshp = 'DIMAUS_NSHA18_FIXEDSHAPES.shp'

print 'Reading source shapefile...'
sf = shapefile.Reader(ausshp)
shapes = sf.shapes()
polygons = []
newpoints = []
for poly in shapes:
    # first, let's round points to 2 decimal places
    newpoints.append(array(around(poly.points, decimals=2)))

    polygons.append(Polygon(poly.points))

# get src name
src_name = get_field_data(sf, 'SRC_NAME', 'str')

###############################################################################
# parse AUS6 lookup csv
###############################################################################

auscsv = 'DIMAUS_lookup.csv'

mmin = []
mmax = []
name = []
codes = []
dim_class = []

lines = open(auscsv).readlines()[1:]
for line in lines:
Ejemplo n.º 22
0
#w.save(outshp)
w.close()

###############################################################################
# read new shapefile
###############################################################################

print('Reading source shapefile...')
sf = shapefile.Reader(outshp)
shapes = sf.shapes()
polygons = []
for poly in shapes:
    polygons.append(Polygon(poly.points))

# get field data
src_codes = get_field_data(sf, 'CODE', 'str')
src_names = get_field_data(sf, 'SRC_NAME', 'str')

###############################################################################
# load neotectonic domains parameters
###############################################################################

# set domestic domain numbers based on neotectonic domains
refshpfile = '..//reference_shp.txt'
neo_domains, neo_min_rmag, neo_mmax, neo_trt, neo_bval_fix, neo_bval_sig_fix, neo_usd, neo_lsd, neo_dep_b, neo_dep_u, neo_dep_l \
    = get_simple_neotectonic_domain_params(sf, refshpfile)
zone_class = list(neo_domains)[:]
'''
# reset Gawler Craton to Flinders due to b-value similarities
zone_class[4] = 2.