if zgroup == 'BS': mmin = 5.25 if zgroup == 'BS': if pt.within(poly) and sd['mag'] >= mmin and sd[ 'rhyp'] > 500 and sd['rhyp'] < 1750: idx.append(i) A19imt = calc_nac_gmm_spectra(sd['mag'], sd['rhyp'], sd['dep'], 760., zgroup) lnAmp = interp(log(A19imt['per']), log(sd['per']), log(sd['geom'])) stdict[i]['lnRes'] = lnAmp - A19imt['sa'] stdict[i]['lnSA'] = A19imt['sa'] stdict[i]['vs30'] = get_station_vs30(sd['sta'])[0] if len(res_stack) == 0: res_stack = array([stdict[i]['lnRes']]) else: res_stack = vstack((res_stack, [stdict[i]['lnRes']])) # build vs30 array vs30.append(stdict[i]['vs30']) gmmT = A19imt['per'] stdict = stdict[array(idx)] ############################################################################### # build residual data
for poly, zcode, zgroup in zip(polygons, zone_code, zone_group): for i, sd in enumerate(stdict): pt = Point(sd['eqlo'], sd['eqla']) if zgroup == 'BS': mmin = 5.25 else: mmin = 5.75 if zgroup == pr: if pt.within(poly) and sd['mag'] >= mmin and sd['rhyp'] > 500 and sd['rhyp'] < 1500 \ and sd['network'] != 'OA' and sd['network'] != 'GE' and sd['sta'] != 'COEN' and sd['sta'] != 'MTSU': idx.append(i) # get site vs30 vs30 = get_station_vs30(sd['sta'])[2] # use USGS if isnan(vs30): vs30 = 450 # get PGM residuals pgmTrue = True A19imt = calc_nac_gmm_spectra(sd['mag'], sd['rhyp'], sd['dep'], vs30, zgroup, pgmTrue) lnAmpPGM = array([log(sd['pgv']), log(sd['pga'])]) stdict[i]['lnResPGM'] = lnAmpPGM - A19imt['sa'] # get SA residuals pgmTrue = False A19imt = calc_nac_gmm_spectra(sd['mag'], sd['rhyp'], sd['dep'], vs30, zgroup,
psafile = path.join(root, filename) elif filename.find('HHH') >= 0: psafile = path.join(root, filename) # get record details print(stn, psafile) sta, sps, rhyp, pga, pgv, mag, dep, stlo, stla = read_psa_details(psafile) ztor = 100 # dummy dip = 30 # dummy rake = -90 # now plot if stn != 'CDNM.HNH': i += 1 print('rhyp', rhyp) vs30 = get_station_vs30(stn)[2] # use USGS if isnan(vs30): vs30 = 760 datestr = psafile.split('/')[-1].split('T')[0] makesubplt(i, fig, plt, stn, sps, mag, dep, ztor, dip, rake, rhyp, vs30, datestr) if ii == 1: if rhyp <= 900: plt.ylim([1e-4, .1]) else: plt.ylim([2e-5, 0.01]) elif rhyp > 1000.: plt.ylim([2e-5, 0.01])
lines = open(wfcsv).readlines() events = [] magnitudes = [] sites = [] for line in lines[1:]: dat = line.strip().split(',') events.append(dat[1]) sites.append(dat[9]) unique_sites = unique(array(sites)) txt = 'STA,STLO,STLA,M17,WA07,Kea15\n' for sta in unique_sites: vs30, isproxy, usgsvs, asscmvs, kvs, stla, stlo = get_station_vs30(sta) txt += ','.join((sta, str('%0.3f' % stlo), str('%0.3f' % stla), \ str(round(asscmvs)), str(round(usgsvs)), str(round(kvs)))) + '\n' # now write f = open('sta_vs30_list.csv', 'wb') f.write(txt) f.close() ''' unique_events, unique_idx = unique(datetime_list, return_index=True) umags = mags[unique_idx] ueqla = eqla[unique_idx] ueqlo = eqlo[unique_idx] ueqdp = eqdp[unique_idx]