def test_rupture_from_dict(): # Grab an EdgeRupture origin = Origin({ 'id': 'test', 'lat': 0, 'lon': 0, 'depth': 5.0, 'mag': 7.0, 'netid': 'us', 'network': '', 'locstring': '', 'time': HistoricTime.utcfromtimestamp(time.time()) }) file = os.path.join(homedir, 'rupture_data/cascadia.json') rup_original = get_rupture(origin, file) d = rup_original._geojson rup_from_dict = rupture_from_dict(d) assert rup_from_dict._mesh_dx == 0.5 # Specify mesh_dx rup_original = get_rupture(origin, file, mesh_dx=1.0) d = rup_original._geojson rup_from_dict = rupture_from_dict(d) assert rup_from_dict._mesh_dx == 1.0 # Quad rupture file = os.path.join(homedir, 'rupture_data/izmit.json') rup_original = get_rupture(origin, file) d = rup_original._geojson rup_from_dict = rupture_from_dict(d) assert rup_from_dict.getArea() == rup_original.getArea() # Note, there's a bit of an inconsistency highlighted here because # magnitude has key 'magnitude' in the izmit file, but 'mag' in # the origin and both get retained. # Point rupture origin = Origin({ 'id': 'test', 'lon': -122.5, 'lat': 37.3, 'depth': 5.0, 'mag': 7.0, 'netid': 'us', 'network': '', 'locstring': '', 'time': HistoricTime.utcfromtimestamp(time.time()) }) rup_original = get_rupture(origin) d = rup_original._geojson rup_from_dict = rupture_from_dict(d) assert rup_from_dict.lats == 37.3 assert rup_from_dict.lons == -122.5
def test_slip(): # Rupture requires an origin even when not used: origin = Origin({ 'id': 'test', 'lon': 0, 'lat': 0, 'depth': 5.0, 'mag': 7.0, 'netid': 'us', 'network': '', 'locstring': '', 'time': HistoricTime.utcfromtimestamp(time.time()) }) # Make a rupture lat0 = np.array([34.1]) lon0 = np.array([-118.2]) lat1 = np.array([34.2]) lon1 = np.array([-118.15]) z = np.array([1.0]) W = np.array([3.0]) dip = np.array([30.]) rup = QuadRupture.fromTrace(lon0, lat0, lon1, lat1, z, W, dip, origin) slp = get_quad_slip(rup.getQuadrilaterals()[0], 30).getArray() slpd = np.array([0.80816457, 0.25350787, 0.53160491]) np.testing.assert_allclose(slp, slpd) slp = get_local_unit_slip_vector(22, 30, 86).getArray() slpd = np.array([0.82714003, 0.38830563, 0.49878203]) np.testing.assert_allclose(slp, slpd)
def test_slip(): # Rupture requires an origin even when not used: origin = Origin({ 'id': 'test', 'lon': 0, 'lat': 0, 'depth': 5.0, 'mag': 7.0, 'netid': 'us', 'network': '', 'locstring': '', 'time': HistoricTime.utcfromtimestamp(time.time()) }) # Make a rupture lat0 = np.array([34.1]) lon0 = np.array([-118.2]) lat1 = np.array([34.2]) lon1 = np.array([-118.15]) z = np.array([1.0]) W = np.array([3.0]) dip = np.array([30.]) rup = QuadRupture.fromTrace(lon0, lat0, lon1, lat1, z, W, dip, origin) slp = get_quad_slip(rup.getQuadrilaterals()[0], 30).getArray() slpd = np.array([0.80816457, 0.25350787, 0.53160491]) np.testing.assert_allclose(slp, slpd) slp = get_quad_strike_vector(rup.getQuadrilaterals()[0]).getArray() slpd = np.array([0.58311969, 0.27569625, 0.76417472]) np.testing.assert_allclose(slp, slpd) slp = get_quad_down_dip_vector(rup.getQuadrilaterals()[0]).getArray() slpd = np.array([0.81219873, -0.17763484, -0.55567895]) np.testing.assert_allclose(slp, slpd) slp = get_local_unit_slip_vector(22, 30, 86).getArray() slpd = np.array([0.82714003, 0.38830563, 0.49878203]) np.testing.assert_allclose(slp, slpd) slp = get_local_unit_slip_vector_DS(22, 30, -86).getArray() slpd = np.array([-0.80100879, -0.32362856, -0.49878203]) np.testing.assert_allclose(slp, slpd) slp = get_local_unit_slip_vector_SS(22, 80, 5).getArray() slpd = np.array([0.3731811, 0.92365564, 0.]) np.testing.assert_allclose(slp, slpd) mech = rake_to_mech(-160) assert mech == 'SS' mech = rake_to_mech(0) assert mech == 'SS' mech = rake_to_mech(160) assert mech == 'SS' mech = rake_to_mech(-80) assert mech == 'NM' mech = rake_to_mech(80) assert mech == 'RS'
def test_fromTrace(): xp0 = [0.0] xp1 = [0.0] yp0 = [0.0] yp1 = [0.05] zp = [0.0] widths = [10.0] dips = [45.0] # Rupture requires an origin even when not used: origin = Origin({ 'id': 'test', 'lon': 0, 'lat': 0, 'depth': 5.0, 'mag': 7.0, 'netid': 'us', 'network': '', 'locstring': '', 'time': HistoricTime.utcfromtimestamp(time.time()) }) rupture = QuadRupture.fromTrace( xp0, yp0, xp1, yp1, zp, widths, dips, origin, reference='From J Smith, (personal communication)') fstr = io.StringIO() rupture.writeTextFile(fstr) xp0 = [-121.81529, -121.82298] xp1 = [-121.82298, -121.83068] yp0 = [37.73707, 37.74233] yp1 = [37.74233, 37.74758] zp = [10, 15] widths = [15.0, 20.0] dips = [30.0, 45.0] rupture = QuadRupture.fromTrace( xp0, yp0, xp1, yp1, zp, widths, dips, origin, reference='From J Smith, (personal communication)')
def test_northridge(): rupture_text = """# Source: Wald, D. J., T. H. Heaton, and K. W. Hudnut (1996). The Slip History of the 1994 Northridge, California, Earthquake Determined from Strong-Motion, Teleseismic, GPS, and Leveling Data, Bull. Seism. Soc. Am. 86, S49-S70. -118.421 34.315 5.000 -118.587 34.401 5.000 -118.693 34.261 20.427 -118.527 34.175 20.427 -118.421 34.315 5.000 """ # noqa # Rupture requires an origin even when not used: origin = Origin({ 'id': 'test', 'lon': 0, 'lat': 0, 'depth': 5.0, 'mag': 7.0, 'netid': 'us', 'network': '', 'locstring': '', 'time': HistoricTime.utcfromtimestamp(time.time()) }) cbuf = io.StringIO(rupture_text) rupture = get_rupture(origin, cbuf) strike = rupture.getStrike() np.testing.assert_allclose(strike, 122.06, atol=0.01) dip = rupture.getDip() np.testing.assert_allclose(dip, 40.21, atol=0.01) L = rupture.getLength() np.testing.assert_allclose(L, 17.99, atol=0.01) W = rupture.getWidth() np.testing.assert_allclose(W, 23.94, atol=0.01) nq = rupture.getNumQuads() np.testing.assert_allclose(nq, 1) ng = rupture.getNumGroups() np.testing.assert_allclose(ng, 1) sind = rupture._getGroupIndex() np.testing.assert_allclose(sind, [0]) ztor = rupture.getDepthToTop() np.testing.assert_allclose(ztor, 5, atol=0.01) itl = rupture.getIndividualTopLengths() np.testing.assert_allclose(itl, 17.99, atol=0.01) iw = rupture.getIndividualWidths() np.testing.assert_allclose(iw, 23.94, atol=0.01) lats = rupture.lats lats_d = np.array([34.401, 34.315, 34.175, 34.261, 34.401, np.nan]) np.testing.assert_allclose(lats, lats_d, atol=0.01) lons = rupture.lons lons_d = np.array( [-118.587, -118.421, -118.527, -118.693, -118.587, np.nan]) np.testing.assert_allclose(lons, lons_d, atol=0.01)
def test_incorrect(): rupture_text = """# Source: Ji, C., D. V. Helmberger, D. J. Wald, and K.-F. Ma (2003). Slip history and dynamic implications of the 1999 Chi-Chi, Taiwan, earthquake, J. Geophys. Res. 108, 2412, doi:10.1029/2002JB001764. 120.72300 24.27980 0 121.00000 24.05000 17 121.09300 24.07190 17 121.04300 24.33120 17 121.04300 24.33120 17 120.72300 24.27980 0 > 120.72300 24.27980 0 120.68000 23.70000 0 120.97200 23.60400 17 121.00000 24.05000 17 120.72300 24.27980 0 > 120.97200 23.60400 17 120.68000 23.70000 0 120.58600 23.58850 0 120.78900 23.40240 17 120.97200 23.60400 17""" # noqa # Rupture requires an origin even when not used: origin = Origin({ 'id': 'test', 'lon': 0, 'lat': 0, 'depth': 5.0, 'mag': 7.0, 'netid': 'us', 'network': '', 'locstring': '', 'time': HistoricTime.utcfromtimestamp(time.time()) }) cbuf = io.StringIO(rupture_text) with pytest.raises(Exception): get_rupture(origin, cbuf)
def read_event_file(eventxml): """ Read event.xml file from disk, returning a dictionary of attributes. Input XML format looks like this: .. code-block:: xml <earthquake id="2008ryan " lat="30.9858" lon="103.3639" mag="7.9" year="2008" month="05" day="12" hour="06" minute="28" second="01" timezone="GMT" depth="19.0" locstring="EASTERN SICHUAN, CHINA" created="1211173621" otime="1210573681" type="" /> Args: eventxml (str): Path to event XML file OR file-like object. Returns: dict: Dictionary with keys: - eventsourcecode Origin network and origin code (i.e., us2017abcd). - eventsource Origin network ("us"). - time Origin time as an HistoricTime object. - lat Origin latitude - lon Origin longitude - depth Origin depth - mag Origin magnitude - created Process time as an HistoricTime object. - locstring Location string - mechanism Moment mechanism, one of: - 'RS' (Reverse) - 'SS' (Strike-Slip) - 'NM' (Normal) - 'ALL' (Undetermined) """ # fill in default values for mechanism, rake and dip # these may be overriden by values in event.xml, source.txt, or by values # passed in after reading input data. # event = {'mech': DEFAULT_MECH, # 'rake': DEFAULT_RAKE, # 'dip': DEFAULT_DIP} if isinstance(eventxml, str): root = minidom.parse(eventxml) else: data = eventxml.read() root = minidom.parseString(data) # Turn XML content into dictionary eq = root.getElementsByTagName('earthquake')[0] xmldict = dict(eq.attributes.items()) root.unlink() eqdict = {} eqdict['eventsourcecode'] = xmldict['id'] if 'network' in xmldict: eqdict['eventsource'] = xmldict['network'] else: eqdict['eventsource'] = 'us' #?? #look for the productcode attribute if 'productcode' in xmldict: eqdict['productcode'] = xmldict['productcode'] # fix eventsourcecode if not specified correctly if not eqdict['eventsourcecode'].startswith(eqdict['eventsource']): eqdict['eventsourcecode'] = eqdict['eventsource'] + eqdict['eventsourcecode'] year = int(xmldict['year']) month = int(xmldict['month']) day = int(xmldict['day']) hour = int(xmldict['hour']) minute = int(xmldict['minute']) second = int(xmldict['second']) microseconds = int((second - int(xmldict['second']))*1e6) eqdict['time'] = HistoricTime(year,month,day,hour,minute,second,microseconds) eqdict['lat'] = float(xmldict['lat']) eqdict['lon'] = float(xmldict['lon']) eqdict['depth'] = float(xmldict['depth']) eqdict['mag'] = float(xmldict['mag']) # make created field in event.xml optional - set to current UTC time if not # supplied. if 'created' in xmldict: eqdict['created'] = HistoricTime.utcfromtimestamp(int(xmldict['created'])) else: eqdict['created'] = HistoricTime.utcnow() eqdict['locstring'] = xmldict['locstring'] if 'mech' in xmldict: eqdict['mech'] = xmldict['mech'] return eqdict
def test_ss3_move_hypo1(): magnitude = 7.2 dip = np.array([90]) rake = 180.0 width = np.array([15]) rupx = np.array([0, 0]) rupy = np.array([0, 80]) zp = np.array([0.0]) epix = np.array([1.0]) epiy = np.array([-1.0]) # Convert to lat/lon proj = geo.utils.get_orthographic_projection(-122, -120, 39, 37) tlon, tlat = proj(rupx, rupy, reverse=True) epilon, epilat = proj(epix, epiy, reverse=True) # Origin origin = Origin({'lat': epilat[0], 'lon': epilon[0], 'depth': -1, 'mag': magnitude, 'eventsourcecode': 'ss3', 'rake': rake}) rup = QuadRupture.fromTrace( np.array([tlon[0]]), np.array([tlat[0]]), np.array([tlon[1]]), np.array([tlat[1]]), zp, width, dip, origin, reference='ss3') x = np.linspace(0, 20, 6) y = np.linspace(0, 90, 11) site_x, site_y = np.meshgrid(x, y) slon, slat = proj(site_x, site_y, reverse=True) deps = np.zeros_like(slon) test1 = Bayless2013(origin, rup, slat, slon, deps, T=1.0) phyp = copy.deepcopy(test1.phyp[0]) plat, plon, pdep = ecef2latlon(phyp.x, phyp.y, phyp.z) px, py = proj(plon, plat, reverse=False) np.testing.assert_allclose(plat, 38.004233219183604, rtol=1e-4) np.testing.assert_allclose(plon, -120.98636122402166, rtol=1e-4) np.testing.assert_allclose(pdep, 7.4999999989205968, rtol=1e-4) # -------------------------------------------------------------------------- # Also for multiple segments # -------------------------------------------------------------------------- dip = np.array([90., 90., 90.]) rake = 180.0 width = np.array([15., 15., 10.]) rupx = np.array([0., 0., 10., 20.]) rupy = np.array([0., 20., 60., 80.]) zp = np.array([0., 0., 0.]) epix = np.array([0.]) epiy = np.array([0.]) # Convert to lat/lon proj = geo.utils.get_orthographic_projection(-122, -120, 39, 37) tlon, tlat = proj(rupx, rupy, reverse=True) epilon, epilat = proj(epix, epiy, reverse=True) rup = QuadRupture.fromTrace( np.array(tlon[0:3]), np.array(tlat[0:3]), np.array(tlon[1:4]), np.array(tlat[1:4]), zp, width, dip, origin, reference='') event = {'lat': epilat[0], 'lon': epilon[0], 'depth': 1.0, 'mag': magnitude, 'eventsourcecode': '', 'locstring': 'test', 'type': 'SS', 'timezone': 'UTC'} event['time'] = HistoricTime.utcfromtimestamp(int(time.time())) event['created'] = HistoricTime.utcfromtimestamp(int(time.time())) x = np.linspace(0, 20, 6) y = np.linspace(0, 90, 11) site_x, site_y = np.meshgrid(x, y) slon, slat = proj(site_x, site_y, reverse=True) deps = np.zeros_like(slon) origin = Origin(event) origin.rake = rake test1 = Bayless2013(origin, rup, slat, slon, deps, T=1.0) # 1st pseudo-hyp phyp = copy.deepcopy(test1.phyp[0]) plat, plon, pdep = ecef2latlon(phyp.x, phyp.y, phyp.z) px, py = proj(plon, plat, reverse=False) np.testing.assert_allclose(plat, 38.004233219183604, rtol=1e-4) np.testing.assert_allclose(plon, -120.98636122402166, rtol=1e-4) np.testing.assert_allclose(pdep, 7.4999999989205968, rtol=1e-4) # 2nd pseudo-hyp phyp = copy.deepcopy(test1.phyp[1]) plat, plon, pdep = ecef2latlon(phyp.x, phyp.y, phyp.z) px, py = proj(plon, plat, reverse=False) np.testing.assert_allclose(plat, 38.184097835787796, rtol=1e-4) np.testing.assert_allclose(plon, -120.98636122402166, rtol=1e-4) np.testing.assert_allclose(pdep, 7.4999999989103525, rtol=1e-4) # 3rd pseudo-hyp phyp = copy.deepcopy(test1.phyp[2]) plat, plon, pdep = ecef2latlon(phyp.x, phyp.y, phyp.z) px, py = proj(plon, plat, reverse=False) np.testing.assert_allclose(plat, 38.543778594535752, rtol=1e-4) np.testing.assert_allclose(plon, -120.87137783362499, rtol=1e-4) np.testing.assert_allclose(pdep, 4.9999999995063993, rtol=1e-4)
def test_rupture_from_dict(): # Grab an EdgeRupture origin = Origin({ 'id': 'test', 'lat': 0, 'lon': 0, 'depth': 5.0, 'mag': 7.0, 'netid': 'us', 'network': '', 'locstring': '', 'time': HistoricTime.utcfromtimestamp(time.time()) }) file = os.path.join(homedir, 'rupture_data/cascadia.json') rup_original = get_rupture(origin, file) d = rup_original._geojson rup_from_dict = rupture_from_dict(d) assert rup_from_dict._mesh_dx == 0.5 # Specify mesh_dx rup_original = get_rupture(origin, file, mesh_dx=1.0) d = rup_original._geojson rup_from_dict = rupture_from_dict(d) assert rup_from_dict._mesh_dx == 1.0 # Quad rupture file = os.path.join(homedir, 'rupture_data/izmit.json') rup_original = get_rupture(origin, file) d = rup_original._geojson rup_from_dict = rupture_from_dict(d) assert rup_from_dict.getArea() == rup_original.getArea() # Note, there's a bit of an inconsistency highlighted here because # magnitude has key 'magnitude' in the izmit file, but 'mag' in # the origin and both get retained. # Point rupture from json file = os.path.join(homedir, 'rupture_data/point.json') rup = get_rupture(origin, file) assert rup.lats == 0 assert rup.lons == 0 # Point rupture origin = Origin({ 'id': 'test', 'lon': -122.5, 'lat': 37.3, 'depth': 5.0, 'mag': 7.0, 'netid': 'us', 'network': '', 'locstring': '', 'time': HistoricTime.utcfromtimestamp(time.time()) }) rup_original = get_rupture(origin) d = rup_original._geojson rup_from_dict = rupture_from_dict(d) assert rup_from_dict.lats == 37.3 assert rup_from_dict.lons == -122.5 assert rup_original.getLength() is None assert rup_original.getWidth() == constants.DEFAULT_WIDTH assert rup_original.getArea() is None assert rup_original.getStrike() == constants.DEFAULT_STRIKE assert rup_original.getDip() == constants.DEFAULT_DIP assert rup_original.getDepthToTop() == constants.DEFAULT_ZTOR assert rup_original.getQuadrilaterals() is None assert rup_original.depths == 5.0 # No mech, no tectonic region rjb, _ = rup_original.computeRjb(np.array([-122.0]), np.array([37.0]), np.array([0.0])) rrup, _ = rup_original.computeRrup(np.array([-122.0]), np.array([37.0]), np.array([0.0])) np.testing.assert_allclose([rjb[0], rrup[0]], [41.11182253, 42.73956168]) # Various combinations of mech and tectonic region... rup_original._origin._tectonic_region = 'Active Shallow Crust' rup_original._origin.mech = 'ALL' rjb, _ = rup_original.computeRjb(np.array([-122.0]), np.array([37.0]), np.array([0.0])) rrup, _ = rup_original.computeRrup(np.array([-122.0]), np.array([37.0]), np.array([0.0])) np.testing.assert_allclose([rjb[0], rrup[0]], [41.11182253, 42.73956168]) rup_original._origin.mech = 'RS' rjb, _ = rup_original.computeRjb(np.array([-122.0]), np.array([37.0]), np.array([0.0])) rrup, _ = rup_original.computeRrup(np.array([-122.0]), np.array([37.0]), np.array([0.0])) np.testing.assert_allclose([rjb[0], rrup[0]], [39.17479645, 41.20916362]) rup_original._origin.mech = 'NM' rjb, _ = rup_original.computeRjb(np.array([-122.0]), np.array([37.0]), np.array([0.0])) rrup, _ = rup_original.computeRrup(np.array([-122.0]), np.array([37.0]), np.array([0.0])) np.testing.assert_allclose([rjb[0], rrup[0]], [39.85641875, 41.89222387]) rup_original._origin.mech = 'SS' rjb, _ = rup_original.computeRjb(np.array([-122.0]), np.array([37.0]), np.array([0.0])) rrup, _ = rup_original.computeRrup(np.array([-122.0]), np.array([37.0]), np.array([0.0])) np.testing.assert_allclose([rjb[0], rrup[0]], [43.21392667, 44.04215406]) rup_original._origin._tectonic_region = 'Stable Shallow Crust' rup_original._origin.mech = 'ALL' rjb, _ = rup_original.computeRjb(np.array([-122.0]), np.array([37.0]), np.array([0.0])) rrup, _ = rup_original.computeRrup(np.array([-122.0]), np.array([37.0]), np.array([0.0])) np.testing.assert_allclose([rjb[0], rrup[0]], [42.68382206, 43.71213495]) rup_original._origin.mech = 'RS' rjb, _ = rup_original.computeRjb(np.array([-122.0]), np.array([37.0]), np.array([0.0])) rrup, _ = rup_original.computeRrup(np.array([-122.0]), np.array([37.0]), np.array([0.0])) np.testing.assert_allclose([rjb[0], rrup[0]], [42.29766584, 43.51422441]) rup_original._origin.mech = 'NM' rjb, _ = rup_original.computeRjb(np.array([-122.0]), np.array([37.0]), np.array([0.0])) rrup, _ = rup_original.computeRrup(np.array([-122.0]), np.array([37.0]), np.array([0.0])) np.testing.assert_allclose([rjb[0], rrup[0]], [42.57075149, 43.7987126]) rup_original._origin.mech = 'SS' rjb, _ = rup_original.computeRjb(np.array([-122.0]), np.array([37.0]), np.array([0.0])) rrup, _ = rup_original.computeRrup(np.array([-122.0]), np.array([37.0]), np.array([0.0])) np.testing.assert_allclose([rjb[0], rrup[0]], [44.19126409, 45.02525107]) rup_original._origin._tectonic_region = 'Somewhere Else' rup_original._origin.mech = 'ALL' rjb, var = rup_original.computeRjb(np.array([-122.0]), np.array([37.0]), np.array([0.0])) rrup, var = rup_original.computeRrup(np.array([-122.0]), np.array([37.0]), np.array([0.0])) np.testing.assert_allclose([rjb[0], rrup[0]], [41.11182253, 42.73956168]) # This is just zeroes now, so there's not much to check gc2 = rup_original.computeGC2(np.array([-122.0]), np.array([37.0]), np.array([0.0])) assert gc2['rx'][0] == 0
def parse_json_shakemap(rupts, args): """ This parses a json format that is basically a list of rupture.json formats, are multipolygon features, and very similar to the ShakeMap 3.5 fault file format. In this format, each corner is specified. Todo: - Support specification without a rupture (PointRupture). Args: rupts (dict): Python translation of rupture json file using json.load method. args (ArgumentParser): argparse object. Returns: dict: Dictionary of rupture information. """ rlist = [] nrup = len(rupts['events']) if args.index is not None: iter = args.index iter = map(int, iter) else: iter = range(nrup) for i in iter: event_name = rupts['events'][i]['metadata']['locstring'] short_name = rupts['events'][i]['metadata']['locstring'] id = rupts['events'][i]['metadata']['id'] magnitude = rupts['events'][i]['metadata']['mag'] if 'rake' in rupts['events'][i]['metadata'].keys(): rake = rupts['events'][i]['metadata']['rake'] else: rake = None # Does the event include a rupture model? if rupts['events'][i]['features'][0]['geometry']['type'] == \ "MultiPolygon": # Dummy origin origin = Origin({ 'mag': 0, 'id': '', 'lat': 0, 'lon': 0, 'depth': 0 }) rupt = json_to_rupture(rupts['events'][i], origin) quads = rupt.getQuadrilaterals() edges = get_rupture_edges(quads) # for hypo placement hlat, hlon, hdepth = get_hypo(edges, args) else: rupt = None edges = None quads = None hlon = float(rupts['events'][i]['features'][0]['geometry'] ['coordinates'][0]) hlat = float(rupts['events'][i]['features'][0]['geometry'] ['coordinates'][1]) hdepth = float(rupts['events'][i]['features'][0]['geometry'] ['coordinates'][2]) id_str, eventsourcecode, real_desc = get_event_id(event_name, magnitude, args.directivity, args.dirind, quads, id=id) event = { 'lat': hlat, 'lon': hlon, 'depth': hdepth, 'mag': magnitude, 'rake': rake, 'id': id_str, 'locstring': event_name, 'type': 'ALL', 'timezone': 'UTC', 'time': ShakeDateTime.utcfromtimestamp(int(time.time())), 'created': ShakeDateTime.utcfromtimestamp(int(time.time())) } # Update rupture with new origin info if rupt is not None: origin = Origin(event) rupt = json_to_rupture(rupts['events'][i], origin) rdict = { 'rupture': rupt, 'event': event, 'edges': edges, 'id_str': id_str, 'short_name': short_name, 'real_desc': real_desc, 'eventsourcecode': eventsourcecode } rlist.append(rdict) return rlist
def parse_json_nshmp_sub(rupts, args): """ This is an alternative version of parse_json to use with the Cascadia subduction zone JSON rupture file. Args: rupts (dict): Python translation of rupture json file using json.load method. args (ArgumentParser): argparse object. Returns: dict: Dictionary of rupture information. """ rlist = [] nrup = len(rupts['events']) if args.index is not None: iter = args.index iter = map(int, iter) else: iter = range(nrup) for i in iter: event_name = rupts['events'][i]['desc'] short_name = event_name.split('.xls')[0] id = rupts['events'][i]['id'] magnitude = rupts['events'][i]['mag'] if 'rake' in rupts['events'][i].keys(): rake = rupts['events'][i]['rake'] else: rake = None toplons = np.array(rupts['events'][i]['toplons']) toplats = np.array(rupts['events'][i]['toplats']) topdeps = np.array(rupts['events'][i]['topdeps']) botlons = np.array(rupts['events'][i]['botlons']) botlats = np.array(rupts['events'][i]['botlats']) botdeps = np.array(rupts['events'][i]['botdeps']) # Dummy origin origin = Origin({'mag': 0, 'id': '', 'lat': 0, 'lon': 0, 'depth': 0}) rupt = EdgeRupture.fromArrays(toplons=toplons, toplats=toplats, topdeps=topdeps, botlons=botlons, botlats=botlats, botdeps=botdeps, origin=origin, reference=args.reference) rupt._segment_index = np.zeros_like(toplons) quads = rupt.getQuadrilaterals() edges = get_rupture_edges(quads) # for hypo placement hlat, hlon, hdepth = get_hypo(edges, args) id_str, eventsourcecode, real_desc = get_event_id(event_name, magnitude, args.directivity, args.dirind, quads, id=id) event = { 'lat': hlat, 'lon': hlon, 'depth': hdepth, 'mag': magnitude, 'rake': rake, 'id': id_str, 'locstring': event_name, 'type': 'ALL', # overwrite later 'timezone': 'UTC' } event['time'] = ShakeDateTime.utcfromtimestamp(int(time.time())) event['created'] = ShakeDateTime.utcfromtimestamp(int(time.time())) # Update rupture with new origin info if rupt is not None: origin = Origin(event) rupt = EdgeRupture.fromArrays(toplons=toplons, toplats=toplats, topdeps=topdeps, botlons=botlons, botlats=botlats, botdeps=botdeps, origin=origin, reference=args.reference) rdict = { 'rupture': rupt, 'event': event, 'edges': edges, 'id_str': id_str, 'short_name': short_name, 'real_desc': real_desc, 'eventsourcecode': eventsourcecode } rlist.append(rdict) return rlist
def parse_json_nshmp(rupts, args): """ This will hopefully be the most general json format for rutpures. Assumes top of ruputure is horizontal and continuous, and that there is only one segment per rupture (but multiple quads). Users first and last point to get average strike, which is used for all quads. Args: rupts (dict): Python translation of rupture json file using json.load method. args (ArgumentParser): argparse object. Returns: dict: Dictionary of rupture information. """ rlist = [] nrup = len(rupts['events']) if args.index is not None: iter = args.index iter = map(int, iter) else: iter = range(nrup) for i in iter: event_name = rupts['events'][i]['desc'] short_name = event_name.split('.xls')[0] id = rupts['events'][i]['id'] magnitude = rupts['events'][i]['mag'] if 'rake' in rupts['events'][i].keys(): rake = rupts['events'][i]['rake'] else: rake = np.nan # Does the file include a rupture model? if len(rupts['events'][i]['lats']) > 1: dip = rupts['events'][i]['dip'] width = rupts['events'][i]['width'] ztor = rupts['events'][i]['ztor'] lons = rupts['events'][i]['lons'] lats = rupts['events'][i]['lats'] xp0 = np.array(lons[:-1]) xp1 = np.array(lons[1:]) yp0 = np.array(lats[:-1]) yp1 = np.array(lats[1:]) zp = np.ones_like(xp0) * ztor dips = np.ones_like(xp0) * dip widths = np.ones_like(xp0) * width P1 = geo.point.Point(lons[0], lats[0]) P2 = geo.point.Point(lons[-1], lats[-1]) strike = np.array([P1.azimuth(P2)]) # Dummy origin origin = Origin({ 'mag': 0, 'id': '', 'lat': 0, 'lon': 0, 'depth': 0 }) rupt = QuadRupture.fromTrace(xp0, yp0, xp1, yp1, zp, widths, dips, origin, strike=strike, reference=args.reference) quads = rupt.getQuadrilaterals() edges = get_rupture_edges(quads) # for hypo placement hlat, hlon, hdepth = get_hypo(edges, args) else: rupt = None edges = None hlat = float(rupts['events'][i]['lats'][0]) hlon = float(rupts['events'][i]['lons'][0]) id_str, eventsourcecode, real_desc = get_event_id(event_name, magnitude, args.directivity, args.dirind, quads, id=id) event = { 'lat': hlat, 'lon': hlon, 'depth': hdepth, 'mag': magnitude, 'rake': rake, 'id': id_str, 'locstring': event_name, 'type': 'ALL', 'timezone': 'UTC', 'time': ShakeDateTime.utcfromtimestamp(int(time.time())), 'created': ShakeDateTime.utcfromtimestamp(int(time.time())) } # Update rupture with new origin info if rupt is not None: origin = Origin(event) rupt = QuadRupture.fromTrace(xp0, yp0, xp1, yp1, zp, widths, dips, origin, strike=strike, reference=args.reference) rdict = { 'rupture': rupt, 'event': event, 'edges': edges, 'id_str': id_str, 'short_name': short_name, 'real_desc': real_desc, 'eventsourcecode': eventsourcecode } rlist.append(rdict) return rlist
def parse_bssc2014_ucerf(rupts, args): """ This function is to parse the UCERF3 json file format. The ruptures in UCERF3 are very complex and so we don't exepct to get other rupture lists in this format. Args: rupts (dict): Python translation of rupture json file using json.load method. args (ArgumentParser): argparse object. Returns: dict: Dictionary of rupture information. """ rlist = [] nrup = len(rupts['events']) if args.index is not None: iter = args.index iter = map(int, iter) else: iter = range(nrup) for i in iter: event_name = rupts['events'][i]['name'] short_name = event_name.split('EllB')[0].split('Shaw09')[0].split( '2011')[0].split('HB08')[0].rstrip() magnitude = rupts['events'][i]['magnitude'] rake = rupts['events'][i]['rake'] sections = np.array(rupts['events'][i]['sections']) nsections = len(sections) secind = 0 new_seg_ind = [] rev = np.array([[]]) xp0 = np.array([[]]) xp1 = np.array([[]]) yp0 = np.array([[]]) yp1 = np.array([[]]) zp = np.array([[]]) dip_sec = np.array([[]]) strike_sec = np.array([[]]) width_sec = np.array([[]]) for j in range(0, nsections): trace_sec = np.array(sections[j]['resampledTrace']) top_sec_lon = trace_sec[:, 0] top_sec_lat = trace_sec[:, 1] top_sec_z = trace_sec[:, 2] n_sec_trace = len(trace_sec) - 1 dip_sec = np.append(dip_sec, np.repeat(sections[j]['dip'], n_sec_trace)) dipDir_sec = np.repeat(sections[j]['dipDir'], n_sec_trace) strike_sec = np.append(strike_sec, dipDir_sec - 90) width_sec = np.append(width_sec, np.repeat(sections[j]['width'], n_sec_trace)) rev_sec = sections[j]['reversed'] rev = np.append(rev, np.repeat(rev_sec, n_sec_trace)) xp0_sec = top_sec_lon[range(0, n_sec_trace)] xp1_sec = top_sec_lon[range(1, n_sec_trace + 1)] yp0_sec = top_sec_lat[range(0, n_sec_trace)] yp1_sec = top_sec_lat[range(1, n_sec_trace + 1)] zp_sec = top_sec_z[range(0, n_sec_trace)] if rev_sec is False: xp0 = np.append(xp0, xp0_sec) xp1 = np.append(xp1, xp1_sec) yp0 = np.append(yp0, yp0_sec) yp1 = np.append(yp1, yp1_sec) zp = np.append(zp, zp_sec) else: xp0 = np.append(xp0, xp1_sec[::-1]) xp1 = np.append(xp1, xp0_sec[::-1]) yp0 = np.append(yp0, yp1_sec[::-1]) yp1 = np.append(yp1, yp0_sec[::-1]) zp = np.append(zp, zp_sec[::-1]) new_seg_ind.extend([secind] * n_sec_trace) secind = secind + 1 # Origin origin = Origin({'mag': 0, 'id': '', 'lat': 0, 'lon': 0, 'depth': 0}) rupt = QuadRupture.fromTrace(xp0, yp0, xp1, yp1, zp, width_sec, dip_sec, origin, strike=strike_sec, group_index=new_seg_ind, reference=args.reference) quads = rupt.getQuadrilaterals() edges = get_rupture_edges(quads, rev) hlat, hlon, hdepth = get_hypo(edges, args) id_str, eventsourcecode, real_desc = get_event_id( event_name, magnitude, args.directivity, args.dirind, quads) event = { 'lat': hlat, 'lon': hlon, 'depth': hdepth, 'mag': magnitude, 'rake': rake, 'id': id_str, 'locstring': event_name, 'type': 'ALL', 'timezone': 'UTC', 'time': ShakeDateTime.utcfromtimestamp(int(time.time())), 'created': ShakeDateTime.utcfromtimestamp(int(time.time())) } # Update rupture with new origin info origin = Origin(event) rupt = QuadRupture.fromTrace(xp0, yp0, xp1, yp1, zp, width_sec, dip_sec, origin, strike=strike_sec, group_index=new_seg_ind, reference=args.reference) rdict = { 'rupture': rupt, 'event': event, 'edges': edges, 'id_str': id_str, 'short_name': short_name, 'real_desc': real_desc, 'eventsourcecode': eventsourcecode } rlist.append(rdict) return rlist