def test_event(self): """ Tests event Web service interface. Examples are inspired by http://www.iris.edu/ws/event/. """ client = Client() # 1 url = "http://www.iris.edu/ws/event/query?mindepth=34.9&" + \ "maxdepth=35.1&catalog=NEIC%20PDE&contributor=NEIC%20PDE-Q&" + \ "magtype=MB&lat=-56.1&lon=-26.7&maxradius=1" # direct call doc1 = urllib.urlopen(url).read() # using client doc2 = client.event(mindepth=34.9, maxdepth=35.1, catalog="NEIC PDE", contributor="NEIC PDE-Q", magtype="MB", lat=-56.1, lon=-26.7, maxradius=1) self.assertEqual(doc1, doc2) client = Client() # 2 url = "http://www.iris.edu/ws/event/query?eventid=3316989" # direct call doc1 = urllib.urlopen(url).read() # using client doc2 = client.event(eventid=3316989) self.assertEqual(doc1, doc2) # 2 url = "http://www.iris.edu/ws/event/query?eventid=3316989" # direct call doc1 = urllib.urlopen(url).read() # using client doc2 = client.event(eventid=3316989) self.assertEqual(doc1, doc2) # 3 url = "http://www.iris.edu/ws/event/query?minmag=8.5" # direct call doc1 = urllib.urlopen(url).read() # using client doc2 = client.event(minmag=8.5) self.assertEqual(doc1, doc2) # 4 url = "http://www.iris.edu/ws/event/query?starttime=2011-01-07T" + \ "14%3A00%3A00&endtime=2011-02-07&minlat=15&maxlat=40&" + \ "minlon=-170&maxlon=170&preferredonly=yes&" + \ "includeallmagnitudes=yes&orderby=magnitude" # direct call doc1 = urllib.urlopen(url).read() # using client doc2 = client.event(starttime=UTCDateTime(2011, 1, 7, 14), endtime=UTCDateTime('2011-02-07'), minlat=15.0, maxlat=40.0, minlon=-170, maxlon=170, preferredonly=True, includeallmagnitudes=True, orderby='magnitude') self.assertEqual(doc1, doc2)
def test_flinnengdahl(self): """ Tests calculation of Flinn-Engdahl region code or name. """ client = Client() # code result = client.flinnengdahl(lat=-20.5, lon=-100.6, rtype="code") self.assertEquals(result, 683) # w/o kwargs result = client.flinnengdahl(-20.5, -100.6, "code") self.assertEquals(result, 683) # region result = client.flinnengdahl(lat=42, lon=-122.24, rtype="region") self.assertEquals(result, 'OREGON') # w/o kwargs result = client.flinnengdahl(42, -122.24, "region") self.assertEquals(result, 'OREGON') # both result = client.flinnengdahl(lat=-20.5, lon=-100.6, rtype="both") self.assertEquals(result, (683, 'SOUTHEAST CENTRAL PACIFIC OCEAN')) # w/o kwargs result = client.flinnengdahl(-20.5, -100.6, "both") self.assertEquals(result, (683, 'SOUTHEAST CENTRAL PACIFIC OCEAN')) # default rtype result = client.flinnengdahl(lat=42, lon=-122.24) self.assertEquals(result, (32, 'OREGON')) # w/o kwargs # outside boundaries self.assertRaises(Exception, client.flinnengdahl, lat=-90.1, lon=0) self.assertRaises(Exception, client.flinnengdahl, lat=90.1, lon=0) self.assertRaises(Exception, client.flinnengdahl, lat=0, lon=-180.1) self.assertRaises(Exception, client.flinnengdahl, lat=0, lon=180.1)
def test_timeseries(self): """ Tests timeseries Web service interface. Examples are inspired by http://www.iris.edu/ws/timeseries/. """ client = Client() # 1 t1 = UTCDateTime("2005-001T00:00:00") t2 = UTCDateTime("2005-001T00:01:00") # no filter st1 = client.timeseries("IU", "ANMO", "00", "BHZ", t1, t2) # instrument corrected st2 = client.timeseries("IU", "ANMO", "00", "BHZ", t1, t2, filter=["correct"]) # compare results self.assertEquals(st1[0].stats.starttime, st2[0].stats.starttime) self.assertEquals(st1[0].stats.endtime, st2[0].stats.endtime) self.assertEquals(st1[0].data[0], 24) self.assertAlmostEquals(st2[0].data[0], -2.4910707e-06)
def test_saveResponse(self): """ Fetches and stores response information as SEED RESP file. """ client = Client() start = UTCDateTime("2005-001T00:00:00") end = UTCDateTime("2008-001T00:00:00") # RESP, single channel origfile = os.path.join(self.path, 'data', 'RESP.ANMO.IU.00.BHZ') with NamedTemporaryFile() as tf: tempfile = tf.name client.saveResponse(tempfile, "IU", "ANMO", "00", "BHZ", start, end) self.assertTrue(filecmp.cmp(origfile, tempfile)) # RESP, multiple channels origfile = os.path.join(self.path, 'data', 'RESP.ANMO.IU._.BH_') with NamedTemporaryFile() as tf: tempfile = tf.name client.saveResponse(tempfile, "IU", "ANMO", "*", "BH?", start, end) self.assertTrue(filecmp.cmp(origfile, tempfile)) # StationXML, single channel with NamedTemporaryFile() as tf: tempfile = tf.name client.saveResponse(tempfile, "IU", "ANMO", "00", "BHZ", start, end, format="StationXML") data = open(tempfile).read() self.assertTrue('<Station net_code="IU" sta_code="ANMO">' in data) # SACPZ, single channel with NamedTemporaryFile() as tf: tempfile = tf.name client.saveResponse(tempfile, "IU", "ANMO", "00", "BHZ", start, end, format="SACPZ") data = open(tempfile).read() self.assertTrue('NETWORK (KNETWK): IU' in data) self.assertTrue('STATION (KSTNM): ANMO' in data)
def test_issue419(self): """ obspy.iris.Client.availability should work with output='bulkdataselect' """ client = Client() # 1 - default output ('bulkdataselect') t1 = UTCDateTime("2010-02-27T06:30:00.000") t2 = UTCDateTime("2010-02-27T10:30:00.000") result = client.availability('IU', channel='B*', starttime=t1, endtime=t2) self.assertTrue(isinstance(result, basestring)) self.assertTrue('IU YSS 00 BHZ' in result) # 2 - explicit set output 'bulkdataselect' t1 = UTCDateTime("2010-02-27T06:30:00.000") t2 = UTCDateTime("2010-02-27T10:30:00.000") result = client.availability('IU', channel='B*', starttime=t1, endtime=t2, output='bulkdataselect') self.assertTrue(isinstance(result, basestring)) self.assertTrue('IU YSS 00 BHZ' in result) # 3 - output 'bulk' (backward compatibility) t1 = UTCDateTime("2010-02-27T06:30:00.000") t2 = UTCDateTime("2010-02-27T10:30:00.000") with warnings.catch_warnings(record=True): warnings.simplefilter('ignore', DeprecationWarning) result = client.availability('IU', channel='B*', starttime=t1, endtime=t2, output='bulk') self.assertTrue(isinstance(result, basestring)) self.assertTrue('IU YSS 00 BHZ' in result) # 4 - output 'xml' t1 = UTCDateTime("2010-02-27T06:30:00.000") t2 = UTCDateTime("2010-02-27T10:30:00.000") result = client.availability('IU', channel='B*', starttime=t1, endtime=t2, output='xml') self.assertTrue(isinstance(result, basestring)) self.assertTrue('<?xml' in result)
def test_sacpz(self): """ Fetches SAC poles and zeros information. """ client = Client() # 1 t1 = UTCDateTime("2005-01-01") t2 = UTCDateTime("2008-01-01") result = client.sacpz("IU", "ANMO", "00", "BHZ", t1, t2) # drop lines with creation date (current time during request) result = result.splitlines() sacpz_file = os.path.join(self.path, 'data', 'IU.ANMO.00.BHZ.sacpz') expected = open(sacpz_file, 'rt').read().splitlines() result.pop(5) expected.pop(5) self.assertEquals(result, expected) # 2 - empty location code dt = UTCDateTime("2002-11-01") result = client.sacpz('UW', 'LON', '', 'BHZ', dt) self.assertTrue("* STATION (KSTNM): LON" in result) self.assertTrue("* LOCATION (KHOLE): " in result) # 3 - empty location code via '--' result = client.sacpz('UW', 'LON', '--', 'BHZ', dt) self.assertTrue("* STATION (KSTNM): LON" in result) self.assertTrue("* LOCATION (KHOLE): " in result)
def test_traveltime(self): """ Tests calculation of travel-times for seismic phases. """ client = Client() result = client.traveltime(evloc=(-36.122, -72.898), evdepth=22.9, staloc=[(-33.45, -70.67), (47.61, -122.33), (35.69, 139.69)]) self.assertTrue(result.startswith('Model: iasp91'))
def test_issue623(self): """ obspy.iris bulkdataselect only returns last trace in result """ t1 = UTCDateTime("2011-03-11T06:31:30Z") t2 = UTCDateTime("2011-03-11T06:48:00Z") client = Client() st = client.getWaveform("GE", "EIL", "", "BHZ", t1, t2) self.assertEqual(len(st), 5)
def test_saveResponseToStringIO(self): """ Same as test_saveResponse but saves to a StringIO. """ client = Client() start = UTCDateTime("2005-001T00:00:00") end = UTCDateTime("2008-001T00:00:00") # RESP, single channel origfile = os.path.join(self.path, 'data', 'RESP.ANMO.IU.00.BHZ') with open(origfile, "rb") as fh: org_data = fh.read() memfile = StringIO.StringIO() client.saveResponse(memfile, "IU", "ANMO", "00", "BHZ", start, end) memfile.seek(0, 0) new_data = memfile.read() self.assertEqual(new_data, org_data) # RESP, multiple channels origfile = os.path.join(self.path, 'data', 'RESP.ANMO.IU._.BH_') with open(origfile, "rb") as fh: org_data = fh.read() memfile = StringIO.StringIO() client.saveResponse(memfile, "IU", "ANMO", "*", "BH?", start, end) memfile.seek(0, 0) new_data = memfile.read() self.assertEqual(new_data, org_data) # StationXML, single channel memfile = StringIO.StringIO() client.saveResponse(memfile, "IU", "ANMO", "00", "BHZ", start, end, format="StationXML") memfile.seek(0, 0) data = memfile.read() self.assertTrue('<Station net_code="IU" sta_code="ANMO">' in data) # SACPZ, single channel memfile = StringIO.StringIO() client.saveResponse(memfile, "IU", "ANMO", "00", "BHZ", start, end, format="SACPZ") memfile.seek(0, 0) data = memfile.read() self.assertTrue('NETWORK (KNETWK): IU' in data) self.assertTrue('STATION (KSTNM): ANMO' in data)
def test_availability(self): """ Tests availability of waveform data at the DMC. Examples are inspired by http://www.iris.edu/ws/availability/. """ client = Client() # 1 t1 = UTCDateTime("2010-02-27T06:30:00.000") t2 = UTCDateTime("2010-02-27T10:30:00.000") result = client.availability('IU', channel='B*', starttime=t1, endtime=t2) self.assertTrue(isinstance(result, basestring)) self.assertTrue('IU YSS 00 BHZ' in result) # 2 dt = UTCDateTime("2011-11-13T07:00:00") result = client.availability(network='GE', starttime=dt, endtime=dt + 10) self.assertTrue(isinstance(result, basestring)) self.assertTrue('GE DAG -- BHE' in result) # 3 - unknown network results in empty string dt = UTCDateTime(2011, 11, 16) result = client.availability(network='XX', starttime=dt, endtime=dt + 10) # 4 - location=None t1 = UTCDateTime("2010-02-27T06:30:00") t2 = UTCDateTime("2010-02-27T06:40:00") result = client.availability("IU", "K*", starttime=t1, endtime=t2) self.assertTrue(isinstance(result, basestring)) self.assertTrue('IU KBL -- BHZ' in result) self.assertTrue('IU KBS 00 BHE' in result) # 5 - empty location result = client.availability("IU", "K*", "", starttime=t1, endtime=t2) self.assertTrue(isinstance(result, basestring)) self.assertTrue('IU KBL -- BHZ' in result) self.assertFalse('IU KBS 00 BHE' in result) # 6 - empty location code via '--' result = client.availability("IU", "K*", "--", starttime=t1, endtime=t2) self.assertTrue(isinstance(result, basestring)) self.assertTrue('IU KBL -- BHZ' in result) self.assertFalse('IU KBS 00 BHE' in result)
def test_saveWaveform(self): """ Testing simple waveform file save method. """ # file identical to file retrieved via web interface client = Client() start = UTCDateTime("2010-02-27T06:30:00") end = UTCDateTime("2010-02-27T06:31:00") origfile = os.path.join(self.path, 'data', 'IU.ANMO.00.BHZ.mseed') tempfile = NamedTemporaryFile().name client.saveWaveform(tempfile, "IU", "ANMO", "00", "BHZ", start, end) self.assertTrue(filecmp.cmp(origfile, tempfile)) os.remove(tempfile) # no data raises an exception self.assertRaises(Exception, client.saveWaveform, "YY", "XXXX", "00", "BHZ", start, end)
def test_saveWaveformToStringIO(self): """ Same as test_saveWaveform but saves to a StringIO. """ # file identical to file retrieved via web interface client = Client() start = UTCDateTime("2010-02-27T06:30:00") end = UTCDateTime("2010-02-27T06:31:00") memfile = StringIO.StringIO() client.saveWaveform(memfile, "IU", "ANMO", "00", "BHZ", start, end) memfile.seek(0, 0) new_data = memfile.read() origfile = os.path.join(self.path, 'data', 'IU.ANMO.00.BHZ.mseed') with open(origfile, "rb") as fh: org_data = fh.read() self.assertEqual(new_data, org_data)
def test_getEvents(self): """ Tests getEvents method. """ client = Client() dt = UTCDateTime("2012-03-13T04:49:38") # 1 cat = client.getEvents(mindepth=34.9, maxdepth=35.1, magtype="MB", catalog="NEIC PDE", lat=-56.1, lon=-26.7, maxradius=2, starttime=dt, endtime=dt + 10) self.assertEqual(len(cat), 1) ev = cat[0] self.assertEqual(len(ev.origins), 1) self.assertEqual(len(ev.magnitudes), 1) self.assertEqual(ev.origins[0].depth, 35.0) self.assertEqual(ev.origins[0].latitude, -55.404) self.assertEqual(ev.origins[0].longitude, -27.895) self.assertEqual(ev.magnitudes[0].magnitude_type, 'MB')
def test_getWaveform(self): """ Testing simple waveform request method. """ # simple example client = Client() start = UTCDateTime("2010-02-27T06:30:00.019538Z") end = start + 20 stream = client.getWaveform("IU", "ANMO", "00", "BHZ", start, end) self.assertEquals(len(stream), 1) self.assertEquals(stream[0].stats.starttime, start) self.assertEquals(stream[0].stats.endtime, end) self.assertEquals(stream[0].stats.network, 'IU') self.assertEquals(stream[0].stats.station, 'ANMO') self.assertEquals(stream[0].stats.location, '00') self.assertEquals(stream[0].stats.channel, 'BHZ') # no data raises an exception self.assertRaises(Exception, client.getWaveform, "YY", "XXXX", "00", "BHZ", start, end)
def test_distaz(self): """ Tests distance and azimuth calculation between two points on a sphere. """ client = Client() # normal request result = client.distaz(stalat=1.1, stalon=1.2, evtlat=3.2, evtlon=1.4) self.assertAlmostEquals(result['distance'], 2.09554) self.assertAlmostEquals(result['backazimuth'], 5.46946) self.assertAlmostEquals(result['azimuth'], 185.47692) # w/o kwargs result = client.distaz(1.1, 1.2, 3.2, 1.4) self.assertAlmostEquals(result['distance'], 2.09554) self.assertAlmostEquals(result['backazimuth'], 5.46946) self.assertAlmostEquals(result['azimuth'], 185.47692) # missing parameters self.assertRaises(Exception, client.distaz, stalat=1.1) self.assertRaises(Exception, client.distaz, 1.1) self.assertRaises(Exception, client.distaz, stalat=1.1, stalon=1.2) self.assertRaises(Exception, client.distaz, 1.1, 1.2)
def test_resp(self): """ Tests resp Web service interface. Examples are inspired by http://www.iris.edu/ws/resp/. """ client = Client() # 1 t1 = UTCDateTime("2005-001T00:00:00") t2 = UTCDateTime("2008-001T00:00:00") result = client.resp("IU", "ANMO", "00", "BHZ", t1, t2) self.assertTrue('B050F03 Station: ANMO' in result) # 2 - empty location code result = client.resp("UW", "LON", "", "EHZ") self.assertTrue('B050F03 Station: LON' in result) self.assertTrue('B052F03 Location: ??' in result) # 3 - empty location code via '--' result = client.resp("UW", "LON", "--", "EHZ") self.assertTrue('B050F03 Station: LON' in result) self.assertTrue('B052F03 Location: ??' in result) # 4 dt = UTCDateTime("2010-02-27T06:30:00.000") result = client.resp("IU", "ANMO", "*", "*", dt) self.assertTrue('B050F03 Station: ANMO' in result)
print st print(st[0].stats) st.write('REF.EHZ.2009:082.mseed', format='MSEED') #st.write('REF.EHZ.2009:082.wave', format='WAV', framerate=6000) st.write('REF.EHZ.2009:082.sac', format='SAC') # This needs basemap and demonstrates the event and catalog classes from obspy.core.event import * cat = readEvents(\ "http://www.seismicportal.eu/services/event/search?magMin=8.0") cat.plot() # IRIS DMC example from obspy.iris import Client from obspy.core import UTCDateTime client = Client() t = UTCDateTime("2012-08-05T06:00:00.000") st = client.getWaveform('IU', 'ANMO', '00', 'BHZ', t, t + 300) st.plot() # Earthworm wave server example - connection is refused though from obspy.earthworm import Client client = Client("pele.ess.washington.edu", 16017) response = client.availability("UW", "TUCA", channel="BHZ") print response t = response[0][4] st = client.getWaveform('UW', 'TUCA', '', 'BH*', t + 100, t + 130) st.plot()
def test_evalresp(self): """ Tests evaluating instrument response information. """ client = Client() dt = UTCDateTime("2005-01-01") # plot as PNG file with NamedTemporaryFile() as tf: tempfile = tf.name client.evalresp(network="IU", station="ANMO", location="00", channel="BHZ", time=dt, output='plot', filename=tempfile) with open(tempfile, 'rb') as fp: self.assertEqual(fp.read(4)[1:4], b'PNG') # plot-amp as PNG file with NamedTemporaryFile() as tf: tempfile = tf.name client.evalresp(network="IU", station="ANMO", location="00", channel="BHZ", time=dt, output='plot-amp', filename=tempfile) with open(tempfile, 'rb') as fp: self.assertEqual(fp.read(4)[1:4], b'PNG') # plot-phase as PNG file with NamedTemporaryFile() as tf: tempfile = tf.name client.evalresp(network="IU", station="ANMO", location="00", channel="BHZ", time=dt, output='plot-phase', filename=tempfile) with open(tempfile, 'rb') as fp: self.assertEqual(fp.read(4)[1:4], b'PNG') # fap as ASCII file with NamedTemporaryFile() as tf: tempfile = tf.name client.evalresp(network="IU", station="ANMO", location="00", channel="BHZ", time=dt, output='fap', filename=tempfile) with open(tempfile, 'rt') as fp: self.assertEqual(fp.readline(), '1.000000E-05 1.055999E+04 1.792007E+02\n') # cs as ASCII file with NamedTemporaryFile() as tf: tempfile = tf.name client.evalresp(network="IU", station="ANMO", location="00", channel="BHZ", time=dt, output='cs', filename=tempfile) with open(tempfile, 'rt') as fp: self.assertEqual(fp.readline(), '1.000000E-05 -1.055896E+04 1.473054E+02\n') # fap & def as ASCII file with NamedTemporaryFile() as tf: tempfile = tf.name client.evalresp(network="IU", station="ANMO", location="00", channel="BHZ", time=dt, output='fap', units='def', filename=tempfile) with open(tempfile, 'rt') as fp: self.assertEqual(fp.readline(), '1.000000E-05 1.055999E+04 1.792007E+02\n') # fap & dis as ASCII file with NamedTemporaryFile() as tf: tempfile = tf.name client.evalresp(network="IU", station="ANMO", location="00", channel="BHZ", time=dt, output='fap', units='dis', filename=tempfile) with open(tempfile, 'rt') as fp: self.assertEqual(fp.readline(), '1.000000E-05 6.635035E-01 2.692007E+02\n') # fap & vel as ASCII file with NamedTemporaryFile() as tf: tempfile = tf.name client.evalresp(network="IU", station="ANMO", location="00", channel="BHZ", time=dt, output='fap', units='vel', filename=tempfile) with open(tempfile, 'rt') as fp: self.assertEqual(fp.readline(), '1.000000E-05 1.055999E+04 1.792007E+02\n') # fap & acc as ASCII file with NamedTemporaryFile() as tf: tempfile = tf.name client.evalresp(network="IU", station="ANMO", location="00", channel="BHZ", time=dt, output='fap', units='acc', filename=tempfile) with open(tempfile, 'rt') as fp: self.assertEqual(fp.readline(), '1.000000E-05 1.680674E+08 8.920073E+01\n') # fap as NumPy ndarray data = client.evalresp(network="IU", station="ANMO", location="00", channel="BHZ", time=dt, output='fap') np.testing.assert_array_equal( data[0], [1.00000000e-05, 1.05599900e+04, 1.79200700e+02]) # cs as NumPy ndarray data = client.evalresp(network="IU", station="ANMO", location="00", channel="BHZ", time=dt, output='cs') np.testing.assert_array_equal( data[0], [1.00000000e-05, -1.05589600e+04, 1.47305400e+02])
def single_comparison(): """ one by one comparison of the waveforms in the first path with the second path. """ client = Client() global input # identity of the waveforms (first and second paths) to be compared with each other identity_all = input['net'] + '.' + input['sta'] + '.' + \ input['loc'] + '.' + input['cha'] ls_first = glob.glob(os.path.join(input['first_path'], identity_all)) ls_second = glob.glob(os.path.join(input['second_path'], identity_all)) for i in range(0, len(ls_first)): try: tr1 = read(ls_first[i])[0] if input['phase'] != 'N': evsta_dist = util.locations2degrees(lat1 = tr1.stats.sac.evla, \ long1 = tr1.stats.sac.evlo, lat2 = tr1.stats.sac.stla, \ long2 = tr1.stats.sac.stlo) taup_tt = taup.getTravelTimes(delta=evsta_dist, depth=tr1.stats.sac.evdp) phase_exist = 'N' for tt_item in taup_tt: if tt_item['phase_name'] == input['phase']: print 'Requested phase:' print input['phase'] print '------' print tt_item['phase_name'] print 'exists in the waveform!' print '-----------------------' t_phase = tt_item['time'] phase_exist = 'Y' break if phase_exist != 'Y': continue # identity of the current waveform identity = tr1.stats.network + '.' + tr1.stats.station + '.' + \ tr1.stats.location + '.' + tr1.stats.channel # tr1: first path, tr2: second path, tr3: Raw data #tr3 = read(os.path.join(input['first_path'], '..', 'BH_RAW', identity))[0] if input['resp_paz'] == 'Y': response_file = os.path.join(input['first_path'], '..', 'Resp/RESP.' + identity) # Extract the PAZ info from response file paz = readRESP(response_file, unit=input['corr_unit']) poles = paz['poles'] zeros = paz['zeros'] scale_fac = paz['gain'] sensitivity = paz['sensitivity'] print paz # Convert Poles and Zeros (PAZ) to frequency response. h, f = pazToFreqResp(poles, zeros, scale_fac, \ 1./tr1.stats.sampling_rate, tr1.stats.npts*2, freq=True) # Use the evalresp library to extract # instrument response information from a SEED RESP-file. resp = invsim.evalresp(t_samp = 1./tr1.stats.sampling_rate, \ nfft = tr1.stats.npts*2, filename = response_file, \ date = tr1.stats.starttime, units = input['corr_unit'].upper()) # Keep the current identity in a new variable id_name = identity try: tr2 = read(os.path.join(input['second_path'], identity))[0] except Exception, error: # if it is not possible to read the identity in the second path # then change the network part of the identity based on # correction unit identity = input['corr_unit'] + '.' + tr1.stats.station + '.' + \ tr1.stats.location + '.' + tr1.stats.channel tr2 = read(os.path.join(input['second_path'], identity))[0] if input['resample'] != 'N': print 'WARNING: you are using resample!!!' tr1.resample(input['resample']) tr2.resample(input['resample']) if input['tw'] == 'Y': t_cut_1 = tr1.stats.starttime + t_phase - input['preset'] t_cut_2 = tr1.stats.starttime + t_phase + input['offset'] tr1.trim(starttime=t_cut_1, endtime=t_cut_2) t_cut_1 = tr2.stats.starttime + t_phase - input['preset'] t_cut_2 = tr2.stats.starttime + t_phase + input['offset'] tr2.trim(starttime=t_cut_1, endtime=t_cut_2) if input['hlfilter'] == 'Y': tr1.filter('lowpass', freq=input['hfreq'], corners=2) tr2.filter('lowpass', freq=input['hfreq'], corners=2) tr1.filter('highpass', freq=input['lfreq'], corners=2) tr2.filter('highpass', freq=input['lfreq'], corners=2) # normalization of all three waveforms to the # max(max(tr1), max(tr2), max(tr3)) to keep the scales #maxi = max(abs(tr1.data).max(), abs(tr2.data).max(), abs(tr3.data).max()) #maxi = max(abs(tr1.data).max(), abs(tr2.data).max()) #tr1_data = tr1.data/abs(maxi) #tr2_data = tr2.data/abs(maxi) #tr3_data = tr3.data/abs(maxi) tr1_data = tr1.data / abs(max(tr1.data)) tr2_data = tr2.data / abs(max(tr2.data)) #tr1_data = tr1.data #tr2_data = tr2.data*1e9 print max(tr1.data) print max(tr2.data) # create time arrays for tr1, tr2 and tr3 time_tr1 = np.arange(0, tr1.stats.npts/tr1.stats.sampling_rate, \ 1./tr1.stats.sampling_rate) time_tr2 = np.arange(0, tr2.stats.npts/tr2.stats.sampling_rate, \ 1./tr2.stats.sampling_rate) #time_tr3 = np.arange(0, tr3.stats.npts/tr3.stats.sampling_rate, \ # 1./tr3.stats.sampling_rate) # label for plotting label_tr1 = ls_first[i].split('/')[-2] label_tr2 = ls_second[i].split('/')[-2] label_tr3 = 'RAW' if input['resp_paz'] == 'Y': # start plotting plt.figure() plt.subplot2grid((3, 4), (0, 0), colspan=4, rowspan=2) #plt.subplot(211) plt.plot(time_tr1, tr1_data, color='blue', label=label_tr1, lw=3) plt.plot(time_tr2, tr2_data, color='red', label=label_tr2, lw=3) #plt.plot(time_tr3, tr3_data, color = 'black', ls = '--', label = label_tr3) plt.xlabel('Time (sec)', fontsize='xx-large', weight='bold') if input['corr_unit'] == 'dis': ylabel_str = 'Relative Displacement' elif input['corr_unit'] == 'vel': ylabel_str = 'Relative Vel' elif input['corr_unit'] == 'acc': ylabel_str = 'Relative Acc' plt.ylabel(ylabel_str, fontsize='xx-large', weight='bold') plt.xticks(fontsize='xx-large', weight='bold') plt.yticks(fontsize='xx-large', weight='bold') plt.legend(loc=1, prop={'size': 20}) #-------------------Cross Correlation # 5 seconds as total length of samples to shift for cross correlation. cc_np = tr1.stats.sampling_rate * 3 np_shift, coeff = cross_correlation.xcorr(tr1, tr2, int(cc_np)) t_shift = float(np_shift) / tr1.stats.sampling_rate print "Cross Correlation:" print "Shift: " + str(t_shift) print "Coefficient: " + str(coeff) plt.title('Single Comparison' + '\n' + str(t_shift) + \ ' sec , coeff: ' + str(round(coeff, 5)) + \ '\n' + id_name, \ fontsize = 'xx-large', weight = 'bold') if input['resp_paz'] == 'Y': # ----------------------- #plt.subplot(223) plt.subplot2grid((3, 4), (2, 0), colspan=2) ''' plt.plot(np.log10(f), np.log10(abs(resp)/(sensitivity*sensitivity)), \ color = 'blue', label = 'RESP', lw=3) plt.plot(np.log10(f), np.log10(abs(h)/sensitivity), \ color = 'red', label = 'PAZ', lw=3) ''' plt.loglog(f, abs(resp)/(sensitivity*sensitivity), \ color = 'blue', label = 'RESP', lw=3) plt.loglog(f, abs(h)/sensitivity, \ color = 'red', label = 'PAZ', lw=3) #for j in [0.008, 0.012, 0.025, 0.5, 1, 2, 3, 4]: for j in [0]: plt.axvline(np.log10(j), linestyle='--') #plt.xlabel('Frequency [Hz]\n(power of 10)', fontsize = 'xx-large', weight = 'bold') #plt.ylabel('Amplitude\n (power of 10)', fontsize = 'xx-large', weight = 'bold') plt.xlabel('Frequency [Hz]', fontsize='xx-large', weight='bold') plt.ylabel('Amplitude', fontsize='xx-large', weight='bold') plt.xticks(fontsize='xx-large', weight='bold') #plt.yticks = MaxNLocator(nbins=4) plt.yticks(fontsize='xx-large', weight='bold') plt.legend(loc=2, prop={'size': 20}) # ----------------------- #plt.subplot(224) plt.subplot2grid((3, 4), (2, 2), colspan=2) #take negative of imaginary part phase_paz = np.unwrap(np.arctan2(h.imag, h.real)) phase_resp = np.unwrap(np.arctan2(resp.imag, resp.real)) #plt.plot(np.log10(f), phase_resp, color = 'blue', label = 'RESP', lw=3) #plt.plot(np.log10(f), phase_paz, color = 'red', label = 'PAZ', lw=3) plt.semilogx(f, phase_resp, color='blue', label='RESP', lw=3) plt.semilogx(f, phase_paz, color='red', label='PAZ', lw=3) #for j in [0.008, 0.012, 0.025, 0.5, 1, 2, 3, 4]: for j in [0.0]: plt.axvline(np.log10(j), linestyle='--') #plt.xlabel('Frequency [Hz]\n(power of 10)', fontsize = 'xx-large', weight = 'bold') plt.xlabel('Frequency [Hz]', fontsize='xx-large', weight='bold') plt.ylabel('Phase [radian]', fontsize='xx-large', weight='bold') plt.xticks(fontsize='xx-large', weight='bold') plt.yticks(fontsize='xx-large', weight='bold') plt.legend(loc=3, prop={'size': 20}) # title, centered above both subplots # make more room in between subplots for the ylabel of right plot plt.subplots_adjust(wspace=0.4, hspace=0.3) """ # ----------------------- plt.subplot(325) plt.plot(np.log10(f), np.log10(abs(resp)/(sensitivity*sensitivity)) - \ np.log10(abs(h)/sensitivity), \ color = 'black', label = 'RESP - PAZ') for j in [0.008, 0.012, 0.025, 0.5, 1, 2, 3, 4]: plt.axvline(np.log10(j), linestyle = '--') plt.xlabel('Frequency [Hz] (power of 10)') plt.ylabel('Amplitude (power of 10)') plt.legend() # ----------------------- plt.subplot(326) #take negative of imaginary part phase_paz = np.unwrap(np.arctan2(h.imag, h.real)) phase_resp = np.unwrap(np.arctan2(resp.imag, resp.real)) plt.plot(np.log10(f), np.log10(phase_resp) - np.log10(phase_paz), \ color = 'black', label = 'RESP - PAZ') for j in [0.008, 0.012, 0.025, 0.5, 1, 2, 3, 4]: plt.axvline(np.log10(j), linestyle = '--') plt.xlabel('Frequency [Hz] (power of 10)') plt.ylabel('Phase [radian] (power of 10)') plt.legend() # title, centered above both subplots # make more room in between subplots for the ylabel of right plot plt.subplots_adjust(wspace=0.3) """ plt.show() print str(i + 1) + '/' + str(len(ls_first)) print ls_first[i] print '------------------' wait = raw_input(id_name) print '***************************' except Exception, error: print '##################' print error print '##################'
def test_evalresp(self): """ Tests evaluating instrument response information. """ client = Client() dt = UTCDateTime("2005-01-01") # plot as PNG file tempfile = NamedTemporaryFile().name client.evalresp(network="IU", station="ANMO", location="00", channel="BHZ", time=dt, output='plot', filename=tempfile) self.assertEqual(open(tempfile, 'rb').read(4)[1:4], 'PNG') os.remove(tempfile) # plot-amp as PNG file tempfile = NamedTemporaryFile().name client.evalresp(network="IU", station="ANMO", location="00", channel="BHZ", time=dt, output='plot-amp', filename=tempfile) self.assertEqual(open(tempfile, 'rb').read(4)[1:4], 'PNG') os.remove(tempfile) # plot-phase as PNG file tempfile = NamedTemporaryFile().name client.evalresp(network="IU", station="ANMO", location="00", channel="BHZ", time=dt, output='plot-phase', filename=tempfile) self.assertEqual(open(tempfile, 'rb').read(4)[1:4], 'PNG') os.remove(tempfile) # fap as ASCII file tempfile = NamedTemporaryFile().name client.evalresp(network="IU", station="ANMO", location="00", channel="BHZ", time=dt, output='fap', filename=tempfile) self.assertEqual( open(tempfile, 'rt').readline(), '1.000000E-05 1.202802E+04 1.792007E+02\n') os.remove(tempfile) # cs as ASCII file tempfile = NamedTemporaryFile().name client.evalresp(network="IU", station="ANMO", location="00", channel="BHZ", time=dt, output='cs', filename=tempfile) self.assertEqual( open(tempfile, 'rt').readline(), '1.000000E-05 -1.202685E+04 1.677835E+02\n') os.remove(tempfile) # fap & def as ASCII file tempfile = NamedTemporaryFile().name client.evalresp(network="IU", station="ANMO", location="00", channel="BHZ", time=dt, output='fap', units='def', filename=tempfile) self.assertEqual( open(tempfile, 'rt').readline(), '1.000000E-05 1.202802E+04 1.792007E+02\n') os.remove(tempfile) # fap & dis as ASCII file tempfile = NamedTemporaryFile().name client.evalresp(network="IU", station="ANMO", location="00", channel="BHZ", time=dt, output='fap', units='dis', filename=tempfile) self.assertEqual( open(tempfile, 'rt').readline(), '1.000000E-05 7.557425E-01 2.692007E+02\n') os.remove(tempfile) # fap & vel as ASCII file tempfile = NamedTemporaryFile().name client.evalresp(network="IU", station="ANMO", location="00", channel="BHZ", time=dt, output='fap', units='vel', filename=tempfile) self.assertEqual( open(tempfile, 'rt').readline(), '1.000000E-05 1.202802E+04 1.792007E+02\n') os.remove(tempfile) # fap & acc as ASCII file tempfile = NamedTemporaryFile().name client.evalresp(network="IU", station="ANMO", location="00", channel="BHZ", time=dt, output='fap', units='acc', filename=tempfile) self.assertEqual( open(tempfile, 'rt').readline(), '1.000000E-05 1.914318E+08 8.920073E+01\n') os.remove(tempfile) # fap as NumPy ndarray data = client.evalresp(network="IU", station="ANMO", location="00", channel="BHZ", time=dt, output='fap') np.testing.assert_array_equal( data[0], [1.00000000e-05, 1.20280200e+04, 1.79200700e+02]) # cs as NumPy ndarray data = client.evalresp(network="IU", station="ANMO", location="00", channel="BHZ", time=dt, output='cs') np.testing.assert_array_equal( data[0], [1.00000000e-05, -1.20268500e+04, 1.67783500e+02])
from obspy.iris import Client from obspy.core import UTCDateTime from obspy.core.util import NamedTemporaryFile import matplotlib.pyplot as plt import numpy as np # MW 7.1 Darfield earthquake, New Zealand t1 = UTCDateTime("2010-09-3T16:30:00.000") t2 = UTCDateTime("2010-09-3T17:00:00.000") # Fetch waveform from IRIS web service into a ObsPy stream object client = Client() st = client.getWaveform('NZ', 'BFZ', '10', 'HHZ', t1, t2) # Download and save instrument response file into a temporary file with NamedTemporaryFile() as tf: respf = tf.name client.saveResponse(respf, 'NZ', 'BFZ', '10', 'HHZ', t1, t2, format="RESP") # make a copy to keep our original data st_orig = st.copy() # define a filter band to prevent amplifying noise during the deconvolution pre_filt = (0.005, 0.006, 30.0, 35.0) # this can be the date of your raw data or any date for which the # SEED RESP-file is valid date = t1 seedresp = {'filename': respf, # RESP filename # when using Trace/Stream.simulate() the "date" parameter can
def main(argv=sys.argv): global eplat, eplon, epdepth, orig GFdir = "/home/roberto/data/GFS/" beta = 4.e3 #m/s rho = 3.e3 #kg/m^3 mu = rho*beta*beta mu =40e9 Lbdm0min = 1e-26*np.array([125.]) Lbdsmooth = 1e-26*np.array([100.]) #~ Lbdm0min = 1e-26*np.linspace(60.,500,40) #~ Lbdsmooth = 1e-26*np.linspace(60.,500,40)#*0.5 corners = 4. fmin = 0.001 fmax = 0.005 ### Data from Chilean 2010 EQ (Same as W phase inv.) strike = 18. dip = 18. rake = 104. # 109. #rake = 45. rakeA = rake + 45. rakeB = rake - 45. #################### nsx = 21 nsy = 11 Min_h = 10. flen = 600. #Fault's longitude [km] along strike fwid = 300. #Fault's longitude [km] along dip sflen = flen/float(nsx) sfwid = fwid/float(nsy) swp = [1, 0, 2] nsf = nsx*nsy ################### t_h = 10. MISFIT = np.array([]) #RUPVEL = np.arange(1.0, 5.0, 0.05) RupVel = 2.1 # Best fit #RupVel = 2.25 #From Lay et al. #for RupVel in RUPVEL: print "****************************" print RupVel print "****************************" NP = [strike, dip, rake] NPA = [strike, dip, rakeA] NPB = [strike, dip, rakeB] M = np.array(NodalPlanetoMT(NP)) MA = np.array(NodalPlanetoMT(NPA)) MB = np.array(NodalPlanetoMT(NPB)) Mp = np.sum(M**2)/np.sqrt(2) ############# #Loading req file and EQparameters parameters={} with open(argv[1],'r') as file: for line in file: line = line.split() key = line[0] val = line[1:] parameters[key] = val #~ cmteplat = float(parameters['eplat'][0]) #~ cmteplon = float(parameters['eplon'][0]) #~ cmtepdepth=float(parameters['epdepth'][0]) orig = UTCDateTime(parameters['origin_time'][0]) ####Hypocentre from ### http://earthquake.usgs.gov/earthquakes/eqinthenews/2010/us2010tfan/ cmteplat = -35.91#-35.85#-36.03#-35.83 cmteplon = -72.73#-72.72#-72.83# -72.67 cmtepdepth= 35. eq_hyp = (cmteplat,cmteplon,cmtepdepth) ############ grid, sblt = fault_grid('CL-2010',cmteplat,cmteplon,cmtepdepth,0, Min_h,\ strike, dip, rake, flen, fwid, nsx, nsy, Verbose=False, ffi_io=True, gmt_io=True) print ('CL-2010',cmteplat,cmteplon,cmtepdepth,0, Min_h,\ strike, dip, rake, flen, fwid, nsx, nsy,\ ) print grid[0][1] #sys.exit() ############# #Loading files and setting dirs: inputfile = os.path.abspath(argv[1]) if not os.path.exists(inputfile): print inputfile, "does not exist."; exit() workdir = "/".join(inputfile.split("/")[:-1]) basename = inputfile.split("/")[-1][:-4] if workdir[-1] != "/": workdir += "/" try : os.mkdir(workdir+"WPinv") except OSError: pass#print "Directory WPtraces already exists. Skipping" trfile = open(workdir+"goodtraces.dat") trlist = [] #Loading Good traces files: while 1: line = trfile.readline().rstrip('\r\n') if not line: break trlist.append(line.split()[0]) trfile.close() ############# # Reading traces: st = read(workdir+"WPtraces/" + basename + ".decov.trim.mseed") ############################################################################# ######Determining the sf closest to the hypocentre: min_Dist_hyp_subf = flen *fwid for subf in range(nsf): sblat = grid[subf][1] sblon = grid[subf][0] sbdepth = grid[subf][2] sf_hyp = (sblat,sblon, sbdepth) Dist_hyp_subf = hypo2dist(eq_hyp,sf_hyp) if Dist_hyp_subf < min_Dist_hyp_subf: min_Dist_hyp_subf = Dist_hyp_subf min_sb_hyp = sf_hyp hyp_subf = subf print hyp_subf, min_sb_hyp, min_Dist_hyp_subf ####Determining trimming times: test_tr = read(GFdir + "H003.5/PP/GF.0001.SY.LHZ.SAC")[0] t0 = test_tr.stats.starttime TrimmingTimes = {} # Min. Distace from the fault to each station. A =0 for trid in trlist: tr = st.select(id=trid)[0] metafile = workdir + "DATA/" + "META." + tr.id + ".xml" META = DU.getMetadataFromXML(metafile)[tr.id] stlat = META['latitude'] stlon = META['longitude'] dist = locations2degrees(min_sb_hyp[0],min_sb_hyp[1],\ stlat,stlon) parrivaltime = getTravelTimes(dist,min_sb_hyp[2])[0]['time'] ta = t0 + parrivaltime tb = ta + round(15.*dist) TrimmingTimes[trid] = (ta, tb) ############################################################################## ##### DIST = [] # Ordering the stations in terms of distance for trid in trlist: metafile = workdir + "DATA/" + "META." + trid + ".xml" META = DU.getMetadataFromXML(metafile)[trid] lat = META['latitude'] lon = META['longitude'] trdist = locations2degrees(cmteplat,cmteplon,lat,lon) DIST.append(trdist) DistIndex = lstargsort(DIST) if len(argv) == 3: trlist = [argv[2]] OneStation = True else: trlist = [trlist[i] for i in DistIndex] OneStation = False ##### client = Client() ObservedDisp = np.array([]) gridlat = [] gridlon = [] griddepth = [] sbarea = [] mindist = flen*fwid # min distance hyp-subfault ##########Loop for each subfault for subf in range(nsf): print "**********" print subf eplat = grid[subf][1] eplon = grid[subf][0] epdepth = grid[subf][2] ## Storing the subfault's location centered in the hypcenter gridlat.append(eplat-cmteplat) gridlon.append(eplon-cmteplon) griddepth.append(epdepth) strike = grid[subf][3] #+ 360. dip = grid[subf][4] rake = grid[subf][5] # NP = [strike, dip, rake] M = np.array(NodalPlanetoMT(NP)) #Calculating the time dalay: sf_hyp = (eplat,eplon, epdepth) Dist_ep_subf = hypo2dist(eq_hyp,sf_hyp) t_d = round(Dist_ep_subf/RupVel) #-59. print eplat,eplon, epdepth #t_d = 0. # Determining depth dir: depth = [] depthdir = [] for file in os.listdir(GFdir): if file[-2:] == ".5": depthdir.append(file) depth.append(float(file[1:-2])) BestDirIndex = np.argsort(abs(epdepth-np.array(depth)))[0] hdir = GFdir + depthdir[BestDirIndex] + "/" # hdir is the absolute path to the closest deepth. SYN = np.array([]) SYNA = np.array([]) SYNB = np.array([]) #Main loop : for trid in trlist: tr = st.select(id=trid)[0] metafile = workdir + "DATA/" + "META." + tr.id + ".xml" META = DU.getMetadataFromXML(metafile)[tr.id] lat = META['latitude'] lon = META['longitude'] trPPsy, trRRsy, trRTsy, trTTsy = \ GFSelectZ(lat,lon,hdir) tr.stats.delta = trPPsy.stats.delta azi = -np.pi/180.*gps2DistAzimuth(lat,lon,\ eplat,eplon)[2] trROT = MTrotationZ(azi, trPPsy, trRRsy, trRTsy, trTTsy) #Triangle dt = trROT[0].stats.delta trianglen = 2.*t_h/dt-1. window = triang(trianglen) window /= np.sum(window) #window = np.array([1.]) FirstValid = int(trianglen/2.) + 1 dist = locations2degrees(eplat,eplon,lat,lon) parrivaltime = getTravelTimes(dist,epdepth)[0]['time'] t1 = TrimmingTimes[trid][0] - t_d t2 = TrimmingTimes[trid][1] - t_d #~ t1 = trROT[0].stats.starttime + parrivaltime- t_d #~ t2 = t1+ round(MinDist[tr.id]*15. ) N = len(trROT[0]) for trR in trROT: trR.data *= 10.**-21 ## To get M in Nm trR.data -= trR.data[0] AUX1 = len(trR) trR.data = convolve(trR.data,window,mode='valid') AUX2 = len(trR) mean = np.mean(np.hstack((trR.data[0]*np.ones(FirstValid),\ trR.data[:60./trR.stats.delta*1.-FirstValid+1]))) #mean = np.mean(trR.data[:60]) trR.data -= mean trR.data = bp.bandpassfilter(trR.data,len(trR), trR.stats. delta, corners , 1 , fmin, fmax) t_l = dt*0.5*(AUX1 - AUX2) trR.trim(t1-t_l,t2-t_l, pad=True, fill_value=trR.data[0]) #We lost t_h due to the convolution #~ for trR in trROT: #~ trR.data *= 10.**-23 ## To get M in Nm #~ trR.data -= trR.data[0] #~ trR.data = convolve(trR.data,window,mode='same') #~ # mean = np.mean(np.hstack((trR.data[0]*np.ones(FirstValid),\ #~ # trR.data[:60./trR.stats.delta*1.-FirstValid+1]))) #~ mean = np.mean(trR.data[:60]) #~ trR.data -= mean #~ trR.data = bp.bandpassfilter(trR.data,len(trR), trR.stats.delta,\ #~ corners ,1 , fmin, fmax) #~ trR.trim(t1,t2,pad=True, fill_value=trR.data[0]) nmin = min(len(tr.data),len(trROT[0].data)) tr.data = tr.data[:nmin] for trR in trROT: trR.data = trR.data[:nmin] ############# trROT = np.array(trROT) syn = np.dot(trROT.T,M) synA = np.dot(trROT.T,MA) synB = np.dot(trROT.T,MB) SYN = np.append(SYN,syn) SYNA = np.append(SYNA,synA) SYNB = np.append(SYNB,synB) if subf == 0 : ObservedDisp = np.append(ObservedDisp,tr.data,0) sbarea.append(grid[subf][6]) print np.shape(A), np.shape(np.array([SYN])) if subf == 0: A = np.array([SYN]) AA = np.array([SYNA]) AB = np.array([SYNB]) else: A = np.append(A,np.array([SYN]),0) AA = np.append(AA,np.array([SYNA]),0) AB = np.append(AB,np.array([SYNB]),0) #Full matrix with the two rake's component AC = np.vstack((AA,AB)) #MISFIT = np.array([]) ########## Stabilizing the solution: #### Moment minimization: #~ constraintD = np.zeros(nsf) #~ ObservedDispcons = np.append(ObservedDisp,constraintD) #~ for lbd in Lbd: #~ constraintF = lbd*np.eye(nsf,nsf) #~ Acons = np.append(A,constraintF,1) #~ print np.shape(Acons.T), np.shape(ObservedDispcons) #~ R = nnls(Acons.T,ObservedDispcons) #~ M = R[0] #~ #M = np.zeros(nsf) #~ #M[::2] = 1 #~ fit = np.dot(A.T,M) #~ misfit = 100.*np.sum(np.abs(fit-ObservedDisp))\ #~ /np.sum(np.abs(ObservedDisp)) #~ MISFIT = np.append(MISFIT,misfit) #~ plt.figure() #~ plt.plot(Lbd,MISFIT) #~ ########################################### #~ ### Smoothing: #~ constraintF_base = SmoothMatrix(nsx,nsy) #~ constraintD = np.zeros(np.shape(constraintF_base)[0]) #~ ObservedDispcons = np.append(ObservedDisp,constraintD) #~ for lbd in Lbd: #~ constraintF = lbd*constraintF_base #~ Acons = np.append(A,constraintF.T,1) #~ #print np.shape(Acons.T), np.shape(ObservedDispcons) #~ R = nnls(Acons.T,ObservedDispcons) #~ M = R[0] #~ fit = np.dot(A.T,M) #~ misfit = 100.*np.sum(np.abs(fit-ObservedDisp))\ #~ /np.sum(np.abs(ObservedDisp)) #~ print lbd, misfit #~ MISFIT = np.append(MISFIT,misfit) #~ ########################################### ########################################### #~ ##### Moment Minimization (including rake projections): #~ constraintD = np.zeros(2*nsf) #~ ObservedDispcons = np.append(ObservedDisp,constraintD) #~ for lbd in Lbd: #~ constraintF = lbd*np.eye(2*nsf,2*nsf) #~ ACcons = np.append(AC,constraintF,1) #~ print np.shape(ACcons.T), np.shape(ObservedDispcons) #~ R = nnls(ACcons.T,ObservedDispcons) #~ M = R[0] #~ fit = np.dot(AC.T,M) #~ misfit = 100.*np.sum(np.abs(fit-ObservedDisp))\ #~ /np.sum(np.abs(ObservedDisp)) #~ MISFIT = np.append(MISFIT,misfit) #~ M = np.sqrt(M[:nsf]**2+M[nsf:]**2) ############################################## ### Smoothing (including rake projections): #~ constraintF_base = SmoothMatrix(nsx,nsy) #~ Nbase = np.shape(constraintF_base)[0] #~ constraintD = np.zeros(2*Nbase) #~ constraintF_base_big = np.zeros((2*Nbase, 2*nsf)) #~ constraintF_base_big[:Nbase,:nsf]= constraintF_base #~ constraintF_base_big[Nbase:,nsf:]= constraintF_base #~ ObservedDispcons = np.append(ObservedDisp,constraintD) #~ for lbd in Lbd: #~ constraintF = lbd*constraintF_base_big #~ ACcons = np.append(AC,constraintF.T,1) #~ #print np.shape(Acons.T), np.shape(ObservedDispcons) #~ R = nnls(ACcons.T,ObservedDispcons) #~ M = R[0] #~ fit = np.dot(AC.T,M) #~ misfit = 100.*np.sum(np.abs(fit-ObservedDisp))\ #~ /np.sum(np.abs(ObservedDisp)) #~ print lbd, misfit #~ MISFIT = np.append(MISFIT,misfit) #~ M = np.sqrt(M[:nsf]**2+M[nsf:]**2) ########################################### #~ ##### Moment Minimization and Smoothing #~ #### (including rake projections): #~ mom0 = [] #~ constraintF_base = SmoothMatrix(nsx,nsy) #~ Nbase = np.shape(constraintF_base)[0] #~ constraintDsmoo = np.zeros(2*Nbase) #~ constraintDmin = np.zeros(2*nsf) #~ constraintF_base_big = np.zeros((2*Nbase, 2*nsf)) #~ constraintF_base_big[:Nbase,:nsf]= constraintF_base #~ constraintF_base_big[Nbase:,nsf:]= constraintF_base #~ ObservedDispcons = np.concatenate((ObservedDisp, #~ constraintDmin, #~ constraintDsmoo )) #~ for lbdm0 in Lbdm0min: #~ constraintFmin = lbdm0*np.eye(2*nsf,2*nsf) #~ for lbdsm in Lbdsmooth: #~ constraintFsmoo = lbdsm*constraintF_base_big #~ ACcons = np.hstack((AC, constraintFmin, constraintFsmoo.T)) #~ print lbdm0, lbdsm #~ R = nnls(ACcons.T,ObservedDispcons) #~ M = R[0] #~ fit = np.dot(AC.T,M) #~ misfit = 100.*np.sum(np.abs(fit-ObservedDisp))\ #~ /np.sum(np.abs(ObservedDisp)) #~ MISFIT = np.append(MISFIT,misfit) #~ MA = M[:nsf] #~ MB = M[nsf:] #~ M = np.sqrt(MA**2+MB**2) #~ mom0.append(np.sum(M)) ############################################## # Rotation to the rake's conventional angle: #MB, MA = Rot2D(MB,MA,-rakeB) print np.shape(M), np.shape(A.T) R = nnls(A.T,ObservedDisp) M = R[0] #~ M = np.zeros(nsf) #~ M[::2] = 1 fit = np.dot(A.T,M) MA = M MB = M np.save("RealSol", M) nm0 = np.size(Lbdm0min) nsmth = np.size(Lbdsmooth) #~ plt.figure() #~ plt.pcolor(1./Lbdsmooth, 1./Lbdm0min,MISFIT.reshape(nm0,nsmth)) #~ plt.xlabel(r'$1/ \lambda_{2}$', fontsize = 24) #~ plt.ylabel(r'$1/ \lambda_{1}$',fontsize = 24 ) #~ plt.ylim((1./Lbdm0min).min(),(1./Lbdm0min).max() ) #~ plt.ylim((1./Lbdsmooth).min(),(1./Lbdsmooth).max() ) #~ cbar = plt.colorbar() #~ cbar.set_label("Misfit %") #~ print np.shape(Lbdm0min), np.shape(mom0) #~ plt.figure() #~ CS = plt.contour(1./Lbdsmooth, 1./Lbdm0min,MISFIT.reshape(nm0,nsmth) ) #~ plt.xlabel(r'$1/ \lambda_{2}$', fontsize = 24) #~ plt.ylabel(r'$1/ \lambda_{1}$',fontsize = 24 ) #~ plt.clabel(CS, inline=1, fontsize=10) #~ plt.title('Misfit') #~ plt.figure() #~ plt.plot(1./Lbdm0min,MISFIT) #~ plt.xlabel(r'$1/ \lambda_{2}$', fontsize = 24) #~ plt.ylabel("Misfit %") #~ plt.figure() #~ plt.plot(Lbdm0min,mom0) #~ plt.ylabel(r'$M_0\, [Nm]$', fontsize = 24) #~ plt.xlabel(r'$\lambda_{M0}$', fontsize = 24) misfit = 100.*np.sum(np.abs(fit-ObservedDisp))/np.sum(np.abs(ObservedDisp)) print "Residual: ", 1000.*R[1] print misfit #SLIP = M*Mp/mu/(1.e6*np.array(sbarea)) sbarea = sflen*sfwid SLIP = M/(mu*1.e6*sbarea) SLIP = SLIP.reshape(nsx,nsy).T[::-1] moment = M.reshape(nsx,nsy).T[::-1] plt.figure(figsize = (13,5)) plt.plot(fit,'b' ,label="Fit") plt.plot(ObservedDisp,'r',label="Observed") plt.xlabel("Time [s]") plt.ylabel("Displacement [m]") plt.legend() np.set_printoptions(linewidth=1000,precision=3) print "***********" print sbarea print SLIP print np.mean(SLIP) print "Moment:" print np.sum(M) ### SLIPS Distribution (as the synthetics) : SLIPS = M.reshape(nsx,nsy).T SLIPS /= mu*1.e6*sbarea #~ #########Ploting slip distribution: #~ #we are going to reflect the y axis later, so: hypsbloc = [hyp_subf / nsy , -(hyp_subf % nsy) - 2] #Creating the strike and dip axis: StrikeAx= np.linspace(0,flen,nsx+1) DipAx= np.linspace(0,fwid,nsy+1) DepthAx = DipAx*np.sin(np.pi/180.*dip) + Min_h print DepthAx hlstrike = StrikeAx[hypsbloc[0]] + sflen*0.5 #we are going to reflect the axis later, so: hldip = DipAx[hypsbloc[1]] + sfwid*0.5 hldepth = DepthAx[hypsbloc[1]] + sfwid*0.5*np.sin(np.pi/180.*dip) StrikeAx = StrikeAx - hlstrike DipAx = DipAx - hldip XX, YY = np.meshgrid(StrikeAx, DepthAx) XX, ZZ = np.meshgrid(StrikeAx, DipAx ) ######Plot: (Old colormap: "gist_rainbow_r") plt.figure(figsize = (13,6)) ax = host_subplot(111) im = ax.pcolor(XX, YY, SLIPS, cmap="jet") ax.set_ylabel('Depth [km]') ax.set_ylim(DepthAx[-1],DepthAx[0]) # Creating a twin plot ax2 = ax.twinx() im2 = ax2.pcolor(XX, ZZ, SLIPS[::-1,:], cmap="jet") ax2.set_ylabel('Distance along the dip [km]') ax2.set_xlabel('Distance along the strike [km]') ax2.set_ylim(DipAx[0],DipAx[-1]) ax2.set_xlim(StrikeAx[0],StrikeAx[-1]) ax.axis["bottom"].major_ticklabels.set_visible(False) ax2.axis["bottom"].major_ticklabels.set_visible(False) ax2.axis["top"].set_visible(True) ax2.axis["top"].label.set_visible(True) divider = make_axes_locatable(ax) cax = divider.append_axes("bottom", size="5%", pad=0.1) cb = plt.colorbar(im, cax=cax, orientation="horizontal") cb.set_label("Slip [m]") ax2.plot([0], [0], '*', ms=225./(nsy+4)) ax2.set_xticks(ax2.get_xticks()[1:-1]) #~ ### Rake plot: plt.figure(figsize = (13,6)) fig = host_subplot(111) XXq, ZZq = np.meshgrid(StrikeAx[:-1]+sflen, DipAx[:-1]+sfwid ) Q = plt.quiver(XXq,ZZq, MB.reshape(nsx,nsy).T[::-1,:]/(mu*1.e6*sbarea), MA.reshape(nsx,nsy).T[::-1,:]/(mu*1.e6*sbarea), SLIPS[::-1,:], units='xy',scale = 0.5 , linewidths=(2,), edgecolors=('k'), headaxislength=5 ) fig.set_ylim([ZZq.min()-80,ZZq.max()+80]) fig.set_xlim([XXq.min()-20, XXq.max()+20 ]) fig.set_ylabel('Distance along dip [km]') fig.set_xlabel('Distance along the strike [km]') fig2 = fig.twinx() fig2.set_xlabel('Distance along the strike [km]') fig.axis["bottom"].major_ticklabels.set_visible(False) fig.axis["bottom"].label.set_visible(False) fig2.axis["top"].set_visible(True) fig2.axis["top"].label.set_visible(True) fig2.axis["right"].major_ticklabels.set_visible(False) divider = make_axes_locatable(fig) cax = divider.append_axes("bottom", size="5%", pad=0.1) cb = plt.colorbar(im, cax=cax, orientation="horizontal") cb.set_label("Slip [m]") plt.show() ############# #~ print np.shape(MISFIT), np.shape(RUPVEL) #~ plt.figure() #~ plt.plot(RUPVEL,MISFIT) #~ plt.xlabel("Rupture Velocity [km/s]") #~ plt.ylabel("Misfit %") #~ plt.show() print np.shape(MB.reshape(nsx,nsy).T) print np.shape(ZZ)