def gauge2sac(gauge_file,dictionary,xyfile,outdir,time_epi,dt): ''' Convert output from fort.gauge file intop individual sac files ''' from numpy import genfromtxt,unique,where,arange,interp from obspy import Stream,Trace from obspy.core.util.attribdict import AttribDict #Read gauge file gauges=genfromtxt(gauge_file) #Read names plume_name=genfromtxt(dictionary,usecols=0,dtype='S') claw_name=genfromtxt(dictionary,usecols=1) lat=genfromtxt(xyfile,usecols=2) lon=genfromtxt(xyfile,usecols=3) #Find unique stations gauge_list=unique(gauges[:,0]) for k in range(len(gauge_list)): print k st=Stream(Trace()) i=where(gauges[:,0]==gauge_list[k])[0] data=gauges[i,6] time=gauges[i,2] ti=arange(0,time.max(),dt) tsunami=interp(ti,time,data) st[0].data=tsunami st[0].stats.starttime=time_epi st[0].stats.delta=dt iname=where(claw_name==gauge_list[k])[0][0] st[0].stats.station=plume_name[iname] sac=AttribDict() sac.stla=lat[iname] sac.stlo=lon[iname] sac.evla=46.607 sac.evlo=153.230 #sac.iztype='IO' st[0].stats['sac']=sac st.write(outdir+'/'+plume_name[iname]+'.tsun.sac',format='SAC')
def _unpack_trace(data): ah_stats = AttribDict({ 'version': '2.0', 'event': AttribDict(), 'station': AttribDict(), 'record': AttribDict(), 'extras': [] }) # station info data.unpack_int() # undocumented extra int? ah_stats.station.code = _unpack_string(data) data.unpack_int() # here too? ah_stats.station.channel = _unpack_string(data) data.unpack_int() # and again? ah_stats.station.type = _unpack_string(data) ah_stats.station.recorder = _unpack_string(data) ah_stats.station.sensor = _unpack_string(data) ah_stats.station.azimuth = data.unpack_float() # degrees E from N ah_stats.station.dip = data.unpack_float() # up = -90, down = +90 ah_stats.station.latitude = data.unpack_double() ah_stats.station.longitude = data.unpack_double() ah_stats.station.elevation = data.unpack_float() ah_stats.station.gain = data.unpack_float() ah_stats.station.normalization = data.unpack_float() # A0 npoles = data.unpack_int() ah_stats.station.poles = [] for _i in range(npoles): r = data.unpack_float() i = data.unpack_float() ah_stats.station.poles.append(complex(r, i)) nzeros = data.unpack_int() ah_stats.station.zeros = [] for _i in range(nzeros): r = data.unpack_float() i = data.unpack_float() ah_stats.station.zeros.append(complex(r, i)) ah_stats.station.comment = _unpack_string(data) # event info ah_stats.event.latitude = data.unpack_double() ah_stats.event.longitude = data.unpack_double() ah_stats.event.depth = data.unpack_float() ot_year = data.unpack_int() ot_mon = data.unpack_int() ot_day = data.unpack_int() ot_hour = data.unpack_int() ot_min = data.unpack_int() ot_sec = data.unpack_float() try: ot = UTCDateTime(ot_year, ot_mon, ot_day, ot_hour, ot_min, ot_sec) except Exception: ot = None ah_stats.event.origin_time = ot data.unpack_int() # and again? ah_stats.event.comment = _unpack_string(data) # record info ah_stats.record.type = dtype = data.unpack_int() # data type ah_stats.record.ndata = ndata = data.unpack_uint() # number of samples ah_stats.record.delta = data.unpack_float() # sampling interval ah_stats.record.max_amplitude = data.unpack_float() at_year = data.unpack_int() at_mon = data.unpack_int() at_day = data.unpack_int() at_hour = data.unpack_int() at_min = data.unpack_int() at_sec = data.unpack_float() at = UTCDateTime(at_year, at_mon, at_day, at_hour, at_min, at_sec) ah_stats.record.start_time = at ah_stats.record.units = _unpack_string(data) ah_stats.record.inunits = _unpack_string(data) ah_stats.record.outunits = _unpack_string(data) data.unpack_int() # and again? ah_stats.record.comment = _unpack_string(data) data.unpack_int() # and again? ah_stats.record.log = _unpack_string(data) # user attributes nusrattr = data.unpack_int() ah_stats.usrattr = {} for _i in range(nusrattr): key = _unpack_string(data) value = _unpack_string(data) ah_stats.usrattr[key] = value # unpack data using dtype from record info if dtype == 1: # float temp = data.unpack_farray(ndata, data.unpack_float) elif dtype == 6: # double temp = data.unpack_farray(ndata, data.unpack_double) else: # e.g. 3 (vector), 2 (complex), 4 (tensor) msg = 'Unsupported AH v2 record type %d' raise NotImplementedError(msg % (dtype)) tr = Trace(np.array(temp)) tr.stats.ah = ah_stats tr.stats.delta = ah_stats.record.delta tr.stats.starttime = ah_stats.record.start_time tr.stats.station = ah_stats.station.code tr.stats.channel = ah_stats.station.channel return tr
def RGF_from_SW4(path_to_green=".", t0=0, file_name=None, origin_time=None, event_lat=None, event_lon=None, depth=None, station_name=None, station_lat=None, station_lon=None, output_directory="sw4out"): """ Function to convert reciprocal Green's functions from SW4 to tensor format Reads the reciprocal Green's functions (displacement/unit force) from SW4 and performs the summation to get the Green's function tensor. RGFs from SW4 are oriented north, east and positive down by setting az=0. Assumes the following file structure: f[x,y,z]/station_name/event_name.[x,y,z] """ import os from obspy.core import read, Stream from obspy.geodetics.base import gps2dist_azimuth from obspy.core.util.attribdict import AttribDict # Defined variables (do not change) dirs = ["fz", "fx", "fy"] # directory to displacement per unit force du = [ "duxdx", "duydy", "duzdz", "duydx", "duxdy", "duzdx", "duxdz", "duzdy", "duydz" ] orientation = ["Z", "N", "E"] # set az=0 in SW4 so x=north, y=east cmpaz = [0, 0, 90] cmpinc = [0, 90, 90] # Create a new output directory under path_to_green dirout = "%s/%s" % (path_to_green, output_directory) if os.path.exists(dirout): print("Warning: output directory '%s' already exists." % dirout) else: print("Creating output directory '%s'." % dirout) os.mkdir(dirout) # Loop over each directory fx, fy, fz nsta = len(station_name) for i in range(3): # Set headers according to the orientation if dirs[i][-1].upper() == "Z": scale = -1 # change to positive up else: scale = 1 # Loop over each station for j in range(nsta): station = station_name[j] stlo = station_lon[j] stla = station_lat[j] dirin = "%s/%s/%s" % (path_to_green, dirs[i], station) print("Reading RGFs from %s:" % (dirin)) st = Stream() for gradient in du: fname = "%s/%s.%s" % (dirin, file_name, gradient) st += read(fname, format="SAC") # Set station headers starttime = origin_time - t0 dist, az, baz = gps2dist_azimuth(event_lat, event_lon, stla, stlo) # SAC headers sacd = AttribDict() sacd.stla = stla sacd.stlo = stlo sacd.evla = event_lat sacd.evlo = event_lon sacd.az = az sacd.baz = baz sacd.dist = dist / 1000 # convert to kilometers sacd.o = 0 sacd.b = -1 * t0 sacd.cmpaz = cmpaz[i] sacd.cmpinc = cmpinc[i] sacd.kstnm = station # Update start time for tr in st: tr.stats.starttime = starttime tr.stats.distance = dist tr.stats.back_azimuth = baz # Sum displacement gradients to get reciprocal Green's functions tensor = Stream() for gradient, element in zip(["duxdx", "duydy", "duzdz"], ["XX", "YY", "ZZ"]): trace = st.select(channel=gradient)[0].copy() trace.stats.channel = "%s%s" % (orientation[i], element) tensor += trace trace = st.select(channel="duydx")[0].copy() trace.data += st.select(channel="duxdy")[0].data trace.stats.channel = "%s%s" % (orientation[i], "XY") tensor += trace trace = st.select(channel="duzdx")[0].copy() trace.data += st.select(channel="duxdz")[0].data trace.stats.channel = "%s%s" % (orientation[i], "XZ") tensor += trace trace = st.select(channel="duzdy")[0].copy() trace.data += st.select(channel="duydz")[0].data trace.stats.channel = "%s%s" % (orientation[i], "YZ") tensor += trace # Set sac headers before saving print(" Saving GFs to %s" % dirout) for tr in tensor: tr.trim(origin_time, tr.stats.endtime) tr.data = scale * tr.data tr.stats.sac = sacd sacout = "%s/%s.%.4f.%s" % (dirout, station, depth, tr.stats.channel) #print("Writing %s to file."%sacout) tr.write(sacout, format="SAC")
def _unpack_trace(data): ah_stats = AttribDict({ 'version': '1.0', 'event': AttribDict(), 'station': AttribDict(), 'record': AttribDict(), 'extras': [] }) # station info ah_stats.station.code = _unpack_string(data) ah_stats.station.channel = _unpack_string(data) ah_stats.station.type = _unpack_string(data) ah_stats.station.latitude = data.unpack_float() ah_stats.station.longitude = data.unpack_float() ah_stats.station.elevation = data.unpack_float() ah_stats.station.gain = data.unpack_float() ah_stats.station.normalization = data.unpack_float() # A0 poles = [] zeros = [] for _i in range(0, 30): r = data.unpack_float() i = data.unpack_float() poles.append(complex(r, i)) r = data.unpack_float() i = data.unpack_float() zeros.append(complex(r, i)) # first value describes number of poles/zeros npoles = int(poles[0].real) + 1 nzeros = int(zeros[0].real) + 1 ah_stats.station.poles = poles[1:npoles] ah_stats.station.zeros = zeros[1:nzeros] # event info ah_stats.event.latitude = data.unpack_float() ah_stats.event.longitude = data.unpack_float() ah_stats.event.depth = data.unpack_float() ot_year = data.unpack_int() ot_mon = data.unpack_int() ot_day = data.unpack_int() ot_hour = data.unpack_int() ot_min = data.unpack_int() ot_sec = data.unpack_float() try: ot = UTCDateTime(ot_year, ot_mon, ot_day, ot_hour, ot_min, ot_sec) except Exception: ot = None ah_stats.event.origin_time = ot ah_stats.event.comment = _unpack_string(data) # record info ah_stats.record.type = dtype = data.unpack_int() # data type ah_stats.record.ndata = ndata = data.unpack_uint() # number of samples ah_stats.record.delta = data.unpack_float() # sampling interval ah_stats.record.max_amplitude = data.unpack_float() at_year = data.unpack_int() at_mon = data.unpack_int() at_day = data.unpack_int() at_hour = data.unpack_int() at_min = data.unpack_int() at_sec = data.unpack_float() at = UTCDateTime(at_year, at_mon, at_day, at_hour, at_min, at_sec) ah_stats.record.start_time = at ah_stats.record.abscissa_min = data.unpack_float() ah_stats.record.comment = _unpack_string(data) ah_stats.record.log = _unpack_string(data) # extras ah_stats.extras = data.unpack_array(data.unpack_float) # unpack data using dtype from record info if dtype == 1: # float temp = data.unpack_farray(ndata, data.unpack_float) elif dtype == 6: # double temp = data.unpack_farray(ndata, data.unpack_double) else: # e.g. 3 (vector), 2 (complex), 4 (tensor) msg = 'Unsupported AH v1 record type %d' raise NotImplementedError(msg % (dtype)) tr = Trace(np.array(temp)) tr.stats.ah = ah_stats tr.stats.delta = ah_stats.record.delta tr.stats.starttime = ah_stats.record.start_time tr.stats.station = ah_stats.station.code tr.stats.channel = ah_stats.station.channel return tr
def _unpack_trace(data): ah_stats = AttribDict({ 'version': '1.0', 'event': AttribDict(), 'station': AttribDict(), 'record': AttribDict(), 'extras': [] }) # station info ah_stats.station.code = _unpack_string(data) ah_stats.station.channel = _unpack_string(data) ah_stats.station.type = _unpack_string(data) ah_stats.station.latitude = data.unpack_float() ah_stats.station.longitude = data.unpack_float() ah_stats.station.elevation = data.unpack_float() ah_stats.station.gain = data.unpack_float() ah_stats.station.normalization = data.unpack_float() # A0 poles = [] zeros = [] for _i in range(0, 30): r = data.unpack_float() i = data.unpack_float() poles.append(complex(r, i)) r = data.unpack_float() i = data.unpack_float() zeros.append(complex(r, i)) # first value describes number of poles/zeros npoles = int(poles[0].real) + 1 nzeros = int(zeros[0].real) + 1 ah_stats.station.poles = poles[1:npoles] ah_stats.station.zeros = zeros[1:nzeros] # event info ah_stats.event.latitude = data.unpack_float() ah_stats.event.longitude = data.unpack_float() ah_stats.event.depth = data.unpack_float() ot_year = data.unpack_int() ot_mon = data.unpack_int() ot_day = data.unpack_int() ot_hour = data.unpack_int() ot_min = data.unpack_int() ot_sec = data.unpack_float() try: ot = UTCDateTime(ot_year, ot_mon, ot_day, ot_hour, ot_min, ot_sec) except: ot = None ah_stats.event.origin_time = ot ah_stats.event.comment = _unpack_string(data) # record info ah_stats.record.type = dtype = data.unpack_int() # data type ah_stats.record.ndata = ndata = data.unpack_uint() # number of samples ah_stats.record.delta = data.unpack_float() # sampling interval ah_stats.record.max_amplitude = data.unpack_float() at_year = data.unpack_int() at_mon = data.unpack_int() at_day = data.unpack_int() at_hour = data.unpack_int() at_min = data.unpack_int() at_sec = data.unpack_float() at = UTCDateTime(at_year, at_mon, at_day, at_hour, at_min, at_sec) ah_stats.record.start_time = at ah_stats.record.abscissa_min = data.unpack_float() ah_stats.record.comment = _unpack_string(data) ah_stats.record.log = _unpack_string(data) # extras ah_stats.extras = data.unpack_array(data.unpack_float) # unpack data using dtype from record info if dtype == 1: # float temp = data.unpack_farray(ndata, data.unpack_float) elif dtype == 6: # double temp = data.unpack_farray(ndata, data.unpack_double) else: # e.g. 3 (vector), 2 (complex), 4 (tensor) msg = 'Unsupported AH v1 record type %d' raise NotImplementedError(msg % (dtype)) tr = Trace(np.array(temp)) tr.stats.ah = ah_stats tr.stats.delta = ah_stats.record.delta tr.stats.starttime = ah_stats.record.start_time tr.stats.station = ah_stats.station.code tr.stats.channel = ah_stats.station.channel return tr
def _unpack_trace(data): ah_stats = AttribDict({ 'version': '2.0', 'event': AttribDict(), 'station': AttribDict(), 'record': AttribDict(), 'extras': [] }) # station info data.unpack_int() # undocumented extra int? ah_stats.station.code = _unpack_string(data) data.unpack_int() # here too? ah_stats.station.channel = _unpack_string(data) data.unpack_int() # and again? ah_stats.station.type = _unpack_string(data) ah_stats.station.recorder = _unpack_string(data) ah_stats.station.sensor = _unpack_string(data) ah_stats.station.azimuth = data.unpack_float() # degrees E from N ah_stats.station.dip = data.unpack_float() # up = -90, down = +90 ah_stats.station.latitude = data.unpack_double() ah_stats.station.longitude = data.unpack_double() ah_stats.station.elevation = data.unpack_float() ah_stats.station.gain = data.unpack_float() ah_stats.station.normalization = data.unpack_float() # A0 npoles = data.unpack_int() ah_stats.station.poles = [] for _i in range(npoles): r = data.unpack_float() i = data.unpack_float() ah_stats.station.poles.append(complex(r, i)) nzeros = data.unpack_int() ah_stats.station.zeros = [] for _i in range(nzeros): r = data.unpack_float() i = data.unpack_float() ah_stats.station.zeros.append(complex(r, i)) ah_stats.station.comment = _unpack_string(data) # event info ah_stats.event.latitude = data.unpack_double() ah_stats.event.longitude = data.unpack_double() ah_stats.event.depth = data.unpack_float() ot_year = data.unpack_int() ot_mon = data.unpack_int() ot_day = data.unpack_int() ot_hour = data.unpack_int() ot_min = data.unpack_int() ot_sec = data.unpack_float() try: ot = UTCDateTime(ot_year, ot_mon, ot_day, ot_hour, ot_min, ot_sec) except: ot = None ah_stats.event.origin_time = ot data.unpack_int() # and again? ah_stats.event.comment = _unpack_string(data) # record info ah_stats.record.type = dtype = data.unpack_int() # data type ah_stats.record.ndata = ndata = data.unpack_uint() # number of samples ah_stats.record.delta = data.unpack_float() # sampling interval ah_stats.record.max_amplitude = data.unpack_float() at_year = data.unpack_int() at_mon = data.unpack_int() at_day = data.unpack_int() at_hour = data.unpack_int() at_min = data.unpack_int() at_sec = data.unpack_float() at = UTCDateTime(at_year, at_mon, at_day, at_hour, at_min, at_sec) ah_stats.record.start_time = at ah_stats.record.units = _unpack_string(data) ah_stats.record.inunits = _unpack_string(data) ah_stats.record.outunits = _unpack_string(data) data.unpack_int() # and again? ah_stats.record.comment = _unpack_string(data) data.unpack_int() # and again? ah_stats.record.log = _unpack_string(data) # user attributes nusrattr = data.unpack_int() ah_stats.usrattr = {} for _i in range(nusrattr): key = _unpack_string(data) value = _unpack_string(data) ah_stats.usrattr[key] = value # unpack data using dtype from record info if dtype == 1: # float temp = data.unpack_farray(ndata, data.unpack_float) elif dtype == 6: # double temp = data.unpack_farray(ndata, data.unpack_double) else: # e.g. 3 (vector), 2 (complex), 4 (tensor) msg = 'Unsupported AH v2 record type %d' raise NotImplementedError(msg % (dtype)) tr = Trace(np.array(temp)) tr.stats.ah = ah_stats tr.stats.delta = ah_stats.record.delta tr.stats.starttime = ah_stats.record.start_time tr.stats.station = ah_stats.station.code tr.stats.channel = ah_stats.station.channel return tr
path = "/import/two-data/salvermoser/waveformCompare" filenames_json = glob.glob(path + "/database/static/OUTPUT/*/*.json") filenames_xml = glob.glob(path + "/database/static/OUTPUT/*/*.xml") counter=0 for i_ in range(0,len(filenames_json)): d,pcc,tSNR,rSNR,tba,eba,peak_tra,freq, p_rot = parse_json(filenames_json[i_],'epicentral_distance', 'peak_correlation_coefficient','transverse_acceleration_SNR','vertical_rotation_rate_SNR', 'theoretical_backazimuth','estimated_backazimuth','peak_transverse_acceleration', 'frequency_at_peak_vertical_rotation_rate', 'peak_vertical_rotation_rate') ns = 'http://www.rotational-seismology.org' params = AttribDict() params.namespace = ns params.value = AttribDict() params.value.epicentral_distance = AttribDict() params.value.epicentral_distance.namespace = ns params.value.epicentral_distance.value = d params.value.epicentral_distance.attrib = {'unit':"km"} params.value.transverse_acceleration_SNR= AttribDict() params.value.transverse_acceleration_SNR.namespace = ns params.value.transverse_acceleration_SNR.value = tSNR params.value.vertical_rotation_rate_SNR = AttribDict() params.value.vertical_rotation_rate_SNR.namespace = ns params.value.vertical_rotation_rate_SNR.value = rSNR
def __init__(self): self.config = AttribDict() self.events = events.ses3dCatalog() self.stalst = stations.StaLst() # self.vmodel return
def stream_add_stats(data_stream,inv,evt,write_sac=False,rotate_in_obspy=False): for net in inv: for sta in net: str1=data_stream.select(network=net.code,station=sta.code) print(str(net.code),str(sta.code),len(str1)) if len(str1) == 0: continue # update in future to deal with multiple channel (total_number_of channels) if len(str1) % 3 !=0: print('Problem: missing components', str1); exit() for tr in str1: for chan in sta: if tr.stats.channel == chan.code and tr.stats.location == chan.location_code: break else: print('Problem finding channel in inventory',tr); exit() tr.stats.coordinates={'latitude':chan.latitude,'longitude':chan.longitude} (tr.stats.distance,tr.stats.azimuth,tr.stats.back_azimuth)=gps2dist_azimuth( chan.latitude, chan.longitude, evt.origins[0].latitude, evt.origins[0].longitude) if write_sac==True: sac= AttribDict() sac.kstnm=str(sta.code); sac.knetwk=str(net.code); sac.kcmpnm=str(chan.code) sac.khole=str(chan.location_code) sac.stla=chan.latitude; sac.stlo=chan.longitude; sac.stel=chan.elevation sac.evla=evt.origins[0].latitude; sac.evlo=evt.origins[0].longitude; sac.evdp=evt.origins[0].depth/1000. # in km sac.mag=evt.magnitudes[0].mag; time=evt.origins[0].time sac.nzyear, sac.nzjday, sac.nzhour, sac.nzmin, sac.nzsec, sac.nzmsec=time.year, time.julday, time.hour, time.minute, time.second, time.microsecond/1000 sac.o=0. sac.b=tr.stats.starttime-time # this is very important!! sac.kevnm=str(time) sac.cmpaz=chan.azimuth # dip is from horizontal downward; inc is from vertical downward sac.cmpinc=chan.dip+90 sac.gcarc = locations2degrees(evt.origins[0].latitude, evt.origins[0].longitude, chan.latitude, chan.longitude) sac.dist,sac.az,sac.baz= tr.stats.distance/1000,tr.stats.azimuth,tr.stats.back_azimuth tr.stats.sac=sac tr_name=sta.code+'.'+net.code+'.'+chan.location_code+'.'+chan.code+'.sac' tr.write(tr_name,format='SAC')
def write_stream_to_sac(str1, write_dir='data', ext='', verbose=False): if ext != '': ext = '.' + ext if not os.path.isdir(write_dir): sys.exit('No such dir to write sac', write_dir) for tr in str1: sac = AttribDict() (sac.kstnm, sac.knetwk, sac.kcmpnm, sac.khole) = (str(tr.stats.station), str(tr.stats.network), str(tr.stats.channel), str(tr.stats.location)) (sac.stla, sac.stlo, sac.stel) = (tr.stats.station_coordinates.latitude, tr.stats.station_coordinates.longitude, tr.stats.station_coordinates.elevation) ev = tr.stats.event_origin time = ev.time # sac depth is in km sac.evla, sac.evlo, sac.evdp, sac.mag = ev.latitude, ev.longitude, ev.depth / 1000., tr.stats.event_mag.mag sac.evla, sac.evlo, sac.evdp, sac.mag = ev.latitude, ev.longitude, ev.depth / 1000., tr.stats.event_mag.mag # sac uses millisec while obspy uses microsec. sac.nzyear, sac.nzjday, sac.nzhour, sac.nzmin, sac.nzsec, sac.nzmsec = time.year, time.julday, time.hour, time.minute, time.second, time.microsecond / 1000 sac.o = 0. sac.b = tr.stats.starttime - time # this is very important!! sac.kevnm = str(time) # dip is from horizontal downward; inc is from vertical downward # in SAC component "incidence angle" relative to the vertical sac.cmpaz, sac.cmpinc = tr.stats.cmpaz, tr.stats.dip + 90 sac.gcarc, sac.dist, sac.az, sac.baz = tr.stats.gcarc, tr.stats.distance / 1000, tr.stats.azimuth, tr.stats.back_azimuth # traveltimes sac.a = tr.stats.Parr.arrival_time sac.ka = 'P' # cannot add S time because user1 is assigned to ray parameter # the ray parameter required by hk code is in sin(th)/v (sac.user0, sac.user1) = (tr.stats.Parr.rayp / radiusOfEarth, tr.stats.Sarr.rayp / radiusOfEarth) # add sac header to tr.stats tr.stats.sac = sac # set sac file name tr_name = write_dir + '/' + tr.stats.station + '.' + tr.stats.network + '.' + tr.stats.location + '.' + tr.stats.channel + ext + '.sac' tr.write(tr_name, format='SAC') if verbose: print('Writing sac file ...' + tr_name)
def get_syngine_data(model, client=None, reclat=None, reclon=None, inv=None, eventid=None, origins=None, m_tensor=None, source_dc=None): """ param reclat: type reclat: list of floats param reclon: type reclon: list of floats """ if client: client = fdsnClient(client) synclient = synClient() if inv: streams = AttribDict() for network in inv: stream = obspy.Stream() for station in network: print(station) if eventid: stream_tmp = synclient.get_waveforms(model=model, network=network.code, station=station.code, eventid=eventid) else: stream_tmp = synclient.get_waveforms( model=model, network=network.code, station=station.code, origintime=origins.time, sourcelatitude=origins.latitude, sourcelongitude=origins.longitude, sourcedepthinmeters=origins.depth, sourcemomenttensor=m_tensor, sourcedoublecouple=source_dc) stream.append(stream_tmp[0]) streams[network.code] = stream if reclat and reclon: stream = obspy.Stream() for rlat, rlon in zip(reclat, reclon): if eventid: stream_tmp = synclient.get_waveforms(model=model, receiverlatitude=rlat, receiverlongitude=rlon, eventid=eventid) else: stream_tmp = synclient.get_waveforms( model=model, receiverlatitude=rlat, receiverlongitude=rlon, origintime=origins.time, sourcelatitude=origins.latitude, sourcelongitude=origins.longitude, sourcedepthinmeters=origins.depth, sourcemomenttensor=m_tensor, sourcedoublecouple=source_dc) stream.append(stream_tmp[0]) streams = stream if origins: starttime = origins.time - 120 endtime = starttime + 120 if client: cat = client.get_events(starttime, endtime, minlatitude=origins.latitude - .5, maxlatitude=origins.latitude + .5) else: cat = None else: cat = None return streams, cat