def generate_events(spark_context, eventList): global sc sc = spark_context quake_data = list() client = Client("IRIS") processedEvent = set() if os.path.exists("processedEvent.p"): pickleFile = open('processedEvent.p', 'rb') processedEvent = pickle.load(pickleFile) pickleFile.close() for each_event in eventList: if each_event in processedEvent: continue cat = client.get_events(eventid=int(each_event)) eventDict = dict() for each_entry in cat: event_info = re.compile("Event:\t(.*)\n").match( str(each_entry)).groups()[0] event_info_list = event_info.split('|') eventDict['date-time'] = event_info_list[0].rstrip().lstrip() eventDict['EventID'] = each_event origin_info = event_info_list[1].split(',') eventDict['Latitude'] = origin_info[0].rstrip().lstrip() eventDict['Longitude'] = origin_info[1].rstrip().lstrip() eventDict['Magnitude'] = event_info_list[2].split()[0].rstrip( ).lstrip() quake_data.append(eventDict) return processEvents(quake_data, client, processedEvent)
def generate_events(spark_context, eventList): global sc sc = spark_context quake_data = list() client = Client("IRIS") processedEvent = set() if os.path.exists("processedEvent.p"): pickleFile = open('processedEvent.p', 'rb') processedEvent = pickle.load(pickleFile) pickleFile.close() for each_event in eventList: if each_event in processedEvent: continue cat = client.get_events(eventid=int(each_event)) eventDict = dict() for each_entry in cat: event_info = re.compile("Event:\t(.*)\n").match(str(each_entry)).groups()[0] event_info_list = event_info.split('|') eventDict['date-time'] = event_info_list[0].rstrip().lstrip() eventDict['EventID'] = each_event origin_info = event_info_list[1].split(',') eventDict['Latitude'] = origin_info[0].rstrip().lstrip() eventDict['Longitude'] = origin_info[1].rstrip().lstrip() eventDict['Magnitude'] = event_info_list[2].split()[0].rstrip().lstrip() quake_data.append(eventDict) return processEvents(quake_data, client, processedEvent)
def main(): #==== INITIALIZE ==== #---- parameters ---- redownload = False basedir = 'semucb_dataset' datadir = os.path.join(basedir,'data') t1,t2 = -3600.,10000. #---- input files ---- fname_evcatalogue = os.path.join(basedir,'evcatalogue_semucb.ndk') fname_stcatalogue = os.path.join(basedir,'receivers.dat') #==== PREPARE DOWNLOAD LIST ==== print 'reading station list...' stcatalogue = RecFile(fname_stcatalogue) print 'retrieved {:d} stations'.format(stcatalogue.ndata) unique_stations = set(stcatalogue.stations) print 'unique stations: {}'.format(len(unique_stations)) print 'reading event catalogue...' evcatalogue = NDKFile(fname_evcatalogue) print 'found {:d} events'.format(evcatalogue.nevents) create_dir(datadir) #==== LOOP THROUGH EVENTS ==== client = Client('Iris') for name,event in zip(evcatalogue.names[0:1],evcatalogue.events[0:1]): print '---- {:s} ----'.format(name) runtime = time.time() fname_events = '{:s}_events.xml'.format(name) fname_stations = '{:s}_stations.xml'.format(name) fname_mseed = '{:s}_waveforms.mseed'.format(name) path_stations = os.path.join(datadir,fname_stations) path_events = os.path.join(datadir,fname_events) path_mseed = os.path.join(datadir,fname_mseed) #---- check if data exist ---- if os.path.exists(path_mseed)\ and os.path.exists(path_events)\ and os.path.exists(path_stations): print 'data already exists, continuing ...' continue #---- prepare redownload ---- tstart = event.origin+event.ctime+t1 tend = event.origin+event.ctime+t2 downloadlist = [('*',stat,'*','LH*',tstart,tend) for stat in unique_stations] print 'downloading ...'.format(tstart,tend) try: inventory = client.get_stations_bulk(downloadlist,level='response') events = client.get_events(minmag=5.0,starttime=tstart,endtime=tend) waveforms = client.get_waveforms_bulk(downloadlist, attach_response=False,longestonly=True) except Exception,err: print err print 'ERROR WHILE DOWNLOADING' continue #---- write to file ---- try: inventory.write(path_stations,format='STATIONXML') events.write(path_events,format='QUAKEML') waveforms.write(path_mseed,format='MSEED') except Exception,err: print err print 'SAVING ERROR' continue
def main(): eventID = 4417721 client = Client("IRIS") cat = client.get_events(eventid=eventID) #print cat timeSeriesClient = iris.Client() eventLatitude = 40.8287 eventLongitude = -125.1338 eventTime = '2012-04-12T07:15:49.1700' eventMagnitude = 7.1 magUnit = 'MW' dt = UTCDateTime(eventTime) print "EARTHQUAKE", str(eventLatitude), str(eventLongitude), str(eventMagnitude)+" "+ magUnit netStationList = set() #getting list of stations for US networks for each_net in USNETS: try: inventory = client.get_stations(network = each_net, latitude=eventLatitude,\ longitude=eventLongitude, maxradius=5) #print type(inventory) for each in inventory: each_content = each.get_contents() lat, lon = get_coordinates(each[0]) channelList = each_content['stations'] for each_channel in channelList: netStationList.add((each_channel.split()[0], lat, lon)) except: #print "Failed for", each_net continue #print inventory.get_contents() #print netStationList #getting time series data in a loop for each1 in netStationList: netStation = each1[0].split('.') network = netStation[0] station = netStation[1] magList = [] try: data = timeSeriesClient.resp(network, station, '*', '*', dt) #print "SEEDLIST SUCCESS ", each1[0] seedList = parseResponse(data.decode()) for each2 in seedList: arr = each2.split('.') try: st = client.get_waveforms(arr[0], arr[1], arr[2], arr[3], dt, dt+1800) #print "TIMESERIES SUCCESS", each2 ampl = get_amplitude(st, dt, each2) local_mag = calculate_local_magnitude(eventLatitude, eventLongitude, each1[1], each1[2], ampl) magList.append(local_mag) #print each2, each1[1], each1[2], local_mag #print "TIMESERIES SUCCESS", each2 except: #print "TIMESERIES FAIL", each2 continue if len(magList) > 0: print each1[0], each1[1], each1[2], sum(magList)/float(len(magList)) except: #print 'SEEDLIST FAIL ', each1[0] continue
def main(): eventID = 4417721 client = Client("IRIS") cat = client.get_events(eventid=eventID) #print cat timeSeriesClient = iris.Client() eventLatitude = 40.8287 eventLongitude = -125.1338 eventTime = '2012-04-12T07:15:49.1700' eventMagnitude = 7.1 magUnit = 'MW' dt = UTCDateTime(eventTime) print "EARTHQUAKE", str(eventLatitude), str( eventLongitude), str(eventMagnitude) + " " + magUnit netStationList = set() #getting list of stations for US networks for each_net in USNETS: try: inventory = client.get_stations(network = each_net, latitude=eventLatitude,\ longitude=eventLongitude, maxradius=5) #print type(inventory) for each in inventory: each_content = each.get_contents() lat, lon = get_coordinates(each[0]) channelList = each_content['stations'] for each_channel in channelList: netStationList.add((each_channel.split()[0], lat, lon)) except: #print "Failed for", each_net continue #print inventory.get_contents() #print netStationList #getting time series data in a loop for each1 in netStationList: netStation = each1[0].split('.') network = netStation[0] station = netStation[1] magList = [] try: data = timeSeriesClient.resp(network, station, '*', '*', dt) #print "SEEDLIST SUCCESS ", each1[0] seedList = parseResponse(data.decode()) for each2 in seedList: arr = each2.split('.') try: st = client.get_waveforms(arr[0], arr[1], arr[2], arr[3], dt, dt + 1800) #print "TIMESERIES SUCCESS", each2 ampl = get_amplitude(st, dt, each2) local_mag = calculate_local_magnitude( eventLatitude, eventLongitude, each1[1], each1[2], ampl) magList.append(local_mag) #print each2, each1[1], each1[2], local_mag #print "TIMESERIES SUCCESS", each2 except: #print "TIMESERIES FAIL", each2 continue if len(magList) > 0: print each1[0], each1[1], each1[2], sum(magList) / float( len(magList)) except: #print 'SEEDLIST FAIL ', each1[0] continue
def getEvents(args): client = Client("Iris") events = client.get_events(minmag=5.0, starttime=args["starttime"], endtime=args["endtime"]) print "found these events:" print events return events