def generate_events(spark_context, eventList): global sc sc = spark_context quake_data = list() client = Client("IRIS") processedEvent = set() if os.path.exists("processedEvent.p"): pickleFile = open('processedEvent.p', 'rb') processedEvent = pickle.load(pickleFile) pickleFile.close() for each_event in eventList: if each_event in processedEvent: continue cat = client.get_events(eventid=int(each_event)) eventDict = dict() for each_entry in cat: event_info = re.compile("Event:\t(.*)\n").match( str(each_entry)).groups()[0] event_info_list = event_info.split('|') eventDict['date-time'] = event_info_list[0].rstrip().lstrip() eventDict['EventID'] = each_event origin_info = event_info_list[1].split(',') eventDict['Latitude'] = origin_info[0].rstrip().lstrip() eventDict['Longitude'] = origin_info[1].rstrip().lstrip() eventDict['Magnitude'] = event_info_list[2].split()[0].rstrip( ).lstrip() quake_data.append(eventDict) return processEvents(quake_data, client, processedEvent)
def processList(data): #print data[0] print "SLEEP", data[6], data[7] time.sleep(float(data[7] / 1000)) dt = UTCDateTime(data[5]) timeSeriesClient = iris.Client() client = Client("IRIS") netStation = data[0].split('.') network = netStation[0] station = netStation[1] magList = [] try: respData = timeSeriesClient.resp(network, station, '*', '*', dt) #print "SEEDLIST SUCCESS ", respData[0] seedList = parseResponse(respData.decode()) for each2 in seedList: arr = each2.split('.') try: st = client.get_waveforms(arr[0], arr[1], arr[2], arr[3], dt, dt + 1800) print "TIMESERIES SUCCESS", each2 ampl = get_amplitude(st, dt, each2) local_mag = calculate_local_magnitude(data[3], data[4], data[1], data[2], ampl) #print local_mag magList.append(local_mag) #print "Appended to magnitude list" except: #print "TIMESERIES FAIL", each2 continue print magList if len(magList) > 0: #print "Magnitude list obtained" retVal = str(data[6])+ "," + data[0]+ ","+ data[1] +"," +\ data[2] +","+ str(sum(magList)/float(len(magList))) #print "Returning value:", retVal return retVal else: return 'FAIL' except: print 'SEEDLIST FAIL ', data[0] return 'FAIL'
print "No hay parametros de entrada" sys.exit() year, MM, dd, hh, mm, ss = int(sys.argv[1][0:4]), int(sys.argv[1][4:6]), int(sys.argv[1][6:8]), int(sys.argv[1][8:10]), int(sys.argv[1][10:12]), int(sys.argv[1][12:14]) t = UTCDateTime(year, MM, dd, hh, mm) t_ = float(sys.argv[2]) if len(sys.argv)>2: name = str(t)+archive_name(sys.argv[3:])+"."+Format else: name = str(t)+"."+Format #print t, t_ client_sc = Client(ip_fdsn+":"+port_fdsn) print client_sc if len(sys.argv) == 2: st = request(client = client_sc, t1 = t, t2= t + t_) print st if len(sys.argv) == 4: st = request(client = client_sc, t1 = t, t2= t + t_, AG=sys.argv[3]) print st elif len(sys.argv) == 5: st = request(client = client_sc, t1 = t, t2= t + t_, AG=sys.argv[3], STA=sys.argv[4]) print st elif len(sys.argv) == 6: st = request(client = client_sc, t1 = t, t2= t + t_, AG=sys.argv[3], STA=sys.argv[4], LOC=sys.argv[5]) print st elif len(sys.argv) == 7:
def main(): eventID = 4417721 client = Client("IRIS") #cat = client.get_events(eventid=eventID) #print cat #timeSeriesClient = iris.Client() eventLatitude = 40.8287 eventLongitude = -125.1338 #eventTime = '2014-03-10T05:18:13.4000' eventMagnitude = 7.1 magUnit = 'MW' #dt = UTCDateTime(eventTime) ##THIS LIST IS TO BE MADE BY PARSING EVENTS.CSV '''eventList = [('5158626', 25.1395, -109.433, '2015-09-13T08:14:12.2400', 6.7, 'MWW'),\ ('4768129', 38.2155, -122.3117, '2014-08-24T10:20:44.0600', 6.02, 'MW'),\ ('4311182', 26.0913, -110.3209, '2013-10-19T17:54:54.7000', 6.6, 'MWW'),\ ('3318739', 25.09, -109.5366, '2011-07-26T17:44:21.5100', 6.0, 'MW')]''' #reading data from json file with open('input.json') as data_file: quake_data = json.load(data_file) print quake_data processedEvent = set() if os.path.exists("processedEvent.p"): pickleFile = open('processedEvent.p', 'rb') processedEvent = pickle.load(pickleFile) pickleFile.close() netStationList = set() for each_quake in quake_data: eventID = each_quake['EventID'] if eventID in processedEvent: continue eventLatitude = each_quake['Latitude'] eventLongitude = each_quake['Longitude'] eventTime = each_quake['date'] + "T" + each_quake['time'] for each_net in USNETS: try: inventory = client.get_stations(network = each_net, latitude=eventLatitude, \ longitude=eventLongitude, maxradius=10) #print type(inventory) for each in inventory: each_content = each.get_contents() lat, lon = get_coordinates(each[0]) channelList = each_content['stations'] for each_channel in channelList: randTime = random.randint(1, 10) netStationList.add((each_channel.split()[0], lat, lon,eventLatitude,\ eventLongitude, eventTime, eventID, randTime)) except: #print "Failed for", each_net continue #print "EARTHQUAKE", str(eventLatitude), str(eventLongitude), str(eventMagnitude), magUnit #getting list of stations for US networks #print USNETS #print inventory.get_contents() print "LENGTH OF NET STATION", len(netStationList) #getting time series data in a loop netRDD = sc.parallelize(netStationList) outRDD = netRDD.map(processList).filter(lambda x: not x == 'FAIL') #outRDD = netRDD.map(processList) stationMag = outRDD.collect() print stationMag fStat = open('stationMagnitudes.txt', 'a') #fStat.write('EventID,NETSTATIONID,LATITUDE,LONGITUDE,MAGNITUDE'+"\n") for each_station in stationMag: event_id = each_station.split(',')[0] #print event_id processedEvent.add(event_id) fStat.write(each_station + "\n") #f.write('test') fStat.close() pickleWrite = open("processedEvent.p", "w") pickle.dump(processedEvent, pickleWrite) pickleWrite.close()
def main(): eventID = 4417721 client = Client("IRIS") cat = client.get_events(eventid=eventID) #print cat timeSeriesClient = iris.Client() eventLatitude = 40.8287 eventLongitude = -125.1338 eventTime = '2012-04-12T07:15:49.1700' eventMagnitude = 7.1 magUnit = 'MW' dt = UTCDateTime(eventTime) print "EARTHQUAKE", str(eventLatitude), str( eventLongitude), str(eventMagnitude) + " " + magUnit netStationList = set() #getting list of stations for US networks for each_net in USNETS: try: inventory = client.get_stations(network = each_net, latitude=eventLatitude,\ longitude=eventLongitude, maxradius=5) #print type(inventory) for each in inventory: each_content = each.get_contents() lat, lon = get_coordinates(each[0]) channelList = each_content['stations'] for each_channel in channelList: netStationList.add((each_channel.split()[0], lat, lon)) except: #print "Failed for", each_net continue #print inventory.get_contents() #print netStationList #getting time series data in a loop for each1 in netStationList: netStation = each1[0].split('.') network = netStation[0] station = netStation[1] magList = [] try: data = timeSeriesClient.resp(network, station, '*', '*', dt) #print "SEEDLIST SUCCESS ", each1[0] seedList = parseResponse(data.decode()) for each2 in seedList: arr = each2.split('.') try: st = client.get_waveforms(arr[0], arr[1], arr[2], arr[3], dt, dt + 1800) #print "TIMESERIES SUCCESS", each2 ampl = get_amplitude(st, dt, each2) local_mag = calculate_local_magnitude( eventLatitude, eventLongitude, each1[1], each1[2], ampl) magList.append(local_mag) #print each2, each1[1], each1[2], local_mag #print "TIMESERIES SUCCESS", each2 except: #print "TIMESERIES FAIL", each2 continue if len(magList) > 0: print each1[0], each1[1], each1[2], sum(magList) / float( len(magList)) except: #print 'SEEDLIST FAIL ', each1[0] continue