Esempio n. 1
0
def getStationXML(args, events):
    print "\n---- checking data availability ----"
    print args
    client = Client("Iris")
    if args["minradius"] or args["maxradius"]:
        lat = events[0].origins[0].latitude
        lon = events[0].origins[0].longitude
        print "searching around event location:", lat, lon
        inventory = client.get_stations(level="response", latitude=lat, longitude=lon, **args)
    else:
        inventory = client.get_stations(level="response", **args)
    print "\nData is available for: "
    print inventory
    return inventory
Esempio n. 2
0
def main():
    eventID = 4417721
    client = Client("IRIS")
    #cat = client.get_events(eventid=eventID)
    #print cat
    #timeSeriesClient = iris.Client()
    eventLatitude = 40.8287
    eventLongitude = -125.1338
    #eventTime = '2014-03-10T05:18:13.4000'
    eventMagnitude = 7.1
    magUnit = 'MW'
    #dt = UTCDateTime(eventTime)

    ##THIS LIST IS TO BE MADE BY PARSING EVENTS.CSV
    '''eventList = [('5158626', 25.1395, -109.433, '2015-09-13T08:14:12.2400', 6.7, 'MWW'),\
                 ('4768129', 38.2155, -122.3117, '2014-08-24T10:20:44.0600', 6.02, 'MW'),\
                 ('4311182', 26.0913, -110.3209, '2013-10-19T17:54:54.7000', 6.6, 'MWW'),\
                 ('3318739', 25.09, -109.5366, '2011-07-26T17:44:21.5100', 6.0, 'MW')]'''

    #reading data from json file
    with open('input.json') as data_file:
        quake_data = json.load(data_file)
    print quake_data
    processedEvent = set()
    if os.path.exists("processedEvent.p"):
        pickleFile = open('processedEvent.p', 'rb')
        processedEvent = pickle.load(pickleFile)
        pickleFile.close()
    netStationList = set()
    for each_quake in quake_data:
        eventID = each_quake['EventID']
        if eventID in processedEvent:
            continue
        eventLatitude = each_quake['Latitude']
        eventLongitude = each_quake['Longitude']
        eventTime = each_quake['date'] + "T" + each_quake['time']
        for each_net in USNETS:
            try:
                inventory = client.get_stations(network = each_net, latitude=eventLatitude, \
                                        longitude=eventLongitude, maxradius=10)
                #print type(inventory)
                for each in inventory:
                    each_content = each.get_contents()
                    lat, lon = get_coordinates(each[0])
                    channelList = each_content['stations']
                    for each_channel in channelList:
                        randTime = random.randint(1, 10)
                        netStationList.add((each_channel.split()[0], lat, lon,eventLatitude,\
                                            eventLongitude, eventTime, eventID, randTime))
            except:
                #print "Failed for", each_net
                continue

    #print "EARTHQUAKE", str(eventLatitude), str(eventLongitude), str(eventMagnitude), magUnit

    #getting list of stations for US networks
    #print USNETS

    #print inventory.get_contents()
    print "LENGTH OF NET STATION", len(netStationList)
    #getting time series data in a loop
    netRDD = sc.parallelize(netStationList)
    outRDD = netRDD.map(processList).filter(lambda x: not x == 'FAIL')
    #outRDD = netRDD.map(processList)
    stationMag = outRDD.collect()
    print stationMag
    fStat = open('stationMagnitudes.txt', 'a')
    #fStat.write('EventID,NETSTATIONID,LATITUDE,LONGITUDE,MAGNITUDE'+"\n")
    for each_station in stationMag:
        event_id = each_station.split(',')[0]
        #print event_id
        processedEvent.add(event_id)
        fStat.write(each_station + "\n")
    #f.write('test')
    fStat.close()
    pickleWrite = open("processedEvent.p", "w")
    pickle.dump(processedEvent, pickleWrite)
    pickleWrite.close()
def main():
    eventID = 4417721
    client = Client("IRIS")
    #cat = client.get_events(eventid=eventID)
    #print cat
    #timeSeriesClient = iris.Client()
    eventLatitude = 40.8287
    eventLongitude = -125.1338
    #eventTime = '2014-03-10T05:18:13.4000'
    eventMagnitude = 7.1
    magUnit = 'MW'
    #dt = UTCDateTime(eventTime)

    ##THIS LIST IS TO BE MADE BY PARSING EVENTS.CSV
    '''eventList = [('5158626', 25.1395, -109.433, '2015-09-13T08:14:12.2400', 6.7, 'MWW'),\
                 ('4768129', 38.2155, -122.3117, '2014-08-24T10:20:44.0600', 6.02, 'MW'),\
                 ('4311182', 26.0913, -110.3209, '2013-10-19T17:54:54.7000', 6.6, 'MWW'),\
                 ('3318739', 25.09, -109.5366, '2011-07-26T17:44:21.5100', 6.0, 'MW')]'''

    #reading data from json file
    with open('input.json') as data_file:
        quake_data = json.load(data_file)
    print quake_data
    processedEvent = set()
    if os.path.exists("processedEvent.p"):
        pickleFile = open('processedEvent.p', 'rb')
        processedEvent = pickle.load(pickleFile)
        pickleFile.close()
    netStationList = set()
    for each_quake in quake_data:
        eventID = each_quake['EventID']
        if eventID in processedEvent:
            continue
        eventLatitude = each_quake['Latitude']
        eventLongitude = each_quake['Longitude']
        eventTime = each_quake['date']+"T"+each_quake['time']
        for each_net in USNETS:
            try:
                inventory = client.get_stations(network = each_net, latitude=eventLatitude, \
                                        longitude=eventLongitude, maxradius=10)
                #print type(inventory)
                for each in inventory:
                    each_content = each.get_contents()
                    lat, lon = get_coordinates(each[0])
                    channelList = each_content['stations']
                    for each_channel in channelList:
                        randTime = random.randint(1, 10)
                        netStationList.add((each_channel.split()[0], lat, lon,eventLatitude,\
                                            eventLongitude, eventTime, eventID, randTime))
            except:
                #print "Failed for", each_net
                continue


    #print "EARTHQUAKE", str(eventLatitude), str(eventLongitude), str(eventMagnitude), magUnit

    #getting list of stations for US networks
    #print USNETS

    #print inventory.get_contents()
    print "LENGTH OF NET STATION", len(netStationList)
    #getting time series data in a loop
    netRDD = sc.parallelize(netStationList)
    outRDD = netRDD.map(processList).filter(lambda x: not x =='FAIL' )
    #outRDD = netRDD.map(processList)
    stationMag = outRDD.collect()
    print stationMag
    fStat = open('stationMagnitudes.txt', 'a')
    #fStat.write('EventID,NETSTATIONID,LATITUDE,LONGITUDE,MAGNITUDE'+"\n")
    for each_station in stationMag:
        event_id = each_station.split(',')[0]
        #print event_id
        processedEvent.add(event_id)
        fStat.write(each_station+"\n")
    #f.write('test')
    fStat.close()
    pickleWrite = open( "processedEvent.p", "w" )
    pickle.dump( processedEvent, pickleWrite)
    pickleWrite.close()
def main():
    eventID = 4417721
    client = Client("IRIS")
    cat = client.get_events(eventid=eventID)
    #print cat
    timeSeriesClient = iris.Client()
    eventLatitude = 40.8287
    eventLongitude = -125.1338
    eventTime = '2012-04-12T07:15:49.1700'
    eventMagnitude = 7.1
    magUnit = 'MW'
    dt = UTCDateTime(eventTime)
    print "EARTHQUAKE", str(eventLatitude), str(eventLongitude), str(eventMagnitude)+" "+ magUnit
    netStationList = set()
    #getting list of stations for US networks
    for each_net in USNETS:
        try:
            inventory = client.get_stations(network = each_net, latitude=eventLatitude,\
                                            longitude=eventLongitude, maxradius=5)
            #print type(inventory)
            for each in inventory:
                each_content = each.get_contents()
                lat, lon = get_coordinates(each[0])
                channelList = each_content['stations']
                for each_channel in channelList:
                    netStationList.add((each_channel.split()[0], lat, lon))
        except:
            #print "Failed for", each_net
            continue

    #print inventory.get_contents()
    #print netStationList
    #getting time series data in a loop



    for each1 in netStationList:
        netStation = each1[0].split('.')
        network = netStation[0]
        station = netStation[1]
        magList = []
        try:
            data = timeSeriesClient.resp(network, station, '*', '*', dt)
            #print "SEEDLIST SUCCESS ", each1[0]
            seedList = parseResponse(data.decode())
            for each2 in seedList:
                arr = each2.split('.')
                try:
                    st = client.get_waveforms(arr[0], arr[1], arr[2], arr[3], dt, dt+1800)
                    #print "TIMESERIES SUCCESS", each2
                    ampl = get_amplitude(st, dt, each2)
                    local_mag = calculate_local_magnitude(eventLatitude, eventLongitude, each1[1], each1[2], ampl)
                    magList.append(local_mag)
                    #print each2, each1[1], each1[2], local_mag
                    #print "TIMESERIES SUCCESS", each2
                except:
                    #print "TIMESERIES FAIL", each2
                    continue
            if len(magList) > 0:
                print each1[0], each1[1], each1[2], sum(magList)/float(len(magList))

        except:
            #print 'SEEDLIST FAIL ', each1[0]
            continue
Esempio n. 5
0
def main():
    eventID = 4417721
    client = Client("IRIS")
    cat = client.get_events(eventid=eventID)
    #print cat
    timeSeriesClient = iris.Client()
    eventLatitude = 40.8287
    eventLongitude = -125.1338
    eventTime = '2012-04-12T07:15:49.1700'
    eventMagnitude = 7.1
    magUnit = 'MW'
    dt = UTCDateTime(eventTime)
    print "EARTHQUAKE", str(eventLatitude), str(
        eventLongitude), str(eventMagnitude) + " " + magUnit
    netStationList = set()
    #getting list of stations for US networks
    for each_net in USNETS:
        try:
            inventory = client.get_stations(network = each_net, latitude=eventLatitude,\
                                            longitude=eventLongitude, maxradius=5)
            #print type(inventory)
            for each in inventory:
                each_content = each.get_contents()
                lat, lon = get_coordinates(each[0])
                channelList = each_content['stations']
                for each_channel in channelList:
                    netStationList.add((each_channel.split()[0], lat, lon))
        except:
            #print "Failed for", each_net
            continue

    #print inventory.get_contents()
    #print netStationList
    #getting time series data in a loop

    for each1 in netStationList:
        netStation = each1[0].split('.')
        network = netStation[0]
        station = netStation[1]
        magList = []
        try:
            data = timeSeriesClient.resp(network, station, '*', '*', dt)
            #print "SEEDLIST SUCCESS ", each1[0]
            seedList = parseResponse(data.decode())
            for each2 in seedList:
                arr = each2.split('.')
                try:
                    st = client.get_waveforms(arr[0], arr[1], arr[2], arr[3],
                                              dt, dt + 1800)
                    #print "TIMESERIES SUCCESS", each2
                    ampl = get_amplitude(st, dt, each2)
                    local_mag = calculate_local_magnitude(
                        eventLatitude, eventLongitude, each1[1], each1[2],
                        ampl)
                    magList.append(local_mag)
                    #print each2, each1[1], each1[2], local_mag
                    #print "TIMESERIES SUCCESS", each2
                except:
                    #print "TIMESERIES FAIL", each2
                    continue
            if len(magList) > 0:
                print each1[0], each1[1], each1[2], sum(magList) / float(
                    len(magList))

        except:
            #print 'SEEDLIST FAIL ', each1[0]
            continue