def generate_events(spark_context, eventList):
    global sc
    sc = spark_context
    quake_data = list()
    client = Client("IRIS")
    processedEvent = set()
    if os.path.exists("processedEvent.p"):
        pickleFile = open('processedEvent.p', 'rb')
        processedEvent = pickle.load(pickleFile)
        pickleFile.close()
    for each_event in eventList:
        if each_event in processedEvent:
            continue
        cat = client.get_events(eventid=int(each_event))
        eventDict = dict()
        for each_entry in cat:
            event_info = re.compile("Event:\t(.*)\n").match(str(each_entry)).groups()[0]
            event_info_list = event_info.split('|')
            eventDict['date-time'] = event_info_list[0].rstrip().lstrip()
            eventDict['EventID'] = each_event
            origin_info = event_info_list[1].split(',')
            eventDict['Latitude'] = origin_info[0].rstrip().lstrip()
            eventDict['Longitude'] = origin_info[1].rstrip().lstrip()
            eventDict['Magnitude'] = event_info_list[2].split()[0].rstrip().lstrip()
        quake_data.append(eventDict)
    return processEvents(quake_data, client, processedEvent)
Exemplo n.º 2
0
def generate_events(spark_context, eventList):
    global sc
    sc = spark_context
    quake_data = list()
    client = Client("IRIS")
    processedEvent = set()
    if os.path.exists("processedEvent.p"):
        pickleFile = open('processedEvent.p', 'rb')
        processedEvent = pickle.load(pickleFile)
        pickleFile.close()
    for each_event in eventList:
        if each_event in processedEvent:
            continue
        cat = client.get_events(eventid=int(each_event))
        eventDict = dict()
        for each_entry in cat:
            event_info = re.compile("Event:\t(.*)\n").match(
                str(each_entry)).groups()[0]
            event_info_list = event_info.split('|')
            eventDict['date-time'] = event_info_list[0].rstrip().lstrip()
            eventDict['EventID'] = each_event
            origin_info = event_info_list[1].split(',')
            eventDict['Latitude'] = origin_info[0].rstrip().lstrip()
            eventDict['Longitude'] = origin_info[1].rstrip().lstrip()
            eventDict['Magnitude'] = event_info_list[2].split()[0].rstrip(
            ).lstrip()
        quake_data.append(eventDict)
    return processEvents(quake_data, client, processedEvent)
Exemplo n.º 3
0
def getStationXML(args, events):
    print "\n---- checking data availability ----"
    print args
    client = Client("Iris")
    if args["minradius"] or args["maxradius"]:
        lat = events[0].origins[0].latitude
        lon = events[0].origins[0].longitude
        print "searching around event location:", lat, lon
        inventory = client.get_stations(level="response", latitude=lat, longitude=lon, **args)
    else:
        inventory = client.get_stations(level="response", **args)
    print "\nData is available for: "
    print inventory
    return inventory
Exemplo n.º 4
0
def processList(data):
    #print data[0]
    print "SLEEP", data[6], data[7]
    time.sleep(float(data[7] / 1000))
    dt = UTCDateTime(data[5])
    timeSeriesClient = iris.Client()
    client = Client("IRIS")
    netStation = data[0].split('.')
    network = netStation[0]
    station = netStation[1]
    magList = []
    try:
        respData = timeSeriesClient.resp(network, station, '*', '*', dt)
        #print "SEEDLIST SUCCESS ", respData[0]
        seedList = parseResponse(respData.decode())
        for each2 in seedList:
            arr = each2.split('.')
            try:
                st = client.get_waveforms(arr[0], arr[1], arr[2], arr[3], dt,
                                          dt + 1800)
                print "TIMESERIES SUCCESS", each2
                ampl = get_amplitude(st, dt, each2)
                local_mag = calculate_local_magnitude(data[3], data[4],
                                                      data[1], data[2], ampl)
                #print local_mag
                magList.append(local_mag)
                #print "Appended to magnitude list"
            except:
                #print "TIMESERIES FAIL", each2
                continue
        print magList
        if len(magList) > 0:
            #print "Magnitude list obtained"
            retVal = str(data[6])+ "," + data[0]+ ","+ data[1] +"," +\
                    data[2] +","+ str(sum(magList)/float(len(magList)))
            #print "Returning value:", retVal

            return retVal
        else:
            return 'FAIL'

    except:
        print 'SEEDLIST FAIL ', data[0]
        return 'FAIL'
Exemplo n.º 5
0
def processList(data):
    #print data[0]
    print "SLEEP", data[6], data[7]
    time.sleep(float(data[7]/1000))
    dt = UTCDateTime(data[5])
    timeSeriesClient = iris.Client()
    client = Client("IRIS")
    netStation = data[0].split('.')
    network = netStation[0]
    station = netStation[1]
    magList = []
    try:
        respData = timeSeriesClient.resp(network, station, '*', '*', dt)
        #print "SEEDLIST SUCCESS ", respData[0]
        seedList = parseResponse(respData.decode())
        for each2 in seedList:
            arr = each2.split('.')
            try:
                st = client.get_waveforms(arr[0], arr[1], arr[2], arr[3], dt, dt+1800)
                print "TIMESERIES SUCCESS", each2
                ampl = get_amplitude(st, dt, each2)
                local_mag = calculate_local_magnitude(data[3], data[4], data[1], data[2], ampl)
                #print local_mag
                magList.append(local_mag)
                #print "Appended to magnitude list"
            except:
                #print "TIMESERIES FAIL", each2
                continue
        print magList
        if len(magList) > 0:
            #print "Magnitude list obtained"
            retVal = str(data[6])+ "," + data[0]+ ","+ data[1] +"," +\
                    data[2] +","+ str(sum(magList)/float(len(magList)))
            #print "Returning value:", retVal

            return retVal
        else:
            return 'FAIL'

    except:
        print 'SEEDLIST FAIL ', data[0]
        return 'FAIL'
Exemplo n.º 6
0
def getWaveforms(args, inventory):
    traces = inventory.get_contents()["channels"]
    ntraces = len(traces)
    print "Downloading {:d} waveforms".format(ntraces)
    print traces

    t1 = args["starttime"]
    t2 = args["endtime"]
    downloadlist = [tuple(trace.split(".")) + (t1, t2) for trace in traces]

    parameters = {
        "attach_response": False,
        #'minimumlength':10.*3600.,
        "longestonly": True,
    }
    client = Client("Iris")
    waveforms = client.get_waveforms_bulk(downloadlist, **parameters)
    print "downloaded waveforms"
    print waveforms
    return waveforms
Exemplo n.º 7
0
	print "No hay parametros de entrada"
	sys.exit()

year, MM, dd, hh, mm, ss = int(sys.argv[1][0:4]), int(sys.argv[1][4:6]), int(sys.argv[1][6:8]), int(sys.argv[1][8:10]), int(sys.argv[1][10:12]), int(sys.argv[1][12:14])

t = UTCDateTime(year, MM, dd, hh, mm)
t_ = float(sys.argv[2])

if len(sys.argv)>2:
	name = str(t)+archive_name(sys.argv[3:])+"."+Format
else:
	name = str(t)+"."+Format 

#print t, t_

client_sc = Client(ip_fdsn+":"+port_fdsn)
print client_sc

if len(sys.argv) == 2:
	st = request(client = client_sc, t1 = t, t2= t + t_)
	print st
if len(sys.argv) == 4:
	st = request(client = client_sc, t1 = t, t2= t + t_, AG=sys.argv[3])
        print st
elif len(sys.argv) == 5:
	st = request(client = client_sc, t1 = t, t2= t + t_, AG=sys.argv[3], STA=sys.argv[4])
	print st
elif len(sys.argv) == 6:
	st = request(client = client_sc, t1 = t, t2= t + t_, AG=sys.argv[3], STA=sys.argv[4], LOC=sys.argv[5])
	print st
elif len(sys.argv) == 7:
Exemplo n.º 8
0
def main():
    #==== INITIALIZE ====
    #---- parameters ----
    redownload = False
    basedir    = 'semucb_dataset'
    datadir    = os.path.join(basedir,'data')
    t1,t2      = -3600.,10000.

    #---- input files ----
    fname_evcatalogue = os.path.join(basedir,'evcatalogue_semucb.ndk')
    fname_stcatalogue = os.path.join(basedir,'receivers.dat')

    #==== PREPARE DOWNLOAD LIST ====
    print 'reading station list...'
    stcatalogue = RecFile(fname_stcatalogue)
    print 'retrieved {:d} stations'.format(stcatalogue.ndata)
    unique_stations = set(stcatalogue.stations)
    print 'unique stations: {}'.format(len(unique_stations))

    print 'reading event catalogue...'
    evcatalogue = NDKFile(fname_evcatalogue)
    print 'found {:d} events'.format(evcatalogue.nevents)

    create_dir(datadir)

    #==== LOOP THROUGH EVENTS ====
    client = Client('Iris')
    for name,event in zip(evcatalogue.names[0:1],evcatalogue.events[0:1]):
        print '---- {:s} ----'.format(name)
        runtime = time.time()
        fname_events   = '{:s}_events.xml'.format(name)
        fname_stations = '{:s}_stations.xml'.format(name)
        fname_mseed    = '{:s}_waveforms.mseed'.format(name)

        path_stations = os.path.join(datadir,fname_stations)
        path_events   = os.path.join(datadir,fname_events)
        path_mseed    = os.path.join(datadir,fname_mseed)

        #---- check if data exist ----
        if os.path.exists(path_mseed)\
        and os.path.exists(path_events)\
        and os.path.exists(path_stations):
            print 'data already exists, continuing ...'
            continue

        #---- prepare redownload ----
        tstart = event.origin+event.ctime+t1
        tend   = event.origin+event.ctime+t2
        downloadlist = [('*',stat,'*','LH*',tstart,tend) for stat in unique_stations]

        print 'downloading ...'.format(tstart,tend)
        try:
            inventory = client.get_stations_bulk(downloadlist,level='response')
            events    = client.get_events(minmag=5.0,starttime=tstart,endtime=tend)
            waveforms = client.get_waveforms_bulk(downloadlist,
                                              attach_response=False,longestonly=True)
        except Exception,err:
            print err
            print 'ERROR WHILE DOWNLOADING'
            continue

        #---- write to file ----
        try:
            inventory.write(path_stations,format='STATIONXML')
            events.write(path_events,format='QUAKEML')
            waveforms.write(path_mseed,format='MSEED')
        except Exception,err:
            print err
            print 'SAVING ERROR'
            continue
Exemplo n.º 9
0
def main():
    eventID = 4417721
    client = Client("IRIS")
    #cat = client.get_events(eventid=eventID)
    #print cat
    #timeSeriesClient = iris.Client()
    eventLatitude = 40.8287
    eventLongitude = -125.1338
    #eventTime = '2014-03-10T05:18:13.4000'
    eventMagnitude = 7.1
    magUnit = 'MW'
    #dt = UTCDateTime(eventTime)

    ##THIS LIST IS TO BE MADE BY PARSING EVENTS.CSV
    '''eventList = [('5158626', 25.1395, -109.433, '2015-09-13T08:14:12.2400', 6.7, 'MWW'),\
                 ('4768129', 38.2155, -122.3117, '2014-08-24T10:20:44.0600', 6.02, 'MW'),\
                 ('4311182', 26.0913, -110.3209, '2013-10-19T17:54:54.7000', 6.6, 'MWW'),\
                 ('3318739', 25.09, -109.5366, '2011-07-26T17:44:21.5100', 6.0, 'MW')]'''

    #reading data from json file
    with open('input.json') as data_file:
        quake_data = json.load(data_file)
    print quake_data
    processedEvent = set()
    if os.path.exists("processedEvent.p"):
        pickleFile = open('processedEvent.p', 'rb')
        processedEvent = pickle.load(pickleFile)
        pickleFile.close()
    netStationList = set()
    for each_quake in quake_data:
        eventID = each_quake['EventID']
        if eventID in processedEvent:
            continue
        eventLatitude = each_quake['Latitude']
        eventLongitude = each_quake['Longitude']
        eventTime = each_quake['date'] + "T" + each_quake['time']
        for each_net in USNETS:
            try:
                inventory = client.get_stations(network = each_net, latitude=eventLatitude, \
                                        longitude=eventLongitude, maxradius=10)
                #print type(inventory)
                for each in inventory:
                    each_content = each.get_contents()
                    lat, lon = get_coordinates(each[0])
                    channelList = each_content['stations']
                    for each_channel in channelList:
                        randTime = random.randint(1, 10)
                        netStationList.add((each_channel.split()[0], lat, lon,eventLatitude,\
                                            eventLongitude, eventTime, eventID, randTime))
            except:
                #print "Failed for", each_net
                continue

    #print "EARTHQUAKE", str(eventLatitude), str(eventLongitude), str(eventMagnitude), magUnit

    #getting list of stations for US networks
    #print USNETS

    #print inventory.get_contents()
    print "LENGTH OF NET STATION", len(netStationList)
    #getting time series data in a loop
    netRDD = sc.parallelize(netStationList)
    outRDD = netRDD.map(processList).filter(lambda x: not x == 'FAIL')
    #outRDD = netRDD.map(processList)
    stationMag = outRDD.collect()
    print stationMag
    fStat = open('stationMagnitudes.txt', 'a')
    #fStat.write('EventID,NETSTATIONID,LATITUDE,LONGITUDE,MAGNITUDE'+"\n")
    for each_station in stationMag:
        event_id = each_station.split(',')[0]
        #print event_id
        processedEvent.add(event_id)
        fStat.write(each_station + "\n")
    #f.write('test')
    fStat.close()
    pickleWrite = open("processedEvent.p", "w")
    pickle.dump(processedEvent, pickleWrite)
    pickleWrite.close()
Exemplo n.º 10
0
def main():
    eventID = 4417721
    client = Client("IRIS")
    #cat = client.get_events(eventid=eventID)
    #print cat
    #timeSeriesClient = iris.Client()
    eventLatitude = 40.8287
    eventLongitude = -125.1338
    #eventTime = '2014-03-10T05:18:13.4000'
    eventMagnitude = 7.1
    magUnit = 'MW'
    #dt = UTCDateTime(eventTime)

    ##THIS LIST IS TO BE MADE BY PARSING EVENTS.CSV
    '''eventList = [('5158626', 25.1395, -109.433, '2015-09-13T08:14:12.2400', 6.7, 'MWW'),\
                 ('4768129', 38.2155, -122.3117, '2014-08-24T10:20:44.0600', 6.02, 'MW'),\
                 ('4311182', 26.0913, -110.3209, '2013-10-19T17:54:54.7000', 6.6, 'MWW'),\
                 ('3318739', 25.09, -109.5366, '2011-07-26T17:44:21.5100', 6.0, 'MW')]'''

    #reading data from json file
    with open('input.json') as data_file:
        quake_data = json.load(data_file)
    print quake_data
    processedEvent = set()
    if os.path.exists("processedEvent.p"):
        pickleFile = open('processedEvent.p', 'rb')
        processedEvent = pickle.load(pickleFile)
        pickleFile.close()
    netStationList = set()
    for each_quake in quake_data:
        eventID = each_quake['EventID']
        if eventID in processedEvent:
            continue
        eventLatitude = each_quake['Latitude']
        eventLongitude = each_quake['Longitude']
        eventTime = each_quake['date']+"T"+each_quake['time']
        for each_net in USNETS:
            try:
                inventory = client.get_stations(network = each_net, latitude=eventLatitude, \
                                        longitude=eventLongitude, maxradius=10)
                #print type(inventory)
                for each in inventory:
                    each_content = each.get_contents()
                    lat, lon = get_coordinates(each[0])
                    channelList = each_content['stations']
                    for each_channel in channelList:
                        randTime = random.randint(1, 10)
                        netStationList.add((each_channel.split()[0], lat, lon,eventLatitude,\
                                            eventLongitude, eventTime, eventID, randTime))
            except:
                #print "Failed for", each_net
                continue


    #print "EARTHQUAKE", str(eventLatitude), str(eventLongitude), str(eventMagnitude), magUnit

    #getting list of stations for US networks
    #print USNETS

    #print inventory.get_contents()
    print "LENGTH OF NET STATION", len(netStationList)
    #getting time series data in a loop
    netRDD = sc.parallelize(netStationList)
    outRDD = netRDD.map(processList).filter(lambda x: not x =='FAIL' )
    #outRDD = netRDD.map(processList)
    stationMag = outRDD.collect()
    print stationMag
    fStat = open('stationMagnitudes.txt', 'a')
    #fStat.write('EventID,NETSTATIONID,LATITUDE,LONGITUDE,MAGNITUDE'+"\n")
    for each_station in stationMag:
        event_id = each_station.split(',')[0]
        #print event_id
        processedEvent.add(event_id)
        fStat.write(each_station+"\n")
    #f.write('test')
    fStat.close()
    pickleWrite = open( "processedEvent.p", "w" )
    pickle.dump( processedEvent, pickleWrite)
    pickleWrite.close()
Exemplo n.º 11
0
from obspy.core.util.geodetics import gps2DistAzimuth


paz_wa = {'sensitivity': 2800, 'zeros': [0j], 'gain': 1,
          'poles': [-6.2832 - 4.7124j, -6.2832 + 4.7124j]}

client = Client(user="******")
t = UTCDateTime("2012-04-03T02:45:03")

stations = client.getStations(t, t + 300, "BC")'''

from obspy.fdsn.client import Client
from obspy import UTCDateTime
from obspy.core import read

client = Client("IRIS")
t1 = UTCDateTime("2012-04-12T07:15:49.1700")
#IU.TUC.20.LNZ
st = client.get_waveforms("IU", "ANMO", "00", "BH?", t1, t1 + 4 * 3600)
st.detrend(type='demean')
for each in st:
    ampl = each.data
    print max(abs(ampl))
'''tr_n = st.select(component="N")[0]
ampl_n = max(abs(tr_n.data))

tr_e = st.select(component="E")[0]
ampl_e = max(abs(tr_e.data))

ampl = max(ampl_n, ampl_e)'''
Exemplo n.º 12
0
def main():
    eventID = 4417721
    client = Client("IRIS")
    cat = client.get_events(eventid=eventID)
    #print cat
    timeSeriesClient = iris.Client()
    eventLatitude = 40.8287
    eventLongitude = -125.1338
    eventTime = '2012-04-12T07:15:49.1700'
    eventMagnitude = 7.1
    magUnit = 'MW'
    dt = UTCDateTime(eventTime)
    print "EARTHQUAKE", str(eventLatitude), str(eventLongitude), str(eventMagnitude)+" "+ magUnit
    netStationList = set()
    #getting list of stations for US networks
    for each_net in USNETS:
        try:
            inventory = client.get_stations(network = each_net, latitude=eventLatitude,\
                                            longitude=eventLongitude, maxradius=5)
            #print type(inventory)
            for each in inventory:
                each_content = each.get_contents()
                lat, lon = get_coordinates(each[0])
                channelList = each_content['stations']
                for each_channel in channelList:
                    netStationList.add((each_channel.split()[0], lat, lon))
        except:
            #print "Failed for", each_net
            continue

    #print inventory.get_contents()
    #print netStationList
    #getting time series data in a loop



    for each1 in netStationList:
        netStation = each1[0].split('.')
        network = netStation[0]
        station = netStation[1]
        magList = []
        try:
            data = timeSeriesClient.resp(network, station, '*', '*', dt)
            #print "SEEDLIST SUCCESS ", each1[0]
            seedList = parseResponse(data.decode())
            for each2 in seedList:
                arr = each2.split('.')
                try:
                    st = client.get_waveforms(arr[0], arr[1], arr[2], arr[3], dt, dt+1800)
                    #print "TIMESERIES SUCCESS", each2
                    ampl = get_amplitude(st, dt, each2)
                    local_mag = calculate_local_magnitude(eventLatitude, eventLongitude, each1[1], each1[2], ampl)
                    magList.append(local_mag)
                    #print each2, each1[1], each1[2], local_mag
                    #print "TIMESERIES SUCCESS", each2
                except:
                    #print "TIMESERIES FAIL", each2
                    continue
            if len(magList) > 0:
                print each1[0], each1[1], each1[2], sum(magList)/float(len(magList))

        except:
            #print 'SEEDLIST FAIL ', each1[0]
            continue
Exemplo n.º 13
0
def main():
    eventID = 4417721
    client = Client("IRIS")
    cat = client.get_events(eventid=eventID)
    #print cat
    timeSeriesClient = iris.Client()
    eventLatitude = 40.8287
    eventLongitude = -125.1338
    eventTime = '2012-04-12T07:15:49.1700'
    eventMagnitude = 7.1
    magUnit = 'MW'
    dt = UTCDateTime(eventTime)
    print "EARTHQUAKE", str(eventLatitude), str(
        eventLongitude), str(eventMagnitude) + " " + magUnit
    netStationList = set()
    #getting list of stations for US networks
    for each_net in USNETS:
        try:
            inventory = client.get_stations(network = each_net, latitude=eventLatitude,\
                                            longitude=eventLongitude, maxradius=5)
            #print type(inventory)
            for each in inventory:
                each_content = each.get_contents()
                lat, lon = get_coordinates(each[0])
                channelList = each_content['stations']
                for each_channel in channelList:
                    netStationList.add((each_channel.split()[0], lat, lon))
        except:
            #print "Failed for", each_net
            continue

    #print inventory.get_contents()
    #print netStationList
    #getting time series data in a loop

    for each1 in netStationList:
        netStation = each1[0].split('.')
        network = netStation[0]
        station = netStation[1]
        magList = []
        try:
            data = timeSeriesClient.resp(network, station, '*', '*', dt)
            #print "SEEDLIST SUCCESS ", each1[0]
            seedList = parseResponse(data.decode())
            for each2 in seedList:
                arr = each2.split('.')
                try:
                    st = client.get_waveforms(arr[0], arr[1], arr[2], arr[3],
                                              dt, dt + 1800)
                    #print "TIMESERIES SUCCESS", each2
                    ampl = get_amplitude(st, dt, each2)
                    local_mag = calculate_local_magnitude(
                        eventLatitude, eventLongitude, each1[1], each1[2],
                        ampl)
                    magList.append(local_mag)
                    #print each2, each1[1], each1[2], local_mag
                    #print "TIMESERIES SUCCESS", each2
                except:
                    #print "TIMESERIES FAIL", each2
                    continue
            if len(magList) > 0:
                print each1[0], each1[1], each1[2], sum(magList) / float(
                    len(magList))

        except:
            #print 'SEEDLIST FAIL ', each1[0]
            continue
Exemplo n.º 14
0
def getEvents(args):
    client = Client("Iris")
    events = client.get_events(minmag=5.0, starttime=args["starttime"], endtime=args["endtime"])
    print "found these events:"
    print events
    return events