Esempio n. 1
0
def _km_to_deg_lat(kilometers):
    """
    Convenience tool for converting km to degrees latitude.

    """
    try:
        degrees = kilometers2degrees(kilometers)
    except Exception:
        degrees = None
    return degrees
Esempio n. 2
0
def _km_to_deg_lon(kilometers, latitude):
    """
    Convenience tool for converting km to degrees longitude.

    latitude in degrees

    """
    try:
        degrees_lat = kilometers2degrees(kilometers)
    except Exception:
        return None
    degrees_lon = degrees_lat / cos(radians(latitude))

    return degrees_lon
Esempio n. 3
0
def process(asdf_source, event_folder, output_path, min_magnitude, restart,
            save_quality_plots):
    """
    ASDF_SOURCE: Text file containing a list of paths to ASDF files
    EVENT_FOLDER: Path to folder containing event files\n
    OUTPUT_PATH: Output folder \n
    """

    comm = MPI.COMM_WORLD
    nproc = comm.Get_size()
    rank = comm.Get_rank()
    proc_workload = None

    if (rank == 0):

        def outputConfigParameters():
            # output config parameters
            fn = 'pick.%s.cfg' % (datetime.now().strftime('%Y-%m-%d-%H-%M-%S'))
            fn = os.path.join(output_path, fn)

            f = open(fn, 'w+')
            f.write('Parameter Values:\n\n')
            f.write('%25s\t\t: %s\n' % ('ASDF_SOURCE', asdf_source))
            f.write('%25s\t\t: %s\n' % ('EVENT_FOLDER', event_folder))
            f.write('%25s\t\t: %s\n' % ('OUTPUT_PATH', output_path))
            f.write('%25s\t\t: %s\n' % ('MIN_MAGNITUDE', min_magnitude))
            f.write('%25s\t\t: %s\n' %
                    ('RESTART_MODE', 'TRUE' if restart else 'FALSE'))
            f.write('%25s\t\t: %s\n' %
                    ('SAVE_PLOTS', 'TRUE' if save_quality_plots else 'FALSE'))
            f.close()

        # end func

        outputConfigParameters()
    # end if

    # ==================================================
    # Create output-folder for snr-plots
    # ==================================================
    plot_output_folder = None
    if (save_quality_plots):
        plot_output_folder = os.path.join(output_path, 'plots')
        if (rank == 0):
            if (not os.path.exists(plot_output_folder)):
                os.mkdir(plot_output_folder)
        # end if
        comm.Barrier()
    # end if

    # ==================================================
    # Read catalogue and retrieve origin times
    # ==================================================
    cat = CatalogCSV(event_folder)
    events = cat.get_events()
    originTimestamps = cat.get_preferred_origin_timestamps()

    # ==================================================
    # Create lists of pickers for both p- and s-arrivals
    # ==================================================
    sigmalist = np.arange(8, 3, -1)
    pickerlist_p = []
    pickerlist_s = []
    for sigma in sigmalist:
        picker_p = aicdpicker.AICDPicker(t_ma=5,
                                         nsigma=sigma,
                                         t_up=1,
                                         nr_len=5,
                                         nr_coeff=2,
                                         pol_len=10,
                                         pol_coeff=10,
                                         uncert_coeff=3)
        picker_s = aicdpicker.AICDPicker(t_ma=15,
                                         nsigma=sigma,
                                         t_up=1,
                                         nr_len=5,
                                         nr_coeff=2,
                                         pol_len=10,
                                         pol_coeff=10,
                                         uncert_coeff=3)

        pickerlist_p.append(picker_p)
        pickerlist_s.append(picker_s)
    # end for

    # ==================================================
    # Define theoretical model
    # Instantiate data-access object
    # Retrieve estimated workload
    # ==================================================
    taupyModel = TauPyModel(model='iasp91')
    fds = FederatedASDFDataSet(asdf_source, use_json_db=False, logger=None)
    workload = getWorkloadEstimate(fds, originTimestamps)

    # ==================================================
    # Define output header and open output files
    # depending on the mode of operation (fresh/restart)
    # ==================================================
    header = '#eventID originTimestamp mag originLon originLat originDepthKm net sta cha pickTimestamp stationLon stationLat az baz distance ttResidual snr qualityMeasureCWT domFreq qualityMeasureSlope bandIndex nSigma\n'
    ofnp = os.path.join(output_path, 'p_arrivals.%d.txt' % (rank))
    ofns = os.path.join(output_path, 's_arrivals.%d.txt' % (rank))
    ofp = None
    ofs = None
    if (restart == False):
        ofp = open(ofnp, 'w+')
        ofs = open(ofns, 'w+')
        ofp.write(header)
        ofs.write(header)
    else:
        ofp = open(ofnp, 'a+')
        ofs = open(ofns, 'a+')
    # end if

    progTracker = ProgressTracker(output_folder=output_path,
                                  restart_mode=restart)
    totalTraceCount = 0
    for nc, sc, start_time, end_time in fds.local_net_sta_list():
        day = 24 * 3600
        dayCount = 0
        curr = start_time
        traceCountP = 0
        pickCountP = 0
        traceCountS = 0
        pickCountS = 0
        sw_start = datetime.now()
        step = day
        while (curr < end_time):
            if (curr + step > end_time):
                step = end_time - curr
            # end if

            eventIndices = (np.where((originTimestamps >= curr.timestamp) & \
                                     (originTimestamps <= (curr + day).timestamp)))[0]

            if (eventIndices.shape[0] > 0):
                totalTraceCount += 1
                stations = fds.get_stations(curr,
                                            curr + day,
                                            network=nc,
                                            station=sc)
                stations_zch = [s for s in stations
                                if 'Z' in s[3]]  # only Z channels
                stations_nch = [
                    s for s in stations if 'N' in s[3] or '1' in s[3]
                ]  # only N channels
                stations_ech = [
                    s for s in stations if 'E' in s[3] or '2' in s[3]
                ]  # only E channels

                for codes in stations_zch:
                    if (progTracker.increment()): pass
                    else: continue

                    st = fds.get_waveforms(codes[0],
                                           codes[1],
                                           codes[2],
                                           codes[3],
                                           curr,
                                           curr + step,
                                           automerge=True,
                                           trace_count_threshold=200)

                    if (len(st) == 0): continue
                    dropBogusTraces(st)

                    slon, slat = codes[4], codes[5]
                    for ei in eventIndices:
                        event = events[ei]
                        po = event.preferred_origin
                        da = gps2dist_azimuth(po.lat, po.lon, slat, slon)
                        mag = None
                        if (event.preferred_magnitude):
                            mag = event.preferred_magnitude.magnitude_value
                        elif (len(po.magnitude_list)):
                            mag = po.magnitude_list[0].magnitude_value
                        if (mag == None): mag = np.NaN

                        if (np.isnan(mag) or mag < min_magnitude): continue

                        result = extract_p(
                            taupyModel,
                            pickerlist_p,
                            event,
                            slon,
                            slat,
                            st,
                            plot_output_folder=plot_output_folder)
                        if (result):
                            picklist, residuallist, snrlist, bandindex, pickerindex = result

                            arcdistance = kilometers2degrees(da[0] / 1e3)
                            for ip, pick in enumerate(picklist):
                                line = '%s %f %f %f %f %f ' \
                                       '%s %s %s %f %f %f ' \
                                       '%f %f %f ' \
                                       '%f %f %f %f %f '\
                                       '%d %d\n' % (event.public_id, po.utctime.timestamp, mag, po.lon, po.lat, po.depthkm,
                                                    codes[0], codes[1], codes[3], pick.timestamp, slon, slat,
                                                    da[1], da[2], arcdistance,
                                                    residuallist[ip], snrlist[ip, 0], snrlist[ip, 1], snrlist[ip, 2], snrlist[ip, 3],
                                                    bandindex, sigmalist[pickerindex])
                                ofp.write(line)
                            # end for
                            ofp.flush()
                            pickCountP += 1
                        # end if

                        if (len(stations_nch) == 0 and len(stations_ech) == 0):
                            result = extract_s(
                                taupyModel,
                                pickerlist_s,
                                event,
                                slon,
                                slat,
                                st,
                                None,
                                da[2],
                                plot_output_folder=plot_output_folder)
                            if (result):
                                picklist, residuallist, snrlist, bandindex, pickerindex = result

                                arcdistance = kilometers2degrees(da[0] / 1e3)
                                for ip, pick in enumerate(picklist):
                                    line = '%s %f %f %f %f %f ' \
                                           '%s %s %s %f %f %f ' \
                                           '%f %f %f ' \
                                           '%f %f %f %f %f ' \
                                           '%d %d\n' % (event.public_id, po.utctime.timestamp, mag, po.lon, po.lat, po.depthkm,
                                                        codes[0], codes[1], codes[3], pick.timestamp, slon, slat,
                                                        da[1], da[2], arcdistance,
                                                        residuallist[ip], snrlist[ip, 0], snrlist[ip, 1], snrlist[ip, 2], snrlist[ip, 3],
                                                        bandindex, sigmalist[pickerindex])
                                    ofs.write(line)
                                # end for
                                ofs.flush()
                                pickCountS += 1
                            # end if
                        # end if
                    # end for

                    traceCountP += len(st)
                # end for

                if (len(stations_nch) > 0
                        and len(stations_nch) == len(stations_ech)):
                    for codesn, codese in zip(stations_nch, stations_ech):
                        if (progTracker.increment()): pass
                        else: continue

                        stn = fds.get_waveforms(codesn[0],
                                                codesn[1],
                                                codesn[2],
                                                codesn[3],
                                                curr,
                                                curr + step,
                                                automerge=True,
                                                trace_count_threshold=200)
                        ste = fds.get_waveforms(codese[0],
                                                codese[1],
                                                codese[2],
                                                codese[3],
                                                curr,
                                                curr + step,
                                                automerge=True,
                                                trace_count_threshold=200)

                        dropBogusTraces(stn)
                        dropBogusTraces(ste)

                        if (len(stn) == 0): continue
                        if (len(ste) == 0): continue

                        slon, slat = codesn[4], codesn[5]

                        for ei in eventIndices:
                            event = events[ei]
                            po = event.preferred_origin
                            da = gps2dist_azimuth(po.lat, po.lon, slat, slon)

                            mag = None
                            if (event.preferred_magnitude):
                                mag = event.preferred_magnitude.magnitude_value
                            elif (len(po.magnitude_list)):
                                mag = po.magnitude_list[0].magnitude_value
                            if (mag == None): mag = np.NaN

                            if (np.isnan(mag) or mag < min_magnitude): continue

                            result = extract_s(
                                taupyModel,
                                pickerlist_s,
                                event,
                                slon,
                                slat,
                                stn,
                                ste,
                                da[2],
                                plot_output_folder=plot_output_folder)
                            if (result):
                                picklist, residuallist, snrlist, bandindex, pickerindex = result

                                arcdistance = kilometers2degrees(da[0] / 1e3)
                                for ip, pick in enumerate(picklist):
                                    line = '%s %f %f %f %f %f ' \
                                           '%s %s %s %f %f %f ' \
                                           '%f %f %f ' \
                                           '%f %f %f %f %f ' \
                                           '%d %d\n' % (event.public_id, po.utctime.timestamp, mag, po.lon, po.lat, po.depthkm,
                                                        codesn[0], codesn[1], '00T', pick.timestamp, slon, slat,
                                                        da[1], da[2], arcdistance,
                                                        residuallist[ip], snrlist[ip, 0], snrlist[ip, 1], snrlist[ip, 2], snrlist[ip, 3],
                                                        bandindex, sigmalist[pickerindex])
                                    ofs.write(line)
                                # end for
                                ofs.flush()
                                pickCountS += 1
                            # end if
                        # end for

                        traceCountS += (len(stn) + len(ste))
                    # end for
                # end if
            # end if
            curr += step
            dayCount += 1
        # wend
        sw_stop = datetime.now()
        totalTime = (sw_stop - sw_start).total_seconds()

        gc.collect()
        print '(Rank %d: %5.2f%%, %d/%d) Processed %d traces and found %d p-arrivals and %d s-arrivals for ' \
              'network %s station %s in %f s. Memory usage: %5.2f MB.' % \
              (rank, (float(totalTraceCount) / float(workload) * 100) if workload > 0 else 100, totalTraceCount, workload,
               traceCountP + traceCountS, pickCountP, pickCountS, nc, sc, totalTime,
               round(psutil.Process().memory_info().rss / 1024. / 1024., 2))
    # end for
    ofp.close()
    ofs.close()

    print 'Processing complete on rank %d' % (rank)

    del fds
def process(data_path, scratch_path, output_file_stem):
    """
    DATA_PATH: input-folder which contains output from iloc_phase_ident.py \n
    SCRATCH_PATH: scratch-folder \n
    OUTPUT_FILE_STEM: output file stem \n
    """

    comm = MPI.COMM_WORLD
    nproc = comm.Get_size()
    rank = comm.Get_rank()

    # Fetch and distribute event-ids
    eventIds = []
    inventory = defaultdict(list)
    if (rank==0):
        files = recursive_glob(data_path, "*.txt")
        
        for f in files:
            d = np.genfromtxt(f, dtype=[('mstring','S100'),('mfloat','f8')], skip_header=1)
            for item in np.atleast_1d(d):
                if(item[1]>0): eventIds.append(item[0])
            # end for
        # end for
        
        s = set(eventIds)
        assert len(s) == len(eventIds), 'Duplicate event-ids found'
        print (('Processing %d events..'%(len(eventIds))))
        
        shuffle(eventIds)
        eventIds = split_list(eventIds, nproc)

        # fetch inventory
        db = None
        try:
            db = MySQLdb.connect(host="localhost",
                                 user="******",
                                 passwd="sysop",
                                 db="seiscomp3")
        except:
            raise Exception('Failed to connect to database')
        # end try
        
        c = db.cursor()
        c.execute('select n.code, s.code, s.longitude, s.latitude from Network n, Station s where s._parent_oid=n._oid group by n.code, s.code')
        rows = c.fetchall()
        for row in rows:
            inventory['%s.%s'%(row[0], row[1])] = [row[2], row[3]]
        # end for

        db.close()
    #end if
    
    eventIds = comm.scatter(eventIds, root = 0)
    inventory = comm.bcast(inventory, root = 0)
    
    pprocfile = open('%s/pproc.%d.txt'%(scratch_path, rank), 'w+')
    sprocfile = open('%s/sproc.%d.txt'%(scratch_path, rank), 'w+')
    allprocfile = open('%s/allproc.%d.txt'%(scratch_path, rank), 'w+')
    for eid in tqdm(eventIds):
        ofn = '%s/%s.xml'%(scratch_path, eid.split('/')[-1])
        cmd = ['scxmldump -d  mysql://sysop:sysop@localhost/seiscomp3 -E %s -PAMf -o %s'% 
                (eid, ofn)]
        rc, _ = runprocess(cmd, get_results=False)
        if(rc!=0):
            print (('Error exporting event: %s'%(eid)))
        else:
            notFound = defaultdict(int)
            cat = read_events(ofn, format='SC3ML')
            
            linesp = []
            liness = []
            linesall = []
            for e in cat.events:
                po = e.preferred_origin()

                # retrieve depth; some preferred origins don't have depth values
                poDepth = po.depth
                if(poDepth == None):
                    for o in e.origins:
                        if(o.depth): poDepth = o.depth
                    # end for
                # end if

                if(poDepth == None): continue
                
                for a in po.arrivals:
                    try:
                        ncode = a.pick_id.get_referred_object().waveform_id.network_code
                        scode = a.pick_id.get_referred_object().waveform_id.station_code
                        ccode = a.pick_id.get_referred_object().waveform_id.channel_code
                    except:
                        continue

                    
                    slon = None
                    slat = None
                    try:
                        slon, slat = inventory['%s.%s'%(ncode, scode)]
                    except:
                        notFound['%s.%s'%(ncode, scode)] += 1
                        continue
                    # end try
                    
                    # get band-index and snr from comments
                    pick_attribs = defaultdict(lambda:-999.)
                    pick = a.pick_id.get_referred_object()
                    for c in pick.comments:
                        if ('text' in list(c.keys())):
                            for item in c['text'].split(','):
                                k,v = item.split('=')
                                pick_attribs[k.strip()] = float(v)
                            # end for
                        # end if
                    # end for

                    # get az, baz and distance
                    da = gps2dist_azimuth(po.latitude, po.longitude, slat, slon)

                    # create row
                    #if(a.phase not in ['P', 'S']): continue
                    if(a.time_residual is None): continue

                    line = [eid, '{:<25s}',
                            po.time.timestamp, '{:f}',
                            e.magnitudes[0].mag if (len(e.magnitudes)) else -999, '{:f}',
                            po.longitude, '{:f}',
                            po.latitude, '{:f}',
                            poDepth/1e3, '{:f}',
                            ncode, '{:<5s}',
                            scode, '{:<5s}',
                            ccode, '{:<5s}',
                            pick.time.timestamp, '{:f}',
                            a.phase, '{:<5s}',
                            slon, '{:f}',
                            slat, '{:f}',
                            da[1], '{:f}',
                            da[2], '{:f}',
                            kilometers2degrees(da[0]/1e3), '{:f}',
                            a.time_residual, '{:f}',
                            pick_attribs['phasepapy_snr'], '{:f}',
                            pick_attribs['quality_measure_cwt'], '{:f}',
                            pick_attribs['dom_freq'], '{:f}',
                            pick_attribs['quality_measure_slope'], '{:f}',
                            int(pick_attribs['band_index']), '{:d}',
                            int(pick_attribs['nsigma']), '{:d}']
                    
                    linesall.append(line)

                    if(a.phase == 'P' or a.phase == 'Pg'): linesp.append(line)
                    elif(a.phase == 'S' or a.phase == 'Sg'): liness.append(line)
                # end for
            # end for

            for line in linesp:
                lineout = ' '.join(line[1::2]).format(*line[::2])
                pprocfile.write(lineout + '\n')
            # end for
            
            for line in liness:
                lineout = ' '.join(line[1::2]).format(*line[::2])
                sprocfile.write(lineout + '\n')
            # end for
            
            for line in linesall:
                lineout = ' '.join(line[1::2]).format(*line[::2])
                allprocfile.write(lineout + '\n')
            # end for

            if (len(notFound)): print (('Rank: %d'%(rank), notFound))
        # end if
        if (os.path.exists(ofn)): os.remove(ofn)
        #break
    # end for
    pprocfile.close()
    sprocfile.close()
    allprocfile.close()

    header = '#eventID originTimestamp mag originLon originLat originDepthKm net sta cha pickTimestamp phase stationLon stationLat az baz distance ttResidual snr qualityMeasureCWT domFreq qualityMeasureSlope bandIndex nSigma\n'
    comm.barrier()
    if(rank == 0):
        ofp = open(output_file_stem + '.p.txt', 'w+')
        ofs = open(output_file_stem + '.s.txt', 'w+')
        ofall = open(output_file_stem + '.all.txt', 'w+')
        
        ofp.write(header)
        ofs.write(header)
        ofall.write(header)
        for i in range(nproc):
            pfn = '%s/pproc.%d.txt'%(scratch_path, i)
            sfn = '%s/sproc.%d.txt'%(scratch_path, i)
            allfn = '%s/allproc.%d.txt'%(scratch_path, i)
            
            lines = open(pfn, 'r').readlines()
            for line in lines:
                ofp.write(line)
            # end for
            
            lines = open(sfn, 'r').readlines()
            for line in lines:
                ofs.write(line)
            # end for
            
            lines = open(allfn, 'r').readlines()
            for line in lines:
                ofall.write(line)
            # end for

            if (os.path.exists(pfn)): os.remove(pfn)
            if (os.path.exists(sfn)): os.remove(sfn)
            if (os.path.exists(allfn)): os.remove(allfn)
        # end for
        ofp.close()
        ofs.close()
        ofall.close()
    def _load_events(self):
        self._load_events_helper()
        cache = {}
        notFound = defaultdict(int)
        oEvents = []
        missingStations = defaultdict(int)
        for e in self.eventList:
            if (e.preferred_origin and len(e.preferred_origin.arrival_list)):
                cullList = []
                for a in e.preferred_origin.arrival_list:
                    if (len(a.net)): continue

                    seedid = '%s.%s.%s.%s' % (a.net, a.sta, a.loc, a.cha)
                    newCode = None
                    if (seedid not in cache):
                        sc = a.sta
                        lonlat = self.isc_coords_dict[sc]
                        if (len(lonlat) == 0):
                            cullList.append(a)
                            continue
                        # end if

                        r = self.fdsn_inventory.getClosestStations(lonlat[0],
                                                                   lonlat[1],
                                                                   maxdist=1e3)
                        #if(a.sta=='KUM'): print a.net, a.sta, a.loc, a.cha, r
                        if (not r):
                            notFound[sc] += 1
                        else:
                            for cr in r[0]:
                                c = cr.split('.')[0]
                                newCode = c
                            # end for
                        # end if

                        if (newCode):
                            cache[seedid] = newCode
                        # end if
                    else:
                        newCode = cache[seedid]
                    # end if

                    if (newCode):
                        #print a.net, newCode
                        a.net = newCode

                        sc = self.fdsn_inventory.t[a.net][a.sta]
                        if (type(sc) == defaultdict):
                            cullList.append(a)
                            continue
                        # end if
                        da = gps2dist_azimuth(e.preferred_origin.lat,
                                              e.preferred_origin.lon, sc[1],
                                              sc[0])
                        dist = kilometers2degrees(da[0] / 1e3)
                        if (np.fabs(a.distance - dist) > 0.5):
                            cullList.append(a)
                        # end if
                    # end if
                # end for
                for c in cullList:
                    e.preferred_origin.arrival_list.remove(c)
            # end if

            # Create obspy event object
            ci = OCreationInfo(author='GA',
                               creation_time=UTCDateTime(),
                               agency_id='GA-iteration-1')
            oid = self.get_id()
            origin = OOrigin(resource_id=OResourceIdentifier(id=oid),
                             time=UTCDateTime(e.preferred_origin.utctime),
                             longitude=e.preferred_origin.lon,
                             latitude=e.preferred_origin.lat,
                             depth=e.preferred_origin.depthkm * 1e3,
                             method_id=OResourceIdentifier(id='unknown'),
                             earth_model_id=OResourceIdentifier(id='iasp91'),
                             evaluation_mode='automatic',
                             creation_info=ci)
            magnitude = OMagnitude(
                resource_id=OResourceIdentifier(id=self.get_id()),
                mag=e.preferred_magnitude.magnitude_value,
                magnitude_type=e.preferred_magnitude.magnitude_type,
                origin_id=OResourceIdentifier(id=oid),
                creation_info=ci)
            event = OEvent(resource_id=OResourceIdentifier(id=self.get_id()),
                           creation_info=ci,
                           event_type='earthquake')
            event.origins = [origin]
            event.magnitudes = [magnitude]
            event.preferred_magnitude_id = magnitude.resource_id
            event.preferred_origin_id = origin.resource_id

            # Insert old picks
            for a in e.preferred_origin.arrival_list:
                if (type(self.fdsn_inventory.t[a.net][a.sta]) == defaultdict):
                    missingStations[a.net + '.' + a.sta] += 1
                    continue
                # end if
                oldPick = OPick(
                    resource_id=OResourceIdentifier(id=self.get_id()),
                    time=UTCDateTime(a.utctime),
                    waveform_id=OWaveformStreamID(network_code=a.net,
                                                  station_code=a.sta,
                                                  channel_code=a.cha),
                    methodID=OResourceIdentifier('unknown'),
                    phase_hint=a.phase,
                    evaluation_mode='automatic',
                    creation_info=ci)

                oldArr = OArrival(resource_id=OResourceIdentifier(
                    id=oldPick.resource_id.id + "#"),
                                  pick_id=oldPick.resource_id,
                                  phase=oldPick.phase_hint,
                                  distance=a.distance,
                                  earth_model_id=OResourceIdentifier(
                                      'quakeml:ga.gov.au/earthmodel/iasp91'),
                                  creation_info=ci)

                event.picks.append(oldPick)
                event.preferred_origin().arrivals.append(oldArr)
            # end for

            # Insert our picks
            opList = self.our_picks.picks[e.public_id]
            if (len(opList)):
                for op in opList:
                    if (type(self.fdsn_inventory.t[op[1]][op[2]]) ==
                            defaultdict):
                        missingStations[op[1] + '.' + op[2]] += 1
                        continue
                    # end if
                    newPick = OPick(
                        resource_id=OResourceIdentifier(id=self.get_id()),
                        time=UTCDateTime(op[0]),
                        waveform_id=OWaveformStreamID(network_code=op[1],
                                                      station_code=op[2],
                                                      channel_code=op[3]),
                        methodID=OResourceIdentifier('phasepapy/aicd'),
                        backazimuth=op[-1],
                        phase_hint=op[4],
                        evaluation_mode='automatic',
                        comments=op[6],
                        creation_info=ci)

                    newArr = OArrival(
                        resource_id=OResourceIdentifier(
                            id=newPick.resource_id.id + "#"),
                        pick_id=newPick.resource_id,
                        phase=newPick.phase_hint,
                        azimuth=op[-2],
                        distance=op[-3],
                        time_residual=op[5],
                        time_weight=1.,
                        earth_model_id=OResourceIdentifier(
                            'quakeml:ga.gov.au/earthmodel/iasp91'),
                        creation_info=ci)
                    event.picks.append(newPick)
                    event.preferred_origin().arrivals.append(newArr)
                # end for
            # end if

            quality = OOriginQuality(
                associated_phase_count=len(e.preferred_origin.arrival_list) +
                len(self.our_picks.picks[e.public_id]),
                used_phase_count=len(e.preferred_origin.arrival_list) +
                len(self.our_picks.picks[e.public_id]))
            event.preferred_origin().quality = quality
            oEvents.append(event)
        # end for // loop over e

        #print notFound
        print self.rank, missingStations

        cat = OCatalog(events=oEvents)
        ofn = self.output_path + '/%d.xml' % (self.rank)
        cat.write(ofn, format='SC3ML')
Esempio n. 6
0
def write_antelope_db(db_path, origin, arrival, site, algorithm, auth_name):
    '''
    Creates an Antelope database from csv data so that Antelope utilities
    can be applied to the data. The assoc file contains the required
    information to relate an event to it's picks.

    Parameters
    ----------
    db_path : str
        The path to the directory where you would like to create the database
        
    origin : pd.DataFrame()
        A DataFrame with the following columns:
            
        - lat: A column of floats describing latitude information.
        
        - lon: A column of floats describing longitude information.
        
        - depth: A column of floats describing depth information.
        
        - datetime: A str of the form %Y-%m-%d %H:%M:%S.%f
        
        - quality: A float describing the number of picks used in the 
                   location calculation
                   
        - orid: A column of unique identifiers for origin. Has corresponding 
                entries in the arrival table
        
    arrival : pd.DAtaFrame()
        A dataframe with the following columns:
            
        - sta: A str describing the station name
        
        - datetime: A str of the form %Y-%m-%d %H:%M:%S.%f
        
        - phase: A str describing the phase type of a given arrival, must be P
                 or S
        
        - orid: The origin that each arrival corresponds to. Has corresponding
                entries in the origin table
                
    site : pd.DataFrame()
        A DataFrame with the following columns:
            
        - sta: A str describing the station name
        
        - lat : A float describing the latitude of the station
        
        - lon : A float describing the longitude of the station
            
    algorithm : str
        The name of the algorithm used to calculate earthquake location.
        
    auth_name : str
        The name of the author of the database. I would suggest using the agency you
        are affiliated with.
        
    Returns
    -------
    
    None : 
        Returns nothing as the purpose of this function is to create database files
        that can be used by Antelope.
    '''

    if os.path.exists(db_path):
        pass

    else:
        os.mkdir(db_path)

    #Resets the index's of the DataFrames so that the index can be used to iterate
    origin.reset_index(inplace=True, drop=True)
    arrival.reset_index(inplace=True, drop=True)

    origin["epoch_time"] = origin.datetime.apply(str_to_epoch)
    arrival["epoch_time"] = arrival.datetime.apply(str_to_epoch)

    origin_file = open(db_path + 'db.origin', 'w')
    arrival_file = open(db_path + 'db.arrival', 'w')
    assoc_file = open(db_path + 'db.assoc', 'w')
    event_file = open(db_path + 'db.event', 'w')
    origerr_file = open(db_path + 'db.origerr', 'w')

    ts = datetime.now().timestamp()
    t = round(ts, 5)

    arid = 1

    #Loops over all the origins
    for i in range(len(origin.index)):

        #Prints a line to the origin file with the correct formatting
        origin_file.write(
            '%9.4f %9.4f %9.4f %17.5f %8d %8d %8d %4d %4d %4d %8d '
            '%8d %-2s %-4s %9.4f %-s %7.2f %8d %7.2f %8d %7.2f %8d '
            '%-15s %-15s %8d %17.5f\n' %
            (origin.lat[i], origin.lon[i], origin.depth[i],
             origin.epoch_time[i], i + 1, i + 1, -1, origin.nass[i],
             origin.ndef[i], -1, -1, -1, '-', '-', -999, '-', -999, -1, -999,
             -1, -999, -1, algorithm, algorithm + ':' + auth_name, -1, t))

        #Prints a line to the event file with the correct formatting
        event_file.write('%8d %-15s %8d %-15s %8d %17.5f\n' %
                         (i + 1, '-', -1, 'S-SNAP:PGC', -1, t))

        #Prints a line to the origerr file with the correct formatting

        #=============================================================
        #orid, sxx, syy, szz, stt, sxy, sxz, syz, stx, sty, stz, sdobs,
        #smajax, sminax, strike, sdepth, stime, conf, lddate
        #=============================================================
        origerr_file.write(
            '%8d %15.4f %15.4f %15.4f %15.4f %15.4f %15.4f '
            '%15.4f %15.4f %15.4f %15.4f %9.4f %9.4f %9.4f %6.2f %9.4f '
            '%8.2f %5.3f %8d %17.5f\n' %
            (i + 1, -999999999.9999, -999999999.9999, -999999999.9999,
             -999999999.9999, -999999999.9999, -999999999.9999,
             -999999999.9999, -999999999.9999, -999999999.9999,
             -999999999.9999, -1.0000, -1.0000, -1.0000, -1.00, -1.0000, -1.00,
             0.000, -1, t))

        tmp_orid = origin.orid[i]
        tmp_arrival = arrival[arrival.orid == tmp_orid]

        ev_lat = origin.lat[i]
        ev_lon = origin.lon[i]

        for j in tmp_arrival.index:  #range(len(tmp_arrival.index)):

            sta = arrival.sta[j]
            sta_lat = site.lat[site.sta == sta].iloc[0]
            sta_lon = site.lon[site.sta == sta].iloc[0]
            sta_coords = (sta_lat, sta_lon)
            ev_coords = (ev_lat, ev_lon)
            dist = geodesic(sta_coords, ev_coords).km
            delta = kilometers2degrees(dist)

            if arrival.phase[j] == 'S' and sta in [
                    'NBC8', 'NBC7', 'TD009', 'TD002'
            ]:
                chan = 'HH1'

            elif arrival.phase[j] == 'S' and sta not in [
                    'NBC8', 'NBC7', 'TD009', 'TD002'
            ]:
                chan = 'HHE'

            elif arrival.phase[j] == 'P':
                chan = 'HHZ'

            #Prints a line to the arrival file with correct formatting
            arrival_file.write(
                '%-6s %17.5f %8d %8d %8d %8d %-8s %-8s %s %6.3f %7.2f %7.2f %7.2f '
                '%7.2f %7.2f %7.3f %10.1f %7.2f %7.2f %s %-2s %10d %s %-16s %7d %17.5f\n'
                % (arrival.sta[j], arrival.epoch_time[j], arid, -1, -1, -1,
                   chan, arrival.phase[j], '-', -1.000, -1.0, -1.0, -1.0, -1.0,
                   -1.0, -1.0, -1.0, -1.0, -999.00, '-', '-', -1, '-',
                   'S-SNAP:PGC', -1, t))

            #Prints a line to the assoc file with correct formatting.
            assoc_file.write(
                '%8d %8d %-6s %-8s %4.2f %8.3f %7.2f %7.2f %8.3f '
                '%-s %7.1f %-s %-7.2f %-s %7.1f %6.3f %-15s %8d %17.5f\n' %
                (arid, i + 1, arrival.sta[j], arrival.phase[j], 9.99, delta,
                 -1, -1, -999.000, 'd', -999.0, '-', -999, '-', -999.0, 1, '-',
                 -1, t))
            arid += 1

    origin_file.close()
    arrival_file.close()
    assoc_file.close()
    event_file.close()

    return None
Esempio n. 7
0
def process(data_path, inventory_file, scratch_path, output_file_stem):
    """
    DATA_PATH: input-folder
    Inventory: FDSN inventory
    SCRATCH_PATH: scratch-folder
    OUTPUT_FILE_STEM: output file stem
    """

    comm = MPI.COMM_WORLD
    nproc = comm.Get_size()
    rank = comm.Get_rank()

    # Fetch and distribute xml files
    inventory = defaultdict(list)
    files = []
    if (rank == 0):
        files = recursive_glob(data_path, "*.xml")
        files = split_list(files, nproc)

        inv = read_inventory(inventory_file)
        for n in inv.networks:
            for s in n.stations:
                inventory['%s.%s' % (n.code, s.code)] = [
                    s.longitude, s.latitude, s.elevation
                ]
            # end for
        # end for
    # end if

    inventory = comm.bcast(inventory, root=0)
    files = comm.scatter(files, root=0)

    pprocfile = open('%s/pproc.%d.txt' % (scratch_path, rank), 'w+')
    sprocfile = open('%s/sproc.%d.txt' % (scratch_path, rank), 'w+')
    for file in files:
        cat = read_events(file, format='SC3ML')
        notFound = defaultdict(int)

        linesp = []
        liness = []
        for e in tqdm(cat.events, desc='Rank: %d' % (rank)):
            po = e.preferred_origin()

            # retrieve depth; some preferred origins don't have depth values
            poDepth = po.depth
            if (poDepth == None):
                for o in e.origins:
                    if (o.depth): poDepth = o.depth
                # end for
            # end if

            if (poDepth == None): continue

            for a in po.arrivals:
                try:
                    ncode = a.pick_id.get_referred_object(
                    ).waveform_id.network_code
                    scode = a.pick_id.get_referred_object(
                    ).waveform_id.station_code
                    ccode = a.pick_id.get_referred_object(
                    ).waveform_id.channel_code
                except:
                    continue

                slon = None
                slat = None
                try:
                    slon, slat = inventory[ncode][scode]
                except:
                    notFound['%s.%s' % (ncode, scode)] += 1
                    continue
                # end try

                # get band-index and snr from comments
                pick_attribs = defaultdict(lambda: -999.)
                pick = a.pick_id.get_referred_object()
                for c in pick.comments:
                    if ('text' in c.keys()):
                        for item in c['text'].split(','):
                            k, v = item.split('=')
                            pick_attribs[k.strip()] = float(v)
                        # end for
                    # end if
                # end for

                # get az, baz and distance
                da = gps2dist_azimuth(po.latitude, po.longitude, slat, slon)

                # create row
                if (a.phase not in ['P', 'S']): continue
                if (a.time_residual is None): continue

                line = [
                    e.resource_id, '{:<25s}', po.time.timestamp, '{:f}',
                    e.magnitudes[0].mag if (len(e.magnitudes)) else -999,
                    '{:f}', po.longitude, '{:f}', po.latitude, '{:f}',
                    poDepth / 1e3, '{:f}', ncode, '{:<5s}', scode, '{:<5s}',
                    ccode, '{:<5s}', pick.time.timestamp, '{:f}', a.phase,
                    '{:<5s}', slon, '{:f}', slat, '{:f}', da[1], '{:f}', da[2],
                    '{:f}',
                    kilometers2degrees(da[0] / 1e3), '{:f}', a.time_residual,
                    '{:f}', pick_attribs['phasepapy_snr'], '{:f}',
                    pick_attribs['quality_measure_cwt'], '{:f}',
                    pick_attribs['dom_freq'], '{:f}',
                    pick_attribs['quality_measure_slope'], '{:f}',
                    int(pick_attribs['band_index']), '{:d}',
                    int(pick_attribs['nsigma']), '{:d}'
                ]

                if (a.phase == 'P'): linesp.append(line)
                elif (a.phase == 'S'): liness.append(line)
            # end for
        # end for

        for line in linesp:
            lineout = ' '.join(line[1::2]).format(*line[::2])
            pprocfile.write(lineout + '\n')
        # end for

        for line in liness:
            lineout = ' '.join(line[1::2]).format(*line[::2])
            sprocfile.write(lineout + '\n')
        # end for
        if (len(notFound)): print 'Rank: %d' % (rank), notFound
    # end for

    pprocfile.close()
    sprocfile.close()

    header = '#eventID originTimestamp mag originLon originLat originDepthKm net sta cha pickTimestamp phase stationLon stationLat az baz distance ttResidual snr qualityMeasureCWT domFreq qualityMeasureSlope bandIndex nSigma\n'
    comm.barrier()
    if (rank == 0):
        ofp = open(output_file_stem + '.p.txt', 'w+')
        ofs = open(output_file_stem + '.s.txt', 'w+')

        ofp.write(header)
        ofs.write(header)
        for i in range(nproc):
            pfn = '%s/pproc.%d.txt' % (scratch_path, i)
            sfn = '%s/sproc.%d.txt' % (scratch_path, i)

            lines = open(pfn, 'r').readlines()
            for line in lines:
                ofp.write(line)
            # end for

            lines = open(sfn, 'r').readlines()
            for line in lines:
                ofs.write(line)
            # end for

            if (os.path.exists(pfn)): os.remove(pfn)
            if (os.path.exists(sfn)): os.remove(sfn)
        # end for
        ofp.close()
        ofs.close()
Esempio n. 8
0
def process(asdf_source, event_folder, output_path, min_magnitude):
    """
    ASDF_SOURCE: Text file containing a list of paths to ASDF files
    EVENT_FOLDER: Path to folder containing event files\n
    OUTPUT_PATH: Output folder \n
    """

    comm = MPI.COMM_WORLD
    nproc = comm.Get_size()
    rank = comm.Get_rank()
    proc_workload = None

    if (rank == 0):

        def outputConfigParameters():
            # output config parameters
            fn = 'pick.%s.cfg' % (
                UTCDateTime.now().strftime("%y-%m-%d.T%H.%M"))
            fn = os.path.join(output_path, fn)

            f = open(fn, 'w+')
            f.write('Parameters Values:\n\n')
            f.write('%25s\t\t: %s\n' % ('ASDF_SOURCE', asdf_source))
            f.write('%25s\t\t: %s\n' % ('EVENT_FOLDER', event_folder))
            f.write('%25s\t\t: %s\n' % ('OUTPUT_PATH', output_path))
            f.close()

        # end func

        outputConfigParameters()
    # end if

    cat = CatalogCSV(event_folder)
    events = cat.get_events()
    originTimestamps = cat.get_preferred_origin_timestamps()

    # ==========================================
    #picker = fbpicker.FBPicker(t_long=5, freqmin=0.1, mode='std', t_ma=20, nsigma=8, \
    #                           t_up=1, nr_len=5, nr_coeff=2, pol_len=10, pol_coeff=10, uncert_coeff=3)
    picker_p = aicdpicker.AICDPicker(t_ma=5,
                                     nsigma=8,
                                     t_up=1,
                                     nr_len=5,
                                     nr_coeff=2,
                                     pol_len=10,
                                     pol_coeff=10,
                                     uncert_coeff=3)
    picker_s = aicdpicker.AICDPicker(t_ma=15,
                                     nsigma=8,
                                     t_up=1,
                                     nr_len=5,
                                     nr_coeff=2,
                                     pol_len=10,
                                     pol_coeff=10,
                                     uncert_coeff=3)

    taupyModel = TauPyModel(model='iasp91')
    fds = FederatedASDFDataSet(asdf_source, use_json_db=False, logger=None)
    workload = getWorkloadEstimate(fds, originTimestamps)

    header = '#eventID originTimestamp mag originLon originLat originDepthKm net sta cha pickTimestamp stationLon stationLat az baz distance ttResidual snr bandIndex\n'
    ofnp = os.path.join(output_path, 'p_arrivals.%d.txt' % (rank))
    ofp = open(ofnp, 'w+')
    ofns = os.path.join(output_path, 's_arrivals.%d.txt' % (rank))
    ofs = open(ofns, 'w+')
    ofp.write(header)
    ofs.write(header)

    totalTraceCount = 0
    for nc, sc, start_time, end_time in fds.local_net_sta_list():
        day = 24 * 3600
        dayCount = 0
        curr = start_time
        traceCountP = 0
        pickCountP = 0
        traceCountS = 0
        pickCountS = 0
        sw_start = datetime.now()
        step = day
        while (curr < end_time):
            if (curr + step > end_time):
                step = end_time - curr
            # end if

            eventIndices = (np.where((originTimestamps >= curr.timestamp) & \
                                     (originTimestamps <= (curr + day).timestamp)))[0]

            if (eventIndices.shape[0] > 0):
                totalTraceCount += 1
                stations = fds.get_stations(curr,
                                            curr + day,
                                            network=nc,
                                            station=sc)
                stations_zch = [s for s in stations
                                if 'Z' in s[3]]  # only Z channels
                stations_nch = [
                    s for s in stations if 'N' in s[3] or '1' in s[3]
                ]  # only N channels
                stations_ech = [
                    s for s in stations if 'E' in s[3] or '2' in s[3]
                ]  # only E channels

                for codes in stations_zch:
                    st = fds.get_waveforms(codes[0],
                                           codes[1],
                                           codes[2],
                                           codes[3],
                                           curr,
                                           curr + step,
                                           automerge=True,
                                           trace_count_threshold=200)

                    if (len(st) == 0): continue
                    dropBogusTraces(st)

                    slon, slat = codes[4], codes[5]
                    for ei in eventIndices:
                        event = events[ei]
                        po = event.preferred_origin
                        da = gps2dist_azimuth(po.lat, po.lon, slat, slon)
                        mag = None
                        if (event.preferred_magnitude):
                            mag = event.preferred_magnitude.magnitude_value
                        elif (len(po.magnitude_list)):
                            mag = po.magnitude_list[0].magnitude_value
                        if (mag == None): mag = np.NaN

                        if (np.isnan(mag) or mag < min_magnitude): continue

                        result = extract_p(taupyModel, picker_p, event, slon,
                                           slat, st)
                        if (result):
                            pick, residual, snr, bi = result

                            line = '%s %f %f %f %f %f ' \
                                   '%s %s %s %f %f %f ' \
                                   '%f %f %f ' \
                                   '%f %f %d\n' % (event.public_id, po.utctime.timestamp, mag, po.lon, po.lat, po.depthkm,
                                                   codes[0], codes[1], codes[3], pick.timestamp, slon, slat,
                                                   da[1], da[2], kilometers2degrees(da[0]/1e3),
                                                   residual, snr, bi)
                            ofp.write(line)
                            pickCountP += 1
                        # end if

                        if (len(stations_nch) == 0 and len(stations_ech) == 0):
                            result = extract_s(taupyModel, picker_s, event,
                                               slon, slat, st, None, da[2])
                            if (result):
                                pick, residual, snr, bi = result

                                line = '%s %f %f %f %f %f ' \
                                       '%s %s %s %f %f %f ' \
                                       '%f %f %f ' \
                                       '%f %f %d\n' % (event.public_id, po.utctime.timestamp, mag, po.lon, po.lat, po.depthkm,
                                                       codes[0], codes[1], codes[3], pick.timestamp, slon, slat,
                                                       da[1], da[2], kilometers2degrees(da[0] / 1e3),
                                                       residual, snr, bi)
                                ofs.write(line)
                                pickCountS += 1
                            # end if
                        # end if
                    # end for

                    traceCountP += len(st)
                # end for

                if (len(stations_nch) > 0
                        and len(stations_nch) == len(stations_ech)):
                    for codesn, codese in zip(stations_nch, stations_ech):
                        stn = fds.get_waveforms(codesn[0],
                                                codesn[1],
                                                codesn[2],
                                                codesn[3],
                                                curr,
                                                curr + step,
                                                automerge=True,
                                                trace_count_threshold=200)
                        ste = fds.get_waveforms(codese[0],
                                                codese[1],
                                                codese[2],
                                                codese[3],
                                                curr,
                                                curr + step,
                                                automerge=True,
                                                trace_count_threshold=200)

                        dropBogusTraces(stn)
                        dropBogusTraces(ste)

                        if (len(stn) == 0): continue
                        if (len(ste) == 0): continue

                        slon, slat = codesn[4], codesn[5]

                        for ei in eventIndices:
                            event = events[ei]
                            po = event.preferred_origin
                            da = gps2dist_azimuth(po.lat, po.lon, slat, slon)

                            mag = None
                            if (event.preferred_magnitude):
                                mag = event.preferred_magnitude.magnitude_value
                            elif (len(po.magnitude_list)):
                                mag = po.magnitude_list[0].magnitude_value
                            if (mag == None): mag = np.NaN

                            if (np.isnan(mag) or mag < min_magnitude): continue

                            result = extract_s(taupyModel, picker_s, event,
                                               slon, slat, stn, ste, da[2])
                            if (result):
                                pick, residual, snr, bi = result

                                line = '%s %f %f %f %f %f ' \
                                       '%s %s %s %f %f %f ' \
                                       '%f %f %f ' \
                                       '%f %f %d\n' % (event.public_id, po.utctime.timestamp, mag, po.lon, po.lat, po.depthkm,
                                                       codesn[0], codesn[1], '00T', pick.timestamp, slon, slat,
                                                       da[1], da[2], kilometers2degrees(da[0] / 1e3),
                                                       residual, snr, bi)
                                ofs.write(line)
                                pickCountS += 1
                            # end if
                        # end for

                        traceCountS += (len(stn) + len(ste))
                    # end for
                # end if
            # end if
            curr += step
            dayCount += 1
        # wend
        sw_stop = datetime.now()
        totalTime = (sw_stop - sw_start).total_seconds()

        gc.collect()
        print '(Rank %d: %5.2f%%, %d/%d) Processed %d traces and found %d p-arrivals and %d s-arrivals for ' \
              'network %s station %s in %f s. Memory usage: %5.2f MB.' % \
              (rank, (float(totalTraceCount) / float(workload) * 100) if workload > 0 else 100, totalTraceCount, workload,
               traceCountP + traceCountS, pickCountP, pickCountS, nc, sc, totalTime,
               round(psutil.Process().memory_info().rss / 1024. / 1024., 2))
    # end for
    ofp.close()
    ofs.close()

    print 'Processing complete on rank %d' % (rank)

    del fds
Esempio n. 9
0
def beach_map(m, event, stlo, stla, distance, show, format, zoom_level=8):
    """
    Plot beach ball on a map

    Function to plot stations and focal mechanisms on a Stamen terrain background.

    :param m: focal mechanisms, refer to :func:`~tdmtpy.image.beach` function
        for the supported formats.
    :type m: list
    :param event: event origin time, longitude and latitude, refer to :class:`~tdmtpy.configure.Configure`
        for details.
    :type event: dict
    :param stlo: station longitudes
    :type stlo: list or :class:`~numpy.ndarray`
    :param stla: station latitudes.
    :type stla: list or :class:`~numpy.ndarray`
    :param distance: source-receiver distance.
    :type distance: list or :class:`~numpy.ndarray`
    :param show: Turn on interactive display.
    :type show: bool
    :param format: figure file format.
    :type format: str
    :param zoom_level: background image tile zoom level. Default is ``8``.
    :type zoom_level: int
    """
    # Turn interactive plotting off
    plt.ioff(
    )  # only display plots when called, save figure without displaying in ipython

    # Calculate image extent based on epicentral distance
    width = kilometers2degrees(0.5 * max(distance))
    height = kilometers2degrees(0.5 * max(distance))

    lat1 = min(stla) - height
    lat2 = max(stla) + height
    lon1 = min(stlo) - width
    lon2 = max(stlo) + width

    data_crs = ccrs.PlateCarree()
    evlo = event["longitude"]
    evla = event["latitude"]
    point = (evlo, evla)

    stamen_terrain = cimgt.Stamen('terrain-background')
    projection = stamen_terrain.crs

    fig = plt.figure(dpi=dpi)
    fig.set_size_inches(8.5, 11)
    if format != "png":
        ax = fig.add_subplot(1, 1, 1, projection=projection,
                             rasterized=True)  # axes coordinates
    else:
        ax = fig.add_subplot(1, 1, 1,
                             projection=projection)  # axes coordinates
    ax.set_extent([lon1, lon2, lat1, lat2])
    ax.add_image(stamen_terrain, zoom_level)

    # Add tick labels
    g1 = ax.gridlines(crs=data_crs, draw_labels=True)
    g1.top_labels = False

    # Plot stations
    ax.plot(stlo,
            stla,
            marker="^",
            color="black",
            markersize=8,
            linestyle="",
            transform=data_crs)

    # Plot beach ball on map
    x, y = projection.transform_point(*point, src_crs=data_crs)
    bb = beach(m,
               xy=(x, y),
               facecolor="red",
               width=135,
               show_iso=True,
               axes=ax)
    ax.add_collection(bb)

    # Add title
    ax.set_title(event["datetime"])

    outfile = "map.%s" % format
    fig.savefig(outfile, format=format, bbox_inches="tight")
    if show:
        plt.show()
    else:
        plt.close(fig)
Esempio n. 10
0
    def _load_events(self):
        self._load_events_helper()
        cache = {}
        notFound = defaultdict(int)
        oEvents = []
        missingStations = defaultdict(int)
        lines = []
        for e in tqdm(self.eventList, desc='Rank %d' % (self.rank)):
            if (e.preferred_origin and len(e.preferred_origin.arrival_list)):
                cullList = []
                for a in e.preferred_origin.arrival_list:
                    if (len(a.net)): continue

                    seedid = '%s.%s.%s.%s' % (a.net, a.sta, a.loc, a.cha)
                    newCode = None
                    if (seedid not in cache):
                        sc = a.sta
                        lonlat = self.isc_coords_dict[sc]
                        if (len(lonlat) == 0):
                            cullList.append(a)
                            continue
                        # end if

                        r = self.fdsn_inventory.getClosestStation(
                            lonlat[0], lonlat[1], maxdist=1e3)  # 1km
                        #if(a.sta=='KUM'): print a.net, a.sta, a.loc, a.cha, r
                        if (not r):
                            notFound[sc] += 1
                        else:
                            c = r[0].split('.')[0]
                            newCode = c
                        # end if

                        if (newCode):
                            cache[seedid] = newCode
                        # end if
                    else:
                        newCode = cache[seedid]
                    # end if

                    if (newCode):
                        #print a.net, newCode
                        a.net = newCode

                        sc = self.fdsn_inventory.t[a.net][a.sta]
                        if (type(sc) == defaultdict):
                            cullList.append(a)
                            continue
                        # end if
                        da = gps2dist_azimuth(e.preferred_origin.lat,
                                              e.preferred_origin.lon, sc[1],
                                              sc[0])
                        dist = kilometers2degrees(da[0] / 1e3)

                        if (np.fabs(a.distance - dist) > 0.5):
                            #print ([e.preferred_origin.lon, e.preferred_origin.lat, sc[0], sc[1]])
                            cullList.append(a)
                        # end if
                    # end if
                # end for
                for c in cullList:
                    e.preferred_origin.arrival_list.remove(c)
            # end if

            # Create obspy event object
            ci = OCreationInfo(author='GA',
                               creation_time=UTCDateTime(),
                               agency_id='GA-iteration-1')
            oid = self.get_id()
            origin = OOrigin(resource_id=OResourceIdentifier(id=oid),
                             time=UTCDateTime(e.preferred_origin.utctime),
                             longitude=e.preferred_origin.lon,
                             latitude=e.preferred_origin.lat,
                             depth=e.preferred_origin.depthkm * 1e3,
                             method_id=OResourceIdentifier(id='unknown'),
                             earth_model_id=OResourceIdentifier(id='iasp91'),
                             evaluation_mode='automatic',
                             creation_info=ci)
            magnitude = OMagnitude(
                resource_id=OResourceIdentifier(id=self.get_id()),
                mag=e.preferred_magnitude.magnitude_value,
                magnitude_type=e.preferred_magnitude.magnitude_type,
                origin_id=OResourceIdentifier(id=oid),
                creation_info=ci)
            event = OEvent(
                resource_id=OResourceIdentifier(id=str(e.public_id)),
                creation_info=ci,
                event_type='earthquake')
            event.origins = [origin]
            event.magnitudes = [magnitude]
            event.preferred_magnitude_id = magnitude.resource_id
            event.preferred_origin_id = origin.resource_id

            # Insert old picks
            if (not self.discard_old_picks):
                for a in e.preferred_origin.arrival_list:
                    if (type(self.fdsn_inventory.t[a.net][a.sta]) ==
                            defaultdict):
                        missingStations[a.net + '.' + a.sta] += 1
                        continue
                    # end if
                    oldPick = OPick(
                        resource_id=OResourceIdentifier(id=self.get_id()),
                        time=UTCDateTime(a.utctime),
                        waveform_id=OWaveformStreamID(network_code=a.net,
                                                      station_code=a.sta,
                                                      channel_code=a.cha),
                        methodID=OResourceIdentifier('unknown'),
                        phase_hint=a.phase,
                        evaluation_mode='automatic',
                        creation_info=ci)

                    oldArr = OArrival(
                        resource_id=OResourceIdentifier(
                            id=oldPick.resource_id.id + "#"),
                        pick_id=oldPick.resource_id,
                        phase=oldPick.phase_hint,
                        distance=a.distance,
                        earth_model_id=OResourceIdentifier(
                            'quakeml:ga.gov.au/earthmodel/iasp91'),
                        creation_info=ci)

                    event.picks.append(oldPick)
                    event.preferred_origin().arrivals.append(oldArr)

                    # polulate list for text output
                    line = [
                        str(e.public_id), '{:<25s}',
                        e.preferred_origin.utctime.timestamp, '{:f}',
                        e.preferred_magnitude.magnitude_value, '{:f}',
                        e.preferred_origin.lon, '{:f}', e.preferred_origin.lat,
                        '{:f}', e.preferred_origin.depthkm, '{:f}', a.net,
                        '{:<5s}', a.sta, '{:<5s}', a.cha, '{:<5s}',
                        a.utctime.timestamp, '{:f}', a.phase, '{:<5s}',
                        self.fdsn_inventory.t[a.net][a.sta][0], '{:f}',
                        self.fdsn_inventory.t[a.net][a.sta][1], '{:f}', -999,
                        '{:f}', -999, '{:f}', a.distance, '{:f}', -999, '{:f}',
                        -999, '{:f}', -999, '{:f}', -999, '{:f}', -999, '{:f}',
                        -999, '{:d}', -999, '{:d}'
                    ]
                    lines.append(line)
                # end for
            # end if

            # Insert our picks
            opList = self.our_picks.picks[e.public_id]
            if (len(opList)):
                for op in opList:
                    if (type(self.fdsn_inventory.t[op[1]][op[2]]) ==
                            defaultdict):
                        missingStations[op[1] + '.' + op[2]] += 1
                        continue
                    # end if
                    newPick = OPick(
                        resource_id=OResourceIdentifier(id=self.get_id()),
                        time=UTCDateTime(op[0]),
                        waveform_id=OWaveformStreamID(network_code=op[1],
                                                      station_code=op[2],
                                                      channel_code=op[3]),
                        methodID=OResourceIdentifier('phasepapy/aicd'),
                        backazimuth=op[-1],
                        phase_hint=op[4],
                        evaluation_mode='automatic',
                        comments=[
                            OComment(
                                text='phasepapy_snr = ' + str(op[6][0]) +
                                ', quality_measure_cwt = ' + str(op[6][1]) +
                                ', dom_freq = ' + str(op[6][2]) +
                                ', quality_measure_slope = ' + str(op[6][3]) +
                                ', band_index = ' + str(op[6][4]) +
                                ', nsigma = ' + str(op[6][5]),
                                force_resource_id=False)
                        ],
                        creation_info=ci)

                    newArr = OArrival(
                        resource_id=OResourceIdentifier(
                            id=newPick.resource_id.id + "#"),
                        pick_id=newPick.resource_id,
                        phase=newPick.phase_hint,
                        azimuth=op[-2],
                        distance=op[-3],
                        time_residual=op[5],
                        time_weight=1.,
                        earth_model_id=OResourceIdentifier(
                            'quakeml:ga.gov.au/earthmodel/iasp91'),
                        creation_info=ci)
                    event.picks.append(newPick)
                    event.preferred_origin().arrivals.append(newArr)

                    # polulate list for text output
                    line = [
                        str(e.public_id), '{:<25s}',
                        e.preferred_origin.utctime.timestamp, '{:f}',
                        e.preferred_magnitude.magnitude_value, '{:f}',
                        e.preferred_origin.lon, '{:f}', e.preferred_origin.lat,
                        '{:f}', e.preferred_origin.depthkm, '{:f}', op[1],
                        '{:<5s}', op[2], '{:<5s}', op[3], '{:<5s}',
                        UTCDateTime(op[0]).timestamp, '{:f}', op[4], '{:<5s}',
                        op[10], '{:f}', op[9], '{:f}', op[12], '{:f}', op[13],
                        '{:f}', op[11], '{:f}', op[5], '{:f}', op[6][0],
                        '{:f}', op[6][1], '{:f}', op[6][2], '{:f}', op[6][3],
                        '{:f}',
                        int(op[6][4]), '{:d}',
                        int(op[6][5]), '{:d}'
                    ]
                    lines.append(line)
                # end for
            # end if

            quality= OOriginQuality(associated_phase_count= len(e.preferred_origin.arrival_list) * \
                                                            int(self.discard_old_picks) + \
                                                             len(self.our_picks.picks[e.public_id]),
                                    used_phase_count=len(e.preferred_origin.arrival_list) * \
                                                     int(self.discard_old_picks) + \
                                                     len(self.our_picks.picks[e.public_id]))
            event.preferred_origin().quality = quality

            if (len(self.our_picks.picks[e.public_id]) == 0
                    and self.discard_old_picks):
                continue
            # end if

            oEvents.append(event)
        # end for // loop over e

        if (len(missingStations)):
            for k, v in missingStations.items():
                self.logger.warning('Missing station %s: %d picks' % (k, v))
            # end for
        # end if

        # write xml output
        if (len(oEvents)):
            cat = OCatalog(events=oEvents)
            ofn = self.output_path + '/%d.xml' % (self.rank)
            cat.write(ofn, format='SC3ML')
        # end if

        # write text output
        procfile = open('%s/proc.%d.txt' % (self.output_path, self.rank), 'w+')
        for line in lines:
            lineout = ' '.join(line[1::2]).format(*line[::2])
            procfile.write(lineout + '\n')
        # end for
        procfile.close()

        # combine text output
        header = '#eventID originTimestamp mag originLon originLat originDepthKm net sta cha pickTimestamp phase stationLon stationLat az baz distance ttResidual snr qualityMeasureCWT domFreq qualityMeasureSlope bandIndex nSigma\n'
        self.comm.barrier()
        if (self.rank == 0):
            of = open('%s/ensemble.txt' % (self.output_path), 'w+')
            of.write(header)

            for i in range(self.nproc):
                fn = '%s/proc.%d.txt' % (self.output_path, i)

                lines = open(fn, 'r').readlines()
                for line in lines:
                    of.write(line)
                # end for

                if (os.path.exists(fn)): os.remove(fn)
            # end for
            of.close()