def main(args): if args.getCount: nevents,maxevents = comcat.getEventCount(bounds=args.bounds,radius=args.radius, starttime=args.startTime,endtime=args.endTime, magrange=args.magRange,catalog=args.catalog, contributor=args.contributor) fmt = '%i %i' print fmt % (nevents,maxevents) sys.exit(0) #actually get the data - do a count first to make sure our request isn't too large. nevents,maxevents = comcat.getEventCount(bounds=args.bounds,radius=args.radius,starttime=args.startTime,endtime=args.endTime, magrange=args.magRange,catalog=args.catalog,contributor=args.contributor) stime = datetime(1900,1,1) etime = datetime.utcnow() if nevents > maxevents: #oops, too many events for one query segments = [] if args.startTime: stime = args.startTime if args.endTime: etime = args.endTime segments = comcat.getTimeSegments(segments,args.bounds,args.radius,stime,etime, args.magRange,args.catalog,args.contributor) eventlist = [] for stime,etime in segments: sys.stderr.write('%s - Getting data for %s => %s\n' % (datetime.now(),stime,etime)) eventlist += comcat.getEventData(bounds=args.bounds,radius=args.radius,starttime=stime,endtime=etime, magrange=args.magRange,catalog=args.catalog, contributor=args.contributor,getComponents=args.getComponents, getAngles=args.getAngles,limitType=args.limitType) else: eventlist = comcat.getEventData(bounds=args.bounds,radius=args.radius,starttime=args.startTime,endtime=args.endTime, magrange=args.magRange,catalog=args.catalog,contributor=args.contributor, getComponents=args.getComponents, getAngles=args.getAngles,limitType=args.limitType) if not len(eventlist): sys.stderr.write('No events found. Exiting.\n') sys.exit(0) fmt = getFormatString(args.format,eventlist[0].keys()) print getHeader(args.format,eventlist[0].keys()) for event in eventlist: if args.limitType is not None and event['type'].lower() != args.limitType: continue if event['mag'] is None: event['mag'] = float('nan') if event['depth'] is None: event['depth'] = float('nan') tpl = getFormatTuple(event) try: print fmt % tpl except: pass
def main(args): if args.getCount: nevents,maxevents = comcat.getEventCount(bounds=args.bounds,radius=args.radius, starttime=args.startTime,endtime=args.endTime, magrange=args.magRange,catalog=args.catalog, contributor=args.contributor) fmt = '%i %i' print fmt % (nevents,maxevents) sys.exit(0) #actually get the data - do a count first to make sure our request isn't too large. nevents,maxevents = comcat.getEventCount(bounds=args.bounds,radius=args.radius,starttime=args.startTime,endtime=args.endTime, magrange=args.magRange,catalog=args.catalog,contributor=args.contributor) if nevents > maxevents: #oops, too many events for one query segments = [] segments = comcat.getTimeSegments(segments,args.bounds,args.radius,args.startTime,args.endTime, args.magRange,args.catalog,args.contributor) eventlist = [] for stime,etime in segments: sys.stderr.write('%s - Getting data for %s => %s\n' % (datetime.now(),stime,etime)) eventlist += comcat.getEventData(bounds=args.bounds,radius=args.radius,starttime=stime,endtime=etime, magrange=args.magRange,catalog=args.catalog,contributor=args.contributor,getComponents=args.getComponents, getAngles=args.getAngles,limitType=args.limitType) else: eventlist = comcat.getEventData(bounds=args.bounds,radius=args.radius,starttime=args.startTime,endtime=args.endTime, magrange=args.magRange,catalog=args.catalog,contributor=args.contributor,getComponents=args.getComponents, getAngles=args.getAngles,limitType=args.limitType) if not len(eventlist): sys.stderr.write('No events found. Exiting.\n') sys.exit(0) fmt = getFormatString(args.format,eventlist[0].keys()) print getHeader(args.format,eventlist[0].keys()) for event in eventlist: if args.limitType is not None and event['type'].lower() != args.limitType: continue tpl = getFormatTuple(event) try: print fmt % tpl except: pass
def __init__(self, cnn): # check the last earthquake in the db quakes = cnn.query('SELECT max(date) as mdate FROM earthquakes') stime = quakes.dictresult()[0].get('mdate') if stime is None: # no events in the table, add all rinex = cnn.query( 'SELECT min("ObservationSTime") as mdate FROM rinex') stime = rinex.dictresult()[0].get('mdate') etime = comcat.ShakeDateTime.utcnow() # we used to do a count of how many events would be returned, # but it turns out that doing the count takes almost as much time # as a query that actually returns the data. So, here we're just # going to split the time segment up into one-week chunks and assume # that no individual segment will return more than the 20,000 event limit. segments = comcat.getTimeSegments2(stime, etime) eventlist = [] maxmags = 0 print('Breaking request into %i segments.\n' % len(segments)) for stime, etime in segments: # sys.stderr.write('%s - Getting data for %s => %s\n' % (comcat.ShakeDateTime.now(),stime,etime)) teventlist, tmaxmags = comcat.getEventData(starttime=stime, endtime=etime, magrange=(6, 10)) eventlist += teventlist if tmaxmags > maxmags: maxmags = tmaxmags if not len(eventlist): print('No events found. Exiting.\n') # eventlist is a list of ordereddict objects for event in eventlist: event_date = datetime.strptime( event.get('time')[0].strftime(TIMEFMT2), TIMEFMT2) try: cnn.insert('earthquakes', date=event_date, lat=event.get('lat')[0], lon=event.get('lon')[0], depth=event.get('depth')[0], mag=event.get('mag')[0]) except Exception as e: continue
def main(args): if args.limitType and not args.getComponents: print('To limit your search to specific moment tensor types, specify both -o and -l options.') sys.exit(1) if args.getCount: nevents,maxevents = comcat.getEventCount(bounds=args.bounds,radius=args.radius, starttime=args.startTime,endtime=args.endTime, magrange=args.magRange,catalog=args.catalog, contributor=args.contributor,devServer=args.debug) fmt = '%i %i' print((fmt % (nevents,maxevents))) sys.exit(0) stime = comcat.ShakeDateTime(1900,1,1) etime = comcat.ShakeDateTime.utcnow() if args.startTime: stime = args.startTime if args.endTime: etime = args.endTime if stime >= etime: print(('End time must be greater than start time. Your inputs: Start %s End %s' % (stime,etime))) sys.exit(1) #we used to do a count of how many events would be returned, #but it turns out that doing the count takes almost as much time #as a query that actually returns the data. So, here we're just #going to split the time segment up into one-week chunks and assume #that no individual segment will return more than the 20,000 event limit. segments = comcat.getTimeSegments2(stime,etime) eventlist = [] maxmags = 0 sys.stderr.write('Breaking request into %i segments.\n' % len(segments)) for stime,etime in segments: #sys.stderr.write('%s - Getting data for %s => %s\n' % (comcat.ShakeDateTime.now(),stime,etime)) teventlist,tmaxmags = comcat.getEventData(bounds=args.bounds,radius=args.radius,starttime=stime,endtime=etime, magrange=args.magRange,catalog=args.catalog, contributor=args.contributor,getComponents=args.getComponents, getAngles=args.getAngles,limitType=args.limitType,getAllMags=args.getAllMags, devServer=args.debug) eventlist += teventlist if tmaxmags > maxmags: maxmags = tmaxmags if not len(eventlist): sys.stderr.write('No events found. Exiting.\n') sys.exit(0) #eventlist is a list of ordereddict objects #the dict keys collectively provide the header #the dict values contain (value,fmt) where value is magnitude, latitude, etc. and fmt is the formatting string #print the header tmpevent = getNewEvent(eventlist[0],maxmags) hdrlist = list(tmpevent.keys()) print((','.join(hdrlist))) #get the formatting string for each line fnuggets = [v[1] for v in list(tmpevent.values())] fmt = ','.join(fnuggets) for event in eventlist: if args.limitType is not None and event['type'][0].lower() != args.limitType: continue event['time'][0] = event['time'][0].strftime(TIMEFMT)[0:-3] newevent = getNewEvent(event,maxmags) tpl = tuple([v[0] for v in list(newevent.values())]) try: print((fmt % tpl)) except: sys.stderr.write('Could not write event %s\n' % event['id']) for i in range(0,len(fnuggets)): print((fnuggets[i],tpl[i])) break
def main(args): if args.limitType and not args.getComponents: print 'To limit your search to specific moment tensor types, specify both -o and -l options.' sys.exit(1) if args.getCount: nevents, maxevents = comcat.getEventCount(bounds=args.bounds, radius=args.radius, starttime=args.startTime, endtime=args.endTime, magrange=args.magRange, catalog=args.catalog, contributor=args.contributor, devServer=args.debug) fmt = '%i %i' print fmt % (nevents, maxevents) sys.exit(0) stime = comcat.ShakeDateTime(1900, 1, 1) etime = comcat.ShakeDateTime.utcnow() if args.startTime: stime = args.startTime if args.endTime: etime = args.endTime if stime >= etime: print 'End time must be greater than start time. Your inputs: Start %s End %s' % ( stime, etime) sys.exit(1) #we used to do a count of how many events would be returned, #but it turns out that doing the count takes almost as much time #as a query that actually returns the data. So, here we're just #going to split the time segment up into one-week chunks and assume #that no individual segment will return more than the 20,000 event limit. segments = comcat.getTimeSegments2(stime, etime) eventlist = [] maxmags = 0 sys.stderr.write('Breaking request into %i segments.\n' % len(segments)) for stime, etime in segments: #sys.stderr.write('%s - Getting data for %s => %s\n' % (comcat.ShakeDateTime.now(),stime,etime)) teventlist, tmaxmags = comcat.getEventData( bounds=args.bounds, radius=args.radius, starttime=stime, endtime=etime, magrange=args.magRange, catalog=args.catalog, contributor=args.contributor, getComponents=args.getComponents, getAngles=args.getAngles, limitType=args.limitType, getAllMags=args.getAllMags, devServer=args.debug) eventlist += teventlist if tmaxmags > maxmags: maxmags = tmaxmags if not len(eventlist): sys.stderr.write('No events found. Exiting.\n') sys.exit(0) #eventlist is a list of ordereddict objects #the dict keys collectively provide the header #the dict values contain (value,fmt) where value is magnitude, latitude, etc. and fmt is the formatting string #print the header tmpevent = getNewEvent(eventlist[0], maxmags) hdrlist = tmpevent.keys() print ','.join(hdrlist) #get the formatting string for each line fnuggets = [v[1] for v in tmpevent.values()] fmt = ','.join(fnuggets) for event in eventlist: if args.limitType is not None and event['type'][0].lower( ) != args.limitType: continue event['time'][0] = event['time'][0].strftime(TIMEFMT)[0:-3] newevent = getNewEvent(event, maxmags) tpl = tuple([v[0] for v in newevent.values()]) try: print fmt % tpl except: sys.stderr.write('Could not write event %s\n' % event['id']) for i in range(0, len(fnuggets)): print fnuggets[i], tpl[i] break
line_no_sh = 0 line_no_dp = 0 with open('boundaries.csv') as boundsfile: for line in boundsfile: bounds = make_tuple(line) #to follow along with the progress of data querying print bounds #use getEventData from comcat.py to search for data of interest #define mags for shallow search to get moment tensor info #getComponents=True for MT (i.e. mrt) where available shallowlist, magmax1 = getEventData(bounds=bounds, starttime=start, endtime=finish, depthrange=depthrange_sh, magrange=magrange, getComponents=True) #add today's date to filename filename1part1 = 'cc_shallowquakes_' filename1part2 = datetime.now().strftime("%Y-%m-%d") filename1part3 = '_.csv' myfilename1 = filename1part1 + filename1part2 + filename1part3 #'a' writes and appends to file if it already exists myfile1 = open(myfilename1, 'a') writer = csv.writer(myfile1) #labels rows with useful headers but only do this once if line_no_sh == 0: writer.writerow(('id_no', 'time', 'lat', 'lon', 'depth', 'mag', 'event_type', 'mrr', 'mtt', 'mpp', 'mrt', 'mrp', 'mtp', 'type', 'moment_lat', 'moment_lon', 'moment_depth', 'moment_duration'))
help='Source contributor (who loaded product) (us, nc, etc.)') parser.add_argument('-o','--get-moment-components', dest='getComponents', action='store_true', help='Also extract moment-tensor components where available.') parser.add_argument('-a','--get-focal-angles', dest='getAngles', action='store_true', help='Also extract focal-mechanism angles (strike,dip,rake) where available.') parser.add_argument('-t','--get-moment-type', dest='getType', action='store_true', help='Also extract moment type (Mww,Mwc, etc.) where available') parser.add_argument('-f','--format', dest='format', choices=['csv','tab'], default='csv', help='Output format') parser.add_argument('-v','--verbose', dest='verbose', action='store_true', help='Print progress') args = parser.parse_args() eventlist = getEventData(bounds=args.bounds,radius=args.radius,starttime=args.startTime,endtime=args.endTime, magrange=args.magRange,catalog=args.catalog,contributor=args.contributor, getComponents=args.getComponents,getAngles=args.getAngles, getType=args.getType,verbose=args.verbose) if not len(eventlist): sys.stderr.write('No events found. Exiting.\n') sys.exit(0) fmt = getFormatString(args.format,eventlist[0].keys()) print getHeader(args.format,eventlist[0].keys()) for event in eventlist: tpl = getFormatTuple(event) try: print fmt % tpl except: pass
def main(args): eventid = args.id radius = args.radius #does the bayesloc folder exist? if not os.path.isdir(BAYESDIR): print FOLDER_ERROR sys.exit(1) bayesbin = os.path.join(BAYESDIR,'bin',BAYESBIN) ttimes = glob.glob(os.path.join(BAYESDIR,'ttimes','ak135.*')) if not os.path.isfile(bayesbin): print FOLDER_ERROR sys.exit(1) if not len(ttimes): print FOLDER_ERROR sys.exit(1) bayesdb = os.path.join(BAYESDIR,BAYESDB) # if startOver and os.path.isfile(bayesdb): # os.remove(bayesdb) #does the database exist - if not, create it if not os.path.isfile(bayesdb): db = sqlite3.connect(bayesdb) cursor = db.cursor() createTables(db,cursor) else: db = sqlite3.connect(bayesdb) cursor = db.cursor() #Delete selected list of events if args.delete: nevents = deleteEvents(db,cursor,args.delete) print '%i events deleted from the database.' % nevents sys.exit(0) #Return some stats about the current database if args.stats: nevents,nstations,narrivals = getStats(cursor) print 'Your database contains information about:' print '\t%i events' % nevents print '\t%i stations' % nstations print '\t%i picks' % narrivals sys.exit(0) eventinfo = getPhaseData(eventid=eventid) if not len(eventinfo): print 'Could not find event %s in ComCat. Returning.' sys.exit(1) #get the information about the input event eventinfo = eventinfo[0] eventlat = eventinfo.origins[0]['lat'] eventlon = eventinfo.origins[0]['lon'] eventtime = eventinfo.origins[0]['time'] if eventtime < args.begindate or eventtime > args.enddate: fmt = 'Event %s (%s) is outside the time bounds you specified. %s to %s. Exiting.' print fmt % (eventinfo.eventcode,eventtime,args.begindate,args.enddate) sys.exit(1) tnow = datetime.utcnow() eventfolder = os.path.join(BAYESDIR,'events',eventid) if not os.path.isdir(eventfolder): os.makedirs(eventfolder) # eventlist1 = getEventData(radius=(eventlat,eventlon,0,radius), # starttime=args.begindate, # endtime=args.enddate,catalog='pde') eventlist = getEventData(radius=(eventlat,eventlon,0,radius), starttime=args.begindate, endtime=args.enddate,catalog='us') #eventlist = eventlist1 + eventlist2 if args.count: fmt = 'There are %i events inside %.1f km radius around event %s (%.4f,%.4f)' print fmt % (len(eventlist),radius,eventid,eventlat,eventlon) sys.exit(0) #check to see if event has already been located - if so, stop, unless we're being forced if not args.force: sql = 'SELECT id,code,rlat,rlon,rdepth,rtime FROM event WHERE code="%s"' % eventid cursor.execute(sql) row = cursor.fetchone() if row is not None and row[2] is not None: print 'Event %s is already in the database. Stopping.' % eventid sys.exit(0) priors = getEventPriors(eventlist,cursor) stations,arrivals,newevents = getProcessedData(eventlist,db,cursor,ndays=NWEEKS*7) fmt = 'In database: %i stations, %i arrivals. %i events not in db.' #print fmt % (len(stations),len(arrivals),len(newevents)) missing_stations = [] for event in newevents: phasedata = getPhaseData(eventid=event) if phasedata is None: continue if not len(phasedata[0].magnitudes): continue newstations,newarrivals,ms = insertPhaseData(phasedata[0],db,cursor) stations = dict(stations.items() + newstations.items()) arrivals += newarrivals missing_stations += ms print 'After searching online:' fmt = 'In database: %i stations, %i arrivals. %i missing stations.' print fmt % (len(stations),len(arrivals),len(missing_stations)) stafile = 'station.dat' stationfile = os.path.join(eventfolder,stafile) f = open(stationfile,'wt') f.write('sta_id lat lon elev\n') for stationcode,stationvals in stations.iteritems(): slat,slon,elev = stationvals f.write('%s %.4f %.4f %.3f\n' % (stationcode,slat,slon,elev)) f.close() arrfile = 'arrival.dat' arrivalfile = os.path.join(eventfolder,arrfile) f = open(arrivalfile,'wt') f.write('ev_id sta_id phase time\n') for arrival in arrivals: eid,scode,phase,time = arrival f.write('%i %s %s %.3f\n' % (eid,scode,phase,time)) f.close() prifile = 'prior.dat' #?? priorfile = os.path.join(eventfolder,prifile) f = open(priorfile,'wt') f.write('ev_id lat_mean lon_mean dist_sd depth_mean depth_sd time_mean time_sd\n') for prior in priors: evid,plat,plon,pdepth,ptime = prior f.write('%i %.4f %.4f 0.0 %.1f 0.0 %.3f 0.0\n' % (evid,plat,plon,pdepth,ptime)) f.close() #write the config file configfile = os.path.join(eventfolder,'bayesloc.cfg') config = CONFIG.replace('BAYESLOC',BAYESLOC) config = config.replace('EVENTFOLDER',eventfolder) fcfg = open(configfile,'wt') fcfg.write(config) fcfg.close() #Run the BayesLoc program #change to the eventfolder cwd = os.getcwd() os.chdir(eventfolder) bayesbin = os.path.join(BAYESLOC,'bin','bayesloc') cmd = '%s %s' % (bayesbin,configfile) print 'Running command %s...' % cmd t1 = datetime.now() # process = subprocess.Popen(cmd, stdout=subprocess.PIPE) # for c in iter(lambda: process.stdout.read(1), ''): # sys.stderr.write(c) res,stdout,stderr = getCommandOutput(cmd) t2 = datetime.now() if not res: print 'BayesLoc command "%s" failed. \n%s\n%s.' % (cmd,stdout,stderr) sys.exit(1) else: dt = ((t2-t1).seconds)/60.0 print 'BayesLoc command was successful - took %.1f minutes.' % dt os.chdir(cwd) resultfile = os.path.join(eventfolder,'output','origins_ned_stats.out') f = open(resultfile,'rt') f.readline() eventlist = [] fieldlist = ['lat','lon','depth','time','rlat','rlon','rdepth','rtime','mag','nevents'] nevents = len(priors) + len(newevents) for line in f.readlines(): parts = line.split() eid = int(parts[0]) lat = float(parts[1]) lon = float(parts[2]) depth = float(parts[3]) time = UTCDateTime(float(parts[4])).datetime efmt = 'UPDATE event set rlat=%.4f,rlon=%.4f,rdepth=%.1f,rtime="%s",nevents=%i WHERE id=%i' equery = efmt % (lat,lon,depth,time,nevents,eid) cursor.execute(equery) db.commit() query = 'SELECT %s FROM event WHERE id=%i' % (','.join(fieldlist),eid) cursor.execute(query) row = cursor.fetchone() eventlist.append(dict(zip(fieldlist,row))) f.close() #make a map of all the relocated events fname = makeMap(eventlist,eventlat,eventlon,eventfolder) print 'Relocated events: %s' % fname #tell the user what happened with the relocation fmt = 'SELECT lat,lon,depth,time,rlat,rlon,rdepth,rtime,nevents FROM event WHERE code="%s"' query = fmt % (eventid) cursor.execute(query) row = cursor.fetchone() lat,lon,depth,time,rlat,rlon,rdepth,rtime,nevents = row time = UTCDateTime(time).datetime rtime = UTCDateTime(rtime).datetime if rtime >= time: dt = (rtime-time).seconds + ((rtime-time).microseconds)/float(1e6) else: dt = (time-rtime).seconds + ((time-rtime).microseconds)/float(1e6) dd,az1,az2 = gps2DistAzimuth(lat,lon,rlat,rlon) dd /= 1000.0 print 'Event %s was relocated using %i events.' % (eventid,nevents) print 'Starting: %s (%.4f,%.4f) %.1f km' % (time.strftime('%Y-%m-%d %H:%M:%S'),lat,lon,depth) print 'Relocated: %s (%.4f,%.4f) %.1f km' % (rtime.strftime('%Y-%m-%d %H:%M:%S'),rlat,rlon,rdepth) print '%.1f km (%.1f degrees), %.1f seconds' % (dd,az1,dt) cursor.close() db.close()