def get_start_end_timestamps(year, month): """Generate start and end time unix timestamps for dbsubsets. Args: year (int): ending year in integer format month (int): ending month in integer format Returns: tuple (int, int): tuple containing the unix start time and end time """ logger = logging.getLogger(__name__) logger.debug("month:%s", month) logger.debug("year:%s", year) month = int(month) year = int(year) next_year = year next_month = month + 1 if next_month > 12: next_month = 1 next_year = next_year + 1 logger.debug("next_month: %s", next_month) logger.debug("next_year: %s", next_year) start_time = stock.str2epoch("%02d/01/%4d 00:00:00" % (month, year)) end_time = stock.str2epoch("%02d/01/%4d 00:00:00" % (next_month, next_year)) logger.info("START:%s => %s", start_time, stock.strdate(start_time)) logger.info("END:%s => %s", end_time, stock.strdate(end_time)) return start_time, end_time
def month_resolver(self, start_mth, start_yr, end_mth, end_yr, dbname_template): """Month list """ if self.include_times: months = {} else: months = [] vol_month = start_mth vol_year = start_yr while vol_year <= end_yr or (vol_year == end_yr and vol_month <= end_mth): voltime = antstock.str2epoch("%d/1/%d" % (vol_month, vol_year)) if vol_month < 12: vol_month += 1 else: vol_month = 1 vol_year += 1 volendtime = antstock.str2epoch("%d/1/%d" % (vol_month,vol_year)) - 1 dbname = antstock.epoch2str(int(voltime), dbname_template) if self.include_times: months[dbname] = (voltime,volendtime) else: months.append(dbname) #if os.path.exists(dbname) and os.path.isfile(dbname): # if self.include_times: # months[dbname] = (voltime,volendtime) # else: # months.append(dbname) #else: # antelog.notify("Dbpath '%s' does not exist." % dbname) return months
def generate_times(year, month): """Generate start and end time unix timestamps for dbsubsets """ month=int(month) year=int(year) next_month = (month + 1) % 12 next_year = year + 1 if next_month==1 else year start_time = str2epoch('%d-%02d-01 00:00' % (year, month)) end_time = str2epoch('%d-%02d-01 00:00' % (next_year, next_month)) return start_time, end_time
def _get_chanperf(self): self.logging.debug("_get_chanperf()") today = stock.str2epoch(str(stock.yearday(stock.now()))) lastmonth = today - (86400 * int(self.perf_days_back)) month = {} week = {} fields = ["snet", "sta", "chan", "time", "perf"] steps = ["dbopen chanperf", "dbjoin -o snetsta", "dbsubset time >= %s" % lastmonth] if self.perf_subset: steps.append("dbsubset %s" % self.perf_subset) for v in extract_from_db(self.perf_db, steps, fields, self.db_subset): snet = v.pop("snet") sta = v.pop("sta") chan = v.pop("chan") fullname = "%s.%s.%s" % (snet, sta, chan) self.logging.debug("_get_chanperf(%s_%s)" % (snet, sta)) if self._verify_cache(snet, sta, "chanperf"): try: if len(self.cache[snet][sta]["chanperf"][chan]) < 1: raise except: self.cache[snet][sta]["chanperf"][chan] = {} # v['time'] = readable_time( v['time'], '%Y-%m-%d' ) v["time"] = int(v["time"]) self.cache[snet][sta]["chanperf"][chan][v["time"]] = v["perf"]
def _get_chanperf(self): self.logger.debug("_get_chanperf()") today = stock.str2epoch(str(stock.yearday(stock.now()))) lastmonth = today - (86400 * int(self.perf_days_back)) fields = ["snet", "sta", "chan", "time", "perf"] steps = [ "dbopen chanperf", "dbjoin -o snetsta", "dbsubset time >= %s" % lastmonth, ] if self.perf_subset: steps.append("dbsubset %s" % self.perf_subset) for v in extract_from_db(self.perf_db, steps, fields, self.db_subset): snet = v.pop("snet") sta = v.pop("sta") chan = v.pop("chan") self.logger.debug("_get_chanperf(%s_%s)" % (snet, sta)) if self._verify_cache(snet, sta, "chanperf"): try: if len(self.cache[snet][sta]["chanperf"][chan]) < 1: raise except Exception: self.cache[snet][sta]["chanperf"][chan] = {} # v['time'] = readable_time( v['time'], '%Y-%m-%d' ) v["time"] = int(v["time"]) self.cache[snet][sta]["chanperf"][chan][v["time"]] = v["perf"]
def _get_chanperf(self): self.logging.debug( "_get_chanperf()") today = stock.str2epoch( str(stock.yearday( stock.now() )) ) lastmonth = today - (86400 * int(self.perf_days_back)) month = {} week = {} fields = ['snet','sta','chan','time','perf'] steps = [ 'dbopen chanperf', 'dbjoin -o snetsta', 'dbsubset time >= %s' % lastmonth ] if self.perf_subset: steps.append ( 'dbsubset %s' % self.perf_subset ) for v in extract_from_db(self.perf_db, steps, fields, self.db_subset): snet = v.pop('snet') sta = v.pop('sta') chan = v.pop('chan') fullname = "%s.%s.%s" % ( snet, sta, chan ) self.logging.debug( "_get_chanperf(%s_%s)" % (snet,sta) ) if self._verify_cache(snet,sta,'chanperf'): try: if len( self.cache[snet][sta]['chanperf'][chan] ) < 1: raise except: self.cache[snet][sta]['chanperf'][chan] = {} #v['time'] = readable_time( v['time'], '%Y-%m-%d' ) v['time'] = int( v['time'] ) self.cache[snet][sta]['chanperf'][chan][ v['time'] ] = v['perf']
def __init__(self,evid,yr,mon,day,hr,mins,sec,lon,lat,dep,mag): self.id=evid; #event id self.year=yr; self.month=mon; self.day=day; #event time self.hour=hr; self.min=mins; self.sec=sec; #more event time self.lon=lon; self.lat=lat; self.depth=dep #event location self.mag=mag; #event magnitude #Calculate epoch time str_date=str(mon)+'/'+str(day)+'/'+str(yr)+' '+str(hr)+':'+str(mins)+':'+str(sec) self.epoch=str2epoch(str_date) #time in seconds after 1/1/1970 self.arrivals=list() #This will be a list of phase arrivals
def year_resolver(self, start_yr, end_yr, dbname_template): """Year list """ if self.include_times: years = {} else: years = [] for y in range(start_yr, end_yr + 1): voltime = antstock.str2epoch("1/1/%s 00:00:00" % y) volendtime = antstock.str2epoch("12/31/%s 23:59:59" % y) dbname = antstock.epoch2str(voltime, dbname_template) if self.include_times: years[dbname] = (voltime,volendtime) else: years.append(dbname) #if os.path.exists(dbname) and os.path.isfile(dbname): # if self.include_times: # years[dbname] = (voltime,volendtime) # else: # years.append(dbname) #else: # antelog.notify("Dbpath '%s' does not exist." % dbname) return years
def __init__(self, evid, yr, mon, day, hr, mins, sec, lon, lat, dep, mag): self.id = evid #event id self.year = yr self.month = mon self.day = day #event time self.hour = hr self.min = mins self.sec = sec #more event time self.lon = lon self.lat = lat self.depth = dep #event location self.mag = mag #event magnitude #Calculate epoch time str_date = str(mon) + '/' + str(day) + '/' + str(yr) + ' ' + str( hr) + ':' + str(mins) + ':' + str(sec) self.epoch = str2epoch(str_date) #time in seconds after 1/1/1970 self.arrivals = list() #This will be a list of phase arrivals
def main(): ######################## # Get information from execution flags ######################## (input, pfname) = configure() ######################## # Get information from parameter file ######################## (tstep, tpad, filter, sta, chan, animation_params, gmt_params) = get_pf(pfname) ######################## # Set start and end time for animation ######################## tstart = float(stock.str2epoch(input.ts)) if input.ts is "now": tstart = tstart - (tstart % (input.twin)) - input.twin tend = tstart + float(input.twin) ######################## # Open waveform database ######################## if not input.dbname: f = '%Y_%m_%d' date = stock.epoch2str(tstart,f) db = ds.dbopen("/aerun/op/run/db/archive_%s" % date, "r") else: db = ds.dbopen(input.dbname, "r") ######################## # Setup format for creating timestamp on GMT plots ######################## f = '%D %H:%M:%S %Z' timestamp = stock.epoch2str(tstart, f, "") ## For state-wide pstextstring = "-153 53 10 0 1 0 %s" % timestamp ## For Anchorage #pstextstring = "-150 53 10 0 1 0 %s" % timestamp current_time = tstart while current_time < tend: ######################## # Get waveform data, compute PGA, and make GMT plot ######################## run_gmt(sta, chan, db, current_time, filter, tstep, tpad, gmt_params, input.verbose) ######################## # Make a timestamp every 10 seconds on the GMT plot ######################## if (current_time%10 == 0): timestamp = stock.epoch2str(current_time, f, "") ## For state-wide pstextstring = "-160 51 10 0 1 0 %s" % timestamp # For Anchorage #pstextstring = "-150.1 61.04 10 0 1 0 %s" % timestamp print "Processing time %s" % timestamp #os.system("echo %s | pstext -R -J -Gblack -P -O -K >> ~/Development/scratch/realtime_gmv/regions_%s.ps" % (pstextstring,current_time)) # 3-d os.system("echo %s | pstext -R -J -E200/40 -Gblack -P -O -K >> ~/Development/scratch/realtime_gmv/regions_%s.ps" % (pstextstring,current_time)) else: #os.system("echo %s | pstext -R -J -Gblack -P -O -K >> ~/Development/scratch/realtime_gmv/regions_%s.ps" % (pstextstring,current_time)) # 3-d os.system("echo %s | pstext -R -J -E200/40 -Gblack -P -O -K >> ~/Development/scratch/realtime_gmv/regions_%s.ps" % (pstextstring,current_time)) ######################## # Plot scalebar ######################## ## For state-wide #os.system("psscale -D4.1i/0.9i/1.0i/.20i -C%s -B:\"PGA (ug)\": -L0.0 -P -O >> ~/Development/scratch/realtime_gmv/regions_%s.ps" % (gmt_params['cptfile'], current_time) ) ## For Anchorage #os.system("psxy /Users/mgardine/Development/GMT/alaska_roads.xy -J -R -m -W2,darkgrey -K -O >> ~/Development/scratch/realtime_gmv/regions_%s.ps" % current_time ) #os.system("psscale -D3.8i/0.7i/1.0i/.20i -C%s -B:\"PGA (mg)\": -L0.0 -P -O >> ~/Development/scratch/realtime_gmv/regions_%s.ps" % (gmt_params['cptfile'], current_time) ) current_time += tstep ######################## # Make animated .GIF, remove temporary GMT plots ######################## print "Making animation" ## For state-wide os.system("/opt/local/bin/gm convert -density 200% -delay 10 -loop 0 -resize 1280x1280 +map ~/Development/scratch/realtime_gmv/regions*.ps ~/Development/scratch/realtime_gmv/animation.gif") os.system("convert ~/Development/scratch/realtime_gmv/animation.gif -layers OptimizeTransparency -crop 720x720+163+300 +repage ~/Development/scratch/realtime_gmv/animation1.gif") ## For Anchorage #os.system("/opt/local/bin/gm convert -density 200% -delay 10 -loop 0 -resize 1280x1280 +map ~/Development/scratch/realtime_gmv/regions*.ps ~/Development/scratch/realtime_gmv/animation.gif") #os.system("convert ~/Development/scratch/realtime_gmv/animation.gif -layers OptimizeTransparency -crop 720x580+163+245 +repage ~/Development/scratch/realtime_gmv/animation1.gif") #os.system("rm -f ~/Development/scratch/realtime_gmv/regions_*.ps ~/Development/scratch/realtime_gmv/animation.gif") #os.system("mv ~/Development/scratch/realtime_gmv/animation1.gif %s/animation_rt.gif" % animation_params['webdir']) #os.system("ffmpeg -i ~/Development/scratch/realtime_gmv/animation1.gif -c:v libvpx -crf 12 ~/Development/scratch/realtime_gmv/animation.webm") db.close()
def comp(self, arg): """Run cross-correlation and select rotation azimuth.""" # Load main database try: self.db = datascope.dbopen(self.databasename, "r+") except Exception as e: self.logger.error("Problems opening database: %s %s" % (self.db, e)) # If origin mode, get origin data if self.origin: event_data = Origin(self.db, arg) time = event_data.time orid = event_data.orid else: time = arg if isinstance(time, str): time = str2epoch(time) event_data = None orid = None # Grab station info from select list that are active during this time site_table = Site(self.db) site_table.get_stations(self.ref_regex, time, event_data=event_data) try: reference = list(site_table.stations.keys())[0] except IndexError: self.logger.error("No stations found in site table.") return None stations = site_table.get_stations(self.comp_regex, time, reference, event_data) ref_chan = stations[reference].chans[0] options = types.SimpleNamespace() # Get station parameters based on station-event distance if stations[reference].delta: delta = stations[reference].delta if delta >= 0 and delta < 5: distance = 5 elif delta >= 5 and delta < 20: distance = 20 elif delta >= 20 and delta < 50: distance = 50 elif delta >= 50 and delta < 100: distance = 100 elif delta >= 100 and delta < 180: distance = 180 self._parse_sta_params(distance, options) # Inititiate waveform data class data = Waveforms(self.db) results = {} if stations[reference].ptime: start = stations[reference].ptime - 2 else: start = time # Get reference sensor data ref_tr = data.get_waveforms( sta=reference, chan=ref_chan, start=start, tw=self.tw, bw_filter=self.filter, debug_plot=self.debug_plot, ) if ref_tr: data.set_ref_data(reference, ref_tr) else: self.logger.notify("No data for reference station %s available" % reference) for sta in stations: results[sta] = {} # for each comparison, get trace for chan in stations[sta].chans: if chan == ref_chan and sta == reference: pass else: tr = data.get_waveforms( sta=sta, chan=chan, start=start, tw=self.tw, bw_filter=self.filter, debug_plot=self.debug_plot, ) if tr: if event_data: diff_esaz = stations[sta].esaz - stations[ reference].esaz # Do not run if station-station azimuth is too far if diff_esaz > 45 and diff_esaz < 315: self.logger.info( "Event-station azimuth difference %s > \ 45 degrees. Station %s thrown out." % (diff_esaz, sta)) if not self.nosave: save_results( ref_sta=self.reference, ref_chan=stations[reference].chans[0], sta=sta, chan=chan, result_dir=self.result_dir, ref_esaz=stations[reference].esaz, ssaz=stations[sta].ssaz, distance=stations[sta].ssdistance, esaz=stations[sta].esaz, azimuth1="NULL", azimuth2="NULL", ) # Otherwise, calculate rotation azimuth else: results[sta][chan] = data.get_azimuth(sta, tr) if not self.nosave: save_results( ref_sta=reference, ref_chan=stations[reference].chans[0], sta=sta, chan=chan, result_dir=self.result_dir, ref_esaz=stations[reference].esaz, ssaz=stations[sta].ssaz, distance=stations[sta].ssdistance, esaz=stations[sta].esaz, azimuth1=results[sta][chan] ["T"].azimuth, azimuth2=results[sta][chan] ["R"].azimuth, ) else: results[sta][chan] = data.get_azimuth(sta, tr) if len(results[sta]) > 0 and not self.noplot: Plot( width=16, height=6, result=results[sta], reference=data.ref_data, ref_sta=reference, ref_chan=ref_chan, sta=sta, start=start, end=start + self.tw, result_dir=self.result_dir, debug_plot=self.debug_plot, orid=orid, ) return results
"Stations(): No records after sitechan sort.") sys.exit(reactor.stop()) prog=ProgressLogger("Stations: processing stachan record ", records, logger=self.logger) for j in range(records): prog.tick() ssc.record = j try: sta, chan, ondate, offdate = ssc.getv( 'sta', 'chan', 'ondate', 'offdate') except Exception, e: self.logger.exception('Station(): (%s=>%s)' % (Exception,e)) ondate = stock.str2epoch(str(ondate)) if chan in self.wfdisc_stachan[sta]: if offdate != -1: offdate = stock.str2epoch(str(offdate)) self.stachan_cache[sta][chan]['dates'].extend([[ondate,offdate]]) self.logger.debug("Station(): %s.%s dates: %s" % ( sta,chan,self.stachan_cache[sta][chan]['dates'])) else: self.logger.debug( 'Station(): %s.%s was not in the wfdisc. Skipping' % ( sta, chan) ) try: ssc.free()
endtime = stock.now() self.logger.debug("endtime=%s" % endtime) if self.volumes == 'single': self._test_db(voltime, volendtime, dbname) elif self.volumes == 'year': start_year = int(stock.epoch2str(time, "%Y")) end_year = int(stock.epoch2str(endtime, "%Y")) for y in range(start_year, end_year + 1): voltime = stock.str2epoch("1/1/%s 00:00:00" % y) volendtime = stock.str2epoch("12/31/%s 23:59:59" % y) dbname = stock.epoch2str(voltime, dbname_template) self._test_db(voltime, volendtime, dbname) elif self.volumes == 'month': start_month = int(stock.epoch2str(time, "%L")) start_year = int(stock.epoch2str(time, "%Y")) end_month = int(stock.epoch2str(endtime, "%L")) end_year = int(stock.epoch2str(endtime, "%Y")) vol_month = start_month vol_year = start_year
prog = ProgressLogger("Stations: processing stachan record ", records, logger=self.logger) for j in range(records): prog.tick() ssc.record = j try: sta, chan, ondate, offdate = ssc.getv( 'sta', 'chan', 'ondate', 'offdate') except Exception, e: self.logger.exception('Station(): (%s=>%s)' % (Exception, e)) ondate = stock.str2epoch(str(ondate)) if chan in self.wfdisc_stachan[sta]: if offdate != -1: offdate = stock.str2epoch(str(offdate)) self.stachan_cache[sta][chan]['dates'].extend( [[ondate, offdate]]) self.logger.debug( "Station(): %s.%s dates: %s" % (sta, chan, self.stachan_cache[sta][chan]['dates'])) else: self.logger.debug( 'Station(): %s.%s was not in the wfdisc. Skipping' % (sta, chan)) try:
def main(): ########## # Extract station list from master_stations ########## stations = [] with ds.closing(ds.dbopen("/aerun/sum/db/dbsum/dbsum", "r")) as db: steps = ["dbopen affiliation", "dbjoin site", "dbsubset net=~/AK/ && offdate==NULL", "dbsort sta"] with ds.freeing(db.process(steps)) as dbview: for record in dbview.iter_record(): stations.append(record.getv('sta')[0]) ########## # Extract waveform data into trace objects ########## twin = 600 tcurrent = float(stock.str2epoch('now')) tend = tcurrent - 60 tstart = tend - twin f = '%Y_%m_%d' date = stock.epoch2str(tstart,f) with ds.closing(ds.dbopen("/aerun/op/run/db/archive_%s" % date, "r")) as db: for sta in stations: data = {} samplerate = {} dbsta = db.lookup(table = 'wfdisc') dbsta = dbsta.subset('sta=~/%s/' % sta ) bband = dbsta.subset('chan=~/BH./') ####### # Extract Broadband seismic data ####### if bband.query('dbRECORD_COUNT') > 0: # print (tstart, tcurrent, "%s" % sta, "BHE") tr_bhe = dbsta.trloadchan(tstart, tend, "%s" % sta, "BHE") tr_bhe.trapply_calib() with ds.trfreeing(tr_bhe): if tr_bhe.query("dbRECORD_COUNT") > 0: tr_bhe.record = 0 data['BHE'] = tr_bhe.trdata() samplerate['BHE'] = tr_bhe.getv("samprate") tr_bhn = dbsta.trloadchan(tstart, tend, "%s" % sta, "BHN") tr_bhn.trapply_calib() with ds.trfreeing(tr_bhn): if tr_bhn.query("dbRECORD_COUNT") > 0: tr_bhn.record = 0 data['BHN'] = tr_bhn.trdata() samplerate['BHN'] = tr_bhn.getv("samprate") tr_bhz = dbsta.trloadchan(tstart, tend, "%s" % sta, "BHZ") tr_bhz.trapply_calib() with ds.trfreeing(tr_bhz): if tr_bhz.query("dbRECORD_COUNT") > 0: tr_bhz.record = 0 data['BHZ'] = tr_bhz.trdata() samplerate['BHZ'] = tr_bhz.getv("samprate") ####### # Extract moderate sample rate strong motion data ####### smot_b = dbsta.subset('chan=~/BN./') if smot_b.query('dbRECORD_COUNT') > 0: tr_bne = dbsta.trloadchan(tstart, tend, "%s" % sta, "BNE") tr_bne.trapply_calib() with ds.trfreeing(tr_bne): if tr_bne.query("dbRECORD_COUNT") > 0: tr_bne.record = 0 data['BNE'] = tr_bne.trdata() samplerate['BNE'] = tr_bne.getv("samprate") tr_bnn = dbsta.trloadchan(tstart, tend, "%s" % sta, "BNN") tr_bnn.trapply_calib() with ds.trfreeing(tr_bnn): if tr_bnn.query("dbRECORD_COUNT") > 0: tr_bnn.record = 0 data['BNN'] = tr_bnn.trdata() samplerate['BNN'] = tr_bnn.getv("samprate") tr_bnz = dbsta.trloadchan(tstart, tend, "%s" % sta, "BNZ") tr_bnz.trapply_calib() with ds.trfreeing(tr_bnz): if tr_bnz.query("dbRECORD_COUNT") > 0: tr_bnz.record = 0 data['BNZ'] = tr_bnz.trdata() samplerate['BNZ'] = tr_bnz.getv("samprate") ####### # Extract high sample rate strong motion data ####### smot_h = dbsta.subset('chan=~/HN./') if smot_h.query('dbRECORD_COUNT') > 0: tr_hne = dbsta.trloadchan(tstart, tend, "%s" % sta, "HNE") tr_hne.trapply_calib() with ds.trfreeing(tr_hne): if tr_hne.query("dbRECORD_COUNT") > 0: tr_hne.record = 0 data['HNE'] = tr_hne.trdata() samplerate['HNE'] = tr_hne.getv("samprate") tr_hnn = dbsta.trloadchan(tstart, tend, "%s" % sta, "HNN") tr_hnn.trapply_calib() with ds.trfreeing(tr_hnn): if tr_hnn.query("dbRECORD_COUNT") > 0: tr_hnn.record = 0 data['HNN'] = tr_hnn.trdata() samplerate['HNN'] = tr_hnn.getv("samprate") tr_hnz = dbsta.trloadchan(tstart, tend, "%s" % sta, "HNZ") tr_hnz.trapply_calib() with ds.trfreeing(tr_hnz): if tr_hnz.query("dbRECORD_COUNT") > 0: tr_hnz.record = 0 data['HNZ'] = tr_hnz.trdata() samplerate['HNZ'] = tr_hnz.getv("samprate") # if sta=="MLY": # plot_traces(sta, data, tstart, tend, samplerate) # shortperz = sbsta.subset('chan=~/EHZ/') # if smot.query('dbRECORD_COUNT') > 0: # tr_ehz = dbsta.trloadchan(tstart, tcurrent, "%s" % sta, "EHZ") print sta plot_traces(sta, data, tstart, tend, samplerate)
def _get_list(self): try: db = datascope.dbopen(self.path, "r") except Exception as e: raise DbcentralException("Cannot open database %s (%s)" % (self.path, e)) try: db = db.lookup("", "clusters", "", "") except datascope.DblookupFieldError: self.type = "masquerade" self.nickname = None self.dbs[self.path] = {"times": [-10000000000.0, 10000000000.0]} self.logger.info("Not a dbcentral database. Set single database.") return else: self.type = "dbcentral" if self.nickname is None: raise ValueError("Need nickname for Dbcentral clustername regex.") try: db = db.lookup("", "clusters", "", "dbNULL") null_time, null_endtime = db.getv("time", "endtime") except Exception as e: raise DbcentralException( "Cannot look up null values in clusters table. (%s)" % e ) expr = "clustername =='%s'" % self.nickname try: db = db.subset(expr) except Exception as e: raise DbcentralException("Cannot subset on clustername. %s" % e) try: db = db.sort("time") nclusters = db.record_count except Exception as e: raise DbcentralException("Cannot sort on 'time' . %s" % e) if nclusters < 1: raise DbcentralException('No matches for nickname "%s".' % self.nickname) self.logger.debug("Records=%s" % nclusters) for i in range(nclusters): self.logger.debug("db.record=%s" % i) db.record = i try: dbname_template = db.extfile()[-1] except Exception as e: raise DbcentralException("Cannot run db.extfile(). %s" % e) self.logger.debug("dbname_template=%s" % dbname_template) try: volumes, net, time, endtime = db.getv( "volumes", "net", "time", "endtime" ) except Exception as e: raise DbcentralException( "Problems with db.getv('volumes','net'," + "'time','endtime'). (%s)\n" % e ) self.logger.debug("volumes=%s" % volumes) self.logger.debug("net=%s" % net) self.logger.debug("time=%s" % time) self.logger.debug("endtime=%s" % endtime) if endtime == null_endtime: # This will be problematic with realtime systems endtime = stock.now() self.logger.debug("endtime=%s" % endtime) start_year = int(stock.epoch2str(time, "%Y")) end_year = int(stock.epoch2str(endtime, "%Y")) start_month = int(stock.epoch2str(time, "%L")) end_month = int(stock.epoch2str(endtime, "%L")) if volumes == "single": dbname = stock.epoch2str(time, dbname_template) self._test_db(time, endtime, dbname) elif volumes == "year": for y in range(start_year, end_year + 1): voltime = stock.str2epoch("1/1/%s 00:00:00" % y) volendtime = stock.str2epoch("12/31/%s 23:59:59" % y) dbname = stock.epoch2str(voltime, dbname_template) self._test_db(voltime, volendtime, dbname) elif volumes == "month": vol_month = start_month vol_year = start_year vol_endmonth = end_month vol_endyear = end_year while vol_year < end_year or ( vol_year == end_year and vol_month <= end_month ): voltime = stock.str2epoch("%d/1/%d" % (vol_month, vol_year)) if vol_month < 12: vol_month = vol_month + 1 else: vol_year = vol_year + 1 vol_month = 1 volendtime = ( stock.str2epoch("%d/1/%d" % (vol_endmonth, vol_endyear)) - 1 ) dbname = stock.epoch2str(int(voltime), dbname_template) self._test_db(voltime, volendtime, dbname) elif volumes == "day": start_day = int(stock.yearday(time)) end_day = int(stock.yearday(endtime)) vol_day = start_day while vol_day <= end_day: voltime = stock.epoch(vol_day) volendtime = voltime + 86399 # full day -1 sec dbname = stock.epoch2str(voltime, dbname_template) if self._test_db(voltime, volendtime, dbname): self.dbs[dbname] = {"times": [time, endtime]} vol_day = stock.yearday((stock.epoch(vol_day) + 86400)) else: raise UnknownVolumeTypeException(volumes) self.logger.debug("DBS=%s" % self.dbs.keys())
def _get_stachan_cache(self): """Load data into cache.""" records = 0 self.logger.info("Stations(): update cache") for dbname in self.dbcentral.list(): self.logger.debug("Station(): dbname: %s" % dbname) dates = {} query_start_time = time.time() try: self.logger.debug("Dbopen " + dbname) db = datascope.dbopen(dbname, "r") table = "wfdisc" field = "time" self.logger.debug("Dblookup table=%s field=%s" % (table, field)) dbwfdisc = db.lookup(table=table, field=field) self.logger.debug("Getting record count of " + table) records = dbwfdisc.query(datascope.dbRECORD_COUNT) self.mintime = dbwfdisc.ex_eval("min(time)") self.maxtime = dbwfdisc.ex_eval("max(endtime)") except Exception as e: self.logger.exception( "Problem with wfdisc table. %s: %s" % (Exception, e) ) sys.exit(reactor.stop()) elapsed_time = time.time() - query_start_time self.logger.debug( "Intial dbquery and wfdisc record count took %d seconds" % elapsed_time ) if self.maxtime > stock.now() or self.maxtime > (stock.now() - 3600): self.maxtime = -1 self.logger.debug("Starting wfdisc processing of %d records" % records) prog = ProgressLogger( "Stations: processing wfdisc record ", records, logger=self.logger ) for j in range(records): prog.tick() dbwfdisc.record = j try: sta, chan, dbtime = dbwfdisc.getv("sta", "chan", "time") self.wfdisc_stachan[sta].add(chan) self.wfdates[stock.yearday(dbtime)] = 1 except datascope.DatascopeException as e: self.logger.exception("(%s=>%s)" % (Exception, e)) prog.finish() self.logger.debug("Stations(): maxtime: %s" % self.maxtime) self.logger.debug("Stations(): mintime: %s" % self.mintime) self.logger.debug("Stations(): dates: %s" % dates.keys()) try: dbsitechan = db.lookup(table="sitechan") ssc = dbsitechan.sort(["sta", "chan"]) records = ssc.query(datascope.dbRECORD_COUNT) except Exception as e: self.logger.exception( "Stations(): Problems with sitechan table %s: %s" % (Exception, e) ) sys.exit(reactor.stop()) if not records: self.logger.critical("Stations(): No records after sitechan sort.") sys.exit(reactor.stop()) prog = ProgressLogger( "Stations: processing stachan record ", records, logger=self.logger ) for j in range(records): prog.tick() ssc.record = j try: sta, chan, ondate, offdate = ssc.getv( "sta", "chan", "ondate", "offdate" ) except Exception as e: self.logger.exception("Station(): (%s=>%s)" % (Exception, e)) ondate = stock.str2epoch(str(ondate)) if chan in self.wfdisc_stachan[sta]: if offdate != -1: offdate = stock.str2epoch(str(offdate)) self.stachan_cache[sta][chan]["dates"].extend([[ondate, offdate]]) self.logger.debug( "Station(): %s.%s dates: %s" % (sta, chan, self.stachan_cache[sta][chan]["dates"]) ) else: self.logger.debug( "Station(): %s.%s was not in the wfdisc. Skipping" % (sta, chan) ) try: ssc.free() db.close() except Exception: pass prog.finish(level=logging.INFO) self.logger.info( "Stations(): Done updating cache (%s) sta-chan pairs." % len(self.stachan_cache) )
self.logger.debug( "endtime=%s" % endtime ) start_year = int(stock.epoch2str(time,"%Y")) end_year = int(stock.epoch2str(endtime,"%Y")) start_month = int(stock.epoch2str(time,"%L")) end_month = int(stock.epoch2str(endtime,"%L")) if volumes == 'single': self._test_db(voltime,volendtime,dbname) elif volumes == 'year': for y in range(start_year,end_year+1): voltime = stock.str2epoch("1/1/%s 00:00:00" % y) volendtime = stock.str2epoch("12/31/%s 23:59:59" % y) dbname = stock.epoch2str(voltime,dbname_template) self._test_db(voltime,volendtime,dbname) elif volumes == 'month': vol_month = start_month vol_year = start_year while vol_year < end_year or ( vol_year == end_year and vol_month <= end_month ): voltime = stock.str2epoch("%d/1/%d" % (vol_month,vol_year) ) if vol_month < 12:
def main(): ######################## # Get information from execution flags ######################## (input, pfname) = configure() ######################## # Get information from parameter file ######################## (tstep, tpad, filter, sta, chan) = get_pf(pfname) ######################## # Set start and end time for animation ######################## tstart = float(stock.str2epoch(input.ts)) if input.ts is "now": tstart = tstart - (tstart % (input.twin)) - input.twin tend = tstart + input.twin #tstart = float(stock.str2epoch('8/27/2013 21:40:00')) #tend = float(stock.str2epoch('8/27/2013 21:50:00')) #tstart = float(stock.str2epoch('8/25/2013 17:47:00')) #tend = float(stock.str2epoch('8/25/2013 17:57:00')) #tstart = float(stock.str2epoch('8/21/2013 18:37:00')) #tend = float(stock.str2epoch('8/21/2013 18:44:00')) ######################## # Open waveform database ######################## if not input.dbname: f = '%Y_%m_%d' date = stock.epoch2str(tstart,f) db = ds.dbopen("/aerun/op/run/db/archive_%s" % date, "r") #db = ds.dbopen("/aerun/op/run/db/archive_2013_08_21", "r") #db = ds.dbopen("/aerun/op/run/db/archive_2013_08_25", "r") #db = ds.dbopen("/aerun/op/run/db/archive_2013_08_27", "r") else: db = ds.dbopen(input.dbname, "r") ######################## # Setup format for creating timestamp on GMT plots ######################## f = '%D %H:%M:%S' timestamp = stock.epoch2str(tstart, f) pstextstring = "-153 53 10 0 1 0 %s" % timestamp current_time = tstart while current_time < tend: ######################## # Get waveform data, compute PGA, and make GMT plot ######################## run_gmt(sta, chan, db, current_time, filter, tstep, tpad, input.verbose) ######################## # Make a timestamp every 10 seconds on the GMT plot ######################## if (current_time%10 == 0): timestamp = stock.epoch2str(current_time, f) pstextstring = "-153 53 10 0 1 0 %s" % timestamp print "Processing time %s" % timestamp os.system("echo %s | pstext -R -Jb -Gblack -P -O -K >> ~/Development/scratch/realtime_gmv/regions_%s.ps" % (pstextstring,current_time)) else: os.system("echo %s | pstext -R -Jb -Gblack -P -O -K >> ~/Development/scratch/realtime_gmv/regions_%s.ps" % (pstextstring,current_time)) ######################## # Plot scalebar ######################## os.system("psscale -D4.1i/0.9i/1.0i/.20i -Cwhitered1.cpt -B:\"PGA (ug)\": -L0.0 -P -O >> ~/Development/scratch/realtime_gmv/regions_%s.ps" % current_time) current_time += tstep ######################## # Make animated .GIF, remove temporary GMT plots ######################## print "Making animation" os.system("convert -delay 10 -loop 2 ~/Development/scratch/realtime_gmv/regions*.ps ~/Development/scratch/realtime_gmv/animation.gif") os.system("rm -f ~/Development/scratch/realtime_gmv/regions_*.ps") os.system("mv ~/Development/scratch/realtime_gmv/animation.gif /usr/local/mosaic/Input/mgardine/animation_rt.gif") db.close()