def main(args=None): # Get Input Options (opts, outp) = get_options() # Initialize the client stdout.writelines("Initializing Client ({0:s})...".format(opts.Server)) if len(opts.UserAuth) == 0: client = Client(opts.Server) else: client = Client(opts.Server, user=opts.UserAuth[0], password=opts.UserAuth[1]) stdout.writelines("Done\n\n") # Search the Client for stations stdout.writelines("Querying client...") try: inv = client.get_stations(network=opts.nets, station=opts.stns, channel=opts.chns, location=opts.locs, starttime=opts.stdate, endtime=opts.enddate, startbefore=opts.stbefore, startafter=opts.stafter, endbefore=opts.endbefore, endafter=opts.endafter, latitude=opts.lat, longitude=opts.lon, minradius=opts.minr, maxradius=opts.maxr, minlatitude=opts.minlat, maxlatitude=opts.maxlat, minlongitude=opts.minlon, maxlongitude=opts.maxlon, includeavailability=None, includerestricted=True, level='channel') stdout.writelines("Done\n\n") except: print('Exception: Cannot complete query or no data in query...') exit() # Summarize Search nstn = 0 for net in inv.networks: for stn in net.stations: nstn = nstn + 1 print("Search Complete: ") print(" {0:d} stations in {1:d} networks".format(nstn, len(inv.networks))) print(" ") # If Debug mode, pickle inventory and exit if opts.debug: stdout.writelines( "Pickling Inventory into {0:s}_query_debug.pkl...".format(outp)) pickle.dump(inv, open('{0:s}_query_debug.pkl'.format(outp), 'wb')) stdout.writelines("Done\n\n") stdout.writelines( "Writing csv2kml format file to {0:s}_query_debug.kcsv\n".format( outp)) fcsv = open("{0:s}_query_debug.kcsv".format(outp), 'w') for net in inv.networks: for stn in net.stations: lat = stn.latitude lon = stn.longitude stdt = stn.start_date eddt = stn.end_date fcsv.writelines("{0:11.6f},{1:10.6f},{2:2s},{3:5s},{4:s},{5:s}\n".format(\ lon, lat, net.code, stn.code, stdt.strftime("%Y-%m-%d"), eddt.strftime("%Y-%m-%d"))) fcsv.close() aa = system( "csv2kml --field-names='lon,lat,net,station,start,end' {0:s}_query_debug.kcsv" .format(outp)) if aa == 0: print( "Generated a KML file {0:s}_query_debug.kcsv.kml".format(outp)) else: print("Generate a kml file using: ") print (" csv2kml --no-random-colours --field-names='lon,lat,net,station,start,end' " \ "{0:s}_query_debug.kcsv".format(outp)) exit() #-- Split locations for later parsing opts.locs = opts.locs.split(',') #-- Sort selected location keys for i, l in enumerate(opts.locs): if len(l) == 0: opts.locs[i] == "--" # Initialize station dictionary stations = {} # Loop through results for net in inv.networks: network = net.code.upper() print("Network: {0:s}".format(network)) for stn in net.stations: station = stn.code.upper() print(" Station: {0:s}".format(station)) # get standard values lat = stn.latitude lon = stn.longitude elev = stn.elevation / 1000. stdt = stn.start_date if stn.end_date is None: eddt = UTCDateTime("2599-12-31") else: eddt = stn.end_date stat = stn.restricted_status print(" Lon, Lat, Elev: {0:9.4f}, {1:8.4f}, {2:7.3f}".format( lon, lat, elev)) print(" Start Date: {0:s}".format( stdt.strftime("%Y-%m-%d %H:%M:%S"))) print(" End Date: {0:s}".format( eddt.strftime("%Y-%m-%d %H:%M:%S"))) print(" Status: {0:s}".format(stat)) # Parse Channels if opts.lkey: # Select Multiple Channels based on those in the rank list # Do not keep overlapping time windows # Select Channels based on those available compared to channel rank chn = [] for pchn in opts.chnrank: stnchn = stn.select(channel=pchn + "Z") if len(stnchn.channels) > 0: chn.append(pchn) #-- If no channels with Z found, skip if chn is None: if len(stn.select(channel='*Z')) == 0: print(" Error: No Z component. Skipping") continue #-- loop through channels and select time windows for pchn in chn: locs = [] stdts = [] eddts = [] stnchn = stn.select(channel=pchn + "Z") #--Collect Start/end Dates and locations for chnl in stnchn: chnlloc = chnl.location_code if len(chnlloc) == 0: chnlloc = "--" for selloc in opts.locs: # print (selloc, chnlloc) if selloc == '*' or chnlloc in selloc: locs.append(chnlloc) stdts.append(chnl.start_date) if chnl.end_date is None: eddts.append(UTCDateTime("2599-12-31")) else: eddts.append(chnl.end_date) #-- Unique set of locids, get minmax time for channel across all locids locs = list(set(locs)) stdts.sort() eddts.sort() stnchnstdt = stdts[0] stnchneddt = eddts[-1] print(" Selected Channel: {0:s}".format(pchn)) print(" Locations: {0:s}".format(",".join(locs))) print(" Start Date: {0:s}".format( stnchnstdt.strftime("%Y-%m-%d %H:%M:%S"))) print(" End Date: {0:s}".format( stnchneddt.strftime("%Y-%m-%d %H:%M:%S"))) #-- Add single key to station database key = "{0:s}.{1:s}.{2:2s}".format(network, station, pchn) if key not in stations: stations[key] = StDbElement(network=network, station=station, channel=pchn, \ location=locs, latitude=lat, longitude=lon, elevation=elev, polarity=1., \ azcorr=0., startdate=stnchnstdt, enddate=stnchneddt, restricted_status=stat) print(" Added as: " + key) else: print(" Warning: " + key + " already exists...Skip") else: # Select a single channel type if only short keys chn = None locs = [] stdts = [] eddts = [] for pchn in opts.chnrank: stnchn = stn.select(channel=pchn + "Z") if len(stnchn.channels) > 0: chn = pchn #--Collect Start/end Dates and locations for chnl in stnchn: chnlloc = chnl.location_code if len(chnlloc) == 0: chnlloc = "--" for selloc in opts.locs: # print (selloc, chnlloc) if selloc == '*' or chnlloc in selloc: locs.append(chnlloc) stdts.append(chnl.start_date) if chnl.end_date is None: eddts.append(UTCDateTime("2599-12-31")) else: eddts.append(chnl.end_date) if len(locs) > 0: break if chn is None: if len(stn.select(channel='*Z')) == 0: print(" Error: No Z component. Skipping") continue if len(locs) == 0: print(" Error: Location {} not available. Skipping". format(",".join(opts.locs))) continue #-- Unique set of locids, get minmax time for channel across all locids locs = list(set(locs)) stdts.sort() eddts.sort() stnchnstdt = stdts[0] stnchneddt = eddts[-1] # # return location codes for selected channel # locs = list(set([a.location_code for a in stn.select(channel=chn+'Z').channels])) # print (" Selected Channel: {0:s}".format(chn)) # print (" Locations: {0:s}".format(",".join(locs))) print(" Selected Channel: {0:s}".format(pchn)) print(" Locations: {0:s}".format(",".join(locs))) print(" Start Date: {0:s}".format( stnchnstdt.strftime("%Y-%m-%d %H:%M:%S"))) print(" End Date: {0:s}".format( stnchneddt.strftime("%Y-%m-%d %H:%M:%S"))) key = "{0:s}.{1:s}".format(network, station) #-- Add single key to station database if key not in stations: stations[key] = StDbElement(network=network, station=station, channel=chn, \ location=locs, latitude=lat, longitude=lon, elevation=elev, polarity=1., \ azcorr=0., startdate=stdt, enddate=eddt, restricted_status=stat) print(" Added as: " + key) else: print(" Warning: " + key + " already exists...Skip") print() # Save and Pickle print(" ") print(" Pickling to {0:s}.pkl".format(outp)) write_db(fname=outp + '.pkl', stdb=stations, binp=opts.use_binary) # Save csv print(" Saving csv to: {0:s}.csv".format(outp)) fcsv = open(outp + ".csv", 'w') stkeys = stations.keys() sorted(stkeys) # python3! for stkey in stkeys: # net stn locs chn std stt edd edt lat lon elev pol azc res fcsv.writelines( "{0:s},{1:s},{2:s},{3:s}*,{4:s},{5:s}.{6:1.0f},{7:s},{8:s}.{9:1.0f},{10:8.4f},{11:9.4f},{12:6.2f},{13:3.1f},{14:8.4f},{15:s}\n" .format(stations[stkey].network, stations[stkey].station, ":".join( stations[stkey].location), stations[stkey].channel[0:2], stations[stkey].startdate.strftime("%Y-%m-%d"), stations[stkey].startdate.strftime("%H:%M:%S"), stations[stkey].startdate.microsecond / 100000., stations[stkey].enddate.strftime("%Y-%m-%d"), stations[stkey].enddate.strftime("%H:%M:%S"), stations[stkey].enddate.microsecond / 100000., stations[stkey].latitude, stations[stkey].longitude, stations[stkey].elevation, stations[stkey].polarity, stations[stkey].azcorr, stations[stkey].status))
def main(args=None): # get options (opts, inpickle) = get_options() # Check extension ext = osp.splitext(inpickle)[1] if ext == ".pkl": # Pickle Already Created... print("Listing Station Pickle: {0:s}".format(inpickle)) db, stkeys = load_db(inpickle, binp=opts.use_binary, keys=opts.keys) # # construct station key loop # allkeys = db.keys() # sorted(allkeys) # Do we make any changes tfEdit = False # # Extract key subset # if len(opts.keys) > 0: # stkeys = [] # for skey in opts.keys: # stkeys.extend([s for s in allkeys if skey in s]) # else: # stkeys = db.keys() # sorted(stkeys) ikey = 0 for key, val in db.items(): if key not in stkeys: continue ikey += 1 print( "--------------------------------------------------------------------------" ) print(" Original ") print("{0:.0f}) {1:s}".format(ikey, key)) print(db[key](5)) print( "**************************************************************************" ) newline = EditMsgBox(ststr=stdb.convert.tocsv(db[key]), title=key) if len(newline) > 0: nkey, nel = stdb.convert.fromcsv(newline, lkey=opts.lkey) if nel == val: print(" No Changes Made...") continue if nkey is not None and nel is not None: if key == nkey: db[key] = nel print(" Replaced " + key + ": ") print(db[key](5)) tfEdit = True else: if nkey not in db: del db[key] db[nkey] = nel print(" Added " + nkey + ":") print(db[nkey](5)) tfEdit = True else: print(" Database already has key " + nkey + ". No changes made") print(db[nkey](5)) else: print(" Error parsing: ") print(" " + newline) else: print(" No Changes Made...") # Did we make any changes? if tfEdit: # Changes Made... Save Database if len(opts.ofile) > 0: if opts.ofile.find('.pkl') > 0: fname = opts.ofile else: fname = opts.ofile + ".pkl" write_db(fname=fname, stdb=db, binp=opts.use_binary) else: write_db(fname=inpickle, stdb=db, binp=opts.use_binary) else: print("Error: Must Enter a .pkl station database pickle file") sys.exit()
def main(args=None): # Get options parser = MyParser(usage="Usage: %prog [options] <station list>", description="Script to generate a pickled station database file.", epilog="""Input File Type 1 (chS csv): NET[:NET2:...],STA,LOC[:LOC2:...],CHN,YYYY-MM-DD,HH:MM:SS.SSS,YYYY-MM-DD,HH:MM:SS.SSS,lat,lon,elev,pol,azcor,status Input File Type 2 (IPO SPC): NET STA CHAN lat lon elev YYYY-MM-DD YYYY-MM-DD Output File Types: Each element corresponding to each dictionary key is saved as StDb.StbBElement class. """) parser.add_option("-L", "--long-keys", action="store_true", dest="lkey", default=False, \ help="Specify Key format. Default is Net.Stn. Long keys are Net.Stn.Chn") parser.add_option("-a", "--ascii", action="store_false", dest="use_binary", default=True, \ help="Specify to write ascii Pickle files instead of binary. Ascii are larger file size, " \ "but more likely to be system independent.") (opts, args) = parser.parse_args() if not osp.exists(args[0]): parser.error("Input File " + args[0] + " does not exist") # Check Extension ext = osp.splitext(args[0])[1] if ext != ".pkl": # Station List...Pickle it. print ("Parse Station List " + args[0]) ofn = args[0] if ofn.find(".csv"): if (len(ofn)-4) == ofn.find(".csv"): ofn = osp.splitext(ofn)[0] pklfile = ofn + ".pkl" fin = open(args[0],'r') stations = {} for line in fin: line = line.strip() if len(line) == 0 or line[0] == "#": continue if len(line.split(',')) > 6: line = line.split(',') # Networks nets = line[0].split(':') if len(nets) == 1: net = nets[0] altnet = [] else: net = nets[0] altnet = nets[1:] # Required Station Parameters stn = line[1] # Required Location Parameters loc = line[2].split(':') # Required Channel Parmaeters chn = line[3][0:2] # Required Timing Parameters stdt = UTCDateTime(line[4]); sttm = line[5] eddt = UTCDateTime(line[6]); edtm = line[7] # Required Position Parameters lat = float(line[8]); lon = float(line[9]) # Set Default values for Optional elements elev = 0.; pol = 1.; azcor = 0.; status = "" if len(line) >= 11: elev = float(line[10]) if len(line) >= 12: pol = float(line[11]) if len(line) >= 13: azcor = float(line[12]) if len(line) == 14: status = line[13] elif len(line.split()) > 6 or len(line.split('\t')) > 6: if len(line.split()) > 6: line = line.split() else: line = line.split('\t') net = line[0]; stn = line[1]; chn = line[2][0:2] stdt = UTCDateTime(line[6]); eddt = UTCDateTime(line[7]) lat = float(line[3]); lon = float(line[4]) elev = float(line[5]) altnet = []; status = ""; azcor = 0.; pol = 0.; loc = "" # Now Add lines to station Dictionary if opts.lkey: key = "{0:s}.{1:s}.{2:2s}".format(net.strip(), stn.strip(), chn.strip()) else: key = "{0:s}.{1:s}".format(net.strip(), stn.strip()) if key not in stations: stations[key] = StDbElement(network=net, station=stn, channel=chn, \ location=loc, latitude=lat, longitude=lon, elevation=elev, polarity=pol, \ azcorr=azcor, startdate=stdt, enddate=eddt, restricted_status=status) print ("Adding key: " + key) else: print ("Warning: Key " + key + " already exists...Skip") # import pprint # print(stations.keys()) # pprint.pprint(stations['TA.M31M']) # Save and Pickle the station database print (" Pickling {0:s}".format(pklfile)) write_db(fname=pklfile, stdb=stations, binp=opts.use_binary) else: print ("Error: Must supply a station list, not a Pickle File") exit()
if nkey not in db: del db[key] db[nkey] = nel print (" Added " + nkey + ":") print (db[nkey](5)) tfEdit = True else: print (" Database already has key " + nkey + ". No changes made") print (db[nkey](5)) else: print (" Error parsing: ") print ( " " + newline ) else: print (" No Changes Made...") # Did we make any changes? if tfEdit: # Changes Made... Save Database if len(opts.ofile) > 0: if opts.ofile.find('.pkl') > 0: fname = opts.ofile else: fname = opts.ofile + ".pkl" write_db(fname=fname, stdb=db, binp=opts.use_binary) else: write_db(fname=inpickle, stdb=db, binp=opts.use_binary) else: print ("Error: Must Enter a .pkl station database pickle file") sys.exit()
def main(args=None): # get options (opts, args) = get_options() # Check Output File if osp.exists(opts.oname) and not opts.ovr: print("Error: Output File exists " + opts.oname) print(" Run using --overwrite to replace existing file") sys.exit() # Load First Database if opts.verb: print("Loading " + args[0]) tdb = load_db(args[0], binp=opts.use_binary) # construct station key loop allkeys = tdb.keys() sorted(allkeys) # Any added? stadd = False # Loop adding additional databases for ndb in args[1:]: # load database if opts.verb: print(" Adding " + ndb) db = load_db(ndb, binp=opts.use_binary) # Get new keys nkeys = db.keys() sorted(nkeys) # Loop through new keys for nkey in nkeys: if nkey not in tdb: tdb[nkey] = db[nkey] stadd = True else: if opts.verb: print("") print( "*********************************************************" ) print("! Duplicate Entry: " + nkey) print(" Retaining: ") print(tdb[nkey](5)) print(" Discarding: ") print(db[nkey](5)) print( "*********************************************************" ) # Were any new stations added? if stadd: if opts.verb: print("") print("Saving merged database: " + opts.oname) write_db(fname=opts.oname, stdb=tdb, binp=opts.use_binary) else: if opts.verb: print("") print("No actual merges performed...")
key = "{0:s}.{1:s}".format(network, station) #-- Add single key to station database if key not in stations: stations[key] = StDbElement(network=network, station=station, channel=chn, \ location=locs, latitude=lat, longitude=lon, elevation=elev, polarity=1., \ azcorr=0., startdate=stdt, enddate=eddt, restricted_status=stat) print (" Added as: " + key) else: print (" Warning: " + key + " already exists...Skip") print () # Save and Pickle print (" ") print (" Pickling to {0:s}.pkl".format(outp)) write_db(fname=outp + '.pkl', stdb=stations, binp=opts.use_binary) # Save csv print (" Saving csv to: {0:s}.csv".format(outp)) fcsv = open(outp + ".csv",'w') stkeys = stations.keys() sorted(stkeys) # python3! for stkey in stkeys: # net stn locs chn std stt edd edt lat lon elev pol azc res fcsv.writelines("{0:s},{1:s},{2:s},{3:s}*,{4:s},{5:s}.{6:1.0f},{7:s},{8:s}.{9:1.0f},{10:8.4f},{11:9.4f},{12:6.2f},{13:3.1f},{14:8.4f},{15:s}\n".format( stations[stkey].network, stations[stkey].station, ":".join(stations[stkey].location), stations[stkey].channel[0:2], stations[stkey].startdate.strftime("%Y-%m-%d"), stations[stkey].startdate.strftime("%H:%M:%S"), stations[stkey].startdate.microsecond/100000., stations[stkey].enddate.strftime("%Y-%m-%d"), stations[stkey].enddate.strftime("%H:%M:%S"), stations[stkey].enddate.microsecond/100000., stations[stkey].latitude, stations[stkey].longitude, stations[stkey].elevation, stations[stkey].polarity, stations[stkey].azcorr, stations[stkey].status))
altnet = [] status = "" azcor = 0. pol = 0. loc = "" # Now Add lines to station Dictionary if opts.lkey: key = "{0:s}.{1:s}.{2:2s}".format(net.strip(), stn.strip(), chn.strip()) else: key = "{0:s}.{1:s}".format(net.strip(), stn.strip()) if key not in stations: stations[key] = StDbElement(network=net, station=stn, channel=chn, \ location=loc, latitude=lat, longitude=lon, elevation=elev, polarity=pol, \ azcorr=azcor, startdate=stdt, enddate=eddt, restricted_status=status) print("Adding key: " + key) else: print("Warning: Key " + key + " already exists...Skip") # import pprint # print(stations.keys()) # pprint.pprint(stations['TA.M31M']) # Save and Pickle the station database print(" Pickling {0:s}".format(pklfile)) write_db(fname=pklfile, stdb=stations, binp=opts.use_binary) else: print("Error: Must supply a station list, not a Pickle File") exit()
if nkey not in db: db[nkey] = NewDbEntry addnew = True print("* Added to DB") else: print("*") print("* Key Exists: " + nkey) print("* Existing: ") print(db[nkey](10)) print("* New:") print(NewDbEntry(10)) print("--------------") ovr = input("Overwrite Existing? [Y]/N: ") if ovr.lower() == "n": print("* Retaining Original") else: db[nkey] = NewDBEntry addnew = True print("* Added to DB") print("") newstn = input("* Another Station? [Y]/N: ") # Were any new stations added? if addnew: print("") print("Saving new database: " + opts.oname) write_db(fname=opts.oname, stdb=db, binp=opts.use_binary) else: print("") print("No changes made...")
def main(args=None): # get options (opts, inpickle) = get_options() # Check Output File if osp.exists(opts.oname): print("Error: Output File exists ", opts.oname) sys.exit() # Load Database db = load_db(inpickle, binp=opts.use_binary) # construct station key loop allkeys = db.keys() sorted(allkeys) # Loop adding new stations newstn = "" addnew = False while len(newstn) == 0 or newstn.lower()[0] == "y": print("********************************") print("* New Station") # Get Basic Info net = input("* Network: ") stn = input("* Station: ") chn = input("* Channel: ")[0:2] loc = input("* LocId: ") lon = float(input("* Longitude: ")) lat = float(input("* Latitude: ")) std = UTCDateTime(input("* Start: ")) edd = UTCDateTime(input("* End: ")) # Advanced Info if opts.complex: altnet = input("* Alternate Networks: ") addloc = input("* Additional LocIDs: ") pol = float(input("* Polarity: ")) azcor = float(input("* Azimuth Correction: ")) elev = float(input("* Elevation: ")) res_stat = input("* Restricted Status: ") loc = [loc] loc.extend(addloc.split(',')) altnet = altnet.split(',') else: altnet = [] loc = [loc] pol = 1. azcor = 0. elev = 0. res_stat = "?" nloc = [] for al in loc: if len(al) == 0: nloc.append('--') else: nloc.append(al) loc = nloc # Contruct Key if opts.lkey: nkey = net.upper() + "." + stn.upper() + "." + chn.upper() else: nkey = net.upper() + "." + stn.upper() #- New DBElement NewDbEntry = StDbElement(network=net, altnet=altnet, station=stn, channel=chn, \ location=loc, latitude=lat, longitude=lon, elevation=elev, \ polarity=pol, azcorr=azcor, startdate=std, enddate=edd) # Add key if not present if nkey not in db: db[nkey] = NewDbEntry addnew = True print("* Added to DB") else: print("*") print("* Key Exists: " + nkey) print("* Existing: ") print(db[nkey](10)) print("* New:") print(NewDbEntry(10)) print("--------------") ovr = input("Overwrite Existing? [Y]/N: ") if ovr.lower() == "n": print("* Retaining Original") else: db[nkey] = NewDBEntry addnew = True print("* Added to DB") print("") newstn = input("* Another Station? [Y]/N: ") # Were any new stations added? if addnew: print("") print("Saving new database: " + opts.oname) write_db(fname=opts.oname, stdb=db, binp=opts.use_binary) else: print("") print("No changes made...")