def run(self): try: if self.dcid is None: print("Please specify datacenter/archive ID", file=sys.stderr) return False nettab = Nettab(self.dcid) instdb = Instruments(self.dcid) try: self.__load_file(instdb.load_db, self.inst_db_file) self.__load_file(nettab.load_statmap, self.stat_map_file) self.__load_file(nettab.load_access_net, self.access_net_file) self.__load_file(nettab.load_access_stat, self.access_stat_file) self.__load_file(instdb.load_sensor_attr, self.sensor_attr_file) self.__load_file(instdb.load_datalogger_attr, self.datalogger_attr_file) self.__load_file(nettab.load_network_attr, self.network_attr_file) self.__load_file(nettab.load_station_attr, self.station_attr_file) inv = SC3Inventory(seiscomp.datamodel.Inventory()) idx = 1 for tab in sorted(self.tab_files): print("Loading %s (%d/%d)" % (tab, idx, len(self.tab_files)), file=sys.stderr) self.__load_file(nettab.load_tab, tab) print("Generating data structures", file=sys.stderr) nettab.update_inventory(instdb, inv) idx = idx + 1 if self.isExitRequested(): print("Exit requested, abort", file=sys.stderr) return False print("Generating output", file=sys.stderr) ar = seiscomp.io.XMLArchive() ar.setFormattedOutput( self.commandline().hasOption("formatted")) ar.create(self.out_file) ar.writeObject(inv.obj) ar.close() print("Finished", file=sys.stderr) except (IOError, NettabError) as e: logs.error("fatal error: " + str(e)) return False except Exception: logs.print_exc() return False return True
def run(self): try: seiscompRoot = self.commandline().unrecognizedOptions()[0] sys.stderr.write("root directory: %s\n" % seiscompRoot) try: DCID = self.configGetString("datacenterID") except: logs.error("datacenterID not found in global.cfg") return False networkRestricted = {} incompleteResponse = {} global instdb instdb = Instruments(DCID) self.__load_file(loadGains, os.path.join(seiscompRoot, "config", "gain.dlsv")) # for backwards compatibility self.__load_file(loadGains, os.path.join(seiscompRoot, "config", "gain.tab.out")) self.__load_file(loadGains, os.path.join(seiscompRoot, "config", "gain.tab")) try: self.__load_file(instdb.load_db, os.path.join(seiscompRoot, "resp", "inst.db")) self.__load_file(instdb.load_sensor_attr, os.path.join(seiscompRoot, "resp", "sensor_attr.csv")) self.__load_file(instdb.load_datalogger_attr, os.path.join(seiscompRoot, "resp", "datalogger_attr.csv")) except (IOError, NettabError), e: logs.error("fatal error: " + str(e)) return False sc3Inv = seiscomp3.DataModel.Inventory() inventory = InventoryWrapper(sc3Inv, DCID) existingNetworks = set() existingStations = set() for f in glob.glob(os.path.join(seiscompRoot, "key", "network_*")): try: logs.debug("processing " + f) netCode = f.split("/network_")[-1] try: kf = Keyfile(f) except IOError, e: logs.error(str(e)) continue existingNetworks.add(netCode) networkRestricted[netCode] = False inventory.updateNetwork(netCode, kf) except ValueError, e: logs.error("%s: %s" % (f, str(e)))
def run(self): try: seiscompRoot = self.commandline().unrecognizedOptions()[0] sys.stderr.write("root directory: %s\n" % seiscompRoot) try: DCID = self.configGetString("datacenterID") except: logs.error("datacenterID not found in global.cfg") return False networkRestricted = {} incompleteResponse = {} global instdb instdb = Instruments(DCID) self.__load_file(loadGains, os.path.join(seiscompRoot, "config", "gain.dlsv")) # for backwards compatibility self.__load_file( loadGains, os.path.join(seiscompRoot, "config", "gain.tab.out")) self.__load_file(loadGains, os.path.join(seiscompRoot, "config", "gain.tab")) try: self.__load_file(instdb.load_db, os.path.join(seiscompRoot, "resp", "inst.db")) self.__load_file( instdb.load_sensor_attr, os.path.join(seiscompRoot, "resp", "sensor_attr.csv")) self.__load_file( instdb.load_datalogger_attr, os.path.join(seiscompRoot, "resp", "datalogger_attr.csv")) except (IOError, NettabError) as e: logs.error("fatal error: " + str(e)) return False sc3Inv = seiscomp3.DataModel.Inventory() inventory = InventoryWrapper(sc3Inv, DCID) existingNetworks = set() existingStations = set() for f in glob.glob(os.path.join(seiscompRoot, "key", "network_*")): try: logs.debug("processing " + f) netCode = f.split("/network_")[-1] try: kf = Keyfile(f) except IOError as e: logs.error(str(e)) continue existingNetworks.add(netCode) networkRestricted[netCode] = False inventory.updateNetwork(netCode, kf) except ValueError as e: logs.error("%s: %s" % (f, str(e))) for f in glob.glob(os.path.join(seiscompRoot, "key", "station_*")): try: logs.debug("processing " + f) (netCode, staCode) = f.split("/station_")[-1].split('_', 1) try: kf = Keyfile(f) except IOError as e: logs.error(str(e)) continue existingStations.add((netCode, staCode)) if netCode not in existingNetworks: logs.warning( "network %s does not exist, ignoring station %s" % (netCode, staCode)) continue if not hasattr(kf, "latitude") or not kf.latitude: logs.warning("missing latitude for %s %s" % (netCode, staCode)) continue if not hasattr(kf, "longitude") or not kf.longitude: logs.warning("missing longitude for %s %s" % (netCode, staCode)) continue if not hasattr(kf, "elevation") or not kf.elevation: logs.warning("missing elevation for %s %s" % (netCode, staCode)) continue if not hasattr(kf, "depth1") or not kf.depth1: logs.warning( "missing depth of primary sensor for %s %s" % (netCode, staCode)) continue if decimal.Decimal(kf.latitude) == decimal.Decimal("0.0") and \ decimal.Decimal(kf.longitude) == decimal.Decimal("0.0"): logs.warning("missing coordinates for %s %s" % (netCode, staCode)) continue if not hasattr(kf, "orientation1") or not kf.orientation1: logs.warning( "missing orientation of primary sensor for %s %s, using default" % (netCode, staCode)) kf.orientation1 = "Z 0.0 -90.0; N 0.0 0.0; E 90.0 0.0" if not hasattr(kf, "orientation2"): kf.orientation2 = "" if not hasattr(kf, "unit1") or not kf.unit1: logs.warning( "missing unit of primary sensor for %s %s, using M/S" % (netCode, staCode)) kf.unit1 = "M/S" if not hasattr(kf, "unit2"): logs.warning( "missing unit of secondary sensor for %s %s, using M/S**2" % (netCode, staCode)) kf.unit2 = "M/S**2" if not hasattr(kf, "type"): kf.type = "" restricted = False # TODO: Make restricted part of the key file if not inventory.updateStation(netCode, staCode, restricted, kf): try: incNet = incompleteResponse[netCode] except KeyError: incNet = set() incompleteResponse[netCode] = incNet incNet.add(staCode) except ValueError as e: logs.error("%s: %s" % (f, str(e))) for (netCode, restricted) in networkRestricted.items(): inventory.setNetworkRestricted(netCode, restricted) for (netCode, network) in inventory.networks.items(): if netCode not in existingNetworks: logs.notice("deleting network %s from inventory" % (netCode, )) inventory.obj.remove(network.obj) for ((netCode, staCode), station) in inventory.stations.items(): if netCode in existingNetworks and ( netCode, staCode) not in existingStations: logs.notice("deleting station %s_%s from inventory" % (netCode, staCode)) inventory.networks[netCode].obj.remove(station.obj) if incompleteResponse: logs.info( "The following stations are missing full response data") logs.info("Use dlsv2inv if needed") # for netCode in sorted(incompleteResponse.keys()): # logs.info("%s: %s" % (netCode, " ".join(sorted(list(incompleteResponse[netCode]))))) tmpDict = sortDictionary(incompleteResponse) for netCode in list(tmpDict.keys()): tmpSortedList = list(tmpDict[netCode]) tmpSortedList.sort() logs.info("%s: %s" % (netCode, " ".join(tmpSortedList))) ar = seiscomp3.IO.XMLArchive() if not self.output: sys.stderr.write("Writing output to stdout\n") if not ar.create("-"): sys.stderr.write("Cannot open open stdout\n") return False else: sys.stderr.write("Writing output to %s\n" % self.output) if not ar.create(self.output): sys.stderr.write("Cannot open open %s\n" % self.output) return False ar.setFormattedOutput(self.commandline().hasOption("formatted")) ar.writeObject(sc3Inv) except Exception: logs.print_exc() return True