def __init__(self, db_list=[], subset=False): """Load class and retrieve data from netperf table. Usage: netperf = NetPerf(db,subset=False) netperf.validate() while True: if netperf.need_update(): netperf.update() data,error = netperf.data() sleep(time) """ self.logging = getLogger(fullname(self)) self.logging.debug("init()") self.db = {} self.database_list = db_list self.db_subset = subset self.cache = [] self.error_cache = [] self.tables = ["netperf"]
def __init__(self, dbmaster, extra_sensor_mapping=constant.DEFAULT_INFRASOUND_MAPPING): """Initialize a new DbMaster view. Args: dbmaster (string): the name of the Datascope database containing the dbmaster. extra_sensor_mapping (dict): key/value pairs of sensor type to representative channels. Several functions use this mapping to determine what type of sensors are at a station, as a shorthand for looking in the `dlsensor` table. Infrasound Mapping dict format: { "MEMS": ("LDM_EP"), "SETRA": ("BDO_EP, LDO_EP"), "NCPA": ("BDF_EP", "LDF_EP") } """ self.register_deploy_type('seismic', self.get_seismic_station_metadata) self.register_deploy_type('inframet', self.get_extra_sensor_metadata) self.logger = getLogger(fullname(self)) self.dbmaster = dbmaster self.extra_sensor_mapping = extra_sensor_mapping self._dbmaster_pointer = None
def __init__(self, db=False, subset=False): """Query a Datascope dlevent database and cache values in memory. Usage: dlevent = Dlevent(db,subset=False) dlevent.validate() while True: if dlevent.need_update(): dlevent.update() data,error = dlevent.data() sleep(time) """ self.logging = getLogger(fullname(self)) self.logging.debug("init()") self.db = False self.database = db self.db_subset = subset self.cache = [] self.error_cache = [] self.tables = ["dlevent"] self.dbs_tables = {}
def __init__(self, options, databasename): """Initialize the Comparison object. Args: options (dict): dictionary containing required options databasename(string): path to Datascope database. Options dict format: origin (int): origin id of an event noplot (bool): do not make a plot at the end nosave (bool): do not save results debug_plot (bool): run plotting routines with debug option """ self.databasename = databasename self.logger = getLogger(fullname(self)) # verify adequate parameter file self.pf = open_verify_pf(options.pf) self.origin = options.origin self.noplot = options.noplot self.nosave = options.nosave self.debug_plot = options.debug_plot # parse parameter file try: self._parse_pf(options) except Exception: self.logger.error("ERROR: problem during parsing of pf file (%s)" % options.pf)
def __init__(self, filename=None, start="oldest"): """Initialize the statefile.""" self.logger = getLogger(fullname(self)) self.logger.debug("stateFile.init()") self.filename = filename self.packet = start self.time = 0 self.strtime = "n/a" self.latency = "n/a" self.pid = "PID %s" % os.getpid() if filename is None: return self.directory, self.filename = os.path.split(filename) if self.directory and not os.path.isdir(self.directory): os.makedirs(self.directory) self.file = os.path.join(self.directory, self.filename) self.logger.debug("Open file for STATE tracking [%s]" % self.file) if os.path.isfile(self.file): self.open_file("r+") self.read_file() else: self.open_file("w+") if not os.path.isfile(self.file): raise stateFileException("Cannot create STATE file %s" % self.file)
def __init__(self): """Initialize the dlsensor_cache.""" self.logger = getLogger(fullname(self)) self.logger.debug("init()") self.defaultTime = 0.0 self.defaultEndtime = 9999999999.9 self.sensors = {} self.digitizers = {}
def __init__(self, q330_dlnames=[], channel_mapping={}): """Initialize the packet.""" self._clean() self.imei_buffer = IMEIbuffer() self.channel_mapping = channel_mapping self.q330_serial_dlname = q330_dlnames self.logging = getLogger(fullname(self))
def __init__(self, pf_files=[]): """Initialize the Q330Serials object. Args: pf_files (list): list of q3302orb parameter file names to parse. """ self.logging = getLogger(fullname(self)) self.serials = {} self.add(pf_files)
def _init_logger(self): self.loglevel = "WARNING" if self.options.debug: self.loglevel = "DEBUG" elif self.options.verbose: self.loglevel = "INFO" # Need new object for logging work. self.logger = getAppLogger(fullname(self), self.loglevel) return True
def __init__(self, db): """Intialize Site object.""" self.db = db self.logger = getLogger(fullname(self)) self.stations = {} steps = ["dbopen site"] steps.extend(["dbjoin sitechan"]) self.logger.info("Database query for stations:") self.logger.info(", ".join(steps)) self.table = self.db.process(steps)
def _init_logging(self): """Initialize the logging instance.""" if self.options.debug: loglevel = "DEBUG" elif self.options.verbose: loglevel = "INFO" else: loglevel = "WARNING" self.loglevel = loglevel getAppLogger(__name__, loglevel) self.logger = getLogger(fullname(self)) self.logger.debug("Hi my name is " + __name__)
def __init__(self, opt=False): """Initialize a new Dlmon object. Sets up logging, and reads parameters for classifying data. """ self.logger = getLogger(fullname(self)) self.logger.info("New Dlmon object") self._clean() # self.export_format = export_format self.parse_opt = opt self.rules = stock.pfread("dlmon_rules.pf")
def __init__(self, orblist=[], orbselect=DEFAULT_ORB_SELECT, orbreject=DEFAULT_ORB_REJECT): """Initialize the ORBSerials class.""" self.logger = getLogger(fullname(self)) self.update_frequency = 3600 self.last_update = 0 self.orb_select = orbselect self.orb_reject = orbreject self.serials = {} self.orblist = [] self.add(orblist) self.update()
def __init__( self, collection, orb, orb_select=None, orb_reject=None, default_orb_read=0, statefile=False, reap_wait=3, timeout_exit=True, reap_timeout=5, ): """Initialize the poc2mongo reader.""" self.logging = getLogger(fullname(self)) self.logging.debug("init()") self.poc = Poc() self.cache = {} self.orb = False self.errors = 0 self.orbname = orb self.lastread = 0 self.timezone = "UTC" self.position = False self.error_cache = {} self.timeout_exit = timeout_exit self.reap_wait = int(reap_wait) self.statefile = statefile self.collection = collection self.orb_select = orb_select self.orb_reject = orb_reject self.reap_timeout = int(reap_timeout) self.timeformat = "%D (%j) %H:%M:%S %z" self.default_orb_read = default_orb_read # StateFile self.state = stateFile(self.statefile, self.default_orb_read) self.position = self.state.last_packet() # self.last_time = self.state.last_time() if not self.orb_select: self.orb_select = None if not self.orb_reject: self.orb_reject = None
def __init__(self, db): """Initialize Waveforms object. Args: db(antelope.datascope.Database): database pointer """ self.logger = getLogger(fullname(self)) self.db = db self.trdata = {} # Get db ready try: self.wftable = self.db.lookup(table="wfdisc") except Exception as e: self.logger.error("Problems opening wfdisc: %s %s" % (self.db, e)) if not self.wftable.record_count: self.logger.error("No data in wfdisc %s" % self.db)
def __init__(self, db, orid): """Initialize the Origin object. Args: db (antelope.datascope.Database): Antelope datascope db pointer orid (int): origin id for the event """ self.logger = getLogger(fullname(self)) self.db = db self.orid = None self.depth = None self.strtime = None self.strdate = None self.time = None self.lat = None self.lon = None self.get_origin(orid)
def __init__(self, filename=False, name="default", start=0): """Initialize a new stateFile object. Args: filename (boolean or string): no-op if false. Otherwise, name of subfile in the main statefile directory. name (string): name of the stateFile object start (int): orb packet id to start at, or something. """ self.logger = getLogger(fullname(self)) self.logger.debug("init()") self.filename = filename self.name = name self.id = start self.time = 0 self.strtime = "n/a" self.latency = "n/a" self.pid = "PID %s" % os.getpid() if not filename: return self.directory, self.filename = os.path.split(filename) if self.directory and not os.path.isdir(self.directory): os.makedirs(self.directory) self.file = os.path.join(self.directory, "%s_%s" % (self.name, self.filename)) self.logger.debug("Open file for STATE tracking [%s]" % self.file) if os.path.isfile(self.file): self.open_file("r+") self.read_file() else: self.open_file("w+") if not os.path.isfile(self.file): raise stateFileException("Cannot create STATE file %s" % self.file)
def __init__(self, db=False, subset=False): """Initilized the db2mongo event module.""" self.logger = getLogger(fullname(self)) self.logger.debug("Events.init()") self.db = False self.database = False self.db_subset = False self.cache = [] self.cache_error = [] self.mags = {} # event table is not tested here. self.tables = ["origin", "netmag"] self.dbs_tables = {} self.timeformat = False self.timezone = False
def __init__( self, map_type, deployment_type, start_time, end_time, station_metadata_objects, config, file_prefix: str, file_suffix: str, ): """Initialize a deployment map plotter for a particular time period. Args: map_type (basestring): type of map - either cumulative or rolling deployment_type: instrument deployment type to plot. Built-ins include seismic and inframet. Others can be added with register_deployment_type. start_time (float): epoch start time of active stations end_time (float): epoch end time of active stations station_metadata_objects (list): list of StationMetadata config(GmtConfig): global options for the session """ self.logger = logutil.getLogger(logutil.fullname(self)) self.start_time = start_time self.end_time = end_time self.map_type = map_type self.deployment_type = deployment_type self.station_metadata_objects = station_metadata_objects self.config = config self.file_prefix = file_prefix self.file_suffix = file_suffix # Register the two default deployment types, with their XY file generator functions. self.register_deployment_type("seismic", self.generate_station_xy_files) self.register_deployment_type( "inframet", self.generate_extra_sensor_xy_files, classifer=util.InframetClassifier, )
def __init__(self, filename=False, start="oldest"): """Initialize the stateFile. Args: filename (string or False): name of the statefile. If False, don't track state. start (string or int): Antelope orb position to start at. """ self.logging = getLogger(fullname(self)) self.logging.debug("stateFile.init()") self.filename = filename self.packet = start self.time = 0 self.strtime = "n/a" self.latency = "n/a" self.pid = "PID %s" % os.getpid() if not filename: return self.directory, self.filename = os.path.split(filename) if self.directory and not os.path.isdir(self.directory): os.makedirs(self.directory) self.file = os.path.join(self.directory, self.filename) self.logging.debug("Open file for STATE tracking [%s]" % self.file) if os.path.isfile(self.file): self.open_file("r+") self.read_file() else: self.open_file("w+") if not os.path.isfile(self.file): raise PocException("Cannot create STATE file %s" % self.file)
def _init_logging(self, debug, verbose): """Initialize the logging instance. As this is called from __init__, and this class isn't intended to be run as is as the main method, we don't call getAppLogger here. """ if debug: self.loglevel = "DEBUG" elif verbose: self.loglevel = "INFO" # else use class default # Set the log level for the module itself, as this class "runs the # show" for the whole deploy_map module. module_logger = getModuleLogger(__name__) module_logger.setLevel(self.loglevel) # Set the log level for this particular class instance. Note that the # result of fullname(self) isn't under the same log hierarchy as # __name__ self.logger = getLogger(fullname(self)) self.logger.setLevel(self.loglevel) self.logger.notify("Logging intialized for %s", __name__)
def __init__( self, collection, orbname, orb_select=None, orb_reject=None, default_orb_read=0, statefile=None, reap_wait=3, timeout_exit=True, reap_timeout=5, parse_opt=False, indexing=[], ): """Intialize the SOH_mongo object. Params: There's a lot of them. Good luck. collection (mongodb thingy): a collection handle from an active mongodb connection. orbname (string): orbserver name (hostname:port or :port) orb_select (string): pattern for source names to selection, or None orb_reject (string): pattern for source names to reject, or None default_orb_read (string or int): starting packet position. Defaults to 0. stateFile (string or None): path to statefile. If none specified (default), no state is tracked reap_wait (int): how long in seconds to wait between orb reaps calls. Default is 3 seconds. timeout_exit (bool): Exit if a request times out? Defaults to True. reap_timeout (int): how long to wait for a request before it times out. Defaults to 5 seconds. parse_opt (bool): Parse the extra OPT channels from a Q330? Defaults to False. indexing (dict): list of keys to index on. Default is empty list. """ self.logger = logging.getLogger(fullname(self)) self.logger.debug("Initializing SOHMongo object.") self.dlmon = Dlmon(parse_opt) self.packet = Packet() self.cache = {} self.orb = None self.errors = 0 self.orbname = orbname self.lastread = 0 self.timezone = "UTC" self.position = False self.error_cache = {} self.indexing = indexing self.statefile = statefile self.collection = collection self.orb_select = orb_select self.orb_reject = orb_reject self.reap_wait = int(reap_wait) self.timeout_exit = timeout_exit self.reap_timeout = int(reap_timeout) self.timeformat = "%D (%j) %H:%M:%S %z" self.default_orb_read = default_orb_read # StateFile self.state = stateFile(self.statefile, self.default_orb_read) self.position = self.state.last_packet() # self.last_time = self.state.last_time() self.logger.debug("Need ORB position: %s" % self.position) if not self.orb_select: self.orb_select = None if not self.orb_reject: self.orb_reject = None
def __init__(self): """Set up the class.""" self.logger = getLogger(fullname(self)) self.cache = {}
def __init__( self, collection, orb, name="test", channel_mapping={}, orbunits=None, q330units=None, mongo_select=None, mongo_reject=None, default_mongo_read=0, statefile=False, mongo_pull_wait=3, pckt_name_type="pf/xi", silent_pkt_fail=False, ): """Initialize the import class.""" self.name = name self.logger = getLogger(fullname(self)) self.logger.debug("init()") # self.dlmon = Dlmon( stock.yesno(parse_opt) ) self.cache = {} self.orb = False self.errors = 0 self.lastread = 0 self.timezone = "UTC" self.error_cache = {} # self.indexing = indexing self.timeformat = "%D (%j) %H:%M:%S %z" # from object options self.orbunits = orbunits self.q330units = q330units self.channel_mapping = channel_mapping self.packet = Packet( q330_dlnames=[self.q330units, self.orbunits], channel_mapping=self.channel_mapping, ) self.collection = collection self.orbname = orb self.mongo_select = mongo_select self.mongo_reject = mongo_reject self.statefile = statefile self.state = None self.mongo_pull_wait = int(mongo_pull_wait) self.pckt_name_type = pckt_name_type self.silent_pkt_fail = silent_pkt_fail if default_mongo_read == "start": self.read_position = 0 elif default_mongo_read == "oldest": self.read_position = 0 elif default_mongo_read == "newest": self.read_position = -1 elif default_mongo_read == "end": self.read_position = -1 else: try: self.read_position = int(default_mongo_read) except Exception: self.logger.error("Cannot convert default_mongo_read [%s]" % default_mongo_read) # verify mongodb collection if self.collection.count() == 0: self.logger.warning("MongoDB collection [%s] is empty" % self.name) self.valid = False else: self.valid = True # StateFile self.state = stateFile(self.statefile, self.name, self.read_position) self.read_position = self.state.last_id() self.logger.debug("Last document read: %s" % self.read_position) self.logger.debug("Prep internal object") self._prep_orb()
def __init__(self): """Set up Packet.""" self.logger = getLogger(fullname(self)) self._clean()
def __init__(self, argv=None): """Initialize object, read config.""" # Read configuration from command-line usage = "Usage: %prog [options]" parser = OptionParser(usage=usage) parser.add_option( "-s", action="store", dest="state", help="track orb id on this state file", default=False, ) parser.add_option( "-c", action="store_true", dest="clean", help="clean 'drop' collection on start", default=False, ) parser.add_option( "-v", action="store_true", dest="verbose", help="verbose output", default=False, ) parser.add_option("-d", action="store_true", dest="debug", help="debug output", default=False) parser.add_option( "-p", "--pf", action="store", dest="pf", type="string", help="parameter file path", default="poc2mongo", ) (self.options, self.args) = parser.parse_args() self.options.loglevel = "WARNING" if self.options.debug: self.options.loglevel = "DEBUG" elif self.options.verbose: self.options.loglevel = "INFO" self.logger = getLogger(fullname(__name__)) # Get PF file values self.logger.info("Read parameters from pf file %s" % self.options.pf) self.pf = stock.pfread(self.options.pf) # Get MongoDb parameters from PF file self.options.mongo = MongoDbConfig( user=self.pf.get("mongo_user"), host_and_port=self.pf.get("mongo_host"), password=self.pf.get("mongo_password"), namespace=self.pf.get("mongo_namespace"), collection=self.pf.get("mongo_collection"), ) self.options.orbserver = self.pf.get("orbserver") self.options.orb_select = self.pf.get("orb_select") self.options.orb_reject = self.pf.get("orb_reject") self.options.default_orb_read = self.pf.get("default_orb_read") self.options.include_pocc2 = self.pf.get("include_pocc2") self.options.reap_wait = self.pf.get("reap_wait") self.options.reap_timeout = self.pf.get("reap_timeout") self.options.timeout_exit = self.pf.get("timeout_exit")