def __init__(self, collection, orb, orb_select=None, orb_reject=None, default_orb_read=0, statefile=False, reap_wait=3, timeout_exit=True, reap_timeout=5, parse_opt=False, indexing=[] ): """ Class to read an ORB for pf/st and pf/im packets and update a MongoDatabase with the values. We can run with the clean option and clean the archive before we start putting data in it. There is a position flag to force the reader to jump to a particular part of the ORB and the usual statefile to look for a previous value for the last packet id read. """ self.logging = getLogger('soh_mongo') self.logging.debug( "Packet.init()" ) self.dlmon = Dlmon( stock.yesno(parse_opt) ) self.packet = Packet() self.cache = {} self.orb = False self.errors = 0 self.orbname = orb self.lastread = 0 self.timezone = 'UTC' self.position = False self.error_cache = {} self.indexing = indexing self.statefile = statefile self.collection = collection self.orb_select = orb_select self.orb_reject = orb_reject self.reap_wait = int(reap_wait) self.timeout_exit = timeout_exit self.reap_timeout = int(reap_timeout) self.timeformat = '%D (%j) %H:%M:%S %z' self.default_orb_read = default_orb_read # StateFile self.state = stateFile( self.statefile, self.default_orb_read ) self.position = self.state.last_packet() #self.last_time = self.state.last_time() self.logging.debug( 'Need ORB position: %s' % self.position ) if not self.orb_select: self.orb_select = None if not self.orb_reject: self.orb_reject = None
def start_daemon(self): """Run in a daemon mode.""" self.logger.debug("Update ORB cache") self.logger.debug(self.orbname) if not self.orbname or not isinstance(self.orbname, str): raise LookupError("Problems with orbname [%s]" % (self.orbname)) # Expand the object if needed if not self.orb: self.logger.debug("orb.Orb(%s)" % (self.orbname)) self.orb = {} self.orb["orb"] = None self.orb["status"] = "offline" self.orb["last_success"] = 0 self.orb["last_check"] = 0 self._connect_to_orb() while True: # Reset the connection if no packets in reap_timeout window self.logger.debug("starting next reap cycle") if (self.orb["last_success"] and self.reap_timeout and ( (stock.now() - self.orb["last_success"]) > self.reap_timeout)): self.logger.warning("Possible stale ORB connection %s" % self.orbname) if stock.yesno(self.timeout_exit): break else: self._connect_to_orb() self.logger.debug("calling extract_data") if self._extract_data(): self.logger.debug("Success on extract_data(%s)" % (self.orbname)) else: self.logger.warning("Problem on extract_data(%s)" % (self.orbname)) self.orb["orb"].close() return 0
def start_daemon(self): """ Look into every ORB listed on the parameter file and track some information from them. """ self.logging.debug( "Update ORB cache" ) self.logging.debug( self.orbname ) if not self.orbname or not isinstance(self.orbname, str): raise LookupError( "Problems with orbname [%s]" % (self.orbname) ) # Expand the object if needed if not self.orb: self.logging.debug( "orb.Orb(%s)" % (self.orbname) ) self.orb = {} self.orb['orb'] = None self.orb['status'] = 'offline' self.orb['last_success'] = 0 self.orb['last_check'] = 0 self._connect_to_orb() while True: # Reset the connection if no packets in reap_timeout window if self.orb['last_success'] and self.reap_timeout and \ ( (stock.now() - self.orb['last_success']) > self.reap_timeout): self.logging.warning('Possible stale ORB connection %s' % self.orbname) if stock.yesno(self.timeout_exit): break else: self._connect_to_orb() if self._extract_data(): self.logging.debug( "Success on extract_data(%s)" % (self.orbname) ) pass else: self.logging.warning( "Problem on extract_data(%s)" % (self.orbname) ) self.orb['orb'].close() return 0
def run_forever(self): """Track POC packets from orbservers.""" self.logging.debug("Update ORB cache") self.logging.debug(self.orbname) if not self.orbname: raise ConfigurationError("orbname is missing [%s]" % (self.orbname)) # Create the orbserver state tracking dict if needed if not self.orb: self.logging.debug("orb.Orb(%s)" % (self.orbname)) self.orb = {} self.orb["orb"] = None self.orb["status"] = "offline" self.orb["last_success"] = 0 self.orb["last_check"] = 0 self._connect_to_orb() while True: # Reset the connection if no packets in reap_timeout window if (self.orb["last_success"] and self.reap_timeout and ( (stock.now() - self.orb["last_success"]) > self.reap_timeout)): self.logging.warning("Possible stale ORB connection %s" % self.orbname) if stock.yesno(self.timeout_exit): break else: self._connect_to_orb() if self._extract_data(): # self.logging.debug( "Success on extract_data(%s)" % (self.orbname) ) pass else: self.logging.warning("Problem on extract_data(%s)" % (self.orbname)) self._connect_to_orb() self.orb["orb"].close()
def setup_event2qml(options, database): """ Parameters ---------- options: OptionParser object Returns ------- ev: instance of Event class qml: instance of QualeMl class logging: logging.getLogger object """ log.info("database [%s]" % database) # Pull values from ParameterFile options.pf = stock.pffiles(options.pf)[-1] log.info("Parameter file to use [%s]" % options.pf) pf_object = open_verify_pf(options.pf, 1472083200) uri_prefix = safe_pf_get(pf_object, 'uri_prefix', 'quakeml') agency_uri = safe_pf_get(pf_object, 'agency_uri', 'local') agency_id = safe_pf_get(pf_object, 'agency_id', 'xx') author = safe_pf_get(pf_object, 'author', 'antelope.event2qml') etype_map = safe_pf_get(pf_object, 'etype_map', {}) preferred_magtypes = safe_pf_get(pf_object, 'preferred_magtypes', []) Q_NAMESPACE = safe_pf_get(pf_object, 'Q_NAMESPACE', 'http://quakeml.org/xmlns/quakeml/1.2') CATALOG_NAMESPACE = safe_pf_get(pf_object, 'CATALOG_NAMESPACE', 'http://anss.org/xmlns/catalog/0.1') BED_NAMESPACE = safe_pf_get(pf_object, 'BED_NAMESPACE', 'http://quakeml.org/xmlns/bed/1.2') BEDRT_NAMESPACE = safe_pf_get(pf_object, 'BEDRT_NAMESPACE', 'http://quakeml.org/xmlns/bed-rt/1.2') review_flags = safe_pf_get(pf_object, 'review_flags', ['r', 'y']) magnitude_type_subset = safe_pf_get(pf_object, 'magnitude_type_subset', ['.*']) info_description = safe_pf_get(pf_object, 'event_info_description', '') info_comment = safe_pf_get(pf_object, 'event_info_comment', '') append_to_output_file = stock.yesno( safe_pf_get(pf_object, 'append_to_output_file', 'true')) add_mt = stock.yesno(safe_pf_get(pf_object, 'add_mt', 'true')) add_origin = stock.yesno(safe_pf_get(pf_object, 'add_origin', 'true')) add_fplane = stock.yesno(safe_pf_get(pf_object, 'add_fplane', 'true')) add_stamag = stock.yesno(safe_pf_get(pf_object, 'add_stamag', 'true')) add_arrival = stock.yesno(safe_pf_get(pf_object, 'add_arrival', 'true')) add_detection = stock.yesno(safe_pf_get(pf_object, 'add_detection', 'true')) add_magnitude = stock.yesno(safe_pf_get(pf_object, 'add_magnitude', 'true')) mt_auth_select = filter(None, safe_pf_get(pf_object, 'mt_auth_select', [])) mt_auth_reject = filter(None, safe_pf_get(pf_object, 'mt_auth_reject', [])) event_auth_select = filter(None, safe_pf_get(pf_object, 'event_auth_select', [])) event_auth_reject = filter(None, safe_pf_get(pf_object, 'event_auth_reject', [])) netmag_auth_select = filter(None, safe_pf_get(pf_object, 'netmag_auth_select', [])) netmag_auth_reject = filter(None, safe_pf_get(pf_object, 'netmag_auth_reject', [])) fplane_auth_select = filter(None, safe_pf_get(pf_object, 'fplane_auth_select', [])) fplane_auth_reject = filter(None, safe_pf_get(pf_object, 'fplane_auth_reject', [])) origin_auth_select = filter(None, safe_pf_get(pf_object, 'origin_auth_select', [])) origin_auth_reject = filter(None, safe_pf_get(pf_object, 'origin_auth_reject', [])) arrival_auth_select = filter( None, safe_pf_get(pf_object, 'arrival_auth_select', [])) arrival_auth_reject = filter( None, safe_pf_get(pf_object, 'arrival_auth_reject', [])) detection_state_select = filter( None, safe_pf_get(pf_object, 'detection_state_select', [])) detection_state_reject = filter( None, safe_pf_get(pf_object, 'detection_state_reject', [])) # New event object log.info('Init Event()') ev = Event(database=database, magnitude_type_subset=magnitude_type_subset, event_auth_select=event_auth_select, event_auth_reject=event_auth_reject, origin_auth_select=origin_auth_select, origin_auth_reject=origin_auth_reject, arrival_auth_select=arrival_auth_select, arrival_auth_reject=arrival_auth_reject, netmag_auth_select=netmag_auth_select, netmag_auth_reject=netmag_auth_reject, detection_state_select=detection_state_select, detection_state_reject=detection_state_reject, mt_auth_select=mt_auth_select, mt_auth_reject=mt_auth_reject, fplane_auth_select=fplane_auth_select, fplane_auth_reject=fplane_auth_reject) # This is the primary object for the conversion. Initialize and # configure for all events that we want to process. log.info('Init QuakeML object') qml = css2qml(review_flags=review_flags, etype_map=etype_map, uri_prefix=uri_prefix, agency_uri=agency_uri, agency_id=agency_id, author=author, q=Q_NAMESPACE, catalog=CATALOG_NAMESPACE, bed=BED_NAMESPACE, bedrt=BEDRT_NAMESPACE, info_description=info_description, info_comment=info_comment, add_origin=add_origin, add_magnitude=add_magnitude, add_fplane=add_fplane, add_mt=add_mt, add_stamag=add_stamag, add_arrival=add_arrival) return ev, qml