def get_one(orb): try: pktid, srcname, pkttime, data = orb.reap(1) orb_packet = Pkt.Packet(srcname, pkttime, data) return create_packets(orb_packet, pktid) except OrbIncompleteException: return []
def new(self, rawpkt): """Create a new Poc object, allowing reuse of an orb.Packet reference. This method is a hack around the memory leaks incurred by allocating a new orb.Packet structure repeatedly. Instead of allocating a new Pkt object and letting the garbage collector come around and free memory, we just repeatedly reuse the same object. """ self._clean() self.rawpkt = rawpkt if not rawpkt[0] or int(float(rawpkt[0])) < 1: return self.id = rawpkt[0] self.time = rawpkt[2] # Try to extract information from packet pkt = Pkt.Packet(rawpkt[1], rawpkt[2], rawpkt[3]) self.srcname = pkt.srcname if pkt.srcname else rawpkt[1] # Step around Antelope TypeError by explicitly calling # .keys() on the ParameterFile object. if "sn" in pkt.pf.keys(): self.sn = pkt.pf["sn"] else: return if "srcip" in pkt.pf.keys(): self.srcip = pkt.pf["srcip"] else: return if "time" in pkt.pf.keys(): self.poctime = float(pkt.pf["time"]) self.strtime = stock.epoch2str(self.poctime, "%D %H:%M:%S %Z").strip() else: return self.valid = True # Maybe we have some extra data... if "pocc2" in pkt.pf.keys(): self.pocc2 = pkt.pf["pocc2"] else: self.pocc2 = {}
def orb_start(orbname, select=None, data_dir_prefix=None, reject=None, after=-1, timeout=-1, queuesize=100): """ Start and read an orb reap thread until signaled. """ data_dir_prefix = data_dir_prefix or "/tmp/scion-data" # Handle break signal. def sig_handler(signum, frame): print 'Orb reap got signal', signum global done done = True signal.signal(signal.SIGTERM, sig_handler) print 'Entering orb reap...' with brttpkt.OrbreapThr(orbname, select=select, reject=reject, after=after, timeout=timeout, queuesize=queuesize) as orbth: data_dir = None while not done: try: pktid, srcname, time, packet = orbth.get() except brttpkt.Timeout: # print 'Timeout waiting for orb...' pass except brttpkt.NoData: print 'No source data in orb...' else: orbpkt = Pkt.Packet(srcname, time, packet) # print pktid, srcname, time, len(packet), str(done) pkt_data = parse_packet(orbpkt) data_dir = data_dir_prefix + '/%s' % srcname.replace('/', '-') if not os.path.isdir(data_dir): os.makedirs(data_dir) fpath = data_dir + '/pkt_%i.json' % pktid # print 'writing packet %s' % fpath with open(fpath, 'w') as f: json.dump(pkt_data, f)
def _clean(self): """Reset the contents of the packet for reuse.""" self.id = False self.orbid = False self.seqNo = False self.logSeqNo = False self.time = False self.datetime = False self.strtime = False self.valid = False self.srcname = "" self.name = "" self.sn = False self.q330 = False self.dlname = False self.imei = False self.src = False self.valueMap = dict() self.rawpkt = dict() self.payload = dict() self.pcktbuf = dict() self.pkt = Pkt.Packet() self.pkt.type_suffix = "pf"
def _get_orb_sta_inp(self, name): self.logger.debug("Check ORB(%s) sources" % name) pkt = Pkt.Packet() self.logger.debug("get pf/st packets from orb(%s)" % name) self.orbs[name]["orb"].reject("") self.orbs[name]["orb"].select(".*/pf/st") # get pf/st packet sources sources = self.orbs[name]["orb"].sources() self.logger.debug(sources) # Make list of all valid packet names valid_packets = [] for r in sources: if isinstance(r, float): continue for stash in r: srcname = stash["srcname"] pkt.srcname = Pkt.SrcName(srcname) self.logger.debug("sources => %s" % srcname) valid_packets.append(srcname) # loop over each source for pckname in valid_packets: # get pf/st packets self.logger.debug("get %s packets from orb(%s)" % (pckname, name)) self.orbs[name]["orb"].select(pckname) attempts = 0 while True: attempts += 1 self.logger.debug( "get ORBNEWEST packet from orb(%s) for %s" % (name, pckname) ) pktid, srcname, pkttime, pktbuf = self.orbs[name]["orb"].get( orb.ORBNEWEST ) self.logger.debug("pktid(%s)" % pktid) # Verify pckt id if int(float(pktid)) > 0: break if attempts > 10: break # Don't have anything useful here if attempts > 10: continue # Try to extract name of packet. Default to the orb provided name. pkt = Pkt.Packet(srcname, pkttime, pktbuf) srcname = pkt.srcname if pkt.srcname else srcname self.logger.debug("srcname: %s" % srcname) if "dls" in pkt.pf.keys(): for netsta in pkt.pf["dls"]: self.logger.debug("Packet: extract: %s" % netsta) try: temp = netsta.split("_") snet = temp[0] sta = temp[1] except Exception: self.logger.debug("ERROR ON PF/ST parse: netsta=[%s] " % netsta) continue self._verify_cache(snet, sta, "orbcomms", primary=True) if "inp" not in pkt.pf["dls"][netsta]: self.logger.debug("NO inp value in pkt: %s" % pckname) continue self.cache[snet][sta]["orbcomms"] = { "id": pktid, "name": pckname, "time": pkttime, "inp": pkt.pf["dls"][netsta]["inp"], }
def _get_orb_sta_latency(self, name): """Get client and source orb latencies.""" self.logger.debug("Check ORB(%s) sources" % name) pkt = Pkt.Packet() self.orbs[name]["orb"].select(self.orb_select) self.orbs[name]["orb"].reject(".*/pf.*|.*/log|/db/.*|.*/MSTC") self.orbs[name]["info"]["status"] = "online" self.orbs[name]["info"]["last_check"] = stock.now() # get clients self.logger.debug("get clients orb(%s)" % name) result = self.orbs[name]["orb"].clients() for r in result: if isinstance(r, float): self.orbs[name]["info"]["clients_time"] = r self.logger.debug("orb(%s) client time %s" % (name, r)) else: self.orbs[name]["clients"] = r # get sources self.logger.debug("get sources orb(%s)" % name) result = self.orbs[name]["orb"].sources() for r in result: # Verify if this is a valid field or just the reported time if isinstance(r, float): self.orbs[name]["info"]["sources_time"] = r self.logger.debug("orb(%s) sources time %s" % (name, r)) else: for stash in r: srcname = stash["srcname"] pkt.srcname = Pkt.SrcName(srcname) snet = pkt.srcname.net sta = pkt.srcname.sta # Not sure if this will ever occur if not snet or not sta: continue self.logger.debug("orb(%s) update %s %s" % (name, snet, sta)) self._verify_cache(snet, sta, "orb", primary=True) self.cache[snet][sta]["orb"][srcname] = parse_sta_time( stash["slatest_time"] ) if "lastpacket" not in self.cache[snet][sta]: self.cache[snet][sta]["lastpacket"] = 0 if ( self.cache[snet][sta]["lastpacket"] < self.cache[snet][sta]["orb"][srcname] ): self.cache[snet][sta]["lastpacket"] = self.cache[snet][sta][ "orb" ][srcname]
def new(self, rawpkt): """Create a new Packet object. This works a little differently than most python objects because the underlying Antelope C code is leaky. The recommended way to manipulate packets is to create a packet buffer in memory and continually write/rewrite to that, rather than reallocate new memory from the heap. Caveats: This whole class is not at all Pythonic, and really a gigantic hack. """ if not rawpkt[0] or int(float(rawpkt[0])) < 1: self.logger.info("Bad Packet: %s %s %s" % (rawpkt[0], rawpkt[1], rawpkt[2])) return self._clean() self.rawpkt = rawpkt self.logger.debug(rawpkt) self.id = rawpkt[0] self.time = float(rawpkt[2]) self.strtime = stock.epoch2str(self.time, "%D %H:%M:%S %Z").strip() # Try to extract information from packet pkt = Pkt.Packet(rawpkt[1], rawpkt[2], rawpkt[3]) self.srcname = pkt.srcname if pkt.srcname else rawpkt[1] self.logger.info("%s %s %s" % (self.id, self.time, self.strtime)) # self.logger.debug( pkt.pf ) # Antelope 5.7 stock.ParameterFile.__getitem__ doesn't like the "foo in # bar" format. # Just try retrieving the value and catch whatever exception we get. # Antelope throws warnings if the key isn't found. We don't care. # https://stackoverflow.com/questions/14463277/how-to-disable-python-warnings with warnings.catch_warnings(): warnings.simplefilter("ignore") try: self.dls = pkt.pf["dls"] self.valid = True except (KeyError, TypeError): self.dls = {} self.valid = False if self.valid: try: self.imei = pkt.pf["imei"] self.logger.info("Found imei: %s" % (pkt.pf["imei"])) except KeyError: pass try: self.q330 = pkt.pf["q330"] self.logger.info("Found q330: %s" % (pkt.pf["q330"])) except KeyError: pass
def main(): orbfd = orb.orbopen(orbname, "r") version = orbfd.ping() print("Connecting to ORB: %s" % orbname) print("ORB version: %s" % version) # conn = amqp.Connection(host="amoeba.ucsd.edu", userid="sqlguest", password="******", virtual_host="/sqlstream", insist=True) # conn = amqp.Connection(host="vista.ucsd.edu", userid="guest", password="******", virtual_host="/", insist=True) # conn = amqp.Connection(host="ec2-50-18-18-199.us-west-1.compute.amazonaws.com", userid="guest", password="******", virtual_host="/", insist=True) conn = amqp.Connection( host="ec2-50-18-66-48.us-west-1.compute.amazonaws.com", userid="guest", password="******", virtual_host="/", insist=True, ) # amqp_chan_1 = conn.channel() # amqp_chan_2 = conn.channel() amqp_chan_3 = conn.channel() print("Start export of pkts... ") try: while True: (pktid, srcname, time, packet, nbytes) = orb.orbreap(orbfd) if verbose: print("%s %s" % (pktid, srcname)) if debug: print("pktid:%s srcname:%s time:%s packet:<...> nbytes:%s" % ( pktid, srcname, time, nbytes, )) (net, sta, chan, loc, suffix, subcode) = orb.split_srcname(srcname) if debug: print("(%s, %s, %s, %s, %s, %s) = split_srcname(%s)" % ( net, sta, chan, loc, suffix, subcode, srcname, )) (type, pkt) = Pkt.unstuffPkt(srcname, time, packet, nbytes) if debug: print("(%s,%s) = unstuffPkt(%s, %s, <...>, %s)" % ( type, pkt, srcname, time, nbytes, )) for i in range(pkt.nchannels()): pktchannel = pkt.channels(i) if debug: print("\n") print("i: " + i) print("time: " + pktchannel.time()) print("net: " + pktchannel.net()) print("sta: " + pktchannel.sta()) print("chan: " + pktchannel.chan()) print("loc: " + pktchannel.loc()) print("nsamp: " + pktchannel.nsamp()) print("samprate: " + pktchannel.samprate()) print("calib: " + pktchannel.calib()) print("calper: " + pktchannel.calper()) print("segtype: " + pktchannel.segtype()) sps = str(pktchannel.samprate()) s_type = str(pktchannel.segtype()) s_time = str(time) s_data = str(pktchannel.data()) r_key = (pktchannel.net() + "." + pktchannel.sta() + "." + pktchannel.chan()) if pktchannel.loc(): r_key = r_key + "." + pktchannel.loc() text = { "pktid": pktid, "time": s_time, "name": r_key, "samprate": sps, "segtype": s_type, "nbytes": nbytes, } if debug: print("type: " + s_type) print("sps: " + sps) print("time: " + s_time) print("data: " + s_data) print("Routing Key => [%s]" % r_key) print("Header => [%s]" % str(text)) # # Convert the ascii string s_data to binary data # # new_bin_data = binascii.a2b_qp(s_data) # packet_string = orbpkt_string( srcname, time, packet, nbytes ) # msg1 = amqp.Message(packet_string, application_headers=text, content_encoding="text", content_type='text/plain') # msg2 = amqp.Message(packet, application_headers=text, content_encoding="binary", content_type='application/octet-stream') # msg2 = amqp.Message(new_bin_data, application_headers=text, content_encoding="binary", content_type='application/octet-stream') msg3 = amqp.Message( s_data, application_headers=text, content_encoding="text", content_type="text/plain", ) # Make msgs persist after server restart # msg1.properties["delivery_mode"]=2 # msg2.properties["delivery_mode"]=2 # amqp_chan_1.basic_publish(msg1, exchange='HEX_DATA') # amqp_chan_2.basic_publish(msg2, exchange='BIN_DATA') # amqp_chan_3.basic_publish(msg3, exchange='ASCII_DATA') amqp_chan_3.basic_publish(msg3, routing_key=r_key, mandatory=True, exchange="magnet.topic") # amqp_chan.basic_publish(msg1, exchange='HEX_DATA', mandatory=False, immediate=False) # amqp_chan.basic_publish(msg2, exchange='BIN_DATA', mandatory=False, immediate=False) except KeyboardInterrupt: # amqp_chan_1.close() # amqp_chan_2.close() amqp_chan_3.close() conn.close() orb.orbclose(orbfd)
def new(self, rawpkt, name_type="pf/xi", select=False, reject=False, silent=False): """Generate a new packet.""" self.logging.debug("new packet") if not rawpkt["_id"]: if not silent: self.logging.warning("Bad Packet: %s" % rawpkt) return self._clean() self.name_type = name_type # Disable _convert_unicod - it seems to have the opposite affect under # python 3 or the newer pymongo library. # self.rawpkt = self._convert_unicode(rawpkt) self.rawpkt = rawpkt if reject and re.search(reject, self.rawpkt["srcType"]): self.logging.debug("type [%s] rejected by configuration" % self.rawpkt["srcType"]) return if select and not re.search(select, self.rawpkt["srcType"]): self.logging.debug("type [%s] missed selection by configuration" % self.rawpkt["srcType"]) return self.logging.debug(self.rawpkt) # Track IDs self.logSeqNo = self.rawpkt["messageLogSeqNo"] self.seqNo = self.rawpkt["seqNo"] self.id = "%s.%s" % (self.logSeqNo, self.seqNo) # Date object self.datetime = self.rawpkt["timestamp"] # Epoch string self.time = (self.datetime - datetime.datetime(1970, 1, 1)).total_seconds() # Time string self.strtime = stock.epoch2str(self.time, "%D %H:%M:%S %Z", tz="UTC").strip() self.q330 = self.rawpkt["q330Sn"] self.imei = self.rawpkt["deviceIMEI"] self.src = self.rawpkt["srcType"] self.srcname = self.src.lower() if not self.imei_buffer.add(imei=self.imei, serial=self.q330): self.logging.warning("Invalid Q330 serial [%s] for IMEI [%s]" % (self.q330, self.imei)) self.q330 = self.imei_buffer(self.imei) if not self.q330: if not silent: self.logging.warning( "UNKNOWN IMEI [%s]: SKIP DATA PACKET!!!" % self.imei) return else: if not silent: self.logging.warning( "USING CACHED Q330 SERIAL [%s] FOR IMEI [%s]" % (self.q330, self.imei)) for test in self.q330_serial_dlname: if test(self.q330): self.dlname = test(self.q330) self.logging.debug("%s => %s" % (self.q330, self.dlname)) if not self.dlname: if not silent: self.logging.warning("NO DLNAME FOR Q330 SERIAL: %s " % self.q330) return self.logging.debug(self.src) self.logging.debug(self.valueMap) # Verify if we have data pairs if "valueMap" in self.rawpkt: self.valueMap = self.rawpkt["valueMap"] # Extract each value to a new key:value on the dict for chan in self.valueMap: if chan in self.channel_mapping: if not self.channel_mapping[chan]: continue self.payload[ self.channel_mapping[chan]] = self.valueMap[chan] self.logging.debug("%s -> %s:%s" % (chan, self.channel_mapping[chan], self.valueMap[chan])) else: self.logging.warning("[%s] NOT DEFINED IN PF FILE" % chan) for test in ["xil1q", "xil2q"]: if test in self.payload and "xisamp" in self.payload: try: self.payload[test] = (float(self.payload[test]) / self.payload["xisamp"]) except Exception: pass self.pcktbuf = { "dls": { self.dlname: self.payload }, "q330": self.q330, "imei": self.imei, "src": self.src, "srcname": self.srcname, } self.logging.debug(self.payload) # Try to build packet from info if self.name_type and self.time and self.payload: self.pkt.srcname = Pkt.SrcName("%s/%s" % (self.dlname, self.name_type)) self.pkt.time = self.time # self.logging.debug( self.pkt.type ) # self.logging.debug( self.pkt.srcname ) # Extract pf structure, update it and return it. temp = self.pkt.pf temp.update(self.pcktbuf) self.pkt.pf = temp self.logging.debug(self.pkt.type) self.logging.debug(self.pkt.srcname) self.logging.debug("Pkt( %s, %s) => {%s}" % ( self.pkt.srcname, self.pkt.time, self.pkt.pf.pf2string().replace("\n", ", ").replace("\t", ":"), )) self.valid = True else: self.logging.warning( "NO VALUABLE INFORMATION IN PACKET. dlname:%s time:%s" % (self.dlname, self.time)) return self.logging.info(str(self))