def process(self, data, addr, port): self.metrics_q.put({'type': 'packets', 'count': 1}) # print "Byte Length of Message: {0}".format(len(d)) header = decoding.header._make(struct.unpack( "!cBHI", data[:8])) # XrdXrootdMonHeader # print "Byte Length of Message: {0}, expected: {1}".format(len(d), h.plen) if len(data) != header.plen: self.logger.error( "Packet Length incorrect: expected={}, got={}".format( header.plen, len(data))) # print h self.logger.debug(header) data = data[8:] # Summarize current datastructure # num_servers = len(AllTransfers) # num_users = 0 # num_files = 0 # for sid in AllTransfers: # num_users += len(AllTransfers[sid]) # for user in AllTransfers[sid]: # num_files += len(AllTransfers[sid][user]) # print "Servers: {0}, Users: {1}, Files: {2}".format(num_servers, num_users, num_files) sid = str(header.server_start) + "#" + str(addr) + "#" + str(port) str_header_code = header.code.decode('utf-8') if sid not in self.seq_data: self.seq_data[sid] = {} self.logger.debug("New SID found. sid=%s, addr=%s", str(sid), addr) if str_header_code not in self.seq_data[sid]: self.seq_data[sid][str_header_code] = header.pseq expected_seq = header.pseq else: # What is the last seq number we got last_seq = self.seq_data[sid][str_header_code] expected_seq = (last_seq + 1) if expected_seq == 256: expected_seq = 0 if expected_seq != header.pseq: if header.pseq < expected_seq: # Handle the roll over missed_packets = (header.pseq + 255) - expected_seq else: missed_packets = abs(header.pseq - expected_seq) hostname = addr try: hostname = socket.gethostbyaddr(addr)[0] except: pass # Remove re-ordering errors if missed_packets < 253: #self.logger.error("Missed packet(s)! Expected seq=%s, got=%s. " # "Missed %s packets! from %s", expected_seq, # header.pseq, missed_packets, addr) self.metrics_q.put({ 'type': 'missing packets', 'count': missed_packets, 'addr': hostname }) else: #self.logger.error("Packet Reording packet(s)! Expected seq=%s, got=%s. " # "Missed %s packets! from %s", expected_seq, # header.pseq, missed_packets, addr) self.metrics_q.put({ 'type': 'reordered packets', 'count': 1, 'addr': hostname }) self.seq_data[sid][str_header_code] = header.pseq if header.code == b'f': # self.logger.debug("Got fstream object") time_record = decoding.MonFile(data) # first one is always TOD self.logger.debug(time_record) data = data[time_record.recSize:] self.logger.debug( "Size of seq_data: %i, Number of sub-records: %i", len(self.seq_data), time_record.total_recs) now = time.time() for idx in range(time_record.total_recs): hd = decoding.MonFile(data) data = data[hd.recSize:] self.logger.debug(str(hd)) if isinstance(hd, decoding.fileDisc): try: userInfo = self._dictid_map[sid][hd.userID] del self._users[userInfo] del self._dictid_map[sid][hd.userID] except KeyError: self.logger.error( 'Disconnect event for unknown UserID=%i with SID=%s', hd.userID, sid) elif isinstance(hd, decoding.fileOpen): transfer_key = str(sid) + "." + str(hd.fileID) self.logger.debug('%i %s', idx, hd) self._transfers[transfer_key] = ((time_record.tBeg, addr), hd) elif isinstance(hd, decoding.fileClose): # self.logger.debug('%i %s', i, hd) transfer_key = str(sid) + "." + str(hd.fileID) if transfer_key in self._transfers: userId = self._transfers[transfer_key][1].userID openTime = self._transfers[transfer_key][0][0] rec = self.addRecord(sid, userId, hd, time_record.tBeg, addr, openTime) self.logger.debug("Record to send: %s", str(rec)) del self._transfers[transfer_key] self.logger.debug('%i %s', idx, hd) else: rec = self.addRecord(sid, 0, hd, time_record.tBeg, addr, time_record.tBeg) self.logger.error( "file to close not found. fileID: %i, serverID: %s. close=%s", hd.fileID, sid, str(hd)) elif isinstance(hd, decoding.fileXfr): transfer_key = str(sid) + "." + str(hd.fileID) if transfer_key in self._transfers: cur_value = self._transfers[transfer_key] self._transfers[transfer_key] = ((now, cur_value[0][1]), cur_value[1], hd) self.logger.debug( "f-stream index=%i Known xfrInfo: %s. sid=%s", idx, str(hd), sid) else: self.logger.debug( "f-stream index=%i Unknown xfrInfo: %s. sid=%s", idx, str(hd), sid) data = data[hd.recSize:] if data: self.logger.error("Bytes leftover! %i bytes left!", len(data)) elif header.code == b'r': self.logger.debug("r - redirect stream message.") elif header.code == b't': #self.logger.warning("t - stream message. Server at %s should remove 'files', 'io', and " # "'iov' directives from the monitoring configuration.", addr) pass elif header.code == b'g': # The rest of the message is the gstream event self.logger.debug("Received gstream message") decoded_gstream = decoding.gStream(data) self.metrics_q.put({'type': 'gstream_message', 'count': 1}) # We only care about the top 8 bits of the ident, which are a character. stream_type = chr(decoded_gstream.ident >> 56) if stream_type == "T": self.process_tcp(decoded_gstream, addr) elif stream_type == "C": # process the gstream self.process_gstream(decoded_gstream, addr, sid) else: infolen = len(data) - 4 mm = decoding.mapheader._make( struct.unpack("!I" + str(infolen) + "s", data)) try: userRec, rest = mm.info.split(b'\n', 1) except ValueError: if not header.code == b'u': self.logger.exception( "Strange >>%s<< mapping message from %s mm: %s", header.code, addr, mm) userRec = mm.info rest = b'' userInfo = decoding.userInfo(userRec) self.logger.debug('%i %s', mm.dictID, userInfo) if header.code == b'=': serverInfo = decoding.serverInfo(rest, addr) if sid not in self._servers: self._servers[sid] = serverInfo self.logger.info( 'Adding new server info: %s started at %i', serverInfo, header.server_start) elif header.code == b'd': path = rest if sid not in self._dictid_map: self._dictid_map[sid] = ttldict.TTLOrderedDict( default_ttl=DEFAULT_TTL) pathInfo = decoding.pathinfo(userInfo, path) self.logger.debug("Adding new pathinfo: %s", pathInfo) self._dictid_map[sid][mm.dictID] = pathInfo elif header.code == b'i': appinfo = rest if sid not in self._users: self._users[sid] = ttldict.TTLOrderedDict( default_ttl=DEFAULT_TTL) if sid not in self._dictid_map: self._dictid_map[sid] = ttldict.TTLOrderedDict( default_ttl=DEFAULT_TTL) self._dictid_map[sid][mm.dictID] = userInfo # Check if userInfo is available in _users if userInfo not in self._users[sid]: self._users[sid][userInfo] = {} self._users[sid][userInfo]['appinfo'] = rest self.logger.info('appinfo:%s', appinfo) elif header.code == b'p': purgeInfo = decoding.purgeInfo(rest) self.logger.info('purgeInfo:%s', purgeInfo) elif header.code == b'u': authorizationInfo = decoding.authorizationInfo(rest) if authorizationInfo.inetv != b'': self.logger.debug("Inet version detected to be %s", authorizationInfo.inetv) # New server seen if sid not in self._users: self._users[sid] = ttldict.TTLOrderedDict( default_ttl=DEFAULT_TTL) # Add the dictid to the userinfo map if sid not in self._dictid_map: self._dictid_map[sid] = ttldict.TTLOrderedDict( default_ttl=DEFAULT_TTL) self._dictid_map[sid][mm.dictID] = userInfo # New user signed in if userInfo not in self._users[sid]: self._users[sid][userInfo] = { 'userinfo': userInfo, 'authinfo': authorizationInfo } self.logger.debug("Adding new user: %s", authorizationInfo) # Seen the user, but not the auth stuff yet elif self._users[sid][userInfo] is None: self.logger.warning( "Received a user ID (%i) from sid %s after corresponding " "f-stream usage information.", mm.dictID, sid) self._users[sid][userInfo] = { 'userinfo': userInfo, 'authinfo': authorizationInfo } # Seen the user, and added the stuff elif self._users[sid][userInfo]: self._users[sid][userInfo].update({ 'userinfo': userInfo, 'authinfo': authorizationInfo }) self.logger.error( "Received a repeated userID; SID: %s and UserID: %s (%s).", sid, mm.dictID, userInfo) elif header.code == b'x': decoding.xfrInfo(rest) # transfer_key = str(sid) + "." + str(xfrInfo.fileID) # if transfer_key in AllTransfers: # cur_value = AllTransfers[transfer_key] # AllTransfers[transfer_key] = (time.time(), cur_value[1], xfrInfo) # print "Adding xfrInfo" # print xfrInfo # Check if we have to flush the AllTransfer now_time = time.time() if (now_time - self.last_flush) > (60 * 5): self.logger.debug("Flushing data structures") dictid_count = 0 dictid_removed = 0 # Flush the dictid mapping for sid in self._dictid_map: removed = self._dictid_map[sid].purge() dictid_removed += removed dictid_count += len(self._dictid_map[sid]) self.logger.debug( "Removed {} items from DictID Mapping".format(dictid_removed)) self.metrics_q.put({ 'type': 'hash size', 'count': dictid_count, 'hash name': 'dictid' }) self.logger.debug("Size of dictid map: %i", dictid_count) # Flush the users data structure users_count = 0 users_removed = 0 for sid in self._users: removed = self._users[sid].purge() users_removed += removed users_count += len(self._users[sid]) self.logger.debug( "Removed {} items from Users".format(users_removed)) self.metrics_q.put({ 'type': 'hash size', 'count': users_count, 'hash name': 'users' }) self.logger.debug("Size of users map: %i", users_count) # Have to make a copy of .keys() because we 'del' as we go for key in list(self._transfers.keys()): cur_value = self._transfers[key] # TODO: since we don't update based on the xfr info, we don't # track progress or bump the timestamps... that needs to be done. if (now_time - cur_value[0][0]) > DEFAULT_TTL: if len(cur_value) == 3: sid = key.rsplit(".", 1)[0] userId = cur_value[1].userID addr = cur_value[0][1] openTime = cur_value[0][0] rec = self.addRecord(sid, userId, cur_value[2], now_time, addr, openTime) del self._transfers[key] self.logger.debug("Size of transfers map: %i", len(self._transfers)) self.metrics_q.put({ 'type': 'hash size', 'count': len(self._transfers), 'hash name': 'transfers' }) self.last_flush = now_time
def process(self, data, addr, port): seq_data = {} # print "Byte Length of Message: {0}".format(len(d)) header = decoding.header._make(struct.unpack("!cBHI", data[:8])) # XrdXrootdMonHeader # print "Byte Length of Message: {0}, expected: {1}".format(len(d), h.plen) if len(data) != header.plen: self.logger.error("Packet Length incorrect: expected={}, got={}".format(header.plen, len(data))) # print h self.logger.debug(header) data = data[8:] # Summarize current datastructure # num_servers = len(AllTransfers) # num_users = 0 # num_files = 0 # for sid in AllTransfers: # num_users += len(AllTransfers[sid]) # for user in AllTransfers[sid]: # num_files += len(AllTransfers[sid][user]) # print "Servers: {0}, Users: {1}, Files: {2}".format(num_servers, num_users, num_files) sid = str(header.server_start) + "#" + str(addr) + "#" + str(port) if header.code == b'f': # self.logger.debug("Got fstream object") time_record = decoding.MonFile(data) # first one is always TOD self.logger.debug(time_record) data = data[time_record.recSize:] if sid not in seq_data: seq_data[sid] = header.pseq self.logger.debug("New SID found. sid=%s, addr=%s", str(sid), addr) else: # What is the last seq number we got last_seq = seq_data[sid] expected_seq = (last_seq + 1) if expected_seq == 256: expected_seq = 0 if expected_seq != header.pseq: missed_packets = abs(header.pseq - expected_seq) self.logger.error("Missed packet(s)! Expected seq=%s, got=%s. " "Missed %s packets! from %s", expected_seq, header.pseq, missed_packets, addr) seq_data[sid] = header.pseq self.logger.debug("Size of seq_data: %i, Number of sub-records: %i", len(seq_data), time_record.total_recs) now = time.time() for idx in range(time_record.total_recs): hd = decoding.MonFile(data) data = data[hd.recSize:] if isinstance(hd, decoding.fileDisc): try: user_info = self._users.setdefault(sid, {}) del user_info[hd.userID] except KeyError: self.logger.error('Disconnect event for unknown UserID=%i with SID=%s', hd.userID, sid) elif isinstance(hd, decoding.fileOpen): transfer_key = str(sid) + "." + str(hd.fileID) self.logger.debug('%i %s', idx, hd) self._transfers[transfer_key] = ((now, addr), hd) elif isinstance(hd, decoding.fileClose): # self.logger.debug('%i %s', i, hd) transfer_key = str(sid) + "." + str(hd.fileID) if transfer_key in self._transfers: userId = self._transfers[transfer_key][1].userID rec = self.addRecord(sid, userId, hd, time_record.tEnd, addr) self.logger.debug("Record to send: %s", str(rec)) del self._transfers[transfer_key] self.logger.debug('%i %s', idx, hd) else: rec = self.addRecord(sid, 0, hd, time_record.tEnd, addr) self.logger.error("file to close not found. fileID: %i, serverID: %s. close=%s", hd.fileID, sid, str(hd)) elif isinstance(hd, decoding.fileXfr): transfer_key = str(sid) + "." + str(hd.fileID) if transfer_key in self._transfers: cur_value = self._transfers[transfer_key] self._transfers[transfer_key] = ((now, cur_value[0][1]), cur_value[1], hd) self.logger.debug("f-stream index=%i Known xfrInfo: %s. sid=%s", idx, str(hd), sid) else: self.logger.debug("f-stream index=%i Unknown xfrInfo: %s. sid=%s", idx, str(hd), sid) data = data[hd.recSize:] if data: self.logger.error("Bytes leftover! %i bytes left!", len(data)) elif header.code == b'r': self.logger.debug("r - redirect stream message.") elif header.code == b't': self.logger.warning("t - stream message. Server at %s should remove 'files', 'io', and " "'iov' directives from the monitoring configuration.", addr) else: infolen = len(data) - 4 mm = decoding.mapheader._make(struct.unpack("!I" + str(infolen) + "s", data)) try: userRec, rest = mm.info.split(b'\n', 1) except ValueError: if not header.code == b'u': self.logger.exception("Strange >>%s<< mapping message from %s mm: %s", header.code, addr, mm) userRec = mm.info rest = '' userInfo = decoding.userInfo(userRec) self.logger.debug('%i %s', mm.dictID, userInfo) if header.code == b'=': serverInfo = decoding.serverInfo(rest, addr) if sid not in self._servers: self._servers[sid] = serverInfo self.logger.info('Adding new server info: %s started at %i', serverInfo, header.server_start) elif header.code == b'd': path = rest self.logger.warning('Path information sent (%s). Server at %s should remove "files" ' 'directive from the monitoring configuration.', path, addr) elif header.code == b'i': appinfo = rest self.logger.info('appinfo:%s', appinfo) elif header.code == b'p': purgeInfo = decoding.purgeInfo(rest) self.logger.info('purgeInfo:%s', purgeInfo) elif header.code == b'u': authorizationInfo = decoding.authorizationInfo(rest) if authorizationInfo.inetv != b'': self.logger.debug("Inet version detected to be %s", authorizationInfo.inetv) if sid not in self._users: self._users[sid] = {} if mm.dictID not in self._users[sid]: self._users[sid][mm.dictID] = (userInfo, authorizationInfo) self.logger.debug("Adding new user: %s", authorizationInfo) elif self._users[sid][mm.dictID] is None: self.logger.warning("Received a user ID (%i) from sid %s after corresponding " "f-stream usage information.", mm.dictID, sid) self._users[sid][mm.dictID] = (userInfo, authorizationInfo) elif self._users[sid][mm.dictID]: self.logger.error("Received a repeated userID; SID: %s and UserID: %s (%s).", sid, mm.dictID, userInfo) elif header.code == b'x': decoding.xfrInfo(rest) # transfer_key = str(sid) + "." + str(xfrInfo.fileID) # if transfer_key in AllTransfers: # cur_value = AllTransfers[transfer_key] # AllTransfers[transfer_key] = (time.time(), cur_value[1], xfrInfo) # print "Adding xfrInfo" # print xfrInfo # Check if we have to flush the AllTransfer now_time = time.time() if (now_time - self.last_flush) > (60*5): # Have to make a copy of .keys() because we 'del' as we go for key in list(self._transfers.keys()): cur_value = self._transfers[key] # TODO: since we don't update based on the xfr info, we don't # track progress or bump the timestamps... that needs to be done. if (now_time - cur_value[0][0]) > (3600*5): if len(cur_value) == 3: sid = key.rsplit(".", 1)[0] userId = cur_value[1].userID addr = cur_value[0][1] rec = self.addRecord(sid, userId, cur_value[2], now_time, addr) del self._transfers[key] self.last_flush = now_time
def eventCreator(): aLotOfData=[] while(True): [d,addr]=q.get() # print "\nByte Length of Message :", len(d) h=decoding.header._make(struct.unpack("!cBHI",d[:8])) # XrdXrootdMonHeader # if h[3]!=1457990510: # q.task_done() # continue # if debug: # print '------------------------------------------------' # print h # else: # print '*', logger.debug(h) d=d[8:] if (h.code=='f'): TimeRecord=decoding.MonFile(d) # first one is always TOD logger.debug(TimeRecord) d=d[TimeRecord.recSize:] sid=(h.server_start << 32) + TimeRecord.sid for i in range(TimeRecord.total_recs): hd=decoding.MonFile(d) d=d[hd.recSize:] logger.debug('%i %s', i, hd) # if debug: print i, hd if isinstance(hd, decoding.fileDisc): try: #print "Disconnecting: ", AllUsers[sid][hd.userID] del AllUsers[sid][hd.userID] except KeyError: logger.error('%sUser that disconnected was unknown.%s', decoding.bcolors.WARNING, decoding.bcolors.ENDC) #print decoding.bcolors.WARNING + 'User that disconnected was unknown.' + decoding.bcolors.ENDC elif isinstance(hd, decoding.fileOpen): if sid not in AllTransfers: AllTransfers[sid]={} if hd.userID not in AllTransfers[sid]: AllTransfers[sid][hd.userID]={} AllTransfers[sid][hd.userID][hd.fileID]=hd elif isinstance(hd, decoding.fileClose): logger.debug('%i %s', i, hd) if sid in AllTransfers: found=0 for u in AllTransfers[sid]: if hd.fileID in AllTransfers[sid][u]: found=1 rec = addRecord(sid, u, hd, TimeRecord.tEnd) aLotOfData.append( rec ) del AllTransfers[sid][u][hd.fileID] if len(AllTransfers[sid][u])==0: del AllTransfers[sid][u] break if not found: logger.error("%sfile to close not found.%s", decoding.bcolors.WARNING, decoding.bcolors.ENDC) # print decoding.bcolors.WARNING + "file to close not found." + decoding.bcolors.ENDC else: logger.error("%sfile closed on server that's not found%s", decoding.bcolors.WARNING, decoding.bcolors.ENDC) # print decoding.bcolors.WARNING + "file closed on server that's not found" + decoding.bcolors.ENDC AllTransfers[sid]={} elif (h.code=='r'): logger.debug("r - stream message.") elif (h.code=='t'): logger.warning("t - stream message. Server at %s should remove files, io, iov from the monitoring configuration.", addr) else: infolen=len(d)-4 mm = decoding.mapheader._make(struct.unpack("!I"+str(infolen)+"s",d)) try: (u,rest) = mm.info.split('\n',1) except ValueError as e: if (not h.code=='u'): logger.error('%s',e[0]) logger.warning("Strange >>%s<< mapping message from %s mm: %s", h.code, addr, mm) u=mm.info rest='' userInfo=decoding.userInfo(u) logger.debug('%i %s', mm.dictID, userInfo) sid=(h.server_start << 32) + 0 #userInfo.sid - this has to go in place of 0 when the new version of server is there. if (h.code=='='): serverInfo=decoding.serverInfo(rest,addr) if sid not in AllServers: AllServers[sid]=serverInfo logger.info('Adding new server info: %s started at %i', serverInfo, h.server_start) elif (h.code=='d'): path=rest # print 'path: ', path logger.warning('path information. Server at %s should remove files from the monitoring configuration.', addr) elif (h.code=='i'): appinfo=rest logger.info('appinfo:%s', appinfo) elif (h.code=='p'): purgeInfo=decoding.purgeInfo(rest) logger.info('purgeInfo:%s', purgeInfo) elif (h.code=='u'): authorizationInfo=decoding.authorizationInfo(rest) if sid not in AllUsers: AllUsers[sid]={} if mm.dictID not in AllUsers[sid]: AllUsers[sid][mm.dictID]=userInfo #authorizationInfo logger.debug("Adding new user:%s", authorizationInfo) else: logger.warning("%sThere is a problem. We already have this sid: %i and userID:%s (%s).%s",decoding.bcolors.FAIL, sid, mm.dictID, userInfo, decoding.bcolors.ENDC) elif (h.code=='x'): xfrInfo=decoding.xfrInfo(rest) # print xfrInfo q.task_done() if q.qsize()>200: logger.error('Some problem in sending data to ES. Trying to reconnect.') RefreshConnection() if len(aLotOfData)>50: try: res = helpers.bulk(es, aLotOfData, raise_on_exception=True) logger.info('%s inserted: %i errors: %s',threading.current_thread().name, res[0], str(res[1])) aLotOfData=[] except es_exceptions.ConnectionError as e: logger.error('ConnectionError %s', e) except es_exceptions.TransportError as e: logger.error('TransportError %s ', e) except helpers.BulkIndexError as e: logger.error('%s',e[0]) errcount=0 for i in e[1]: errcount+=1 if errcount>5: break logger.error('%s',i) except: logger.error('Something serious happened.') e = sys.exc_info()[0] logger.error(e) logger.error('SOMEHOW WENT OUT OF THE LOOP!')
def eventCreator(message_q): """ Helper process target for parsing incoming packets and sending them to the message queue. """ last_flush = time.time() seq_data = {} sys.stdout = LoggerWriter(logger.debug) sys.stderr = LoggerWriter(logger.error) while True: [d, addr, port] = message_q.get() #print "Byte Length of Message: {0}".format(len(d)) h = decoding.header._make(struct.unpack("!cBHI", d[:8])) # XrdXrootdMonHeader #print "Byte Length of Message: {0}, expected: {1}".format(len(d), h.plen) if len(d) != h.plen: logger.error("Packet Length incorrect: expected=%s, got=%s", h.plen, len(d)) #print h logger.debug(h) d = d[8:] # Summarize current datastructure #num_servers = len(AllTransfers) #num_users = 0 #num_files = 0 #for sid in AllTransfers: # num_users += len(AllTransfers[sid]) # for user in AllTransfers[sid]: # num_files += len(AllTransfers[sid][user]) #print "Servers: {0}, Users: {1}, Files: {2}".format(num_servers, num_users, num_files) sid = str(h.server_start) + "#" + str(addr) + "#" + str(port) if h.code == 'f': #logger.debug("Got fstream object") time_record = decoding.MonFile(d) # first one is always TOD logger.debug(time_record) d = d[time_record.recSize:] if sid not in seq_data: seq_data[sid] = h.pseq logger.debug("New SID found. sid=%s, addr=%s", str(sid), addr) else: # What is the last seq number we got last_seq = seq_data[sid] expected_seq = (last_seq + 1) if expected_seq == 256: expected_seq = 0 if expected_seq != h.pseq: missed_packets = abs(h.pseq - expected_seq) logger.error( "Missed packet(s)! Expected seq=%s, got=%s. " "Missed %s packets! from %s", expected_seq, h.pseq, missed_packets, addr) seq_data[sid] = h.pseq logger.debug("Size of seq_data: %i, Number of sub-records: %i", len(seq_data), time_record.total_recs) now = time.time() for i in range(time_record.total_recs): hd = decoding.MonFile(d) d = d[hd.recSize:] if isinstance(hd, decoding.fileDisc): try: user_info = AllUsers.setdefault(sid, {}) del user_info[hd.userID] except KeyError: logger.error( 'Disconnect event for unknown UserID=%i with SID=%s', hd.userID, sid) elif isinstance(hd, decoding.fileOpen): transfer_key = str(sid) + "." + str(hd.fileID) logger.debug('%i %s', i, hd) AllTransfers[transfer_key] = ((now, addr), hd) elif isinstance(hd, decoding.fileClose): #logger.debug('%i %s', i, hd) transfer_key = str(sid) + "." + str(hd.fileID) if transfer_key in AllTransfers: u = AllTransfers[transfer_key][1].userID rec = addRecord(sid, u, hd, time_record.tEnd, addr) logger.debug("Record to send: %s", str(rec)) del AllTransfers[transfer_key] logger.debug('%i %s', i, hd) else: rec = addRecord(sid, 0, hd, time_record.tEnd, addr) logger.error( "file to close not found. fileID: %i, serverID: %s. close=%s", hd.fileID, sid, str(hd)) elif isinstance(hd, decoding.fileXfr): transfer_key = str(sid) + "." + str(hd.fileID) if transfer_key in AllTransfers: cur_value = AllTransfers[transfer_key] AllTransfers[transfer_key] = ((now, cur_value[0][1]), cur_value[1], hd) logger.debug( "f-stream index=%i Known xfrInfo: %s. sid=%s", i, str(hd), sid) else: logger.debug( "f-stream index=%i Unknown xfrInfo: %s. sid=%s", i, str(hd), sid) d = d[hd.recSize:] if d: logger.error("Bytes leftover! %i bytes left!", len(d)) elif h.code == 'r': logger.debug("r - redirect stream message.") elif h.code == 't': logger.warning( "t - stream message. Server at %s should remove 'files', 'io', and " "'iov' directives from the monitoring configuration.", addr) else: infolen = len(d) - 4 mm = decoding.mapheader._make( struct.unpack("!I" + str(infolen) + "s", d)) try: (u, rest) = mm.info.split('\n', 1) except ValueError as e: if not h.code == 'u': logger.error('%s', e[0]) logger.warning( "Strange >>%s<< mapping message from %s mm: %s", h.code, addr, mm) u = mm.info rest = '' userInfo = decoding.userInfo(u) logger.debug('%i %s', mm.dictID, userInfo) if h.code == '=': serverInfo = decoding.serverInfo(rest, addr) if sid not in AllServers: AllServers[sid] = serverInfo logger.info('Adding new server info: %s started at %i', serverInfo, h.server_start) elif h.code == 'd': path = rest logger.warning( 'Path information sent (%s). Server at %s should remove "files" ' 'directive from the monitoring configuration.', path, addr) elif h.code == 'i': appinfo = rest logger.info('appinfo:%s', appinfo) elif h.code == 'p': purgeInfo = decoding.purgeInfo(rest) logger.info('purgeInfo:%s', purgeInfo) elif h.code == 'u': authorizationInfo = decoding.authorizationInfo(rest) if authorizationInfo.inetv != '': logger.debug("Inet version detected to be %s", authorizationInfo.inetv) if sid not in AllUsers: AllUsers[sid] = {} if mm.dictID not in AllUsers[sid]: AllUsers[sid][mm.dictID] = (userInfo, authorizationInfo) logger.debug("Adding new user: %s", authorizationInfo) elif AllUsers[sid][mm.dictID] is None: logger.warning( "Received a user ID (%i) from sid %s after corresponding " "f-stream usage information.", mm.dictID, sid) AllUsers[sid][mm.dictID] = (userInfo, authorizationInfo) elif AllUsers[sid][mm.dictID]: logger.error( "Received a repeated userID; SID: %s and UserID: %s (%s).", sid, mm.dictID, userInfo) elif h.code == 'x': decoding.xfrInfo(rest) #transfer_key = str(sid) + "." + str(xfrInfo.fileID) #if transfer_key in AllTransfers: # cur_value = AllTransfers[transfer_key] # AllTransfers[transfer_key] = (time.time(), cur_value[1], xfrInfo) # print "Adding xfrInfo" # print xfrInfo #q.task_done() # Check if we have to flush the AllTransfer now_time = time.time() if (now_time - last_flush) > (60 * 5): # Have to use .keys() because we are 'del' keys as we go for key in list(AllTransfers.keys()): cur_value = AllTransfers[key] # TODO: since we don't update based on the xfr info, we don't # track progress or bump the timestamps... that needs to be done. if (now_time - cur_value[0][0]) > (3600 * 5): if len(cur_value) == 3: (sid, _) = key.rsplit(".", 1) u = cur_value[1].userID addr = cur_value[0][1] rec = addRecord(sid, u, cur_value[2], now_time, addr) del AllTransfers[key] if message_q.qsize() > 200: logger.error('QSize is large: %d', message_q.qsize())