def msg(self, event): threshold = self._level #if event.get('facility', '').startswith('foolscap'): # threshold = UNUSUAL if event['level'] >= threshold: flogfile.serialize_wrapper(self._logFile, event, from_="local", rx_time=time.time())
def remote_msg(self, d): try: flogfile.serialize_wrapper(self.f, d, from_=self.nodeid_s, rx_time=time.time()) except Exception, ex: print "GATHERER: unable to serialize %s: %s" % (d, ex)
def msg(self, nodeid_s, d): try: flogfile.serialize_wrapper(self._savefile, d, from_=nodeid_s, rx_time=time.time()) except Exception, ex: print "GATHERER: unable to serialize %s: %s" % (d, ex)
def remote_msg(self, d): try: flogfile.serialize_wrapper(self.f, d, from_=self.nodeid_s, rx_time=time.time()) except Exception as ex: print("GATHERER: unable to serialize %s: %s" % (d, ex))
def incident_declared(self, triggering_event): self.trigger = triggering_event # choose a name for the logfile now = time.time() unique = os.urandom(4) unique_s = base32.encode(unique) self.name = "incident-%s-%s" % (self.format_time(now), unique_s) filename = self.name + ".flog" self.abs_filename = os.path.join(self.basedir, filename) self.abs_filename_bz2 = self.abs_filename + ".bz2" self.abs_filename_bz2_tmp = self.abs_filename + ".bz2.tmp" # open logfile. We use both an uncompressed one and a compressed one. self.f1 = open(self.abs_filename, "wb") self.f2 = bz2.BZ2File(self.abs_filename_bz2_tmp, "wb") # write header with triggering_event self.f1.write(flogfile.MAGIC) self.f2.write(flogfile.MAGIC) flogfile.serialize_header(self.f1, "incident", trigger=triggering_event, versions=app_versions.versions, pid=os.getpid()) flogfile.serialize_header(self.f2, "incident", trigger=triggering_event, versions=app_versions.versions, pid=os.getpid()) if self.TRAILING_DELAY is not None: # subscribe to events that occur after this one self.still_recording = True self.remaining_events = self.TRAILING_EVENT_LIMIT self.logger.addObserver(self.trailing_event) # use self.logger.buffers, copy events into logfile events = list(self.logger.get_buffered_events()) events.sort(key=O.itemgetter('num')) for e in events: flogfile.serialize_wrapper(self.f1, e, from_=self.tubid_s, rx_time=now) flogfile.serialize_wrapper(self.f2, e, from_=self.tubid_s, rx_time=now) self.f1.flush() # the BZ2File has no flush method if self.TRAILING_DELAY is None: self.active = False eventually(self.finished_recording) else: # now we wait for the trailing events to arrive self.timer = reactor.callLater(self.TRAILING_DELAY, self.stop_recording)
def save_incident(self, filename, incident): now = time.time() (header, events) = incident f = bz2.BZ2File(filename, "w") flogfile.serialize_raw_header(f, header) for e in events: flogfile.serialize_wrapper(f, e, from_=self.tubid_s, rx_time=now) f.close()
def incident_declared(self, triggering_event): self.trigger = triggering_event # choose a name for the logfile now = time.time() unique = os.urandom(4) unique_s = base32.encode(unique) self.name = "incident-%s-%s" % (self.format_time(now), unique_s) filename = self.name + ".flog" self.abs_filename = os.path.join(self.basedir, filename) self.abs_filename_bz2 = self.abs_filename + ".bz2" self.abs_filename_bz2_tmp = self.abs_filename + ".bz2.tmp" # open logfile. We use both an uncompressed one and a compressed one. self.f1 = open(self.abs_filename, "wb") self.f2 = bz2.BZ2File(self.abs_filename_bz2_tmp, "wb") # write header with triggering_event self.f1.write(flogfile.MAGIC) self.f2.write(flogfile.MAGIC) flogfile.serialize_header(self.f1, "incident", trigger=triggering_event, versions=app_versions.versions, pid=os.getpid()) flogfile.serialize_header(self.f2, "incident", trigger=triggering_event, versions=app_versions.versions, pid=os.getpid()) if self.TRAILING_DELAY is not None: # subscribe to events that occur after this one self.still_recording = True self.remaining_events = self.TRAILING_EVENT_LIMIT self.logger.addObserver(self.trailing_event) # use self.logger.buffers, copy events into logfile events = list(self.logger.get_buffered_events()) events.sort(lambda a,b: cmp(a['num'], b['num'])) for e in events: flogfile.serialize_wrapper(self.f1, e, from_=self.tubid_s, rx_time=now) flogfile.serialize_wrapper(self.f2, e, from_=self.tubid_s, rx_time=now) self.f1.flush() # the BZ2File has no flush method if self.TRAILING_DELAY is None: self.active = False eventually(self.finished_recording) else: # now we wait for the trailing events to arrive self.timer = reactor.callLater(self.TRAILING_DELAY, self.stop_recording)
def trailing_event(self, ev): if not self.still_recording: return self.remaining_events -= 1 if self.remaining_events >= 0: now = time.time() flogfile.serialize_wrapper(self.f1, ev, from_=self.tubid_s, rx_time=now) flogfile.serialize_wrapper(self.f2, ev, from_=self.tubid_s, rx_time=now) return self.stop_recording()