def digest32(colondigest): # takes bytes, returns native string # we get e.g. b'D9:C8:C9:9C:99:FC:6A:6A:E0:E9:BE:9B:D5:0D:3F:60:B0:08:EF:13' assert isinstance(colondigest, bytes), (type(colondigest), colondigest) if sys.version_info.major == 2: digest = "".join([chr(int(c, 16)) for c in colondigest.split(":")]) digest = base32.encode(digest) else: # this is py3-only digest = bytes([int(c, 16) for c in colondigest.split(b":")]) # under py2, we get a string like "[15, 230, 35, ..]", so catch that here assert len(digest) == (len(colondigest) + 1) / 3, "py3 only, sorry" digest = base32.encode(digest) return digest
def nodeid_b2a(nodeid): """ We display nodeids using the same base32 alphabet that Foolscap uses. Returns a Unicode string. """ return ensure_text(base32.encode(nodeid))
def incident_declared(self, triggering_event): self.trigger = triggering_event # choose a name for the logfile now = time.time() unique = os.urandom(4) unique_s = base32.encode(unique) self.name = "incident-%s-%s" % (self.format_time(now), unique_s) filename = self.name + ".flog" self.abs_filename = os.path.join(self.basedir, filename) self.abs_filename_bz2 = self.abs_filename + ".bz2" self.abs_filename_bz2_tmp = self.abs_filename + ".bz2.tmp" # open logfile. We use both an uncompressed one and a compressed one. self.f1 = open(self.abs_filename, "wb") self.f2 = bz2.BZ2File(self.abs_filename_bz2_tmp, "wb") # write header with triggering_event self.f1.write(flogfile.MAGIC) self.f2.write(flogfile.MAGIC) flogfile.serialize_header(self.f1, "incident", trigger=triggering_event, versions=app_versions.versions, pid=os.getpid()) flogfile.serialize_header(self.f2, "incident", trigger=triggering_event, versions=app_versions.versions, pid=os.getpid()) if self.TRAILING_DELAY is not None: # subscribe to events that occur after this one self.still_recording = True self.remaining_events = self.TRAILING_EVENT_LIMIT self.logger.addObserver(self.trailing_event) # use self.logger.buffers, copy events into logfile events = list(self.logger.get_buffered_events()) events.sort(key=O.itemgetter('num')) for e in events: flogfile.serialize_wrapper(self.f1, e, from_=self.tubid_s, rx_time=now) flogfile.serialize_wrapper(self.f2, e, from_=self.tubid_s, rx_time=now) self.f1.flush() # the BZ2File has no flush method if self.TRAILING_DELAY is None: self.active = False eventually(self.finished_recording) else: # now we wait for the trailing events to arrive self.timer = reactor.callLater(self.TRAILING_DELAY, self.stop_recording)
def incident_declared(self, triggering_event): self.trigger = triggering_event # choose a name for the logfile now = time.time() unique = os.urandom(4) unique_s = base32.encode(unique) self.name = "incident-%s-%s" % (self.format_time(now), unique_s) filename = self.name + ".flog" self.abs_filename = os.path.join(self.basedir, filename) self.abs_filename_bz2 = self.abs_filename + ".bz2" self.abs_filename_bz2_tmp = self.abs_filename + ".bz2.tmp" # open logfile. We use both an uncompressed one and a compressed one. self.f1 = open(self.abs_filename, "wb") self.f2 = bz2.BZ2File(self.abs_filename_bz2_tmp, "wb") # write header with triggering_event self.f1.write(flogfile.MAGIC) self.f2.write(flogfile.MAGIC) flogfile.serialize_header(self.f1, "incident", trigger=triggering_event, versions=app_versions.versions, pid=os.getpid()) flogfile.serialize_header(self.f2, "incident", trigger=triggering_event, versions=app_versions.versions, pid=os.getpid()) if self.TRAILING_DELAY is not None: # subscribe to events that occur after this one self.still_recording = True self.remaining_events = self.TRAILING_EVENT_LIMIT self.logger.addObserver(self.trailing_event) # use self.logger.buffers, copy events into logfile events = list(self.logger.get_buffered_events()) events.sort(lambda a,b: cmp(a['num'], b['num'])) for e in events: flogfile.serialize_wrapper(self.f1, e, from_=self.tubid_s, rx_time=now) flogfile.serialize_wrapper(self.f2, e, from_=self.tubid_s, rx_time=now) self.f1.flush() # the BZ2File has no flush method if self.TRAILING_DELAY is None: self.active = False eventually(self.finished_recording) else: # now we wait for the trailing events to arrive self.timer = reactor.callLater(self.TRAILING_DELAY, self.stop_recording)
def incident_declared(self, triggering_event): self.trigger = triggering_event # choose a name for the logfile now = time.time() unique = os.urandom(4) unique_s = base32.encode(unique) self.name = "incident-%s-%s" % (self.format_time(now), unique_s) filename = self.name + ".flog" self.abs_filename = os.path.join(self.basedir, filename) self.abs_filename_bz2 = self.abs_filename + ".bz2" self.abs_filename_bz2_tmp = self.abs_filename + ".bz2.tmp" # open logfile. We use both an uncompressed one and a compressed one. self.f1 = open(self.abs_filename, "wb") self.f2 = bz2.BZ2File(self.abs_filename_bz2_tmp, "wb") # write header with triggering_event header = {"header": {"type": "incident", "trigger": triggering_event, "versions": app_versions.versions, "pid": os.getpid(), }} pickle.dump(header, self.f1) pickle.dump(header, self.f2) if self.TRAILING_DELAY is not None: # subscribe to events that occur after this one self.still_recording = True self.remaining_events = self.TRAILING_EVENT_LIMIT self.logger.addObserver(self.trailing_event) # use self.logger.buffers, copy events into logfile events = list(self.logger.get_buffered_events()) events.sort(lambda a,b: cmp(a['num'], b['num'])) for e in events: wrapper = {"from": self.tubid_s, "rx_time": now, "d": e} pickle.dump(wrapper, self.f1) pickle.dump(wrapper, self.f2) self.f1.flush() # the BZ2File has no flush method if self.TRAILING_DELAY is None: self.active = False eventually(self.finished_recording) else: # now we wait for the trailing events to arrive self.timer = reactor.callLater(self.TRAILING_DELAY, self.stop_recording)
def __init__(self, e): self.e = e self.parent = None self.children = [] self.index = None self.anchor_index = "no-number" self.incarnation = base32.encode(e['d']['incarnation'][0]) if 'num' in e['d']: self.index = (e['from'], e['d']['num']) self.anchor_index = "%s_%s_%d" % (urllib.quote( e['from']), self.incarnation, e['d']['num']) self.parent_index = None if 'parent' in e['d']: self.parent_index = (e['from'], e['d']['parent']) self.is_trigger = False
def __init__(self, e): self.e = e self.parent = None self.children = [] self.index = None self.anchor_index = "no-number" self.incarnation = base32.encode(e['d']['incarnation'][0]) if 'num' in e['d']: self.index = (e['from'], e['d']['num']) self.anchor_index = "%s_%s_%d" % (urllib.quote(e['from']), self.incarnation, e['d']['num']) self.parent_index = None if 'parent' in e['d']: self.parent_index = (e['from'], e['d']['parent']) self.is_trigger = False
def digest32(colondigest): digest = "".join([chr(int(c, 16)) for c in colondigest.split(":")]) digest = base32.encode(digest) return digest
def generateSwissnumber(bits): bytes = os.urandom(bits / 8) return base32.encode(bytes)
def nodeid_b2a(nodeid): # we display nodeids using the same base32 alphabet that Foolscap uses return base32.encode(nodeid)
def generateSwissnumber(bits): bytes = os.urandom(bits/8) return base32.encode(bytes)
def digest32(colondigest): digest = "".join([chr(int(c,16)) for c in colondigest.split(":")]) digest = base32.encode(digest) return digest
def short_tubid_b2a(tubid): return base32.encode(tubid)[:8]
def generateSwissnumber(bits): bytes = os.urandom(int(bits / 8)) return base32.encode(bytes).decode('ascii')
def digest32(colondigest): digest = bytes(int(c, 16) for c in colondigest.split(b":")) digest = base32.encode(digest) return digest