def __init__(self, config, options=None): reactor.addSystemEventTrigger("before", "shutdown", self.cleanup) self.dead = False self.config = config self.version = "txircd.{}".format(__version__) self.created = now() self.token = uuid.uuid1() self.servers = CaseInsensitiveDictionary() self.users = CaseInsensitiveDictionary() self.whowas = CaseInsensitiveDictionary() self.channels = DefaultCaseInsensitiveDictionary(self.ChannelFactory) self.peerConnections = {} self.db = None # self.stats = None # self.stats_timer = LoopingCall(self.flush_stats) # self.stats_data = { # "bytes_in": 0, # "bytes_out": 0, # "lines_in": 0, # "lines_out": 0, # "total_bytes_in": 0, # "total_bytes_out": 0, # "total_lines_in": 0, # "total_lines_out": 0, # "connections": 0, # "total_connections": 0 # } self.xlines = { "G": CaseInsensitiveDictionary(), "K": CaseInsensitiveDictionary(), "Z": CaseInsensitiveDictionary(), "E": CaseInsensitiveDictionary(), "Q": CaseInsensitiveDictionary(), "SHUN": CaseInsensitiveDictionary(), } self.xline_match = { "G": ["{ident}@{host}", "{ident}@{ip}"], "K": ["{ident}@{host}", "{ident}@{ip}"], "Z": ["{ip}"], "E": ["{ident}@{host}", "{ident}@{ip}"], "Q": ["{nick}"], "SHUN": ["{ident}@{host}", "{ident}@{ip}"], } if not options: options = {} self.load_options(options)
class IRCD(Factory): protocol = IRCProtocol channel_prefixes = "#" types = {"user": DBUser, "server": IRCServer, "service": IRCService} prefix_order = "qaohv" # Hardcoded into modes :( prefix_symbols = {"q": "~", "a": "&", "o": "@", "h": "%", "v": "+"} def __init__(self, config, options=None): reactor.addSystemEventTrigger("before", "shutdown", self.cleanup) self.dead = False self.config = config self.version = "txircd.{}".format(__version__) self.created = now() self.token = uuid.uuid1() self.servers = CaseInsensitiveDictionary() self.users = CaseInsensitiveDictionary() self.whowas = CaseInsensitiveDictionary() self.channels = DefaultCaseInsensitiveDictionary(self.ChannelFactory) self.peerConnections = {} self.db = None # self.stats = None # self.stats_timer = LoopingCall(self.flush_stats) # self.stats_data = { # "bytes_in": 0, # "bytes_out": 0, # "lines_in": 0, # "lines_out": 0, # "total_bytes_in": 0, # "total_bytes_out": 0, # "total_lines_in": 0, # "total_lines_out": 0, # "connections": 0, # "total_connections": 0 # } self.xlines = { "G": CaseInsensitiveDictionary(), "K": CaseInsensitiveDictionary(), "Z": CaseInsensitiveDictionary(), "E": CaseInsensitiveDictionary(), "Q": CaseInsensitiveDictionary(), "SHUN": CaseInsensitiveDictionary(), } self.xline_match = { "G": ["{ident}@{host}", "{ident}@{ip}"], "K": ["{ident}@{host}", "{ident}@{ip}"], "Z": ["{ip}"], "E": ["{ident}@{host}", "{ident}@{ip}"], "Q": ["{nick}"], "SHUN": ["{ident}@{host}", "{ident}@{ip}"], } if not options: options = {} self.load_options(options) # if self.app_ip_log: # try: # with open(self.app_ip_log) as f: # self.unique_ips = set(json.loads(f.read())) # self.stats_data["total_connections"] = len(self.unique_ips) # except: # self.unique_ips = set() # else: # self.unique_ips = set() # logfile = "{}/{}".format(self.app_log_dir,"stats") # if not os.path.exists(logfile): # os.makedirs(logfile) # self.stats_log = DailyLogFile("log",logfile) # self.stats_timer.start(1) def rehash(self): try: with open(self.config) as f: self.load_options(yaml.safe_load(f)) except: return False return True def load_options(self, options): # Populate attributes with options for var in default_options.iterkeys(): setattr(self, var, options[var] if var in options else default_options[var]) # Unserialize xlines for key in self.xlines.iterkeys(): self.xlines[key] = CaseInsensitiveDictionary() xlines = getattr(self, "server_xlines_{}".format(key.lower()), None) if not xlines: continue for user, data in xlines.iteritems(): self.xlines[key][user] = { "created": datetime.datetime.strptime(data["created"], "%Y-%m-%d %H:%M:%S"), "duration": parse_duration(data["duration"]), "setter": data["setter"], "reason": data["reason"], } # Create database connection if self.db: self.db.close() if self.db_library: self.db = adbapi.ConnectionPool( self.db_library, host=self.db_host, port=self.db_port, db=self.db_database, user=self.db_username, passwd=self.db_password, cp_reconnect=True, ) # Turn on stats factory if needed, or shut it down if needed # if self.stats_enabled and not self.stats: # self.stats = StatFactory() # if self.stats_port_tcp: # try: # reactor.listenTCP(int(self.stats_port_tcp), self.stats) # except: # pass # Wasn't a number # if self.stats_port_web: # try: # reactor.listenTCP(int(self.stats_port_web), SockJSFactory(self.stats)) # except: # pass # Wasn't a number # elif not self.stats_enabled and self.stats: # self.stats.shutdown() # self.stats = None # Load geoip data # self.geo_db = pygeoip.GeoIP(self.app_geoip_database, pygeoip.MEMORY_CACHE) if self.app_geoip_database else None def save_options(self): # Serialize xlines for key, lines in self.xlines.iteritems(): xlines = {} for user, data in lines.iteritems(): xlines[user] = { "created": str(data["created"]), "duration": build_duration(data["duration"]), "setter": data["setter"], "reason": data["reason"], } setattr(self, "server_xlines_{}".format(key.lower()), xlines) # Load old options options = {} try: with open(self.config) as f: options = yaml.safe_load(f) except: return False # Overwrite with the new stuff for var in default_options.iterkeys(): options[var] = getattr(self, var, None) # Save em try: with open(self.config, "w") as f: yaml.dump(options, f, default_flow_style=False) except: return False return True def cleanup(self): # Track the disconnections so we know they get done deferreds = [] # Cleanly disconnect all clients log.msg("Disconnecting clients...") for u in self.users.values(): u.irc_QUIT(None, ["Server shutting down"]) deferreds.append(u.disconnected) # Without any clients, all channels should be gone # But make sure the logs are closed, just in case log.msg("Closing logs...") for c in self.channels.itervalues(): c.log.close() # self.stats_log.close() # Finally, save the config. Just in case. log.msg("Saving options...") self.save_options() # Return deferreds log.msg("Waiting on deferreds...") self.dead = True return DeferredList(deferreds) def buildProtocol(self, addr): if self.dead: return None ip = addr.host # self.unique_ips.add(ip) # self.stats_data["total_connections"] = len(self.unique_ips) # if self.app_ip_log: # with open(self.app_ip_log,"w") as f: # f.write(json.dumps(list(self.unique_ips), separators=(',',':'))) conn = self.peerConnections.get(ip, 0) max = self.client_peer_exempt[ip] if ip in self.client_peer_exempt else self.client_peer_connections if max and conn >= max: return None # self.stats_data["connections"] += 1 self.peerConnections[ip] = conn + 1 return Factory.buildProtocol(self, addr) def unregisterProtocol(self, p): # self.stats_data["connections"] -= 1 peerHost = p.transport.getPeer().host self.peerConnections[peerHost] -= 1 if self.peerConnections[peerHost] == 0: del self.peerConnections[peerHost] def ChannelFactory(self, name): logfile = "{}/{}".format(self.app_log_dir, irc_lower(name)) if not os.path.exists(logfile): os.makedirs(logfile) c = Channel( name, now(), {"message": None, "author": "", "created": now()}, CaseInsensitiveDictionary(), ChannelModes(self, None), DailyLogFile("log", logfile), ) c.mode.parent = c c.mode.combine("nt", [], name) return c def flush_stats(self): return users = {} countries = {} uptime = now() - self.created for u in self.users.itervalues(): users[u.nickname] = [u.latitude, u.longitude] if u.country not in countries: countries[u.country] = 0 countries[u.country] += 1 line = json.dumps( { "io": self.stats_data, "users": users, "countries": countries, "uptime": "{}".format(uptime if uptime.days > 0 else "0 days, {}".format(uptime)), }, separators=(",", ":"), ) self.stats_data["bytes_in"] = 0 self.stats_data["bytes_out"] = 0 self.stats_data["lines_in"] = 0 self.stats_data["lines_out"] = 0 # if not self.stats_log.closed: # self.stats_log.write(line+"\n") if self.stats: self.stats.broadcast(line + "\r\n")