def load_page(): logging_settings = load_configuration('logging_client')['logging'] logging = logging_settings['logging'] log_level = logging['level'] log_length = logging['length'] log_settings = {'level': log_level, 'length': log_length} time_offset = logging_settings['time_offset'] offset_values = { 'direction': time_offset['direction'], 'amount': time_offset['amount'] } system_time = System.format_date_time(fast_time()) local_time = System.calculate_time_offset(fast_time()) local_time = System.format_date_time(local_time) logging_settings = { 'system': system_time, 'local': local_time, 'logging': log_settings, 'offset': offset_values } return logging_settings
def load_page(): logging_settings = load_configuration('logging_client')['logging'] log = logging_settings['logging'] # correcting time for configured offset. time_offset = logging_settings['time_offset'] system_time = System.format_date_time(fast_time()) local_time = System.calculate_time_offset(fast_time()) local_time = System.format_date_time(local_time) logging_settings = { 'system': system_time, 'local': local_time, 'offset': { 'direction': time_offset['direction'], 'amount': time_offset['amount'] }, 'logging': { 'log_levels': [level.title() for level in LOG_LEVELS], 'level': log['level'], 'length': log['length'] } } return logging_settings
def load_page(): return { 'firewall_rules': System.firewall_rules(), 'dmz_dnat_rules': System.nat_rules(), 'local_snat_rules': System.nat_rules(nat_type='SRCNAT'), 'netmasks': list(reversed(range(24, 33))) }
def __init__(self): self.System = System() self.log_modules = [ 'dhcp_server', 'dns_proxy', 'ip_proxy', 'ips', 'syslog', 'system', 'update', 'logins' ]
def load_page(): backups_info = {} current_backups = System.backups() for backup, c_time in current_backups.items(): c_time = System.calculate_time_offset(c_time) c_time = System.format_date_time(c_time).split(maxsplit=1) backups_info[backup] = (c_time[0], c_time[1]) return backups_info
def message(cls, mod_name, mtype, level, message): date = System.date(string=True) timestamp = System.format_time(fast_time()) level = cls.convert_level(level) system_ip = None # using system/UTC time # 20140624|19:08:15|EVENT|DNSProxy:Informational|192.168.83.1|*MESSAGE* message = f'{date}|{timestamp}|{mtype.name}|{mod_name}:{level}|{system_ip}|{message}' return message.encode('utf-8')
def __init__(self, icmp_check): self.path = os.environ['HOME_DIR'] self.Sys = System() with open(f'{self.path}/data/config.json', 'r') as settings: setting = json.load(settings) self.local_net = setting['Settings']['LocalNet']['Subnet'] self.ip_range = '.'.join(self.local_net.split('.')[:3]) self.lease_table = {} self.icmp_check = icmp_check
def load_page(): fw_rules = System.firewall_rules() nat_rules = System.nat_rules() netmasks = list(reversed(range(24, 33))) firewall_settings = { 'firewall_rules': fw_rules, 'nat_rules': nat_rules, 'netmasks': netmasks } return firewall_settings
def format_row(row, users): '''formats database data to be better displayed and managed by front end. will replace all '_' with ' '. If user is passed in, it will be appended before last_seen.''' Sys = System() *entries, last_seen = row ls_offset = Sys.calculate_time_offset(last_seen) last_seen = Sys.format_date_time(ls_offset) if (users is not None): entries.append(users.get(entries[0], {}).get('name', 'n/a')) entries.append(last_seen) return [str(x).lower().replace('_', ' ') for x in entries]
def load_page(): with DBConnector() as ProxyDB: domain_count = ProxyDB.unique_domain_count(table='dnsproxy', action='blocked') top_domains = ProxyDB.dashboard_query_top(5, table='dnsproxy', action='blocked') request_count = ProxyDB.total_request_count(table='dnsproxy', action='blocked') inf_hosts = ProxyDB.query_last(5, table='infectedclients', action='all') # TODO: see if this is a candidate for a class method Int = Interface() intstat = Int.bandwidth() uptime = System.uptime() cpu = System.cpu_usage() ram = System.ram_usage() dns_servers = System.dns_status() # TODO: make this iterable dns_proxy = Services.status('dnx-dns-proxy') ip_proxy = Services.status('dnx-ip-proxy') dhcp_server = Services.status('dnx-dhcp-server') dnx_ips = Services.status('dnx-ips') mod_status = { 'dns_proxy': dns_proxy, 'ip_proxy': ip_proxy, 'dnx_ips': dnx_ips, 'dhcp_server': dhcp_server } dashboard = { 'domain_count': domain_count, 'infected_hosts': inf_hosts, 'top_domains': top_domains, 'request_count': request_count, 'interfaces': intstat, 'uptime': uptime, 'cpu': cpu, 'ram': ram, 'dns_servers': dns_servers, 'module_status': mod_status } return dashboard
def set_dns_servers(dns_server_info): field = {1: 'primary', 2: 'secondary'} with ConfigurationManager('dns_server') as dnx: dns_server_settings = dnx.load_configuration() public_resolvers = dns_server_settings['dns_server']['resolvers'] for i, (server_name, ip_address) in enumerate(dns_server_info.items(), 1): if (not server_name and ip_address): continue public_resolvers[field[i]].update({ 'name': server_name, 'ip_address': ip_address }) dnx.write_configuration(dns_server_settings) wan_information = load_configuration('config')['settings'] interface = wan_information['interfaces'] wan_dhcp = interface['wan']['dhcp'] wan_int = interface['wan']['ident'] if (not wan_dhcp): wan_ip = interface.get_ip_address(wan_int) wan_dfg = Interface.default_gateway(wan_int) cidr = System.standard_to_cidr(wan_netmask) # TODO: convert this to new module wan_netmask = Interface.netmask(wan_int) set_wan_interface({ 'ip_address': wan_ip, 'cidr': cidr, 'default_gateway': wan_dfg })
def set_dns_servers(dns_server_info): with ConfigurationManager('dns_server') as dnx: dns_server_settings = dnx.load_configuration() dns = dns_server_settings['dns_server']['resolvers'] for i, (server_name, ip_address) in enumerate(dns_server_info.items(), 1): if (server_name and ip_address): server = dns[f'server{i}'] server.update({'name': server_name, 'ip_address': ip_address}) dnx.write_configuration(dns_server_settings) wan_information = load_configuration('config') interface = wan_information['settings']['interface'] wan_dhcp = interface['wan']['dhcp'] wan_int = interface['outside'] if (not wan_dhcp): wan_ip = interface.get_ip_address(wan_int) wan_netmask = Interface.netmask(wan_int) wan_dfg = Interface.default_gateway(wan_int) cidr = System.standard_to_cidr(wan_netmask) wan_settings = { 'ip_address': wan_ip, 'cidr': cidr, 'default_gateway': wan_dfg } set_wan_interface(wan_settings)
def load_page(): blacklist = load_configuration('blacklist')['blacklist'] exceptions = blacklist['exception'] domain_blacklist = blacklist['domain'] for domain, info in domain_blacklist.items(): st_offset = System.calculate_time_offset(info['time']) domain_blacklist[domain]['time'] = System.format_date_time(st_offset) blacklist_settings = { 'domain_blacklist': domain_blacklist, 'exceptions': exceptions } return blacklist_settings
def __init__(self, process, module=None): self.process = process self.System = Sys() self.log_queue = deque() if (module): self.module = module
class SyslogFormat: def __init__(self): self.System = Sys() def Message(self, system_ip, module, msg_type, msg_level, message): epoch = time.time() date = self.Date() timestamp = self.System.FormatTime(epoch) msg_type = self.ReturnType(msg_type) msg_level = self.ReturnLevel(msg_level) #add time offset?? #20140624|19:08:15|EVENT|DNSProxy:Informational|192.168.83.1|src.mac={}; src.ip={}; domain={}; category={}; filter={}; action={} message = f'{date}|{timestamp}|{msg_type}|{module}:{msg_level}|{system_ip}|{message}' return message def ReturnType(self, msg_type): # T3 = System Daemons | T14 = Event/Log Alert if (msg_type == 3): msg_type = 'SYSTEM' elif (msg_type == 14): msg_type = 'EVENT' return msg_type def Date(self): d = self.System.Date() date = f'{d[0]}{d[1]}{d[2]}' return date def ReturnLevel(self, level): log_levels = { 0: 'Emergency', # system is unusable 1: 'Alert', #action must be taken immediately 2: 'Critical', # critical conditions 3: 'Error', # error conditions 4: 'Warning', # warning conditions 5: 'Notice', # normal but significant condition 6: 'Informational', # informational messages 7: 'Debug', # debug-level messages } return log_levels[level]
def load_page(): whitelist = load_configuration('whitelist.json')['whitelist'] exceptions = whitelist['exception'] domain_whitelist = whitelist['domain'] ip_whitelist = whitelist['ip_whitelist'] for domain, info in domain_whitelist.items(): st_offset = System.calculate_time_offset(info['time']) domain_whitelist[domain]['time'] = System.format_date_time(st_offset) whitelist_settings = { 'domain_whitelist': domain_whitelist, 'exceptions': exceptions, 'ip_whitelist': ip_whitelist } return whitelist_settings
def get_log_entries(log_files): combined_log = [] total_lines, line_limit = 0, 100 for file in log_files: if (file.endswith('temp')): continue log_entries = tail_file(file, line_count=100) for line in log_entries: total_lines += 1 if total_lines >= line_limit: break epoch, log_message = line.split('|') date_time = System.calculate_time_offset(int(epoch)) date_time = System.format_log_time(date_time) combined_log.append((date_time, log_message)) return combined_log
def _write_to_disk(cls, job): date = System.date(string=True) path = cls._path + f'{date}-{cls.name}.log' timestamp, message = job with open(path, 'a+') as log: log.write(f'{timestamp}|{message}\n') if (cls.root): change_file_owner(path)
def organize(self): # print('[+] Starting organize operation.') log_entries = [] date = ''.join(System.date()) for module in self.log_modules: module_entries = self.combine_logs(module, date) if (module_entries): log_entries.extend(module_entries) sorted_log_entries = sorted(log_entries) if (sorted_log_entries): self.write_combined_logs(sorted_log_entries, date) log_entries = None # overwriting var to regain system memory
def get_log_entries(log_files): combined_log = [] total_lines, line_limit = 0, 100 for file in log_files: if (file.endswith('temp')): continue log_entries = tail_file(file, line_count=100) for line in log_entries: # skipping over empty lines. if not line.strip('\n'): continue total_lines += 1 if total_lines >= line_limit: break epoch, *log_entry = line.split('|', 3) date_time = System.calculate_time_offset(int(epoch)) date_time = System.format_log_time(date_time) combined_log.append((date_time, *log_entry)) return combined_log
def message(self, system_ip, module, msg_type, msg_level, message): epoch = time.time() date = ''.join(self.System.date()) timestamp = System.format_time(epoch) msg_type = self._convert_type(msg_type) msg_level = self._convert_level(msg_level) # using system/UTC time # 20140624|19:08:15|EVENT|DNSProxy:Informational|192.168.83.1|*MESSAGE* message = f'{date}|{timestamp}|{msg_type}|{module}:{msg_level}|{system_ip}|{message}' return message.encode('utf-8')
def update_page(form): # initial input validation for presence of zone field zone = form.get('zone', None) if (zone not in valid_zones): return INVALID_FORM, 'GLOBAL_INTERFACE', None # if firewall rule, None will be used for evaluation. action = form.get('action', None) nat_type = form.get('nat_type', None) if (nat_type is None): error, zone = _firewall_rules(zone, action, form) elif (nat_type in ['DSTNAT', 'SRCNAT']): if (nat_type == 'DSTNAT'): error, zone = _dnat_rules(zone, action, form) elif (nat_type == 'SRCNAT'): error, zone = _snat_rules(zone, action, form) else: return INVALID_FORM, zone, None # updating page data then returning. this is because we need to serve the content with the newly added # configuration item. page_data = None if not error: page_data = { 'firewall_rules': System.firewall_rules(chain=zone), 'dmz_dnat_rules': System.nat_rules(), 'local_snat_rules': System.nat_rules(nat_type='SRCNAT'), 'netmasks': list(reversed(range(24, 33))) } print(f'RETURNING: {page_data}') return error, zone, page_data
class DHCPLeases: def __init__(self, icmp_check): self.path = os.environ['HOME_DIR'] self.Sys = System() with open(f'{self.path}/data/config.json', 'r') as settings: setting = json.load(settings) self.local_net = setting['Settings']['LocalNet']['Subnet'] self.ip_range = '.'.join(self.local_net.split('.')[:3]) self.lease_table = {} self.icmp_check = icmp_check ## -- DHCP Server Operations -- ## def Release(self, ip, mac): lease_ip = self.lease_table.get(ip, None) if (lease_ip): if (lease_ip[1] == mac and lease_ip[0] != -1): self.lease_table[ip] = None def Handout(self, mac): if (mac in self.dhcp_reservations): return self.dhcp_reservations[mac]['IP Address'] while True: for ip, value in self.lease_table.items(): if (value is not None and value[1] == mac): timestamp = round(time.time()) self.lease_table[ip] = [timestamp, mac] return ip for ip, value in self.lease_table.items(): if (value is None): if (self.icmp_check): result = self.ICMPCheck(ip) if result: continue timestamp = round(time.time()) self.lease_table[ip] = [timestamp, mac] return ip else: self.Sys.Log( 'DHCP Server: IP handout error | No Available IPs in range.' ) return None ## ----------------------------------- ## ## -- Reading Lease table from Json -- ## def LoadLeases(self): print('[+] DHCP: Loading leases from file.') with open(f'{self.path}/data/dhcp_server.json', 'r') as stored_leases: leases = json.load(stored_leases) self.lease_table.update(leases['Reservations']) #### -- Initializing lease database operations -- #### def BuildRange(self): print('[+] DHCP: Building handout range.') timestamp = round(time.time()) threads = [] for i in range(16, 221): hostip = f'{self.ip_range}.{i}' thread = threading.Thread(target=self.ICMPThread, args=(hostip, timestamp)) threads.append(thread) for t in threads: t.start() for t in threads: t.join() def ICMPThread(self, hostip, timestamp): response = self.ICMPCheck(hostip) if (response): self.lease_table[hostip] = [timestamp, None] else: self.lease_table[hostip] = None def ICMPCheck(self, hostip): DEVNULL = open(os.devnull, 'wb') response = subprocess.call(['ping', '-c', '1', hostip], stdout=DEVNULL) if (response == 0): result = True else: result = False return (result) #### -- Async auto timers section -- #### ## -- Purging Lease table / Checked every 5 minutes -- ## async def LeaseTimer(self): while True: await asyncio.sleep(5 * 60) for ip, value in self.lease_table.items(): if (value is None or value == -1): pass else: timestamp = round(time.time()) time_elapsed = timestamp - value[0] if (time_elapsed >= 86800): self.lease_table[ip] = None ## -- Lease Table Backup / RUNs EVERY HOUR -- ## async def WritetoFile(self): while True: await asyncio.sleep(60 * 60) with open(f'{self.path}/data/dhcp_server.json', 'r') as dhcp_server: server_leases = json.load(dhcp_server) new_leases = {} for ip, value in self.lease_table.items(): if (value is not None and value != -1): new_leases.update({ip: value}) server_leases['Leases'] = new_leases with open(f'{self.path}/data/dhcp_server.json', 'w') as dhcp_server: json.dump(server_leases, dhcp_server, indent=4) self.Sys.Log('DHCP Server: Backed Up DNX DHCP Leases') ## -- Updating DHCP Reservations / Checked every 5 minutes -- # async def ReservationTimer(self): while True: with open(f'{self.path}/data/dhcp_server.json', 'r') as dhcp_reservations: dhcp_reservation = json.load(dhcp_reservations) self.dhcp_reservations = dhcp_reservation['Reservations'] ## -- Configuring DHCP Reservations -- ## for reservation in self.dhcp_reservations: res_ip = self.dhcp_reservations[reservation]['IP Address'] res = int(res_ip.split('.')[3]) if (res in range(16, 221)): self.lease_table[res_ip] = [-1, reservation] await asyncio.sleep(5 * 60)
class LogService: def __init__(self): self.System = Sys() def Start(self): loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) asyncio.run(self.Main()) async def Main(self): await asyncio.gather(self.Settings, self.Organize(), self.CleanDBTables()) # Recurring logic to gather all log files and add the mto a signle file (combined logs) every 5 minutes async def Organize(self): log_entries = [] date = self.System.Date() log_modules = [ 'dhcp_server', 'dns_proxy', 'ip_proxy', 'ips', 'syslog', 'system', 'update' ] while True: for module in log_modules: module_entries = await self.CombineLogs(module, date) if (module_entries): log_entries.extend(module_entries) sorted_log_entries = sorted(log_entries) if (sorted_log_entries): await self.WriteCombinedLogs(sorted_log_entries, date) await asyncio.sleep(SETTINGS_TIMER) # grabbing the log from the sent in module, splitting the lines, and returning a list async def CombineLogs(self, module, date): try: with open( f'{HOME_DIR}/dnx_system/log/{module}/{date[0]}{date[1]}{date[2]}-{module}.log', 'r') as log_file: log_entries = log_file.read().strip().split('\n') return log_entries except FileNotFoundError: pass # writing the log entries to the combined log async def WriteCombinedLogs(self, sorted_log_entries, date): with open( f'{HOME_DIR}/dnx_system/log/combined_log/{date[0]}{date[1]}{date[2]}-combined.log', 'w+') as system_log: for log in sorted_log_entries: system_log.write(f'{log}\n') async def CleanDBTables(self): while True: for table in {'dnsproxy', 'ipproxy', 'ips', 'infectedclients'}: Database = DBConnector(table) Database.Connect() Database.Cleaner(self.log_length) Database.Disconnect() #running on system startup and every 24 hours thereafter await asyncio.sleep(EXTRA_LONG_TIMER) async def Settings(self): while True: with open(f'{HOME_DIR}/data/config.json', 'r') as logging: log = json.load(logging) self.log_length = log['settings']['logging']['length'] await asyncio.sleep(SETTINGS_TIMER)
def __init__(self): self.System = Sys()
class LogHandler: def __init__(self, process, module=None): self.process = process self.System = Sys() self.log_queue = deque() if (module): self.module = module async def Settings(self, module): print('[+] Starting: Log Settings Update Thread.') self.module = module while True: with open(f'{HOME_DIR}/data/config.json', 'r') as settings: setting = json.load(settings) self.process.logging_level = setting['settings']['logging'][ 'level'] await asyncio.sleep(SETTINGS_TIMER) ## this is the message input for threadsafe/sequential modules def Message(self, message): timestamp = time.time() timestamp = self.System.FormatTime(timestamp) d = self.System.Date() with open( f'{HOME_DIR}/dnx_system/log/{self.module}/{d[0]}{d[1]}{d[2]}-{self.module}.log', 'a+') as Log: Log.write(f'{timestamp}: {message}\n') ## make sure this works. should be fine, but front end might do something weird to chmod??? user_id = os.geteuid() if (user_id == 0): file_path = f'{HOME_DIR}/dnx_system/log/{self.module}/{d[0]}{d[1]}{d[2]}-{self.module}.log' shutil.chown(file_path, user=USER, group=GROUP) os.chmod(file_path, 0o660) ## REPLACED THESE WITH CODE ABOVE. REMOVE AFTER TESTING AND VALIDATING CHANGES WORK # run(f'chown dnx:dnx {file_path}', shell=True) # run(f'chmod 660 {file_path}', shell=True) def AddtoQueue(self, message, log_queue_lock): timestamp = time.time() with log_queue_lock: self.log_queue.append((timestamp, message)) ## This is the message handler for ensure thread safety in multi threaded or asynchronous tasks async def QueueHandler(self, log_queue_lock): while True: d = self.System.Date() if (not self.log_queue): # waiting 1 second before checking queue again for idle perf await asyncio.sleep(SHORT_POLL) continue with open( f'{HOME_DIR}/dnx_system/log/{self.module}/{d[0]}{d[1]}{d[2]}-{self.module}.log', 'a+') as Log: with log_queue_lock: while self.log_queue: full_message = self.log_queue.popleft() timestamp = full_message[0] message = full_message[1] Log.write(f'{timestamp}: {message}\n') user_id = os.geteuid() if (user_id == 0): file_path = f'{HOME_DIR}/dnx_system/log/{self.module}/{d[0]}{d[1]}{d[2]}-{self.module}.log' shutil.chown(file_path, user=USER, group=GROUP) os.chmod(file_path, 0o660)
class LogService: def __init__(self): self.System = System() self.log_modules = [ 'dhcp_server', 'dns_proxy', 'ip_proxy', 'ips', 'syslog', 'system', 'update', 'logins' ] def start(self): threading.Thread(target=self.get_settings).start() threading.Thread(target=self.organize).start() threading.Thread(target=self.clean_db_tables).start() threading.Thread(target=self.clean_blocked_table).start() # Recurring logic to gather all log files and add the mto a signle file (combined logs) every 5 minutes @looper(THREE_MIN) def organize(self): # print('[+] Starting organize operation.') log_entries = [] date = ''.join(self.System.date()) for module in self.log_modules: module_entries = self.combine_logs(module, date) if (module_entries): log_entries.extend(module_entries) sorted_log_entries = sorted(log_entries) if (sorted_log_entries): self.write_combined_logs(sorted_log_entries, date) log_entries = None # overwriting var to regain system memory # grabbing the log from the sent in module, splitting the lines, and returning a list # TODO: see if we can load file as generator def combine_logs(self, module, date): file_entries = [] try: # print(f'opening {HOME_DIR}/dnx_system/log/{module}/{date[0]}{date[1]}{date[2]}-{module}.log to view entries') with open( f'{HOME_DIR}/dnx_system/log/{module}/{date}-{module}.log', 'r') as log_file: for _ in range(20): line = log_file.readline().strip() if not line: break file_entries.append(line) except FileNotFoundError: return None else: return file_entries # writing the log entries to the combined log def write_combined_logs(self, sorted_log_entries, date): with open(f'{HOME_DIR}/dnx_system/log/combined/{date}-combined.log', 'w+') as system_log: # print(f'writing {HOME_DIR}/dnx_system/log/combined/{date[0]}{date[1]}{date[2]}-combined.log') for log in sorted_log_entries: system_log.write(f'{log}\n') @looper(ONE_DAY) def clean_db_tables(self): # print('[+] Starting general DB table cleaner.') with DBConnector() as FirewallDB: for table in ['dnsproxy', 'ipproxy', 'ips', 'infectedclients']: FirewallDB.table_cleaner(self.log_length, table=table) @looper(THREE_MIN) def clean_blocked_table(self): # print('[+] Starting DB blocked table cleaner.') with DBConnector() as FirewallDB: FirewallDB.blocked_cleaner(table='blocked') @cfg_read_poller('logging_client') def get_settings(self, cfg_file): # print('[+] Starting settings update poller.') log_settings = load_configuration(cfg_file) self.log_length = log_settings['logging']['logging']['length'] self.logging_level = log_settings['logging']['logging']['level']
def load_page(): with DBConnector() as ProxyDB: domain_count = ProxyDB.unique_domain_count(table='dnsproxy', action='blocked') top_domains = ProxyDB.dashboard_query_top(5, table='dnsproxy', action='blocked') request_count = ProxyDB.total_request_count(table='dnsproxy', action='blocked') inf_hosts = ProxyDB.query_last(5, table='infectedclients', action='all') Int = Interface() intstat = Int.bandwidth() uptime = System.uptime() cpu = System.cpu_usage() ram = System.ram_usage() dns_servers = System.dns_status() #----- Services Status ------# dns_proxy = Services.status('dnx-dns-proxy') ip_proxy = Services.status('dnx-ip-proxy') dhcp_server = Services.status('dnx-dhcp-server') dnx_ips = Services.status('dnx-ips') mod_status = { 'dns_proxy': dns_proxy, 'ip_proxy': ip_proxy, 'dnx_ips': dnx_ips, 'dhcp_server': dhcp_server } dnx_license = load_configuration('license')['license'] updates = load_configuration('updates')['updates'] notify = False if (dnx_license['validated']): system_uptodate = updates['system']['current'] domains_uptodate = updates['signature']['domain']['current'] ip_uptodate = updates['signature']['ip']['current'] if not all([system_uptodate, domains_uptodate, ip_uptodate]): notify = 'DNX firewall has updates available. Check updates tab for more info.' # System/Service Restart pending check sys_restart = updates['system']['restart'] domain_restart = updates['signature']['domain']['restart'] ip_restart = updates['signature']['ip']['restart'] if (domain_restart or ip_restart): notify = 'One or more DNX Services require a restart after signature updates. Please check the updates page for more information.' if (sys_restart): notify = 'DNX firewall is pending a system restart after updates.' dashboard = { 'domain_count': domain_count, 'infected_hosts': inf_hosts, 'top_domains': top_domains, 'request_count': request_count, 'interfaces': intstat, 'uptime': uptime, 'cpu': cpu, 'ram': ram, 'dns_servers': dns_servers, 'module_status': mod_status, 'notify': notify } return dashboard