def _write_to_disk(cls, job): date = System.date(string=True) path = cls._path + f'{date}-{cls.name}.log' timestamp, message = job with open(path, 'a+') as log: log.write(f'{timestamp}|{message}\n') if (cls.root): change_file_owner(path)
def message(cls, mod_name, mtype, level, message): date = System.date(string=True) timestamp = System.format_time(fast_time()) level = cls.convert_level(level) system_ip = None # using system/UTC time # 20140624|19:08:15|EVENT|DNSProxy:Informational|192.168.83.1|*MESSAGE* message = f'{date}|{timestamp}|{mtype.name}|{mod_name}:{level}|{system_ip}|{message}' return message.encode('utf-8')
def organize(self): # print('[+] Starting organize operation.') log_entries = [] date = ''.join(System.date()) for module in self.log_modules: module_entries = self.combine_logs(module, date) if (module_entries): log_entries.extend(module_entries) sorted_log_entries = sorted(log_entries) if (sorted_log_entries): self.write_combined_logs(sorted_log_entries, date) log_entries = None # overwriting var to regain system memory
class LogService: def __init__(self): self.System = System() self.log_modules = [ 'dhcp_server', 'dns_proxy', 'ip_proxy', 'ips', 'syslog', 'system', 'update', 'logins' ] def start(self): threading.Thread(target=self.get_settings).start() threading.Thread(target=self.organize).start() threading.Thread(target=self.clean_db_tables).start() threading.Thread(target=self.clean_blocked_table).start() # Recurring logic to gather all log files and add the mto a signle file (combined logs) every 5 minutes @looper(THREE_MIN) def organize(self): # print('[+] Starting organize operation.') log_entries = [] date = ''.join(self.System.date()) for module in self.log_modules: module_entries = self.combine_logs(module, date) if (module_entries): log_entries.extend(module_entries) sorted_log_entries = sorted(log_entries) if (sorted_log_entries): self.write_combined_logs(sorted_log_entries, date) log_entries = None # overwriting var to regain system memory # grabbing the log from the sent in module, splitting the lines, and returning a list # TODO: see if we can load file as generator def combine_logs(self, module, date): file_entries = [] try: # print(f'opening {HOME_DIR}/dnx_system/log/{module}/{date[0]}{date[1]}{date[2]}-{module}.log to view entries') with open( f'{HOME_DIR}/dnx_system/log/{module}/{date}-{module}.log', 'r') as log_file: for _ in range(20): line = log_file.readline().strip() if not line: break file_entries.append(line) except FileNotFoundError: return None else: return file_entries # writing the log entries to the combined log def write_combined_logs(self, sorted_log_entries, date): with open(f'{HOME_DIR}/dnx_system/log/combined/{date}-combined.log', 'w+') as system_log: # print(f'writing {HOME_DIR}/dnx_system/log/combined/{date[0]}{date[1]}{date[2]}-combined.log') for log in sorted_log_entries: system_log.write(f'{log}\n') @looper(ONE_DAY) def clean_db_tables(self): # print('[+] Starting general DB table cleaner.') with DBConnector() as FirewallDB: for table in ['dnsproxy', 'ipproxy', 'ips', 'infectedclients']: FirewallDB.table_cleaner(self.log_length, table=table) @looper(THREE_MIN) def clean_blocked_table(self): # print('[+] Starting DB blocked table cleaner.') with DBConnector() as FirewallDB: FirewallDB.blocked_cleaner(table='blocked') @cfg_read_poller('logging_client') def get_settings(self, cfg_file): # print('[+] Starting settings update poller.') log_settings = load_configuration(cfg_file) self.log_length = log_settings['logging']['logging']['length'] self.logging_level = log_settings['logging']['logging']['level']