class PortManager: # Port Manager: calls necessary managers and utilities to generate parameters for sql. # List of valid ports it can receive is taken from the Configuration setup. # validPortNumbers = () def __init__(self): self.g_config = GlobalConfig() self.validPortNumbers = self.g_config.get_ports() self.date_time_field = self.g_config.get_db_datetime_name() self.log = Logger().get('reportserver.manager.PortManager.PortManager') def isPortValid(self, port_number): if (port_number in self.validPortNumbers): return True else: return False def getPort(self, port_number, uom, unit): self.log.info("Retrieving port:" + str(port_number) + "uom:" + uom + " size: " + str(unit)) items = [] if self.isPortValid(port_number): results = DatabaseHandler().get_json_by_time(port_number, uom, unit) items = utilities.process_data(results) port_json = { 'port': str(port_number), 'timespan': uom + "=" + str(unit), 'items':items } return port_json def get_port_attack_count(self, tablename, unit, uom): fromDate = dateTimeUtility.get_begin_date_iso(unit, uom) sql = "select count(distinct session) as total_attacks from %s where %s >= '%s' " %(tablename, self.date_time_field, fromDate) self.log.debug("sql is:" + sql) result = DatabaseHandler().query_db(sql)[0] return int(result['total_attacks']) def get_unique_ips(self, tablename, unit, uom): fromDate = dateTimeUtility.get_begin_date_iso(unit, uom) sql = "select count(distinct peerAddress) as unique_ips from %s where %s >= '%s' " % (tablename, self.date_time_field, fromDate) self.log.debug("sql is:" + sql) result = DatabaseHandler().query_db(sql)[0] return int(result['unique_ips'])
class IpsManager: # Ips Manager: calls necessary managers and utilities to generate parameters for sql. # validPortNumbers = () def __init__(self): self.g_config = GlobalConfig() self.valid_port_numbers = self.g_config.get_ports() self.date_time_field = self.g_config.get_db_datetime_name() self.log = Logger().get('reportserver.manager.IpsManager.py') def get_data(self, ipaddress, uom, unit): self.log.info("Retrieving ipaddress data: " + str(ipaddress) + " uom: " + uom + " size: " + str(unit)) port_data = [] for port in self.valid_port_numbers: results = self.get_json_by_ip(port, ipaddress, uom, unit) items = utilities.process_data(results) port_data.append({port:items}) port_json = { 'ipaddress': str(ipaddress), 'timespan': uom + "=" + str(unit), 'ports':port_data } return port_json def get_json_by_ip(self, portnumber, ipaddress, uom, units): begin_date_iso = dateTimeUtility.get_begin_date_iso(uom, units) table_name = self.g_config.get_plugin_config(portnumber)['table'] date_time_field = self.g_config.get_db_datetime_name() # query = query_db("SELECT * FROM %s where (datetime > '%s')" % (tableName, query_date_iso)) queryString = "SELECT * FROM %s where %s >= '%s' and peerAddress = '%s' order by id, %s" % ( table_name, date_time_field, begin_date_iso, ipaddress, date_time_field) # args = (tableName, date_time_field, begin_date_iso) self.log.info("queryString is: " + str(queryString)) # print ("args to use: " + str(args)) results = DatabaseHandler().query_db(queryString) self.log.debug("results: " + str(results)) return results
class GlobalConfig_Test(unittest.TestCase): def setUp(self): self.gconfig = GlobalConfig(test_cfg_path, test_global_cfg_path, refresh=True) self.gconfig.read_plugin_config() self.gconfig.read_global_config() def test_getInstance(self): gconfig2 = GlobalConfig() self.assertEqual(str(self.gconfig),str(gconfig2),"these 2 objects should equal") gconfig3 = GlobalConfig() self.assertEqual(str(self.gconfig), str(gconfig3), "these 2 objects should equal") self.assertEqual(str(gconfig2), str(gconfig3), "these 2 objects should equal") def test_getPorts(self): ports = self.gconfig.get_ports() self.assertEqual(len(ports), 2, "expected 2 ports in test.cfg found: " + str(len(ports))) for port in ports: print("found: " + str(port)) def test_getReportServerConfig(self): host = self.gconfig.get_report_server_host() port = self.gconfig.get_report_server_port() self.assertEqual(host, "", "expected host to be ''") self.assertEqual(port, 8080, "expected port to be '8080' ") def test_getReportServerHost(self): self.assertEqual("", self.gconfig.get_report_server_host()) def test_getReportServerPort(self): self.assertEqual(8080, self.gconfig.get_report_server_port()) def test_refresh_instance(self): gconfig2 = GlobalConfig(test_cfg_path, test_global_cfg_path, refresh=True) self.assertNotEqual(str(self.gconfig), str(gconfig2), "these 2 objects should NOT equal when refresh set to True") def test_refresh_instance_same(self): gconfig2 = GlobalConfig() self.assertEqual(str(self.gconfig), str(gconfig2), "these 2 objects should equal when False is set for Refresh") gconfig2 = GlobalConfig() self.assertEqual(str(self.gconfig), str(gconfig2), "these 2 objects should equal with default of False") def test_get_date_time_name(self): self.assertEqual("eventDateTime", self.gconfig['Database']['datetime.name']) def test_get_db_peerAddress_nameself(self): self.assertEqual("peerAddress", self.gconfig['Database']['peerAddress.name']) def test_get_db_localAddress_name(self): self.assertEqual("localAddress", self.gconfig['Database']['localAddress.name'])
class Database: def __init__(self): self.global_config = GlobalConfig() self.log = Logger().get('database.database.Database') def create_default_database(self): """ Calls methods needed to create the database. """ self.create_db_dir() self.create_db() # Execute scripts BEFORE updating schema run_db_scripts(self.global_config) self.update_schema() def create_db_dir(self): """ Creates the database directory if it doesn't already exist. """ # if database directory does not exist create it db_path = self.global_config['Database']['path'] (db_dir, db_name) = ntpath.split(db_path) if not os.path.isdir(db_dir): self.log.info("Database directory not found, " "creating database directory...") os.mkdir(db_dir) def create_db(self): """ Creates the database if it doesn't already exist. """ # if database file does not exist in the directory, create it (db_dir, db_name) = ntpath.split(self.global_config['Database']['path']) if not os.path.exists(self.global_config['Database']['path']): self.log.info("Database file not found, creating database file...") # this actually creates the database file connection = sqlite3.connect(self.global_config['Database']['path']) connection.close() def update_schema(self): """ Updates the database when columns have been added to, or removed from, the schema. """ # Create any new tables that have been added to the plugin # config schema. db_tables = DataValidator().get_tables() cfg_tables = get_config_table_list( self.global_config.get_ports(), self.global_config.get_plugin_dictionary()) table_diff = list(set(cfg_tables) - set(db_tables)) self.create_non_exist_tables(table_diff) # Populate the newly created tables with their column # definitions. DataValidator().update_tables_and_schema() self.update_table_structure() def create_non_exist_tables(self, table_diff): """ create tables that do not exist from the table difference between the current database and the configuration """ if len(table_diff) > 0: for table in table_diff: Table_Init.create_table(table, self.global_config) self.log.info('Updated database schema, table names now match configuration.') else: self.log.info('Database Schema and Configuration table names already match.') def create_dict_config_column_list(self): """ get a dictionary of tables and corresponding columns from the config """ config_column_lists = {} for port in self.global_config.get_ports(): value = self.global_config.get_plugin_dictionary().get(port) config_column_lists[value.get('table')] = value.get('tableColumns') return config_column_lists def create_dict_transformed_column_list(self, database_column_lists): """ returns only custom plugin defined columns from database schema i.e. ignores default columns """ transformed_db_column_list = {} for table in database_column_lists: col_list = database_column_lists[table] transformed_db_column_list[table] = [] # default column ids to ignore default_list = [] for default in default_columns: default_list.append(default[0]) for column in col_list: # ignores the default columns if column[1] in default_list: continue transformed_db_column_list[table].append([column[0],column[1],column[2]]) return transformed_db_column_list def update_table_structure(self): cfg_schema = self.create_dict_config_column_list() db_schema = DataValidator().get_schema() db_schema_sans_defaults = self.create_dict_transformed_column_list(db_schema) for table in cfg_schema: if not [(x[1], x[2]) for x in cfg_schema[table]] == \ [(x[1], x[2]) for x in db_schema_sans_defaults[table]]: Table_Init.change_table_structure( table, cfg_schema[table], db_schema[table], self.global_config)
class _Framework: def __init__(self, plugin_cfg_path, global_cfg_path): self._global_config = GlobalConfig(plugin_cfg_path, global_cfg_path) self._plugin_imports = {} self._listener_list= {} self._running_plugins_list = [] self._data_manager = None self._shutting_down = False self._log = None self._pid = os.getpid() def start(self): self.set_shutdown_hook() print('Press Ctrl+C to exit.') if not self.drop_permissions(): return self._global_config.read_global_config() self.start_logging() self._global_config.read_plugin_config() self._data_manager = DataManager() self._data_manager.start() self.start_listeners() def start_logging(self): log_path = self._global_config['Framework']['logName'] log_level = self._global_config['Framework']['logLevel'] self._log = Logger(log_path, log_level).get('framework.frmwork.Framework') self._log.info('RECCE7 started (PID %d)' % self._pid) @staticmethod def drop_permissions(): if os.getuid() != 0: return True dist_name = os.getenv('RECCE7_OS_DIST') users_dict = { 'centos': ('nobody', 'nobody'), 'debian': ('nobody', 'nogroup') } if dist_name not in users_dict: print( 'Unable to lower permission level - not continuing as\n' 'superuser. Please set the environment variable\n' 'RECCE7_OS_DIST to one of:\n\tcentos\n\tdebian\n' 'or rerun as a non-superuser.') return False lowperm_user = users_dict[dist_name] nobody_uid = pwd.getpwnam(lowperm_user[0]).pw_uid nogroup_gid = grp.getgrnam(lowperm_user[1]).gr_gid os.setgroups([]) os.setgid(nogroup_gid) os.setuid(nobody_uid) os.umask(0o077) return True def create_import_entry(self, port, name, clsname): imp = import_module('plugins.' + name) self._plugin_imports[port] = getattr(imp, clsname) def start_listeners(self): ports = self._global_config.get_ports() for port in ports: plugin_config = self._global_config.get_plugin_config(port) module = plugin_config['module'] clsname = plugin_config['moduleClass'] self.create_import_entry(port, module, clsname) address = self._global_config['Framework']['listeningAddress'] listener = NetworkListener(address, plugin_config, self) listener.start() self._listener_list[port] = listener def set_shutdown_hook(self): signal.signal(signal.SIGINT, self.shutdown) def shutdown(self, *args): self._shutting_down = True self._log.debug('Shutting down network listeners') for listener in self._listener_list.values(): listener.shutdown() self._log.debug('Shutting down plugins') for plugin in self._running_plugins_list: plugin.shutdown() self._log.debug('Shutting down data manager') self._data_manager.shutdown() print('Goodbye!') # # Framework API # def get_config(self, port): """ Returns the configuration dictionary for the plugin running on the specified port. :param port: a port number associated with a loaded plugin :return: a plugin configuration dictionary """ return self._global_config.get_plugin_config(port) def spawn(self, socket, config): """ Spawns the plugin configured by 'config' with the provided (accepted) socket. :param socket: an open, accepted socket returned by socket.accept() :param config: the plugin configuration dictionary describing the plugin to spawn :return: a reference to the plugin that was spawned """ # ToDo Throw exception if plugin class not found plugin_class = self._plugin_imports[config['port']] plugin = plugin_class(socket, config, self) plugin.start() self._running_plugins_list.append(plugin) return plugin def insert_data(self, data): """ Inserts the provided data into the data queue so that it can be pushed to the database. :param data: data object to add to the database """ self._data_manager.insert_data(data) def plugin_stopped(self, plugin): """ Tells the framework that the specified plugin has stopped running and doesn't need to be shutdown explicitly on program exit. :param plugin: a reference to a plugin """ if self._shutting_down: return self._running_plugins_list.remove(plugin)
class database_test(unittest.TestCase): def setUp(self): self.test_db_dir = '/tests/database/test_database' self.test_db_file = '/tests/database/test_database/honeyDB.sqlite' # test configuration files self.plugins_config_file = 'tests/database/test_config/plugins.cfg' self.plugins_config_diff_file = 'tests/database/test_config/plugins_diff.cfg' self.plugins_config_diff_table_file = 'tests/database/test_config/plugins_diff_table.cfg' self.global_config_file = 'tests/database/test_config/global.cfg' # create global config instance self.gci = GlobalConfig(self.plugins_config_file,self.global_config_file) self.gci.read_global_config() self.gci.read_plugin_config() # patch the Logger new method so that it doesn't create the log file, we do not need to test that the logging # works just that the log statements are called. @patch.object(Logger,'__new__') def test_database_init(self,log): db = database.Database() self.assertIsInstance(db.global_config,GlobalConfig._GlobalConfig) self.assertTrue(log.called) @patch.object(Logger,'__new__') def test_database_create_default_database(self,log): db = database.Database() db.create_default_database() validator = datavalidator.DataValidator() # check that the directory exists self.assertTrue(os.path.isdir(os.getcwd() + self.test_db_dir)) # check that the database file exists self.assertTrue(os.path.isfile(os.getcwd() + self.test_db_file)) # get the table names from the database schema_table_list = validator.get_tables() # get the user defined tables from the configuration file config_table_list = util.get_config_table_list(self.gci.get_ports(), self.gci.get_plugin_dictionary()) # check that the non user defined table p0f exists self.assertTrue('p0f' in schema_table_list) # check that the non user defined table ipInfo exists self.assertTrue('ipInfo' in schema_table_list) # check that the non user defined table sessions exists self.assertTrue('sessions' in schema_table_list) # check that the user defined tables are a subset of the tables in the database schema self.assertTrue(set(config_table_list) < set(schema_table_list)) shutil.rmtree(os.getcwd() + self.test_db_dir) @patch.object(Logger,'__new__') def test_database_create_db_dir(self,log): db = database.Database() db.create_db_dir() self.assertTrue(os.path.isdir(os.getcwd() + self.test_db_dir)) self.assertTrue(log.called) shutil.rmtree(os.getcwd() + self.test_db_dir) @patch.object(Logger,'__new__') def test_database_create_db_dir_already_exists(self,log): os.mkdir(os.getcwd() + self.test_db_dir) self.assertTrue(os.path.isdir(os.getcwd() + self.test_db_dir)) db = database.Database() log.reset_mock() db.create_db_dir() self.assertFalse(log.called) shutil.rmtree(os.getcwd() + self.test_db_dir) @patch.object(Logger,'__new__') def test_database_create_db(self,log): db = database.Database() db.create_db_dir() db.create_db() self.assertTrue(os.path.isfile(os.getcwd() + self.test_db_file)) self.assertTrue(log.called) shutil.rmtree(os.getcwd() + self.test_db_dir) @patch.object(Logger,'__new__') def test_database_update_schema(self,log): db = database.Database() db.create_db_dir() db.create_db() util.run_db_scripts(self.gci) db.update_schema() validator = datavalidator.DataValidator() schema = validator.get_schema() self.assertTrue(schema['test_http'][5][1] == 'command') self.assertTrue(schema['test_http2'][6][1] == 'path') self.assertTrue(len(schema['test_telnet']) == 7) # set global config instance to the differing column config file self.gci = GlobalConfig(self.plugins_config_diff_file,self.global_config_file,True) self.gci.read_global_config() self.gci.read_plugin_config() db2 = database.Database() db2.update_schema() validator2 = datavalidator.DataValidator() schema2 = validator2.get_schema() self.assertTrue(schema2['test_http'][5][1] == 'unit_test_data_1') self.assertTrue(schema2['test_http2'][6][1] == 'unit_test_data_2') self.assertTrue(len(schema2['test_telnet']) == 8) self.assertTrue(schema2['test_telnet'][7][1] == 'unit_test_data_3') # set global config instance back to normal self.gci = GlobalConfig(self.plugins_config_file,self.global_config_file,True) self.gci.read_global_config() self.gci.read_plugin_config() shutil.rmtree(os.getcwd() + self.test_db_dir)