def test_get_account_block_list(self): config = {'account': {'block_list': ['thing1', 'blue fish']}} Config.set_config(config) block_list = Config.get_account_block_list() self.assertEqual(block_list, ['thing1', 'blue fish'])
def main(): """ Kicks off DuoLogSync by setting important variables, creating and running a Producer-Consumer pair for each log-type defined in a config file passed to the program. """ arg_parser = argparse.ArgumentParser(prog='duologsync', description="Path to config file") arg_parser.add_argument('ConfigPath', metavar='config-path', type=str, help='Config to start application') args = arg_parser.parse_args() # Handle shutting down the program via Ctrl-C signal.signal(signal.SIGINT, sigint_handler) # Create a config Dictionary from a YAML file located at args.ConfigPath config = Config.create_config(args.ConfigPath) Config.set_config(config) Program.setup_logging(Config.get_log_filepath()) # Dict of writers (server id: writer) to be used for consumer tasks server_to_writer = Writer.create_writers(Config.get_servers()) # List of Producer/Consumer objects as asyncio tasks to be run tasks = create_tasks(server_to_writer) # Run the Producers and Consumers asyncio.get_event_loop().run_until_complete(asyncio.gather(*tasks)) asyncio.get_event_loop().close() if Program.is_logging_set(): print(f"DuoLogSync: shutdown successfully. Check " f"{Config.get_log_filepath()} for program logs")
def test_set_config_normal(self): config = {'field_one': {'nested_field': True}, 'field_two': 100} Config.set_config(config) self.assertEqual(Config._config['field_one']['nested_field'], True) self.assertEqual(Config._config['field_two'], 100)
def create_tasks(server_to_writer): """ Create a pair of Producer-Consumer objects for each endpoint enabled within the account defined in config, or retrieve child accounts and do the same if the account is MSP. Return a list containing the asyncio tasks for running those objects. @param writer Dictionary mapping server ids to writer objects @return list of asyncio tasks for running the Producer and Consumer objects """ tasks = [] # Object with functions needed to utilize log API calls admin = create_admin( Config.get_account_ikey(), Config.get_account_skey(), Config.get_account_hostname()) # This is where functionality would be added to check if an account is MSP # (Config.account_is_msp), and then retrieve child accounts (ignoring those # in a blocklist) if the account is indeed MSP for mapping in Config.get_account_endpoint_server_mappings(): # Get the writer to be used for this set of endpoints writer = server_to_writer[mapping.get('server')] for endpoint in mapping.get('endpoints'): new_tasks = create_consumer_producer_pair(endpoint, writer, admin) tasks.extend(new_tasks) return tasks
def test_get_account_skey(self): config = {'account': {'skey': 'PASSWORD'}} Config.set_config(config) skey = Config.get_account_skey() self.assertEqual(skey, 'PASSWORD')
def update_log_checkpoint(log_type, log_offset, child_account_id): """ Save log_offset to the checkpoint file for log_type. @param log_type Used to determine which checkpoint file to open @param log_offset Information to save in the checkpoint file """ Program.log( f"{log_type} consumer: saving latest log offset to a " "checkpointing file", logging.INFO) file_path = os.path.join( Config.get_checkpoint_dir(), f"{log_type}_checkpoint_data_" + child_account_id + ".txt")\ if child_account_id else os.path.join( Config.get_checkpoint_dir(), f"{log_type}_checkpoint_data.txt") checkpoint_filename = file_path # Open file checkpoint_filename in writing mode only checkpoint_file = open(checkpoint_filename, 'w') checkpoint_file.write(json.dumps(log_offset) + '\n') # According to Python docs, closing a file also flushes the file checkpoint_file.close()
def __init__(self, api_call, log_queue, log_type): self.api_call = api_call self.log_queue = log_queue self.log_type = log_type self.log_offset = get_log_offset(self.log_type, Config.get_checkpointing_enabled(), Config.get_checkpoint_dir())
def format_log(self, log): """ Format the given log in a certain way depending on self.message_type @param log The log to be formatted @return the formatted version of log """ formatted_log = None if self.log_format == Config.CEF: formatted_log = log_to_cef(log, self.keys_to_labels) elif self.log_format == Config.JSON: formatted_log = json.dumps(log) if Config.get_syslog_enabled(): # log_timestamp = datetime.fromisoformat(log["isotimestamp"]) #new in 3.7, provides microsecond resolution log_timestamp = datetime.fromtimestamp(log["timestamp"], tz=timezone.utc) syslog_header = get_syslog_header( format=Config.get_syslog_format(), timestamp=log_timestamp) formatted_log = ' '.join([syslog_header, formatted_log]) else: raise ValueError( f"{self.log_format} is not a supported log format") return formatted_log.encode() + b'\n'
def test_create_tasks_one_server_multiple_endpoints(self, mock, _): server_to_writer = {'Main': 'writer_1'} config = { 'dls_settings': { 'proxy': { 'proxy_server': 'test.com', 'proxy_port': 1234 } }, 'account': { 'ikey': 'a', 'skey': 'a', 'hostname': 'a', 'endpoint_server_mappings': [ { 'endpoints': ['adminaction', 'auth', 'telephony', 'trustmonitor'], 'server': 'Main' } ], 'is_msp': False } } Config.set_config(config) create_tasks(server_to_writer) calls = [ call('adminaction', 'writer_1', 'duo_admin'), call('auth', 'writer_1', 'duo_admin'), call('telephony', 'writer_1', 'duo_admin'), call('trustmonitor', 'writer_1', 'duo_admin') ] self.assertEquals(mock.call_count, 4) mock.assert_has_calls(calls, any_order=True)
def test_create_tasks_multiple_servers_multiple_endpoints(self, mock, _): server_to_writer = {'Main': 'writer_1', 'Backup': 'writer_2'} config = { 'account': { 'ikey': 'a', 'skey': 'a', 'hostname': 'a', 'endpoint_server_mappings': [{ 'endpoints': ['auth', 'telephony'], 'server': 'Main' }, { 'endpoints': ['adminaction'], 'server': 'Backup' }] } } Config.set_config(config) create_tasks(server_to_writer) calls = [ call('adminaction', 'writer_2', 'duo_admin'), call('auth', 'writer_1', 'duo_admin'), call('telephony', 'writer_1', 'duo_admin') ] self.assertEquals(mock.call_count, 3) mock.assert_has_calls(calls, any_order=True)
def test_get_account_hostname(self): config = {'account': {'hostname': 'internet.com'}} Config.set_config(config) hostname = Config.get_account_hostname() self.assertEqual(hostname, 'internet.com')
def test_account_is_msp(self): config = {'account': {'is_msp': False}} Config.set_config(config) is_msp = Config.account_is_msp() self.assertEqual(is_msp, False)
def test_get_log_format(self): config = {'dls_settings': {'log_format': 'JSON'}} Config.set_config(config) log_format = Config.get_log_format() self.assertEqual(log_format, 'JSON')
def test_get_log_filepath(self): config = {'dls_settings': {'log_filepath': '/dev/null'}} Config.set_config(config) log_filepath = Config.get_log_filepath() self.assertEqual(log_filepath, '/dev/null')
def test_get_checkpoint_dir(self): config = {'dls_settings': {'checkpointing': {'directory': '/tmp'}}} Config.set_config(config) checkpoint_dir = Config.get_checkpoint_dir() self.assertEqual(checkpoint_dir, '/tmp')
def test_create_tasks_one_server_multiple_endpoints_msp(self, mock, mock_childaccount, mock_createadmin): server_to_writer = {'Main': 'writer_1'} config = { 'account': { 'ikey': 'a', 'skey': 'a', 'hostname': 'a', 'endpoint_server_mappings': [ { 'endpoints': ['adminaction', 'auth', 'telephony'], 'server': 'Main' } ], 'is_msp': True } } Config.set_config(config) create_tasks(server_to_writer) calls = [ call('adminaction', 'writer_1', duo_client.Accounts, '12345'), call('auth', 'writer_1', duo_client.Accounts, '12345'), call('telephony', 'writer_1', duo_client.Accounts, '12345'), call('adminaction', 'writer_1', duo_client.Accounts, '56789'), call('auth', 'writer_1', duo_client.Accounts, '56789'), call('telephony', 'writer_1', duo_client.Accounts, '56789') ] self.assertEqual(mock_childaccount.call_count, 1) self.assertEqual(mock.call_count, 6) mock.assert_has_calls(calls, any_order=True)
def test_get_value_with_invalid_keys(self): config = {'field_one': {'nested_field': True}, 'field_two': 100} Config.set_config(config) with self.assertRaises(ValueError): Config.get_value(['house_key', 'car_key'])
def test_set_config_twice(self): config = {'field_one': {'nested_field': True}, 'field_two': 100} Config.set_config(config) with self.assertRaises(RuntimeError): Config.set_config(config)
def test_get_account_ikey(self): config = {'account': {'ikey': 'ASDFF'}} Config.set_config(config) ikey = Config.get_account_ikey() self.assertEqual(ikey, 'ASDFF')
def test_get_servers(self): config = {'servers': ['item1', 'item2']} Config.set_config(config) servers = Config.get_servers() self.assertEqual(servers, ['item1', 'item2'])
def test_get_checkpointing_enabled(self): config = {'dls_settings': {'checkpointing': {'enabled': False}}} Config.set_config(config) checkpointing_enabled = Config.get_checkpointing_enabled() self.assertEqual(checkpointing_enabled, False)
def test_get_account_endpoint_server_mappings(self): config = {'account': {'endpoint_server_mappings': {'auth': 'ha.com'}}} Config.set_config(config) endpoint_server_mappings = Config.get_account_endpoint_server_mappings( ) self.assertEqual(endpoint_server_mappings, {'auth': 'ha.com'})
def test_get_value_normal(self): config = {'field_one': {'nested_field': True}, 'field_two': 100} Config.set_config(config) value_one = Config.get_value(['field_one', 'nested_field']) value_two = Config.get_value(['field_two']) self.assertEqual(value_one, True) self.assertEqual(value_two, 100)
def test_get_value_from_keys_bad_keys(self): dictionary = {'house': {'bedrooms': 2}} value_one = Config.get_value_from_keys(dictionary, ('hoose')) value_two = Config.get_value_from_keys(dictionary, ('house', 'badrooms')) self.assertEqual(value_one, None) self.assertEqual(value_two, None)
def __init__(self, api_call, log_queue, log_type, account_id=None, url_path=None): self.api_call = api_call self.log_queue = log_queue self.log_type = log_type self.account_id = account_id self.log_offset = get_log_offset( self.log_type, Config.get_checkpointing_enabled(), Config.get_checkpoint_dir(), self.account_id) self.url_path = url_path
def create_consumer_producer_pair(endpoint, writer, admin, child_account=None): """ Create a pair of Producer-Consumer objects for each endpoint and return a list containing the asyncio tasks for running those objects. @param endpoint Log type to create producer/consumer pair for @param writer Object for writing logs to a server @param admin Object from which to get the correct API endpoints @param child_account If present, this is being used by MSP and pass appropriate account id @return list of asyncio tasks for running the Producer and Consumer objects """ # The format a log should have before being consumed and sent log_format = Config.get_log_format() log_queue = asyncio.Queue() producer = consumer = None # Create the right pair of Producer-Consumer objects based on endpoint if endpoint == Config.AUTH: if Config.account_is_msp(): producer = AuthlogProducer(admin.json_api_call, log_queue, child_account_id=child_account, url_path="/admin/v2/logs/authentication") else: producer = AuthlogProducer(admin.get_authentication_log, log_queue) consumer = AuthlogConsumer(log_format, log_queue, writer, child_account) elif endpoint == Config.TELEPHONY: if Config.account_is_msp(): producer = TelephonyProducer(admin.json_api_call, log_queue, child_account_id=child_account, url_path='/admin/v1/logs/telephony') else: producer = TelephonyProducer(admin.get_telephony_log, log_queue) consumer = TelephonyConsumer(log_format, log_queue, writer, child_account) elif endpoint == Config.ADMIN: if Config.account_is_msp(): producer = AdminactionProducer(admin.json_api_call, log_queue, child_account_id=child_account, url_path='/admin/v1/logs/administrator') else: producer = AdminactionProducer(admin.get_administrator_log, log_queue) consumer = AdminactionConsumer(log_format, log_queue, writer, child_account) else: Program.log(f"{endpoint} is not a recognized endpoint", logging.WARNING) del log_queue return [] tasks = [asyncio.ensure_future(producer.produce()), asyncio.ensure_future(consumer.consume())] return tasks
def test_get_value_from_keys_normal(self): dictionary = { 'level_one': '2FA', 'access_device': { 'ip': '192.168.0.1' } } value_one = Config.get_value_from_keys(dictionary, ('level_one', )) value_two = Config.get_value_from_keys(dictionary, ('access_device', 'ip')) self.assertEqual(value_one, '2FA') self.assertEqual(value_two, '192.168.0.1')
def _construct_extension(log, keys_to_labels): """ Create the extension for a CEF message using the given log and dictionary. @param log The log to convert into a CEF message @param keys_to_labels Dictionary of keys used for retrieving values and the associated labels those values should be given @return the extension field for a CEF message """ # List of additional fields to add to the CEF message beyond whats required extensions = [] # Keep track of the number for the custom string being created custom_string = 1 for keys, label in keys_to_labels.items(): value = Config.get_value_from_keys(log, keys) label_name = label['name'] # Need to generate a custom label if label['is_custom']: custom_label = f"cs{custom_string}" custom_extension = custom_label + 'Label' + '=' + label_name extensions.append(custom_extension) custom_string += 1 label_name = custom_label extension = label_name + '=' + str(value) extensions.append(extension) extensions = ' '.join(extensions) return extensions
async def call_log_api(self): """ Make a call to a log-specific API and return the API result. The default implementation given here will not suffice for every type of log API and so should be overriden by a child clas when necessary. @return the result of the API call """ if Config.account_is_msp(): # Make an API call to retrieve authlog logs for MSP accounts parameters = {"mintime": six.ensure_str(str(self.log_offset)), "account_id": six.ensure_str(self.account_id)} api_result = await run_in_executor( functools.partial( self.api_call, method="GET", path=self.url_path, params=parameters ) ) else: api_result = await run_in_executor( functools.partial( self.api_call, mintime=self.log_offset ) ) return api_result
def test_create_config_normal(self): config_filepath = 'tests/resources/config_files/standard.yml' correct_config = { 'version': '1.0.0', 'dls_settings': { 'log_filepath': '/tmp/duologsync.log', 'log_format': 'JSON', 'api': { 'offset': 180, 'timeout': 120 }, 'checkpointing': { 'enabled': False, 'directory': '/tmp/dls_checkpoints' }, 'proxy': { 'proxy_server': 'test.com', 'proxy_port': 1234 } }, 'servers': [{ 'id': 'main server', 'hostname': 'mysiem.com', 'port': 8888, 'protocol': 'TCPSSL', 'cert_filepath': 'cert.crt' }, { 'id': 'backup', 'hostname': 'safesiem.org', 'port': 13031, 'protocol': 'UDP' }], 'account': { 'ikey': 'AAA101020K12K1K23', 'skey': 'jyJKYAGJKAYGDKJgyJygFUg9F9gyFuo9', 'hostname': 'api-test.first.duosecurity.com', 'endpoint_server_mappings': [{ 'endpoints': ['adminaction', 'auth'], 'server': 'main server' }, { 'endpoints': ['telephony'], 'server': 'backup' }], 'is_msp': True, 'block_list': [] } } config = Config.create_config(config_filepath) config['dls_settings']['api']['offset'] = 180 self.assertEqual(correct_config, config)