def test_encrypted_post(self): """Test that the API can receive and decrypt encrypted data from agent""" # Initialize key variables config = ServerConfig() agent_name = 'test_encrypted_agent' # Create a directory for the Agent keyring as by default the # API and agent use the same keyring directory keyring_directory = tempfile.mkdtemp() # Make agent data agent_data = _make_agent_data() # Turn agent data into a dict to be compared to # the data received by the API expected = converter.posting_data_points( converter.agentdata_to_post(agent_data)) # Make encrypted post encrypted_agent = EncryptedAgent(agent_name, directory=keyring_directory) post_encrypted = EncryptedPostAgent(agent_data, encrypted_agent.encryption) post_encrypted.post() # Read data from directory cache_directory = config.agent_cache_directory(PATTOO_API_AGENT_NAME) cache_data = files.read_json_files(cache_directory) # Test self.assertEqual(len(cache_data), 1) self.assertEqual(len(cache_data[0]), 2) result = cache_data[0][1] # Result and expected are not quite the same. 'expected' will have # lists of tuples where 'result' will have lists of lists for key, value in result.items(): if key not in ['pattoo_agent_timestamp', 'pattoo_datapoints']: self.assertEqual(value, expected[key]) self.assertEqual(result['pattoo_datapoints']['datapoint_pairs'], expected['pattoo_datapoints']['datapoint_pairs']) # Test list of tuples for key, value in result['pattoo_datapoints']['key_value_pairs'].items( ): self.assertEqual( tuple(value), expected['pattoo_datapoints']['key_value_pairs'][int(key)]) # Revert cache_directory for filename in os.listdir(cache_directory): # Examine all the '.json' files in directory if filename.endswith('.json'): # Read file and add to string filepath = '{}{}{}'.format(cache_directory, os.sep, filename) os.remove(filepath)
def test_encrypted_post(self): """Test that the API can receive and decrypt encrypted data from agent""" # Initialize key variables config = ServerConfig() # Get Pgpier object gconfig = Config() # Get config for Pgpier # Create Pgpier object for the agent agent_gpg = files.set_gnupg("test_encrypted_agent", gconfig, "*****@*****.**") # Make agent data agent_data = _make_agent_data() # Turn agent data into a dict to be compared to # the data received by the API expected = converter.posting_data_points( converter.agentdata_to_post(agent_data)) # Make encrypted post post_encrypted = EncryptedPostAgent(agent_data, agent_gpg) post_encrypted.post() # Read data from directory cache_directory = config.agent_cache_directory(PATTOO_API_AGENT_NAME) cache_data = files.read_json_files(cache_directory) # Test self.assertEqual(len(cache_data), 1) self.assertEqual(len(cache_data[0]), 2) result = cache_data[0][1] # Result and expected are not quite the same. 'expected' will have # lists of tuples where 'result' will have lists of lists for key, value in result.items(): if key != 'pattoo_datapoints': self.assertEqual(value, expected[key]) self.assertEqual(result['pattoo_datapoints']['datapoint_pairs'], expected['pattoo_datapoints']['datapoint_pairs']) # Test list of tuples for key, value in result['pattoo_datapoints']['key_value_pairs'].items( ): self.assertEqual( tuple(value), expected['pattoo_datapoints']['key_value_pairs'][int(key)]) # Revert cache_directory for filename in os.listdir(cache_directory): # Examine all the '.json' files in directory if filename.endswith('.json'): # Read file and add to string filepath = '{}{}{}'.format(cache_directory, os.sep, filename) os.remove(filepath)
def __init__(self): """Initialize the class. Args: None Returns: None """ # Instantiate the Config parent ServerConfig.__init__(self)
def test__lock(self): """Testing method / function _lock.""" # Initialize key variables config = ServerConfig() lockfile = files.lock_file(PATTOO_INGESTER_NAME, config) # Test self.assertFalse(os.path.isfile(lockfile)) result = files_test._lock() self.assertTrue(os.path.isfile(lockfile)) self.assertTrue(result) # Should fail result = files_test._lock() self.assertFalse(result) # Remove and test again result = files_test._lock(delete=True) self.assertTrue(result) self.assertFalse(os.path.isfile(lockfile)) result = files_test._lock() self.assertTrue(result) self.assertTrue(os.path.isfile(lockfile)) # Delete again to revert to known working state result = files_test._lock(delete=True) self.assertTrue(result)
def create_cache(): """Testing method / function records.""" # Initialize key variables config = ServerConfig() polling_interval = 20 cache_directory = config.agent_cache_directory(PATTOO_API_AGENT_NAME) result = { 'pattoo_agent_program': data.hashstring(str(random())), 'pattoo_agent_polled_target': socket.getfqdn(), 'pattoo_key': data.hashstring(str(random())), 'pattoo_value': round(uniform(1, 100), 5), 'pattoo_agent_hostname': socket.getfqdn() } # We want to make sure we get a different AgentID each time filename = files.agent_id_file( result['pattoo_agent_program'], config) if os.path.isfile(filename) is True: os.remove(filename) result['pattoo_agent_id'] = files.get_agent_id( result['pattoo_agent_program'], config) # Setup AgentPolledData apd = AgentPolledData(result['pattoo_agent_program'], polling_interval) # Initialize TargetDataPoints ddv = TargetDataPoints(result['pattoo_agent_hostname']) # Setup DataPoint data_type = DATA_INT variable = DataPoint( result['pattoo_key'], result['pattoo_value'], data_type=data_type) # Add data to TargetDataPoints ddv.add(variable) # Write data to cache apd.add(ddv) cache_dict = converter.posting_data_points( converter.agentdata_to_post(apd)) cache_file = '{}{}cache_test.json'.format(cache_directory, os.sep) with open(cache_file, 'w') as _fp: json.dump(cache_dict, _fp) return result
def test_receive(self): """Testing method / function receive.""" # Initialize key variables config = ServerConfig() apd = _create_apd() expected = converter.posting_data_points( converter.agentdata_to_post(apd)) # Post data post = PostAgent(apd) post.post() # Read data from directory cache_directory = config.agent_cache_directory(PATTOO_API_AGENT_NAME) cache_data = files.read_json_files(cache_directory) # Test self.assertEqual(len(cache_data), 1) self.assertEqual(len(cache_data[0]), 2) result = cache_data[0][1] # Result and expected are not quite the same. 'expected' will have # lists of tuples where 'result' will have lists of lists for key, value in result.items(): if key != 'pattoo_datapoints': self.assertEqual(value, expected[key]) self.assertEqual( result['pattoo_datapoints']['datapoint_pairs'], expected['pattoo_datapoints']['datapoint_pairs']) # Test list of tuples for key, value in result[ 'pattoo_datapoints']['key_value_pairs'].items(): self.assertEqual( tuple(value), expected['pattoo_datapoints']['key_value_pairs'][int(key)]) # Revert cache_directory for filename in os.listdir(cache_directory): # Examine all the '.json' files in directory if filename.endswith('.json'): # Read file and add to string filepath = '{}{}{}'.format(cache_directory, os.sep, filename) os.remove(filepath)
def test_purge(self): """Testing method / function purge.""" # Initialize key variables config = ServerConfig() cache_directory = config.agent_cache_directory(PATTOO_API_AGENT_NAME) # Initialize key variables _ = create_cache() # Test result = files.read_json_files(cache_directory) self.assertTrue(bool(result)) # Test - Purge cache = Cache() cache.purge() # Test result = files.read_json_files(cache_directory, die=False) self.assertFalse(bool(result))
def arguments(): """Get the CLI arguments. Args: None Returns: args: NamedTuple of argument values """ # Get config config = Config() # Get cache directory directory = config.agent_cache_directory(PATTOO_API_AGENT_NAME) # Get arguments parser = argparse.ArgumentParser(description='''\ Program to ingest cached agent data from the {} directory into the database.\ '''.format(directory)) parser.add_argument('-b', '--batch_size', default=500, type=int, help='''\ The number of files to process at a time. Smaller batch sizes may help when \ you are memory or database connection constrained. Default=500''') parser.add_argument('-m', '--max_duration', default=3600, type=int, help='''\ The maximum time in seconds that the script should run. This reduces the risk \ of not keeping up with the cache data updates. Default=3600''') # Return args = parser.parse_args() return args
def receive(source): """Handle the agent posting route. Args: source: Unique Identifier of an pattoo agent Returns: Text response of Received """ # Initialize key variables prefix = 'Invalid posted data.' # Read configuration config = Config() cache_dir = config.agent_cache_directory(PATTOO_API_AGENT_NAME) # Get JSON from incoming agent POST try: posted_data = request.json except: # Don't crash if we cannot convert JSON abort(404) # Abort if posted_data isn't a list if isinstance(posted_data, dict) is False: log_message = '{} Not a dictionary'.format(prefix) log.log2warning(20024, log_message) abort(404) if len(posted_data) != len(CACHE_KEYS): log_message = ('''\ {} Incorrect length. Expected length of {}'''.format(prefix, len(CACHE_KEYS))) log.log2warning(20019, log_message) abort(404) for key in posted_data.keys(): if key not in CACHE_KEYS: log_message = '{} Invalid key'.format(prefix) log.log2warning(20018, log_message) abort(404) # Extract key values from posting try: timestamp = posted_data['pattoo_agent_timestamp'] except: _exception = sys.exc_info() log_message = ('API Failure') log.log2exception(20025, _exception, message=log_message) abort(404) # Create filename. Add a suffix in the event the source is posting # frequently. suffix = str(randrange(100000)).zfill(6) json_path = ('{}{}{}_{}_{}.json'.format(cache_dir, os.sep, timestamp, source, suffix)) # Create cache file try: with open(json_path, 'w+') as temp_file: json.dump(posted_data, temp_file) except Exception as err: log_message = '{}'.format(err) log.log2warning(20016, log_message) abort(404) except: _exception = sys.exc_info() log_message = ('API Failure') log.log2exception(20017, _exception, message=log_message) abort(404) # Return return 'OK'
def crypt_receive(): """Receive encrypted data from agent Args: None Returns: message (str): Reception result response (int): HTTP response code """ # Read configuration config = Config() cache_dir = config.agent_cache_directory(PATTOO_API_AGENT_NAME) try: # Retrieves Pgpier class gpg = get_gnupg(PATTOO_API_AGENT_NAME, config) #Sets key ID gpg.set_keyid() # Checks if a Pgpier object exists if gpg is None: raise Exception('Could not retrieve Pgpier for {}'.format( PATTOO_API_AGENT_NAME)) except Exception as e: response = 500 message = 'Server error' log_msg = 'Could not retrieve Pgpier: >>>{}<<<'.format(e) log.log2warning(20175, log_msg) return message, response # Predefined error message and response response = 400 message = 'Proceed to key exchange first' # Block connection if a symmetric key was not stored if 'symm_key' not in session: message = 'No symmetric key' response = 403 return message, response if request.method == 'POST': # Get data from agent data_json = request.get_json(silent=False) data_dict = json.loads(data_json) # Retrieved symmetrically encrypted data encrypted_data = data_dict['encrypted_data'] # Symmetrically decrypt data data = gpg.symmetric_decrypt(encrypted_data, session['symm_key']) # Initialize key variables prefix = 'Invalid posted data.' posted_data = None source = None # Extract posted data and source try: data_extract = json.loads(data) posted_data = data_extract['data'] source = data_extract['source'] except Exception as e: log_message = 'Decrypted data extraction failed: {}'\ .format(e) log.log2warning(20176, log_message) log_message = 'Decrypted data extraction successful' log.log2info(20177, log_message) # Abort if posted_data isn't a list if isinstance(posted_data, dict) is False: log_message = '{} Not a dictionary'.format(prefix) log.log2warning(20178, log_message) abort(404) if len(posted_data) != len(CACHE_KEYS): log_message = ('''{} Incorrect length. Expected length of {} '''.format(prefix, len(CACHE_KEYS))) log.log2warning(20179, log_message) abort(404) for key in posted_data.keys(): if key not in CACHE_KEYS: log_message = '{} Invalid key'.format(prefix) log.log2warning(20180, log_message) abort(404) # Extract key values from posting try: timestamp = posted_data['pattoo_agent_timestamp'] except: _exception = sys.exc_info() log_message = ('API Failure') log.log2exception(20181, _exception, message=log_message) abort(404) # Create filename. Add a suffix in the event the source is posting # frequently. suffix = str(randrange(100000)).zfill(6) json_path = ('{}{}{}_{}_{}.json'.format(cache_dir, os.sep, timestamp, source, suffix)) # Create cache file try: with open(json_path, 'w+') as temp_file: json.dump(posted_data, temp_file) except Exception as err: log_message = '{}'.format(err) log.log2warning(20182, log_message) abort(404) except: _exception = sys.exc_info() log_message = ('API Failure') log.log2exception(20183, _exception, message=log_message) abort(404) # Return message = 'Decrypted and received' response = 202 log.log2info(20184, message) return message, response