def test_init(self): """Testing method init.""" # Testing with non-existant directory directory = 'bogus' os.environ['INFOSET_CONFIGDIR'] = directory with self.assertRaises(SystemExit): configuration.Config() # Testing with an empty directory empty_directory = tempfile.mkdtemp() os.environ['INFOSET_CONFIGDIR'] = empty_directory with self.assertRaises(SystemExit): configuration.Config() # Write bad_config to file empty_config_file = ('%s/test_config.yaml') % (empty_directory) with open(empty_config_file, 'w') as f_handle: f_handle.write('') # Create configuration object config = configuration.Config() with self.assertRaises(SystemExit): config.log_file() # Cleanup files in temp directories _delete_files(directory)
def test_sqlalchemy_pool_size(self): """Testing method sqlalchemy_pool_size.""" # Testing sqlalchemy_pool_size with a good dictionary # good key and key_value result = self.config.sqlalchemy_pool_size() self.assertEqual(result, 10) self.assertEqual( result, self.good_dict['main']['sqlalchemy_pool_size']) # Set the environmental variable for the configuration directory directory = tempfile.mkdtemp() os.environ['INFOSET_CONFIGDIR'] = directory config_file = ('%s/test_config.yaml') % (directory) # Testing sqlalchemy_pool_size with blank key and blank key_value key = '' key_value = '' bad_config = ("""\ main: %s %s """) % (key, key_value) bad_dict = yaml.load(bytes(bad_config, 'utf-8')) # Write bad_config to file with open(config_file, 'w') as f_handle: yaml.dump(bad_dict, f_handle, default_flow_style=True) # Create configuration object config = configuration.Config() with self.assertRaises(SystemExit): config.sqlalchemy_pool_size() # Testing sqlalchemy_pool_size with good key and blank key_value key = 'sqlalchemy_pool_size:' key_value = '' bad_config = ("""\ main: %s %s """) % (key, key_value) bad_dict = yaml.load(bytes(bad_config, 'utf-8')) # Write bad_config to file with open(config_file, 'w') as f_handle: yaml.dump(bad_dict, f_handle, default_flow_style=True) # Create configuration object config = configuration.Config() result = config.sqlalchemy_pool_size() self.assertEqual(result, 10) # Cleanup files in temp directories _delete_files(directory)
def test_memcached_hostname(self): """Testing method memcached_hostname.""" result = self.config.memcached_hostname() self.assertEqual(result, 'localhost') self.assertEqual(result, self.good_dict['main']['memcached_hostname']) # Set the environmental variable for the configuration directory directory = tempfile.mkdtemp() os.environ['INFOSET_CONFIGDIR'] = directory config_file = ('%s/test_config.yaml') % (directory) # Testing memcached_hostname with blank key and blank key_value key = '' key_value = '' bad_config = ("""\ main: %s %s """) % (key, key_value) bad_dict = yaml.safe_load(bytes(bad_config, 'utf-8')) # Write bad_config to file with open(config_file, 'w') as f_handle: yaml.dump(bad_dict, f_handle, default_flow_style=True) # Create configuration object config = configuration.Config() with self.assertRaises(SystemExit): config.memcached_hostname() # Testing memcached_hostname with good key and blank key_value key = 'memcached_hostname:' key_value = '' bad_config = ("""\ main: %s %s """) % (key, key_value) bad_dict = yaml.safe_load(bytes(bad_config, 'utf-8')) # Write bad_config to file with open(config_file, 'w') as f_handle: yaml.dump(bad_dict, f_handle, default_flow_style=True) # Create configuration object defaults to 'localhost' config = configuration.Config() result = config.memcached_hostname() self.assertEqual(result, 'localhost') # Cleanup files in temp directories _delete_files(directory)
def test_db_password(self): """Testing method db_password.""" result = self.config.db_password() self.assertEqual(result, 'test_B3bFHgxQfsEy86TN') self.assertEqual(result, self.good_dict['main']['db_password']) # Set the environmental variable for the configuration directory directory = tempfile.mkdtemp() os.environ['INFOSET_CONFIGDIR'] = directory config_file = ('%s/test_config.yaml') % (directory) # Testing db_password with blank key and blank key_value key = '' key_value = '' bad_config = ("""\ main: %s %s """) % (key, key_value) bad_dict = yaml.load(bytes(bad_config, 'utf-8')) # Write bad_config to file with open(config_file, 'w') as f_handle: yaml.dump(bad_dict, f_handle, default_flow_style=True) # Create configuration object config = configuration.Config() with self.assertRaises(SystemExit): config.db_password() # Testing db_password with good key and blank key_value key = 'db_password:'******'' bad_config = ("""\ main: %s %s """) % (key, key_value) bad_dict = yaml.load(bytes(bad_config, 'utf-8')) # Write bad_config to file with open(config_file, 'w') as f_handle: yaml.dump(bad_dict, f_handle, default_flow_style=True) # Create configuration object config = configuration.Config() with self.assertRaises(SystemExit): config.db_password() # Cleanup files in temp directories _delete_files(directory)
def process(): """Make sure the correct agents are running. Args: None Returns: None """ # Get list of configured agents config = configuration.Config() agents = config.agents() # Process each agent for agent_dict in agents: # Get agent_name agent_name = agent_dict['agent_name'] agentconfig = configuration.ConfigAgent(agent_name) # Check for agent existence if agentconfig.agent_enabled() is True: _check_when_enabled(agentconfig) else: # Shutdown agent if running _check_when_disabled(agentconfig)
def ingester(args): """Process 'show ingester' commands. Args: args: Argparse arguments Returns: None """ if args.subqualifier == 'logs': # Process logs config = configuration.Config() filename = config.log_file() tail = input_output.File(filename) tail.tail() # Done sys.exit(0) elif args.subqualifier == 'status': # Create agent objects agent_ingester = Agent(INGESTER_EXECUTABLE) # Get agent status daemon_ingester = AgentDaemon(agent_ingester) daemon_ingester.status() # Done sys.exit(0) # Show help if there are no matches general.cli_help()
def check_user(): """Check to make sure the user environment is correct. Args: None Returns: None """ # Initialize key variables username = getpass.getuser() config = configuration.Config() configured_username = config.username() # Prevent running as sudo user check_sudo() # Prevent others from running the script if username != configured_username: log_message = ( 'You can only run this script as user \'{}\' ' 'in the configuration file. Try running the command like this:\n' '').format(configured_username) print(log_message) fixed_command = ('$ su -c \'{}\' {}\n'.format(' '.join(sys.argv[:]), configured_username)) print(fixed_command) sys.exit(2)
def _get_config(): """Function for retrieving the current configuration and placing it into a dict. The dict's keys match the form field names in config.html Args: None Returns: Configuration Dictionary """ configDict = {} config = configuration.Config() configDict['infoset-username'] = config.username() configDict['infoset-port'] = config.bind_port() configDict['db-name'] = config.db_name() configDict['db-host'] = config.db_hostname() configDict['db-username'] = config.db_username() configDict['db-password'] = config.db_password() configDict['ingester-cache'] = config.ingest_cache_directory() configDict['ingester-interval'] = config.interval() configDict['ingester-pool-size'] = config.ingest_pool_size() configDict['ingester-listen-address'] = config.listen_address() configDict['log-directory'] = config.log_directory() configDict['log-level'] = config.log_level() configDict['memcached-host'] = config.memcached_hostname() configDict['memcached-port'] = config.memcached_port() configDict['sqlalchemy-overflow'] = config.sqlalchemy_max_overflow() configDict['sqlalchemy-pool-size'] = config.sqlalchemy_pool_size() return configDict
def test_ingest_cache_directory(self): """Testing method ingest_cache_directory.""" # Testing ingest_cache_directory with temp directory # Set the environmental variable for the configuration directory directory = tempfile.mkdtemp() os.environ['INFOSET_CONFIGDIR'] = directory config_file = ('%s/test_config.yaml') % (directory) # Testing ingest_cache_directory with blank key_value(filepath) key = '' key_value = '' bad_config = ("""\ main: %s %s """) % (key, key_value) bad_dict = yaml.load(bytes(bad_config, 'utf-8')) with open(config_file, 'w') as f_handle: yaml.dump(bad_dict, f_handle, default_flow_style=True) # Create configuration object config = configuration.Config() with self.assertRaises(SystemExit): config.ingest_cache_directory() # Cleanup files in temp directories _delete_files(directory)
def run(self): """Setup database. Args: None Returns: None """ # Needed to run install subcommand # https://stackoverflow.com/questions/1321270/how-to-extend-distutils-with-a-simple-post-install-script/1321345#1321345 BuildCommand.run(self) self.reserved = '_SYSTEM_RESERVED_' self.config = configuration.Config() # Initialize key variables use_mysql = True pool_size = 25 max_overflow = 25 config = self.config # Create DB connection pool if use_mysql is True: # Add MySQL to the pool URL = ('sqlite:///%s') % (config.db_file()) engine = create_engine(URL, echo=True, encoding='utf8') # Try to create the database shared.print_ok('Attempting to create database tables') try: sql_string = ('ALTER DATABASE %s CHARACTER SET utf8mb4 ' 'COLLATE utf8mb4_general_ci') % ( config.db_name()) # engine.execute(sql_string) except: log_message = ( 'Cannot connect to database %s. ' 'Verify database server is started. ' 'Verify database is created. ' 'Verify that the configured database authentication ' 'is correct.') % (config.db_name()) log.log2die(1046, log_message) # Apply schemas shared.print_ok('Applying Schemas.') BASE.metadata.create_all(engine) # Insert database entries self._insert_agent_device() self._insert_billcode() self._insert_department() self._insert_datapoint() self._insert_config()
def main(): """Ingest data if this file is run from the CLI. Args: None Returns: None """ # Process data config = configuration.Config() process(config, 'ingestd')
def __init__(self): """Function for intializing the class. Args: None Returns: None """ # Initialize key variables self.reserved = '_SYSTEM_RESERVED_' self.config = configuration.Config()
def main(): """Process agent data. Args: None Returns: None """ # Initialize key variables use_mysql = True global POOL global URL global TEST_ENGINE # Get configuration config = configuration.Config() # Define SQLAlchemy parameters from configuration pool_size = config.sqlalchemy_pool_size() max_overflow = config.sqlalchemy_max_overflow() # Create DB connection pool if use_mysql is True: URL = ('mysql+pymysql://%s:%s@%s/%s?charset=utf8mb4') % ( config.db_username(), config.db_password(), config.db_hostname(), config.db_name()) # Add MySQL to the pool db_engine = create_engine( URL, echo=False, encoding='utf8', max_overflow=max_overflow, pool_size=pool_size, pool_recycle=600) # Fix for multiprocessing _add_engine_pidguard(db_engine) POOL = sessionmaker( autoflush=True, autocommit=False, bind=db_engine ) else: POOL = None # Populate the test engine if this is a test database if config.db_name().startswith('test_') is True: TEST_ENGINE = db_engine
def __init__(self, parent, child): """Method initializing the class. Args: parent: Name of parent daemon child: Name of child daemon Returns: None """ # Initialize key variables Agent.__init__(self, parent, child) self.config = configuration.Config()
def __init__(self): """Method initializing the class.""" # Setup database variables self.url = URL self.engine = TEST_ENGINE # Get configuration self.config = configuration.Config() # Validate the configuration unittest_setup.ready() # Validate the database self.validate()
def receive(id_agent): """Function for handling /infoset/api/v1.0/receive/<id_agent> route. Args: id_agent: Unique Identifier of an Infoset Agent Returns: Text response of Received """ # Initialize key variables found_count = 0 # Read configuration config = configuration.Config() cache_dir = config.ingest_cache_directory() # Get JSON from incoming agent POST data = request.json # Make sure all the important keys are available keys = ['timestamp', 'id_agent', 'devicename'] for key in keys: if key in data: found_count += 1 # Do processing if found_count == 3: # Extract key values from posting try: timestamp = int(data['timestamp']) except: abort(404) id_agent = data['id_agent'] devicename = data['devicename'] # Create a hash of the devicename device_hash = general.hashstring(devicename, sha=1) json_path = ('%s/%s_%s_%s.json') % (cache_dir, timestamp, id_agent, device_hash) with open(json_path, "w+") as temp_file: json.dump(data, temp_file) # Return return 'OK' else: abort(404)
def main(): """Get Flask server running. Args: None Returns: None """ # Start config = configuration.Config() bind_port = config.bind_port() listen_address = config.listen_address() API.run(debug=True, host=listen_address, threaded=True, port=bind_port)
def __init__(self): """Method initializing the class.""" # Define key variables app_name = 'infoset' levels = { 'debug': logging.DEBUG, 'info': logging.INFO, 'warning': logging.WARNING, 'error': logging.ERROR, 'critical': logging.CRITICAL } # Get the logging directory config = configuration.Config() log_file = config.log_file() config_log_level = config.log_level() # Set logging level if config_log_level in levels: log_level = levels[config_log_level] else: log_level = levels['debug'] # create logger with app_name self.logger_file = logging.getLogger(('%s_file') % (app_name)) self.logger_stdout = logging.getLogger(('%s_console') % (app_name)) # Set logging levels to file and stdout self.logger_stdout.setLevel(log_level) self.logger_file.setLevel(log_level) # create file handler which logs even debug messages file_handler = logging.FileHandler(log_file) file_handler.setLevel(log_level) # create console handler with a higher log level stdout_handler = logging.StreamHandler() stdout_handler.setLevel(log_level) # create formatter and add it to the handlers formatter = logging.Formatter( '%(asctime)s - %(name)s - %(levelname)s - %(message)s') file_handler.setFormatter(formatter) stdout_handler.setFormatter(formatter) # add the handlers to the logger self.logger_file.addHandler(file_handler) self.logger_stdout.addHandler(stdout_handler)
def main(): """Process agent data. Args: None Returns: None """ # Initialize key variables use_mysql = True global POOL global URL global TEST_ENGINE # Get configuration config = configuration.Config() # Define SQLAlchemy parameters from configuration pool_size = config.sqlalchemy_pool_size() max_overflow = config.sqlalchemy_max_overflow() # Create DB connection pool if use_mysql is True: create_sqlite_if_not_exist(config) URL = ('sqlite:///%s') % (config.db_file()) # Add MySQL to the pool db_engine = create_engine(URL, echo=False) # Fix for multiprocessing _add_engine_pidguard(db_engine) POOL = sessionmaker(autoflush=True, autocommit=False, bind=db_engine) else: POOL = None # Populate the test engine if this is a test database if config.db_name().startswith('test_') is True: TEST_ENGINE = db_engine
def _configuration(): """Process 'show hostnames' commands. Args: None Returns: None """ config = configuration.Config() contents = config.configuration() print('') pprint(contents, indent=2) print( '\n# Configuration read from directory: {}\n' ''.format(config.configuration_directory())) # Done sys.exit(0)
def normalized_timestamp(timestamp=None): """Normalize timestamp to a multiple of 'interval' seconds. Args: timestamp: epoch timestamp in seconds Returns: value: Normalized value """ # Initialize key variables interval = configuration.Config().interval() # Process data if timestamp is None: value = (int(time.time()) // interval) * interval else: value = (int(timestamp) // interval) * interval # Return return value
def __init__(self): """Function for intializing the class. Args: None Returns: None """ # Initialize key variables running_username = getpass.getuser() daemon_username = configuration.Config().username() self.root_directory = general.root_directory() infoset_user_exists = True self.infoset_user = None self.running_as_root = False # Set the username we need to be running as if running_username == 'root': try: # Get GID and UID for user self.infoset_user = daemon_username self.gid = getpwnam(self.infoset_user).pw_gid self.uid = getpwnam(self.infoset_user).pw_uid except KeyError: infoset_user_exists = False # Die if user doesn't exist if infoset_user_exists is False: log_message = ('User {} not found. Please try again.' ''.format(self.infoset_user)) log.log2die_safe(1049, log_message) else: self.infoset_user = daemon_username # If running as the root user, then the infoset user needs to exist if running_username == 'root': self.running_as_root = True return
def validate_timestamp(timestamp): """Validate timestamp to be a multiple of 'interval' seconds. Args: timestamp: epoch timestamp in seconds Returns: valid: True if valid """ # Initialize key variables valid = False interval = configuration.Config().interval() # Process data test = (int(timestamp) // interval) * interval if test == timestamp: valid = True # Return return valid
def _timestamps(): """Create a list of timestamps staring starting 30 minutes ago. Args: None Returns: timestamps: List of timestamps """ # Initialize key variables timestamps = [] config = configuration.Config() interval = config.interval() # Get the UTC timestamp utc_timestamp = int(datetime.utcnow().timestamp()) starting_timestamp = general.normalized_timestamp(utc_timestamp) - 1800 # Create a list of timestamps based on UTC timesamp timestamps = list( range(starting_timestamp, starting_timestamp - 1800, -interval)) return timestamps
def api(args): """Process 'show api' commands. Args: args: Argparse arguments Returns: None """ if args.subqualifier == 'logs': # Process logs config = configuration.Config() filename = config.web_log_file() tail = input_output.File(filename) tail.tail() # Done sys.exit(0) elif args.subqualifier == 'status': # Create agent objects agent_gunicorn = Agent(API_GUNICORN_AGENT) agent_api = AgentAPI(API_EXECUTABLE, API_GUNICORN_AGENT) # Get agent status daemon_gunicorn = AgentDaemon(agent_gunicorn) daemon_gunicorn.status() daemon_api = AgentDaemon(agent_api) daemon_api.status() # Done sys.exit(0) # Show help if there are no matches general.cli_help()
def process(ingester_agent_name): """Process cache data by adding it to the database using subprocesses. Args: ingester_agent_name: Ingester agent name Returns: None """ # Initialize key variables argument_list = [] id_agent_metadata = defaultdict(lambda: defaultdict(dict)) # Configuration setup config = configuration.Config() configured_pool_size = config.ingest_pool_size() # Make sure we have database connectivity if db.connectivity() is False: log_message = ('No connectivity to database. Check if running. ' 'Check database authentication parameters.' '') log.log2warning(1053, log_message) return # Get meta data on files id_agent_metadata = validate_cache_files() # Spawn processes only if we have files to process if bool(id_agent_metadata.keys()) is True: # Process lock file lockfile = daemon.lock_file(ingester_agent_name) if os.path.exists(lockfile) is True: # Return if lock file is present log_message = ( 'Ingest lock file %s exists. Multiple ingest daemons running ' 'or lots of cache files to ingest. Ingester may have died ' 'catastrophically in the past, in which case the lockfile ' 'should be deleted. Exiting ingest process. ' 'Will try again later.' '') % (lockfile) log.log2warning(1069, log_message) return else: # Create lockfile open(lockfile, 'a').close() # Read each cache file for devicehash in id_agent_metadata.keys(): for id_agent in id_agent_metadata[devicehash].keys(): # Create a list of arguments to process argument_list.append( (config, id_agent_metadata[devicehash][id_agent], ingester_agent_name)) # Create a pool of sub process resources pool_size = int(min(configured_pool_size, len(id_agent_metadata))) with Pool(processes=pool_size) as pool: # Create sub processes from the pool pool.map(_wrapper_process, argument_list) # Wait for all the processes to end # pool.join() # Return if lock file is present if os.path.exists(lockfile) is True: os.remove(lockfile)
def validate_cache_files(): """Create metadata for cache files with valid names. Args: None Returns: id_agent_metadata: Dict keyed by devicehash: A hash of the devicename id_agent: The agent's ID The contents of each key pair is a list of dicts with these keys timestamp: Timestamp of the data received filepath: The path to the file to be read """ # Initialize key variables id_agent_metadata = defaultdict(lambda: defaultdict(dict)) # Configuration setup config = configuration.Config() cache_dir = config.ingest_cache_directory() # Filenames must start with a numeric timestamp and # # end with a hex string. This will be tested later regex = re.compile(r'^\d+_[0-9a-f]+_[0-9a-f]+.json') # Add files in cache directory to list all_filenames = [ filename for filename in os.listdir(cache_dir) if os.path.isfile(os.path.join(cache_dir, filename)) ] ###################################################################### # Create threads ###################################################################### # Process only valid agent filenames for filename in all_filenames: # Add valid data to lists if bool(regex.match(filename)) is True: # Create a complete filepath filepath = os.path.join(cache_dir, filename) # Only read files that are 15 seconds or older # to prevent corruption caused by reading a file that could be # updating simultaneously if time.time() - os.path.getmtime(filepath) < 15: continue # Create a dict of Identifiers, timestamps and filepaths (name, _) = filename.split('.') (tstamp, id_agent, devicehash) = name.split('_') timestamp = int(tstamp) # Create data dictionary data_dict = {'timestamp': timestamp, 'filepath': filepath} # Keep track of devices and the Identifiers that track them # Create a list of timestamp, device filepath # tuples for each id_agent if bool(id_agent_metadata[devicehash][id_agent]) is True: id_agent_metadata[devicehash][id_agent].append(data_dict) else: id_agent_metadata[devicehash][id_agent] = [data_dict] # Return return id_agent_metadata
def _counter(self): """Convert counter data to gauge. Args: None Returns: values: Converted dict of data keyed by timestamp """ # Initialize key variables count = 0 interval = configuration.Config().interval() # Populate values dictionary with zeros. This ensures that # all timestamp values are covered if we have lost contact # with the agent at some point along the time series. if self.base_type == 1: values = dict.fromkeys( range(self.ts_start, self.ts_stop + interval, interval), 0) else: values = dict.fromkeys( range(self.ts_start + interval, self.ts_stop + interval, interval), 0) # Start conversion for timestamp, value in sorted(self.data.items()): # Process counter values if self.base_type != 1: # Skip first value if count == 0: old_timestamp = timestamp count += 1 continue ############################################################# # Treat missing data with caution ############################################################# # These are usually due to outages and can cause spikes # in the data. This ignores the first value after a zero. ############################################################# if timestamp - old_timestamp > interval: old_timestamp = timestamp continue ############################################################# ############################################################# ############################################################# # Get new value new_value = value - self.data[old_timestamp] # Do conversion to values / second if new_value >= 0: values[timestamp] = new_value / interval else: if self.base_type == 32: fixed_value = 4294967296 + abs(value) - 1 else: fixed_value = (4294967296 * 4294967296) + abs(value) - 1 values[timestamp] = fixed_value / interval else: # Process gauge values values[timestamp] = self.data[timestamp] # Save old timestamp old_timestamp = timestamp # Return return values
class TestConfiguration(unittest.TestCase): """Checks all functions and methods.""" ######################################################################### # General object setup ######################################################################### log_directory = tempfile.mkdtemp() cache_directory = tempfile.mkdtemp() good_config = ("""\ main: log_directory: %s log_level: debug ingest_cache_directory: %s ingest_pool_size: 20 bind_port: 3000 interval: 300 sqlalchemy_pool_size: 10 sqlalchemy_max_overflow: 10 memcached_hostname: localhost memcached_port: 22122 db_hostname: localhost db_username: test_infoset db_password: test_B3bFHgxQfsEy86TN db_name: test_infoset """) % (log_directory, cache_directory) # Convert good_config to dictionary good_dict = yaml.load(bytes(good_config, 'utf-8')) # Set the environmental variable for the configuration directory directory = tempfile.mkdtemp() os.environ['INFOSET_CONFIGDIR'] = directory config_file = ('%s/test_config.yaml') % (directory) # Write good_config to file with open(config_file, 'w') as f_handle: yaml.dump(good_dict, f_handle, default_flow_style=True) # Create configuration object config = configuration.Config() @classmethod def tearDownClass(cls): """Post test cleanup.""" os.rmdir(cls.log_directory) os.rmdir(cls.cache_directory) os.remove(cls.config_file) os.rmdir(cls.directory) def test_init(self): """Testing method init.""" # Testing with non-existant directory directory = 'bogus' os.environ['INFOSET_CONFIGDIR'] = directory with self.assertRaises(SystemExit): configuration.Config() # Testing with an empty directory empty_directory = tempfile.mkdtemp() os.environ['INFOSET_CONFIGDIR'] = empty_directory with self.assertRaises(SystemExit): configuration.Config() # Write bad_config to file empty_config_file = ('%s/test_config.yaml') % (empty_directory) with open(empty_config_file, 'w') as f_handle: f_handle.write('') # Create configuration object config = configuration.Config() with self.assertRaises(SystemExit): config.log_file() # Cleanup files in temp directories _delete_files(directory) def test_log_file(self): """Testing method log_file.""" # Test the log_file with a good_dict # good key and key_value result = self.config.log_file() self.assertEqual(result, ('%s/infoset-ng.log') % (self.log_directory)) def test_web_log_file(self): """Testing method web_log_file .""" # Testing web_log_file with a good dictionary. result = self.config.web_log_file() self.assertEqual(result, ('%s/api-web.log') % (self.log_directory)) def test_log_level(self): """Testing method log_level.""" # Tesing with a good_dictionary # good key and good key_value result = self.config.log_level() self.assertEqual(result, 'debug') self.assertEqual(result, self.good_dict['main']['log_level']) # Set the environmental variable for the configuration directory directory = tempfile.mkdtemp() os.environ['INFOSET_CONFIGDIR'] = directory config_file = ('%s/test_config.yaml') % (directory) # Testing log_level with blank key and blank key_value key = '' key_value = '' bad_config = ("""\ main: %s %s """) % (key, key_value) bad_dict = yaml.load(bytes(bad_config, 'utf-8')) # Write bad_config to file with open(config_file, 'w') as f_handle: yaml.dump(bad_dict, f_handle, default_flow_style=True) # Create configuration object config = configuration.Config() with self.assertRaises(SystemExit): config.log_level() # Testing log_level with good key and blank key_value key = 'log_level:' key_value = '' bad_config = ("""\ main: %s %s """) % (key, key_value) bad_dict = yaml.load(bytes(bad_config, 'utf-8')) # Write bad_config to file with open(config_file, 'w') as f_handle: yaml.dump(bad_dict, f_handle, default_flow_style=True) # Create configuration object config = configuration.Config() with self.assertRaises(SystemExit): config.log_level() # Cleanup files in temp directories _delete_files(directory) def test_log_directory(self): """Testing method log_directory.""" # Testing log_directory with temp directory # Set the environmental variable for the configuration directory directory = tempfile.mkdtemp() os.environ['INFOSET_CONFIGDIR'] = directory config_file = ('%s/test_config.yaml') % (directory) # Testing log_directory with blank key_value(filepath) key = '' key_value = '' bad_config = ("""\ main: %s %s """) % (key, key_value) bad_dict = yaml.load(bytes(bad_config, 'utf-8')) with open(config_file, 'w') as f_handle: yaml.dump(bad_dict, f_handle, default_flow_style=True) # Create configuration object config = configuration.Config() with self.assertRaises(SystemExit): config.log_directory() # Cleanup files in temp directories _delete_files(directory) def test_ingest_cache_directory(self): """Testing method ingest_cache_directory.""" # Testing ingest_cache_directory with temp directory # Set the environmental variable for the configuration directory directory = tempfile.mkdtemp() os.environ['INFOSET_CONFIGDIR'] = directory config_file = ('%s/test_config.yaml') % (directory) # Testing ingest_cache_directory with blank key_value(filepath) key = '' key_value = '' bad_config = ("""\ main: %s %s """) % (key, key_value) bad_dict = yaml.load(bytes(bad_config, 'utf-8')) with open(config_file, 'w') as f_handle: yaml.dump(bad_dict, f_handle, default_flow_style=True) # Create configuration object config = configuration.Config() with self.assertRaises(SystemExit): config.ingest_cache_directory() # Cleanup files in temp directories _delete_files(directory) def test_ingest_pool_size(self): """Testing method ingest_pool_size.""" # Testing ingest_pool_size with good_dict # good key and key_value result = self.config.ingest_pool_size() self.assertEqual(result, 20) self.assertEqual(result, self.good_dict['main']['ingest_pool_size']) def test_bind_port(self): """Testing method bind_port.""" # Testing bind_port with good_dictionary # good key and key_value result = self.config.bind_port() self.assertEqual(result, 3000) self.assertEqual(result, self.good_dict['main']['bind_port']) # Set the environmental variable for the configuration directory directory = tempfile.mkdtemp() os.environ['INFOSET_CONFIGDIR'] = directory config_file = ('%s/test_config.yaml') % (directory) # Testing bind_port with blank key and blank key_value key = '' key_value = '' bad_config = ("""\ main: %s %s """) % (key, key_value) bad_dict = yaml.load(bytes(bad_config, 'utf-8')) # Write bad_config to file with open(config_file, 'w') as f_handle: yaml.dump(bad_dict, f_handle, default_flow_style=True) # Create configuration object config = configuration.Config() with self.assertRaises(SystemExit): config.bind_port() # Testing bind_port with good key and blank key_value key = 'bind_port:' key_value = '' bad_config = ("""\ main: %s %s """) % (key, key_value) bad_dict = yaml.load(bytes(bad_config, 'utf-8')) # Write bad_config to file with open(config_file, 'w') as f_handle: yaml.dump(bad_dict, f_handle, default_flow_style=True) # Create configuration object config = configuration.Config() result = config.bind_port() self.assertEqual(result, 6000) # Cleanup files in temp directories _delete_files(directory) def test_interval(self): """Testing method interval.""" # Testing interval with good_dictionary # good key value and key_value result = self.config.interval() self.assertEqual(result, 300) self.assertEqual(result, self.good_dict['main']['interval']) # Set the environmental variable for the configuration directory directory = tempfile.mkdtemp() os.environ['INFOSET_CONFIGDIR'] = directory config_file = ('%s/test_config.yaml') % (directory) # Testing interval with blank key and blank key_value key = '' key_value = '' bad_config = ("""\ main: %s %s """) % (key, key_value) bad_dict = yaml.load(bytes(bad_config, 'utf-8')) # Write bad_config to file with open(config_file, 'w') as f_handle: yaml.dump(bad_dict, f_handle, default_flow_style=True) # Create configuration object config = configuration.Config() with self.assertRaises(SystemExit): config.interval() # Testing interval with blank key_value key = 'interval:' key_value = '' bad_config = ("""\ main: %s %s """) % (key, key_value) bad_dict = yaml.load(bytes(bad_config, 'utf-8')) # Write bad_config to file with open(config_file, 'w') as f_handle: yaml.dump(bad_dict, f_handle, default_flow_style=True) # Create configuration object config = configuration.Config() result = config.interval() self.assertEqual(result, 300) # Cleanup files in temp directories _delete_files(directory) def test_sqlalchemy_pool_size(self): """Testing method sqlalchemy_pool_size.""" # Testing sqlalchemy_pool_size with a good dictionary # good key and key_value result = self.config.sqlalchemy_pool_size() self.assertEqual(result, 10) self.assertEqual( result, self.good_dict['main']['sqlalchemy_pool_size']) # Set the environmental variable for the configuration directory directory = tempfile.mkdtemp() os.environ['INFOSET_CONFIGDIR'] = directory config_file = ('%s/test_config.yaml') % (directory) # Testing sqlalchemy_pool_size with blank key and blank key_value key = '' key_value = '' bad_config = ("""\ main: %s %s """) % (key, key_value) bad_dict = yaml.load(bytes(bad_config, 'utf-8')) # Write bad_config to file with open(config_file, 'w') as f_handle: yaml.dump(bad_dict, f_handle, default_flow_style=True) # Create configuration object config = configuration.Config() with self.assertRaises(SystemExit): config.sqlalchemy_pool_size() # Testing sqlalchemy_pool_size with good key and blank key_value key = 'sqlalchemy_pool_size:' key_value = '' bad_config = ("""\ main: %s %s """) % (key, key_value) bad_dict = yaml.load(bytes(bad_config, 'utf-8')) # Write bad_config to file with open(config_file, 'w') as f_handle: yaml.dump(bad_dict, f_handle, default_flow_style=True) # Create configuration object config = configuration.Config() result = config.sqlalchemy_pool_size() self.assertEqual(result, 10) # Cleanup files in temp directories _delete_files(directory) def test_sqlalchemy_max_overflow(self): """Testing method sqlalchemy_max_overflow.""" result = self.config.sqlalchemy_max_overflow() self.assertEqual(result, 10) self.assertEqual( result, self.good_dict['main']['sqlalchemy_max_overflow']) # Set the environmental variable for the configuration directory directory = tempfile.mkdtemp() os.environ['INFOSET_CONFIGDIR'] = directory config_file = ('%s/test_config.yaml') % (directory) # Testing sqlalchemy_max_overflow with blank key and blank key_value key = '' key_value = '' bad_config = ("""\ main: %s %s """) % (key, key_value) bad_dict = yaml.load(bytes(bad_config, 'utf-8')) # Write bad_config to file with open(config_file, 'w') as f_handle: yaml.dump(bad_dict, f_handle, default_flow_style=True) # Create configuration object config = configuration.Config() with self.assertRaises(SystemExit): config.sqlalchemy_max_overflow() # Testing sqlalchemy_max_overflow with good key and blank key_value key = 'sqlalchemy_max_overflow:' key_value = '' bad_config = ("""\ main: %s %s """) % (key, key_value) bad_dict = yaml.load(bytes(bad_config, 'utf-8')) # Write bad_config to file with open(config_file, 'w') as f_handle: yaml.dump(bad_dict, f_handle, default_flow_style=True) # Create configuration object config = configuration.Config() result = config.sqlalchemy_max_overflow() self.assertEqual(result, 10) # Cleanup files in temp directories _delete_files(directory) def test_memcached_port(self): """Testing method memcached_port.""" # Testing memcached_port with good_dictionary # good key and key_value result = self.config.memcached_port() self.assertEqual(result, 22122) self.assertEqual(result, self.good_dict['main']['memcached_port']) # Set the environmental variable for the configuration directory directory = tempfile.mkdtemp() os.environ['INFOSET_CONFIGDIR'] = directory config_file = ('%s/test_config.yaml') % (directory) # Testing memcached_port with blank key and blank key_value key = '' key_value = '' bad_config = ("""\ main: %s %s """) % (key, key_value) bad_dict = yaml.load(bytes(bad_config, 'utf-8')) # Write bad_config to file with open(config_file, 'w') as f_handle: yaml.dump(bad_dict, f_handle, default_flow_style=True) # Create configuration object config = configuration.Config() with self.assertRaises(SystemExit): config.memcached_port() # Testing memcached_port with good key and blank key_value key = 'memcached_port:' key_value = '' bad_config = ("""\ main: %s %s """) % (key, key_value) bad_dict = yaml.load(bytes(bad_config, 'utf-8')) # Write bad_config to file with open(config_file, 'w') as f_handle: yaml.dump(bad_dict, f_handle, default_flow_style=True) # Create configuration object config = configuration.Config() result = config.memcached_port() self.assertEqual(result, 11211) # Cleanup files in temp directories _delete_files(directory) def test_memcached_hostname(self): """Testing method memcached_hostname.""" result = self.config.memcached_hostname() self.assertEqual(result, 'localhost') self.assertEqual(result, self.good_dict['main']['memcached_hostname']) # Set the environmental variable for the configuration directory directory = tempfile.mkdtemp() os.environ['INFOSET_CONFIGDIR'] = directory config_file = ('%s/test_config.yaml') % (directory) # Testing memcached_hostname with blank key and blank key_value key = '' key_value = '' bad_config = ("""\ main: %s %s """) % (key, key_value) bad_dict = yaml.load(bytes(bad_config, 'utf-8')) # Write bad_config to file with open(config_file, 'w') as f_handle: yaml.dump(bad_dict, f_handle, default_flow_style=True) # Create configuration object config = configuration.Config() with self.assertRaises(SystemExit): config.memcached_hostname() # Testing memcached_hostname with good key and blank key_value key = 'memcached_hostname:' key_value = '' bad_config = ("""\ main: %s %s """) % (key, key_value) bad_dict = yaml.load(bytes(bad_config, 'utf-8')) # Write bad_config to file with open(config_file, 'w') as f_handle: yaml.dump(bad_dict, f_handle, default_flow_style=True) # Create configuration object defaults to 'localhost' config = configuration.Config() result = config.memcached_hostname() self.assertEqual(result, 'localhost') # Cleanup files in temp directories _delete_files(directory) def test_db_hostname(self): """Testing method db_hostname.""" result = self.config.db_hostname() self.assertEqual(result, 'localhost') self.assertEqual(result, self.good_dict['main']['db_hostname']) # Set the environmental variable for the configuration directory directory = tempfile.mkdtemp() os.environ['INFOSET_CONFIGDIR'] = directory config_file = ('%s/test_config.yaml') % (directory) # Testing db_hostname with blank key and blank key_value key = '' key_value = '' bad_config = ("""\ main: %s %s """) % (key, key_value) bad_dict = yaml.load(bytes(bad_config, 'utf-8')) # Write bad_config to file with open(config_file, 'w') as f_handle: yaml.dump(bad_dict, f_handle, default_flow_style=True) # Create configuration object config = configuration.Config() with self.assertRaises(SystemExit): config.db_hostname() # Testing db_hostname with good key and blank key_value key = 'db_hostname:' key_value = '' bad_config = ("""\ main: %s %s """) % (key, key_value) bad_dict = yaml.load(bytes(bad_config, 'utf-8')) # Write bad_config to file with open(config_file, 'w') as f_handle: yaml.dump(bad_dict, f_handle, default_flow_style=True) # Create configuration object config = configuration.Config() with self.assertRaises(SystemExit): config.db_hostname() # Cleanup files in temp directories _delete_files(directory) def test_db_username(self): """Testing method db_username.""" result = self.config.db_username() self.assertEqual(result, 'test_infoset') self.assertEqual(result, self.good_dict['main']['db_username']) # Set the environmental variable for the configuration directory directory = tempfile.mkdtemp() os.environ['INFOSET_CONFIGDIR'] = directory config_file = ('%s/test_config.yaml') % (directory) # Testing db_username with blank key and blank key_value key = '' key_value = '' bad_config = ("""\ main: %s %s """) % (key, key_value) bad_dict = yaml.load(bytes(bad_config, 'utf-8')) # Write bad_config to file with open(config_file, 'w') as f_handle: yaml.dump(bad_dict, f_handle, default_flow_style=True) # Create configuration object config = configuration.Config() with self.assertRaises(SystemExit): config.db_username() # Testing db_username with good key and blank key_value key = 'db_username:'******'' bad_config = ("""\ main: %s %s """) % (key, key_value) bad_dict = yaml.load(bytes(bad_config, 'utf-8')) # Write bad_config to file with open(config_file, 'w') as f_handle: yaml.dump(bad_dict, f_handle, default_flow_style=True) # Create configuration object config = configuration.Config() with self.assertRaises(SystemExit): config.db_username() # Cleanup files in temp directories _delete_files(directory) def test_db_password(self): """Testing method db_password.""" result = self.config.db_password() self.assertEqual(result, 'test_B3bFHgxQfsEy86TN') self.assertEqual(result, self.good_dict['main']['db_password']) # Set the environmental variable for the configuration directory directory = tempfile.mkdtemp() os.environ['INFOSET_CONFIGDIR'] = directory config_file = ('%s/test_config.yaml') % (directory) # Testing db_password with blank key and blank key_value key = '' key_value = '' bad_config = ("""\ main: %s %s """) % (key, key_value) bad_dict = yaml.load(bytes(bad_config, 'utf-8')) # Write bad_config to file with open(config_file, 'w') as f_handle: yaml.dump(bad_dict, f_handle, default_flow_style=True) # Create configuration object config = configuration.Config() with self.assertRaises(SystemExit): config.db_password() # Testing db_password with good key and blank key_value key = 'db_password:'******'' bad_config = ("""\ main: %s %s """) % (key, key_value) bad_dict = yaml.load(bytes(bad_config, 'utf-8')) # Write bad_config to file with open(config_file, 'w') as f_handle: yaml.dump(bad_dict, f_handle, default_flow_style=True) # Create configuration object config = configuration.Config() with self.assertRaises(SystemExit): config.db_password() # Cleanup files in temp directories _delete_files(directory) def test_db_name(self): """Testing method db_name.""" result = self.config.db_name() self.assertEqual(result, 'test_infoset') self.assertEqual(result, self.good_dict['main']['db_name']) # Set the environmental variable for the configuration directory directory = tempfile.mkdtemp() os.environ['INFOSET_CONFIGDIR'] = directory config_file = ('%s/test_config.yaml') % (directory) # Testing db_name with blank key and blank key_value key = '' key_value = '' bad_config = ("""\ main: %s %s """) % (key, key_value) bad_dict = yaml.load(bytes(bad_config, 'utf-8')) # Write bad_config to file with open(config_file, 'w') as f_handle: yaml.dump(bad_dict, f_handle, default_flow_style=True) # Create configuration object config = configuration.Config() with self.assertRaises(SystemExit): config.db_name() # Testing db_name with good key and blank key_value key = 'db_name:' key_value = '' bad_config = ("""\ main: %s %s """) % (key, key_value) bad_dict = yaml.load(bytes(bad_config, 'utf-8')) # Write bad_config to file with open(config_file, 'w') as f_handle: yaml.dump(bad_dict, f_handle, default_flow_style=True) # Create configuration object config = configuration.Config() with self.assertRaises(SystemExit): config.db_name() # Cleanup files in temp directories _delete_files(directory)
"""Initialize the API module.""" # Import PIP3 libraries from flask import Flask ############################################################################# # Import configuration. # This has to be done before all other infoset imports. ############################################################################# from infoset.utils import configuration CONFIG = configuration.Config() ############################################################################# ############################################################################# from infoset.utils import redis REDIS = redis.Redis() # Configure the cache # Define the global URL prefix from infoset.constants import API_PREFIX # Import API Blueprints from infoset.api.post import POST from infoset.api.status import STATUS from infoset.api.config_page import CONFIG_PAGE from infoset.api.resources.agents import AGENTS from infoset.api.resources.datapoints import DATAPOINTS from infoset.api.resources.lastcontacts import LASTCONTACTS from infoset.api.resources.devices import DEVICES