def __init__(self, exchange_name, connection_url, config_file=None): """ Creates a new ClusterExecService. If config_file is omitted, it will try the default location (/etc/commissaire/clusterexec.conf). :param exchange_name: Name of the topic exchange :type exchange_name: str :param connection_url: Kombu connection URL :type connection_url: str :param config_file: Optional configuration file path :type config_file: str or None """ queue_kwargs = [{'routing_key': 'jobs.clusterexec.*'}] super().__init__(exchange_name, connection_url, queue_kwargs) self.storage = StorageClient(self) # Apply any logging configuration for this service. read_config_file(config_file, '/etc/commissaire/clusterexec.conf')
def __init__(self, exchange_name, connection_url, qkwargs, config_file=None): """ Initializes a new Service instance. :param exchange_name: Name of the topic exchange. :type exchange_name: str :param connection_url: Kombu connection url. :type connection_url: str :param qkwargs: One or more dicts keyword arguments for queue creation :type qkwargs: list :param config_file: Path to the configuration file location. :type config_file: str or None """ name = self.__class__.__name__ self.logger = logging.getLogger(name) self.logger.debug('Initializing {}'.format(name)) # If we are given no default, use the global one # Read the configuration file self._config_data = read_config_file(config_file, self._default_config_file) if connection_url is None and 'bus_uri' in self._config_data: connection_url = self._config_data.get('bus_uri') self.logger.debug('Using connection_url=%s from config file', connection_url) if exchange_name is None and 'exchange_name' in self._config_data: self.logger.debug('Using exchange_name=%s from config file', exchange_name) exchange_name = self._config_data.get('bus_exchange') self.connection = Connection(connection_url) self._channel = self.connection.default_channel self._exchange = Exchange(exchange_name, type='topic').bind(self._channel) self._exchange.declare() # Set up queues self._queues = [] for kwargs in qkwargs: queue = Queue(**kwargs) queue.exchange = self._exchange queue = queue.bind(self._channel) self._queues.append(queue) self.logger.debug(queue.as_dict()) # Create producer for publishing on topics self.producer = Producer(self._channel, self._exchange) self.logger.debug('Initializing of {} finished'.format(name))
def __init__(self, exchange_name, connection_url, config_file=None): """ Creates a new ContainerManagerService. If config_file is omitted, it will try the default location (/etc/commissaire/containermgr.conf). :param exchange_name: Name of the topic exchange :type exchange_name: str :param connection_url: Kombu connection URL :type connection_url: str :param config_file: Optional configuration file path :type config_file: str or None """ queue_kwargs = [{ 'name': 'containermgr', 'routing_key': 'container.*', 'exclusive': False, }] super().__init__(exchange_name, connection_url, queue_kwargs) self.storage = StorageClient(self) self.managers = {} # Apply any logging configuration for this service. read_config_file(config_file, '/etc/commissaire/containermgr.conf')
def test_read_config_file_with_storge_handler_as_dict(self): """ Verify the read_config_file function turns storage_handlers into a list. """ data = { 'storage_handlers': { 'name': 'commissaire.storage.etcd', } } with mock.patch('builtins.open', mock.mock_open(read_data=json.dumps(data))) as _open: conf = config.read_config_file() self.assertIsInstance(conf, dict) data['storage_handlers'] = [data['storage_handlers']] self.assertEquals(data, conf)
def __init__(self, exchange_name, connection_url, config_file=None): """ Creates a new WatcherService. If config_file is omitted, it will try the default location (/etc/commissaire/watcher.conf). :param exchange_name: Name of the topic exchange :type exchange_name: str :param connection_url: Kombu connection URL :type connection_url: str :param config_file: Optional configuration file path :type config_file: str or None """ queue_kwargs = [{ 'name': 'watcher', 'exclusive': False, 'routing_key': 'jobs.watcher', }] # Store the last address seen for backoff self.last_address = None super().__init__(exchange_name, connection_url, queue_kwargs) self.storage = StorageClient(self) # Apply any logging configuration for this service. read_config_file(config_file, '/etc/commissaire/watcher.conf')
def test_read_config_file_with_storge_handler_as_dict(self): """ Verify the read_config_file function turns storage-handlers into a list. """ data = { 'storage-handlers': { 'name': 'commissaire.storage.etcd', } } with mock.patch('builtins.open', mock.mock_open(read_data=json.dumps(data))) as _open: conf = config.read_config_file() self.assertIsInstance(conf, dict) data['storage-handlers'] = [data['storage-handlers']] self.assertEquals(data, conf)
def test_read_config_file_with_valid_data(self): """ Test the read_config_file function with valid data. """ # Check handling of storage_handler. data = { 'storage_handlers': [ {'name': 'commissaire.storage.etcd'}, ], } with mock.patch('builtins.open', mock.mock_open(read_data=json.dumps(data))) as _open: conf = config.read_config_file() self.assertIsInstance(conf, dict) self.assertEquals(data, conf)
def test_read_config_file_with_valid_authentication_plugin(self): """ Verify the read_config_file function parses valid authentication-plugin directives. """ data = { 'authentication-plugin': { 'name': 'commissaire_htp.authentication.httpbasicauth', 'users': {}, } } with mock.patch('builtins.open', mock.mock_open(read_data=json.dumps(data))) as _open: conf = config.read_config_file() self.assertIsInstance(conf, dict) self.assertEquals(data['authentication-plugin']['name'], conf['authentication-plugin']) self.assertEquals(data['authentication-plugin']['users'], conf['authentication-plugin-kwargs']['users'])
def test_read_config_file_with_valid_data(self): """ Test the read_config_file function with valid data. """ # Check handling of storage-handler. data = { 'storage-handlers': [ { 'name': 'commissaire.storage.etcd' }, ], } with mock.patch('builtins.open', mock.mock_open(read_data=json.dumps(data))) as _open: conf = config.read_config_file() self.assertIsInstance(conf, dict) self.assertEquals(data, conf)
def test_read_config_file_with_valid_authentication_plugin(self): """ Verify the read_config_file function parses valid authentication-plugin directives. """ data = { 'authentication-plugin': { 'name': 'commissaire_htp.authentication.httpbasicauth', 'users': {}, } } with mock.patch('builtins.open', mock.mock_open(read_data=json.dumps(data))) as _open: conf = config.read_config_file() self.assertIsInstance(conf, dict) self.assertEquals( data['authentication-plugin']['name'], conf['authentication-plugin']) self.assertEquals( data['authentication-plugin']['users'], conf['authentication-plugin-kwargs']['users'])
def test_read_config_file_with_valid_authentication_plugin(self): """ Verify the read_config_file function parses valid authentication_plugin directives. """ plugin_name = 'commissaire_htp.authentication.httpbasicauth' data = { 'authentication_plugins': [{ 'name': plugin_name, 'users': {}, }] } with mock.patch('builtins.open', mock.mock_open(read_data=json.dumps(data))) as _open: conf = config.read_config_file() self.assertIsInstance(conf, dict) self.assertTrue( plugin_name in conf['authentication_plugins'].keys()) self.assertEquals( data['authentication_plugins'][0]['users'], conf['authentication_plugins'][plugin_name]['users'])
def test_read_config_file_with_valid_authentication_plugin(self): """ Verify the read_config_file function parses valid authentication_plugin directives. """ plugin_name = 'commissaire_htp.authentication.httpbasicauth' data = { 'authentication_plugins': [{ 'name': plugin_name, 'users': {}, }] } with mock.patch('builtins.open', mock.mock_open(read_data=json.dumps(data))) as _open: conf = config.read_config_file() self.assertIsInstance(conf, dict) self.assertTrue( plugin_name in conf['authentication_plugins'].keys()) self.assertEquals( data['authentication_plugins'][0]['users'], conf['authentication_plugins'][plugin_name]['users'])
def __init__(self, exchange_name, connection_url, config_file=None): """ Creates a new StorageService and sets up StoreHandler instances according to the config_file. If config_file is omitted, it will try the default location (/etc/commissaire/commissaire.conf). :param exchange_name: Name of the topic exchange :type exchange_name: str :param connection_url: Kombu connection URL :type connection_url: str :param config_file: Optional configuration file path :type config_file: str or None """ queue_kwargs = [ { 'routing_key': 'storage.*' }, ] CommissaireService.__init__(self, exchange_name, connection_url, queue_kwargs) self._manager = StoreHandlerManager() # Collect all model types in commissaire.models. self._model_types = { k: v for k, v in models.__dict__.items() if isinstance(v, type) and issubclass(v, models.Model) } config_data = read_config_file(config_file) store_handlers = config_data.get('storage-handlers', []) # Configure store handlers from user data. if len(store_handlers) == 0: store_handlers = [C.DEFAULT_ETCD_STORE_HANDLER] for config in store_handlers: self.register_store_handler(config)
def parse_args(parser): """ Parses and combines arguments from the server configuration file and the command-line invocation. Command-line arguments override the configuration file. The 'parser' argument should be a fresh argparse.ArgumentParser instance with a suitable description, epilog, etc. This method will add arguments to it. :param parser: An argument parser instance :type parser: argparse.ArgumentParser :returns: The parsed arguments in the form of a Namespace :rtype: argparse.Namespace """ # Do not use required=True because it would preclude such # arguments from being specified in a configuration file. parser.add_argument( '--config-file', '-c', type=str, help='Full path to a JSON configuration file ' '(command-line arguments override)') parser.add_argument( '--no-config-file', action='store_true', help='Disregard default configuration file, if it exists') parser.add_argument( '--listen-interface', '-i', type=str, default='0.0.0.0', help='Interface to listen on') parser.add_argument( '--listen-port', '-p', type=int, default=8000, help='Port to listen on') parser.add_argument( '--tls-pemfile', type=str, help='Full path to the TLS PEM for the commissaire server') parser.add_argument( '--tls-clientverifyfile', type=str, help='Full path to the TLS file containing the certificate ' 'authorities that client certificates should be verified against') parser.add_argument( '--authentication-plugin', action='append', dest='authentication_plugins', metavar='MODULE_NAME:key=value,..', type=parse_to_struct, help=('Authentication Plugin module and configuration.')) parser.add_argument( '--bus-exchange', type=str, default='commissaire', help='Message bus exchange name.') parser.add_argument( '--bus-uri', type=str, metavar='BUS_URI', help=( 'Message bus connection URI. See:' 'http://kombu.readthedocs.io/en/latest/userguide/connections.html') ) # We have to parse the command-line arguments twice. Once to extract # the --config-file option, and again with the config file content as # a baseline. args = parser.parse_args() if not args.no_config_file: # Change dashes to underscores json_object = {k.replace('-', '_'): v for k, v in read_config_file( args.config_file).items()} args = parser.parse_args(namespace=Namespace(**json_object)) else: configured_plugins = {} for auth_plugin in args.authentication_plugins: configured_plugins.update(auth_plugin) args.authentication_plugins = configured_plugins return args