Exemplo n.º 1
0
    def prop_file(self, node):
        cfg = KafkaConfig(**node.config)
        cfg[config_property.ADVERTISED_HOSTNAME] = node.account.hostname
        cfg[config_property.ZOOKEEPER_CONNECT] = self.zk.connect_setting()

        self.set_protocol_and_port(node)

        # TODO - clean up duplicate configuration logic
        prop_file = cfg.render()
        prop_file += self.render('kafka.properties', node=node, broker_id=self.idx(node),
                                 security_config=self.security_config)
        return prop_file
Exemplo n.º 2
0
    def prop_file(self, node):
        self.set_protocol_and_port(node)

        #load template configs as dictionary
        config_template = self.render(
            'kafka.properties',
            node=node,
            broker_id=self.idx(node),
            security_config=self.security_config,
            num_nodes=self.num_nodes,
            listener_security_config=self.listener_security_config)

        configs = dict(l.rstrip().split('=', 1)
                       for l in config_template.split('\n')
                       if not l.startswith("#") and "=" in l)

        #load specific test override configs
        override_configs = KafkaConfig(**node.config)
        override_configs[
            config_property.ADVERTISED_HOSTNAME] = node.account.hostname
        override_configs[
            config_property.ZOOKEEPER_CONNECT] = self.zk_connect_setting()

        for prop in self.server_prop_overides:
            override_configs[prop[0]] = prop[1]

        for prop in self.per_node_server_prop_overrides.get(
                self.idx(node), []):
            override_configs[prop[0]] = prop[1]

        #update template configs with test override configs
        configs.update(override_configs)

        prop_file = self.render_configs(configs)
        return prop_file
    def __init__(self,
                 context,
                 num_nodes,
                 zk,
                 security_protocol=SecurityConfig.PLAINTEXT,
                 interbroker_security_protocol=SecurityConfig.PLAINTEXT,
                 sasl_mechanism=SecurityConfig.SASL_MECHANISM_GSSAPI,
                 topics=None,
                 version=TRUNK,
                 quota_config=None,
                 jmx_object_names=None,
                 jmx_attributes=[]):
        """
        :type context
        :type zk: ZookeeperService
        :type topics: dict
        """
        Service.__init__(self, context, num_nodes)
        JmxMixin.__init__(self, num_nodes, jmx_object_names, jmx_attributes)
        self.log_level = "DEBUG"

        self.zk = zk
        self.quota_config = quota_config

        self.security_protocol = security_protocol
        self.interbroker_security_protocol = interbroker_security_protocol
        self.sasl_mechanism = sasl_mechanism
        self.topics = topics

        for node in self.nodes:
            node.version = version
            node.config = KafkaConfig(
                **{config_property.BROKER_ID: self.idx(node)})
Exemplo n.º 4
0
    def __init__(self, context, num_nodes, zk, security_protocol=SecurityConfig.PLAINTEXT, interbroker_security_protocol=SecurityConfig.PLAINTEXT,
                 client_sasl_mechanism=SecurityConfig.SASL_MECHANISM_GSSAPI, interbroker_sasl_mechanism=SecurityConfig.SASL_MECHANISM_GSSAPI,
                 authorizer_class_name=None, topics=None, version=DEV_BRANCH, jmx_object_names=None,
                 jmx_attributes=None, zk_connect_timeout=5000, zk_session_timeout=6000, server_prop_overides=None, zk_chroot=None):
        """
        :type context
        :type zk: ZookeeperService
        :type topics: dict
        """
        Service.__init__(self, context, num_nodes)
        JmxMixin.__init__(self, num_nodes=num_nodes, jmx_object_names=jmx_object_names, jmx_attributes=(jmx_attributes or []),
                          root=KafkaService.PERSISTENT_ROOT)

        self.zk = zk

        self.security_protocol = security_protocol
        self.interbroker_security_protocol = interbroker_security_protocol
        self.client_sasl_mechanism = client_sasl_mechanism
        self.interbroker_sasl_mechanism = interbroker_sasl_mechanism
        self.topics = topics
        self.minikdc = None
        self.authorizer_class_name = authorizer_class_name
        self.zk_set_acl = False
        if server_prop_overides is None:
            self.server_prop_overides = []
        else:
            self.server_prop_overides = server_prop_overides
        self.log_level = "DEBUG"
        self.zk_chroot = zk_chroot

        #
        # In a heavily loaded and not very fast machine, it is
        # sometimes necessary to give more time for the zk client
        # to have its session established, especially if the client
        # is authenticating and waiting for the SaslAuthenticated
        # in addition to the SyncConnected event.
        #
        # The default value for zookeeper.connect.timeout.ms is
        # 2 seconds and here we increase it to 5 seconds, but
        # it can be overridden by setting the corresponding parameter
        # for this constructor.
        self.zk_connect_timeout = zk_connect_timeout

        # Also allow the session timeout to be provided explicitly,
        # primarily so that test cases can depend on it when waiting
        # e.g. brokers to deregister after a hard kill.
        self.zk_session_timeout = zk_session_timeout

        self.port_mappings = {
            'PLAINTEXT': Port('PLAINTEXT', 9092, False),
            'SSL': Port('SSL', 9093, False),
            'SASL_PLAINTEXT': Port('SASL_PLAINTEXT', 9094, False),
            'SASL_SSL': Port('SASL_SSL', 9095, False)
        }

        for node in self.nodes:
            node.version = version
            node.config = KafkaConfig(**{config_property.BROKER_ID: self.idx(node)})
Exemplo n.º 5
0
    def __init__(self,
                 context,
                 num_nodes,
                 zk,
                 security_protocol=SecurityConfig.PLAINTEXT,
                 interbroker_security_protocol=SecurityConfig.PLAINTEXT,
                 sasl_mechanism=SecurityConfig.SASL_MECHANISM_GSSAPI,
                 authorizer_class_name=None,
                 topics=None,
                 version=TRUNK,
                 quota_config=None,
                 jmx_object_names=None,
                 jmx_attributes=[],
                 zk_connect_timeout=5000):
        """
        :type context
        :type zk: ZookeeperService
        :type topics: dict
        """
        Service.__init__(self, context, num_nodes)
        JmxMixin.__init__(self, num_nodes, jmx_object_names, jmx_attributes)

        self.zk = zk
        self.quota_config = quota_config

        self.security_protocol = security_protocol
        self.interbroker_security_protocol = interbroker_security_protocol
        self.sasl_mechanism = sasl_mechanism
        self.topics = topics
        self.minikdc = None
        self.authorizer_class_name = authorizer_class_name
        #
        # In a heavily loaded and not very fast machine, it is
        # sometimes necessary to give more time for the zk client
        # to have its session established, especially if the client
        # is authenticating and waiting for the SaslAuthenticated
        # in addition to the SyncConnected event.
        #
        # The defaut value for zookeeper.connect.timeout.ms is
        # 2 seconds and here we increase it to 5 seconds, but
        # it can be overriden by setting the corresponding parameter
        # for this constructor.
        self.zk_connect_timeout = zk_connect_timeout

        self.port_mappings = {
            'PLAINTEXT': Port('PLAINTEXT', 9092, False),
            'SSL': Port('SSL', 9093, False),
            'SASL_PLAINTEXT': Port('SASL_PLAINTEXT', 9094, False),
            'SASL_SSL': Port('SASL_SSL', 9095, False)
        }

        for node in self.nodes:
            node.version = version
            node.config = KafkaConfig(
                **{config_property.BROKER_ID: self.idx(node)})
Exemplo n.º 6
0
    def __init__(
            self,
            context,
            num_nodes,
            zk,
            security_protocol=SecurityConfig.PLAINTEXT,
            interbroker_security_protocol=SecurityConfig.PLAINTEXT,
            client_sasl_mechanism=SecurityConfig.SASL_MECHANISM_GSSAPI,
            interbroker_sasl_mechanism=SecurityConfig.SASL_MECHANISM_GSSAPI,
            authorizer_class_name=None,
            topics=None,
            version=DEV_BRANCH,
            jmx_object_names=None,
            jmx_attributes=None,
            zk_connect_timeout=5000,
            zk_session_timeout=6000,
            server_prop_overides=None,
            zk_chroot=None,
            listener_security_config=ListenerSecurityConfig(),
            per_node_server_prop_overrides={}):
        """
        :param context: test context
        :param ZookeeperService zk:
        :param dict topics: which topics to create automatically
        :param str security_protocol: security protocol for clients to use
        :param str interbroker_security_protocol: security protocol to use for broker-to-broker communication
        :param str client_sasl_mechanism: sasl mechanism for clients to use
        :param str interbroker_sasl_mechanism: sasl mechanism to use for broker-to-broker communication
        :param str authorizer_class_name: which authorizer class to use
        :param str version: which kafka version to use. Defaults to "dev" branch
        :param jmx_object_names:
        :param jmx_attributes:
        :param int zk_connect_timeout:
        :param int zk_session_timeout:
        :param dict server_prop_overides: overrides for kafka.properties file
        :param zk_chroot:
        :param ListenerSecurityConfig listener_security_config: listener config to use
        """
        Service.__init__(self, context, num_nodes)
        JmxMixin.__init__(self,
                          num_nodes=num_nodes,
                          jmx_object_names=jmx_object_names,
                          jmx_attributes=(jmx_attributes or []),
                          root=KafkaService.PERSISTENT_ROOT)

        self.zk = zk

        self.security_protocol = security_protocol
        self.client_sasl_mechanism = client_sasl_mechanism
        self.topics = topics
        self.minikdc = None
        self.authorizer_class_name = authorizer_class_name
        self.zk_set_acl = False
        if server_prop_overides is None:
            self.server_prop_overides = []
        else:
            self.server_prop_overides = server_prop_overides
        if per_node_server_prop_overrides is None:
            self.per_node_server_prop_overrides = {}
        else:
            self.per_node_server_prop_overrides = per_node_server_prop_overrides
        self.log_level = "DEBUG"
        self.zk_chroot = zk_chroot
        self.listener_security_config = listener_security_config

        #
        # In a heavily loaded and not very fast machine, it is
        # sometimes necessary to give more time for the zk client
        # to have its session established, especially if the client
        # is authenticating and waiting for the SaslAuthenticated
        # in addition to the SyncConnected event.
        #
        # The default value for zookeeper.connect.timeout.ms is
        # 2 seconds and here we increase it to 5 seconds, but
        # it can be overridden by setting the corresponding parameter
        # for this constructor.
        self.zk_connect_timeout = zk_connect_timeout

        # Also allow the session timeout to be provided explicitly,
        # primarily so that test cases can depend on it when waiting
        # e.g. brokers to deregister after a hard kill.
        self.zk_session_timeout = zk_session_timeout

        self.port_mappings = {
            'PLAINTEXT':
            KafkaListener('PLAINTEXT', 9092, 'PLAINTEXT', False),
            'SSL':
            KafkaListener('SSL', 9093, 'SSL', False),
            'SASL_PLAINTEXT':
            KafkaListener('SASL_PLAINTEXT', 9094, 'SASL_PLAINTEXT', False),
            'SASL_SSL':
            KafkaListener('SASL_SSL', 9095, 'SASL_SSL', False),
            KafkaService.INTERBROKER_LISTENER_NAME:
            KafkaListener(KafkaService.INTERBROKER_LISTENER_NAME, 9099, None,
                          False)
        }

        self.interbroker_listener = None
        self.setup_interbroker_listener(
            interbroker_security_protocol,
            self.listener_security_config.use_separate_interbroker_listener)
        self.interbroker_sasl_mechanism = interbroker_sasl_mechanism

        for node in self.nodes:
            node.version = version
            node.config = KafkaConfig(
                **{config_property.BROKER_ID: self.idx(node)})