Exemplo n.º 1
0
def test_version(version,
                 cmd=None,
                 deploy=True,
                 conf={},
                 debug=False,
                 exec_cnt=1,
                 root_path='tmp',
                 broker_cnt=3):
    """
    @brief Create, deploy and start a Kafka cluster using Kafka \p version
    Then run librdkafka's regression tests.
    """

    print('## Test version %s' % version)

    cluster = Cluster('LibrdkafkaTestCluster', root_path, debug=debug)

    # Enable SSL if desired
    if 'SSL' in conf.get('security.protocol', ''):
        cluster.ssl = SslApp(cluster, conf)

    # One ZK (from Kafka repo)
    zk1 = ZookeeperApp(cluster)
    zk_address = zk1.get('address')

    # Start Kerberos KDC if GSSAPI is configured
    if 'GSSAPI' in args.conf.get('sasl_mechanisms', []):
        KerberosKdcApp(cluster, 'MYREALM').start()

    defconf = {
        'replication_factor': min(broker_cnt, 3),
        'num_partitions': 4,
        'version': version
    }
    defconf.update(conf)

    print('conf: ', defconf)

    brokers = []
    for n in range(0, broker_cnt):
        brokers.append(KafkaBrokerApp(cluster, defconf))

    # Generate test config file
    security_protocol = 'PLAINTEXT'
    fd, test_conf_file = tempfile.mkstemp(prefix='test_conf', text=True)
    os.write(fd, ('test.sql.command=sqlite3 rdktests\n').encode('ascii'))
    os.write(fd, 'broker.address.family=v4\n'.encode('ascii'))
    if version != 'trunk':
        os.write(fd,
                 ('broker.version.fallback=%s\n' % version).encode('ascii'))
    else:
        os.write(fd, 'api.version.request=true\n'.encode('ascii'))
    # SASL (only one mechanism supported)
    mech = defconf.get('sasl_mechanisms', '').split(',')[0]
    if mech != '':
        os.write(fd, ('sasl.mechanisms=%s\n' % mech).encode('ascii'))
        if mech == 'PLAIN' or mech.find('SCRAM') != -1:
            print('# Writing SASL %s client config to %s' %
                  (mech, test_conf_file))
            security_protocol = 'SASL_PLAINTEXT'
            # Use first user as SASL user/pass
            for up in defconf.get('sasl_users', '').split(','):
                u, p = up.split('=')
                os.write(fd, ('sasl.username=%s\n' % u).encode('ascii'))
                os.write(fd, ('sasl.password=%s\n' % p).encode('ascii'))
                break
        else:
            print('# FIXME: SASL %s client config not written to %s' %
                  (mech, test_conf_file))

    # SSL support
    ssl = getattr(cluster, 'ssl', None)
    if ssl is not None:
        if 'SASL' in security_protocol:
            security_protocol = 'SASL_SSL'
        else:
            security_protocol = 'SSL'

        key, req, pem = ssl.create_key('librdkafka')

        os.write(fd, ('ssl.ca.location=%s\n' % ssl.ca_cert).encode('ascii'))
        os.write(fd, ('ssl.certificate.location=%s\n' % pem).encode('ascii'))
        os.write(fd, ('ssl.key.location=%s\n' % key).encode('ascii'))
        os.write(fd, ('ssl.key.password=%s\n' %
                      ssl.conf.get('ssl_key_pass')).encode('ascii'))

    # Define bootstrap brokers based on selected security protocol
    print('# Using client security.protocol=%s' % security_protocol)
    all_listeners = (','.join(cluster.get_all('listeners', '',
                                              KafkaBrokerApp))).split(',')
    bootstrap_servers = ','.join(
        [x for x in all_listeners if x.startswith(security_protocol)])
    os.write(fd,
             ('bootstrap.servers=%s\n' % bootstrap_servers).encode('ascii'))
    os.write(fd,
             ('security.protocol=%s\n' % security_protocol).encode('ascii'))
    os.close(fd)

    if deploy:
        print('# Deploying cluster')
        cluster.deploy()
    else:
        print('# Not deploying')

    print('# Starting cluster, instance path %s' % cluster.instance_path())
    cluster.start()

    print('# Waiting for brokers to come up')

    if not cluster.wait_operational(30):
        cluster.stop(force=True)
        raise Exception('Cluster %s did not go operational, see logs in %s/%s' % \
                        (cluster.name, cluster.root_path, cluster.instance))

    print('# Connect to cluster with bootstrap.servers %s' % bootstrap_servers)

    cmd_env = 'export KAFKA_PATH="%s" RDKAFKA_TEST_CONF="%s" ZK_ADDRESS="%s" BROKERS="%s" TEST_KAFKA_VERSION="%s" TRIVUP_ROOT="%s"; ' % \
              (brokers[0].conf.get('destdir'), test_conf_file, zk_address, bootstrap_servers, version, cluster.instance_path())
    if not cmd:
        cmd = 'bash --rcfile <(cat ~/.bashrc; echo \'PS1="[TRIVUP:%s@%s] \\u@\\h:\w$ "\')' % (
            cluster.name, version)
    for i in range(0, exec_cnt):
        subprocess.call('%s %s' % (cmd_env, cmd),
                        shell=True,
                        executable='/bin/bash')

    try:
        os.remove(test_conf_file)
    except:
        pass

    cluster.stop(force=True)

    cluster.cleanup(keeptypes=['log'])
    return True
def test_version (version, cmd=None, deploy=True, conf={}, debug=False, exec_cnt=1):
    """
    @brief Create, deploy and start a Kafka cluster using Kafka \p version
    Then run librdkafka's regression tests.
    """

    print('## Test version %s' % version)
    
    cluster = Cluster('librdkafkaInteractiveBrokerVersionTests', 'tmp', debug=debug)

    # Enable SSL if desired
    if 'SSL' in conf.get('security.protocol', ''):
        cluster.ssl = SslApp(cluster, conf)

    # One ZK (from Kafka repo)
    zk1 = ZookeeperApp(cluster, bin_path=kafka_path + '/bin/zookeeper-server-start.sh')
    zk_address = zk1.get('address')

    # Start Kerberos KDC if GSSAPI is configured
    if 'GSSAPI' in args.conf.get('sasl_mechanisms', []):
        KerberosKdcApp(cluster, 'MYREALM').start()

    # Three brokers
    defconf = {'replication_factor': 3, 'num_partitions': 4, 'version': version}
    defconf.update(conf)

    print('conf: ', defconf)

    broker1 = KafkaBrokerApp(cluster, defconf, kafka_path=kafka_path)
    broker2 = KafkaBrokerApp(cluster, defconf, kafka_path=kafka_path)
    broker3 = KafkaBrokerApp(cluster, defconf, kafka_path=kafka_path)

    # Generate test config file
    security_protocol='PLAINTEXT'
    fd, test_conf_file = tempfile.mkstemp(prefix='test_conf', text=True)
    if version != 'trunk':
        os.write(fd, ('broker.version.fallback=%s\n' % version).encode('ascii'))
    else:
        os.write(fd, 'api.version.request=true\n')
    # SASL (only one mechanism supported)
    mech = defconf.get('sasl_mechanisms', '').split(',')[0]
    if mech != '':
        os.write(fd, 'sasl.mechanisms=%s\n' % mech)
        if mech == 'PLAIN':
            print('# Writing SASL PLAIN client config to %s' % test_conf_file)
            security_protocol='SASL_PLAINTEXT'
            # Use first user as SASL user/pass
            for up in defconf.get('sasl_users', '').split(','):
                u,p = up.split('=')
                os.write(fd, 'sasl.username=%s\n' % u)
                os.write(fd, 'sasl.password=%s\n' % p)
                break
        else:
            print('# FIXME: SASL %s client config not written to %s' % (mech, test_conf_file))

    # SSL support
    ssl = getattr(cluster, 'ssl', None)
    if ssl is not None:
        if 'SASL' in security_protocol:
            security_protocol = 'SASL_SSL'
        else:
            security_protocol = 'SSL'

        key, req, pem = ssl.create_key('librdkafka')

        os.write(fd, 'ssl.ca.location=%s\n' % ssl.ca_cert)
        os.write(fd, 'ssl.certificate.location=%s\n' % pem)
        os.write(fd, 'ssl.key.location=%s\n' % key)
        os.write(fd, 'ssl.key.password=%s\n' % ssl.conf.get('ssl_key_pass'))


    # Define bootstrap brokers based on selected security protocol
    print('# Using client security.protocol=%s' % security_protocol)
    all_listeners = (','.join(cluster.get_all('listeners', '', KafkaBrokerApp))).split(',')
    bootstrap_servers = ','.join([x for x in all_listeners if x.startswith(security_protocol)])
    os.write(fd, ('bootstrap.servers=%s\n' % bootstrap_servers).encode('ascii'))
    os.write(fd, 'security.protocol=%s\n' % security_protocol)
    os.close(fd)

    if deploy:
        print('# Deploying cluster')
        cluster.deploy()
    else:
        print('# Not deploying')

    print('# Starting cluster')
    cluster.start()

    print('# Waiting for brokers to come up')

    if not cluster.wait_operational(30):
        cluster.stop(force=True)
        raise Exception('Cluster did not go operational, see logs in %s' % \
                        (cluster.root_path))

    print('# Connect to cluster with bootstrap.servers %s' % bootstrap_servers)

    cmd_env = 'export KAFKA_PATH=%s RDKAFKA_TEST_CONF=%s ZK_ADDRESS=%s BROKERS=%s TEST_KAFKA_VERSION=%s;' % \
              (broker1.conf.get('destdir'), test_conf_file, zk_address, bootstrap_servers, version)
    if not cmd:
        cmd = 'bash --rcfile <(cat ~/.bashrc; echo \'PS1="[TRIVUP:%s@%s] \\u@\\h:\w$ "\')' % (cluster.name, version)
    for i in range(0, exec_cnt):
        subprocess.call('%s %s' % (cmd_env, cmd), shell=True, executable='/bin/bash')

    os.remove(test_conf_file)

    cluster.stop(force=True)

    cluster.cleanup(keeptypes=['log'])
    return True
Exemplo n.º 3
0
def test_version (version, cmd=None, deploy=True, conf={}, debug=False, exec_cnt=1,
                  root_path='tmp', broker_cnt=3, scenario='default'):
    """
    @brief Create, deploy and start a Kafka cluster using Kafka \p version
    Then run librdkafka's regression tests.
    """

    print('## Test version %s' % version)

    cluster = Cluster('LibrdkafkaTestCluster', root_path, debug=debug)

    # Enable SSL if desired
    if 'SSL' in conf.get('security.protocol', ''):
        cluster.ssl = SslApp(cluster, conf)

    # One ZK (from Kafka repo)
    zk1 = ZookeeperApp(cluster)
    zk_address = zk1.get('address')

    # Start Kerberos KDC if GSSAPI is configured
    if 'GSSAPI' in args.conf.get('sasl_mechanisms', []):
        KerberosKdcApp(cluster, 'MYREALM').start()

    defconf = {'version': version}
    defconf.update(conf)

    print('conf: ', defconf)

    brokers = []
    for n in range(0, broker_cnt):
        # Configure rack & replica selector if broker supports fetch-from-follower
        if version_as_number(version) >= 2.4:
            defconf.update({'conf': ['broker.rack=RACK${appid}', 'replica.selector.class=org.apache.kafka.common.replica.RackAwareReplicaSelector']})
        brokers.append(KafkaBrokerApp(cluster, defconf))

    cmd_env = os.environ.copy()

    # Generate test config file
    security_protocol='PLAINTEXT'
    fd, test_conf_file = tempfile.mkstemp(prefix='test_conf', text=True)
    os.write(fd, ('test.sql.command=sqlite3 rdktests\n').encode('ascii'))
    os.write(fd, 'broker.address.family=v4\n'.encode('ascii'))
    if version.startswith('0.9') or version.startswith('0.8'):
        os.write(fd, 'api.version.request=false\n'.encode('ascii'))
        os.write(fd, ('broker.version.fallback=%s\n' % version).encode('ascii'))
    # SASL (only one mechanism supported)
    mech = defconf.get('sasl_mechanisms', '').split(',')[0]
    if mech != '':
        os.write(fd, ('sasl.mechanisms=%s\n' % mech).encode('ascii'))
        if mech == 'PLAIN' or mech.find('SCRAM') != -1:
            print('# Writing SASL %s client config to %s' % (mech, test_conf_file))
            security_protocol='SASL_PLAINTEXT'
            # Use first user as SASL user/pass
            for up in defconf.get('sasl_users', '').split(','):
                u,p = up.split('=')
                os.write(fd, ('sasl.username=%s\n' % u).encode('ascii'))
                os.write(fd, ('sasl.password=%s\n' % p).encode('ascii'))
                break
        elif mech == 'OAUTHBEARER':
            security_protocol='SASL_PLAINTEXT'
            os.write(fd, ('enable.sasl.oauthbearer.unsecure.jwt=true\n'))
            os.write(fd, ('sasl.oauthbearer.config=%s\n' % \
                          'scope=requiredScope principal=admin').encode('ascii'))
        else:
            print('# FIXME: SASL %s client config not written to %s' % (mech, test_conf_file))

    # SSL support
    ssl = getattr(cluster, 'ssl', None)
    if ssl is not None:
        if 'SASL' in security_protocol:
            security_protocol = 'SASL_SSL'
        else:
            security_protocol = 'SSL'

        key = ssl.create_cert('librdkafka')

        os.write(fd, ('ssl.ca.location=%s\n' % ssl.ca['pem']).encode('ascii'))
        os.write(fd, ('ssl.certificate.location=%s\n' % key['pub']['pem']).encode('ascii'))
        os.write(fd, ('ssl.key.location=%s\n' % key['priv']['pem']).encode('ascii'))
        os.write(fd, ('ssl.key.password=%s\n' % key['password']).encode('ascii'))

        for k, v in ssl.ca.iteritems():
            cmd_env['RDK_SSL_ca_{}'.format(k)] = v

        # Set envs for all generated keys so tests can find them.
        for k, v in key.iteritems():
            if type(v) is dict:
                for k2, v2 in v.iteritems():
                    # E.g. "RDK_SSL_priv_der=path/to/librdkafka-priv.der"
                    cmd_env['RDK_SSL_{}_{}'.format(k, k2)] = v2
            else:
                cmd_env['RDK_SSL_{}'.format(k)] = v


    # Define bootstrap brokers based on selected security protocol
    print('# Using client security.protocol=%s' % security_protocol)
    all_listeners = (','.join(cluster.get_all('listeners', '', KafkaBrokerApp))).split(',')
    bootstrap_servers = ','.join([x for x in all_listeners if x.startswith(security_protocol)])
    os.write(fd, ('bootstrap.servers=%s\n' % bootstrap_servers).encode('ascii'))
    os.write(fd, ('security.protocol=%s\n' % security_protocol).encode('ascii'))
    os.close(fd)

    if deploy:
        print('# Deploying cluster')
        cluster.deploy()
    else:
        print('# Not deploying')

    print('# Starting cluster, instance path %s' % cluster.instance_path())
    cluster.start()

    print('# Waiting for brokers to come up')

    if not cluster.wait_operational(30):
        cluster.stop(force=True)
        raise Exception('Cluster %s did not go operational, see logs in %s/%s' % \
                        (cluster.name, cluster.root_path, cluster.instance))

    print('# Connect to cluster with bootstrap.servers %s' % bootstrap_servers)

    cmd_env['KAFKA_PATH'] = brokers[0].conf.get('destdir')
    cmd_env['RDKAFKA_TEST_CONF'] = test_conf_file
    cmd_env['ZK_ADDRESS'] = zk_address
    cmd_env['BROKERS'] = bootstrap_servers
    cmd_env['TEST_KAFKA_VERSION'] = version
    cmd_env['TRIVUP_ROOT'] = cluster.instance_path()
    cmd_env['TEST_SCENARIO'] = scenario

    # Per broker env vars
    for b in [x for x in cluster.apps if isinstance(x, KafkaBrokerApp)]:
        cmd_env['BROKER_ADDRESS_%d' % b.appid] = b.conf['address']
        # Add each broker pid as an env so they can be killed indivdidually.
        cmd_env['BROKER_PID_%d' % b.appid] = str(b.proc.pid)
        # JMX port, if available
        jmx_port = b.conf.get('jmx_port', None)
        if jmx_port is not None:
            cmd_env['BROKER_JMX_PORT_%d' % b.appid] = str(jmx_port)

    if not cmd:
        cmd_env['PS1'] = '[TRIVUP:%s@%s] \\u@\\h:\w$ ' % (cluster.name, version)
        cmd = 'bash --rcfile <(cat ~/.bashrc)'

    ret = True

    for i in range(0, exec_cnt):
        retcode = subprocess.call(cmd, env=cmd_env, shell=True, executable='/bin/bash')
        if retcode != 0:
            print('# Command failed with returncode %d: %s' % (retcode, cmd))
            ret = False

    try:
        os.remove(test_conf_file)
    except:
        pass

    cluster.stop(force=True)

    cluster.cleanup(keeptypes=['log'])
    return ret
def test_version (version, cmd=None, deploy=True, conf={}, debug=False, exec_cnt=1,
                  root_path='tmp', broker_cnt=3):
    """
    @brief Create, deploy and start a Kafka cluster using Kafka \p version
    Then run librdkafka's regression tests.
    """

    print('## Test version %s' % version)
    
    cluster = Cluster('LibrdkafkaTestCluster', root_path, debug=debug)

    # Enable SSL if desired
    if 'SSL' in conf.get('security.protocol', ''):
        cluster.ssl = SslApp(cluster, conf)

    # One ZK (from Kafka repo)
    zk1 = ZookeeperApp(cluster)
    zk_address = zk1.get('address')

    # Start Kerberos KDC if GSSAPI is configured
    if 'GSSAPI' in args.conf.get('sasl_mechanisms', []):
        KerberosKdcApp(cluster, 'MYREALM').start()

    defconf = {'replication_factor': min(int(conf.get('replication_factor', broker_cnt)), 3), 'num_partitions': 4, 'version': version}
    defconf.update(conf)

    print('conf: ', defconf)

    brokers = []
    for n in range(0, broker_cnt):
        brokers.append(KafkaBrokerApp(cluster, defconf))

    # Generate test config file
    security_protocol='PLAINTEXT'
    fd, test_conf_file = tempfile.mkstemp(prefix='test_conf', text=True)
    os.write(fd, ('test.sql.command=sqlite3 rdktests\n').encode('ascii'))
    os.write(fd, 'broker.address.family=v4\n'.encode('ascii'))
    if version != 'trunk':
        os.write(fd, ('broker.version.fallback=%s\n' % version).encode('ascii'))
    else:
        os.write(fd, 'api.version.request=true\n'.encode('ascii'))
    # SASL (only one mechanism supported)
    mech = defconf.get('sasl_mechanisms', '').split(',')[0]
    if mech != '':
        os.write(fd, ('sasl.mechanisms=%s\n' % mech).encode('ascii'))
        if mech == 'PLAIN' or mech.find('SCRAM') != -1:
            print('# Writing SASL %s client config to %s' % (mech, test_conf_file))
            security_protocol='SASL_PLAINTEXT'
            # Use first user as SASL user/pass
            for up in defconf.get('sasl_users', '').split(','):
                u,p = up.split('=')
                os.write(fd, ('sasl.username=%s\n' % u).encode('ascii'))
                os.write(fd, ('sasl.password=%s\n' % p).encode('ascii'))
                break
        elif mech == 'OAUTHBEARER':
            security_protocol='SASL_PLAINTEXT'
            os.write(fd, ('sasl.oauthbearer.config=%s\n' % \
                          'scope=requiredScope principal=admin').encode('ascii'))
        else:
            print('# FIXME: SASL %s client config not written to %s' % (mech, test_conf_file))

    # SSL support
    ssl = getattr(cluster, 'ssl', None)
    if ssl is not None:
        if 'SASL' in security_protocol:
            security_protocol = 'SASL_SSL'
        else:
            security_protocol = 'SSL'

        key, req, pem = ssl.create_key('librdkafka')

        os.write(fd, ('ssl.ca.location=%s\n' % ssl.ca_cert).encode('ascii'))
        os.write(fd, ('ssl.certificate.location=%s\n' % pem).encode('ascii'))
        os.write(fd, ('ssl.key.location=%s\n' % key).encode('ascii'))
        os.write(fd, ('ssl.key.password=%s\n' % ssl.conf.get('ssl_key_pass')).encode('ascii'))


    # Define bootstrap brokers based on selected security protocol
    print('# Using client security.protocol=%s' % security_protocol)
    all_listeners = (','.join(cluster.get_all('listeners', '', KafkaBrokerApp))).split(',')
    bootstrap_servers = ','.join([x for x in all_listeners if x.startswith(security_protocol)])
    os.write(fd, ('bootstrap.servers=%s\n' % bootstrap_servers).encode('ascii'))
    os.write(fd, ('security.protocol=%s\n' % security_protocol).encode('ascii'))
    os.close(fd)

    if deploy:
        print('# Deploying cluster')
        cluster.deploy()
    else:
        print('# Not deploying')

    print('# Starting cluster, instance path %s' % cluster.instance_path())
    cluster.start()

    print('# Waiting for brokers to come up')

    if not cluster.wait_operational(30):
        cluster.stop(force=True)
        raise Exception('Cluster %s did not go operational, see logs in %s/%s' % \
                        (cluster.name, cluster.root_path, cluster.instance))

    print('# Connect to cluster with bootstrap.servers %s' % bootstrap_servers)

    cmd_env = os.environ.copy()
    cmd_env['KAFKA_PATH'] = brokers[0].conf.get('destdir')
    cmd_env['RDKAFKA_TEST_CONF'] = test_conf_file
    cmd_env['ZK_ADDRESS'] = zk_address
    cmd_env['BROKERS'] = bootstrap_servers
    cmd_env['TEST_KAFKA_VERSION'] = version
    cmd_env['TRIVUP_ROOT'] = cluster.instance_path()
    # Add each broker pid as an env so they can be killed indivdidually.
    for b in [x for x in cluster.apps if isinstance(x, KafkaBrokerApp)]:
        cmd_env['BROKER_PID_%d' % b.appid] = str(b.proc.pid)

    if not cmd:
        cmd = 'bash --rcfile <(cat ~/.bashrc; echo \'PS1="[TRIVUP:%s@%s] \\u@\\h:\w$ "\')' % (cluster.name, version)

    ret = True

    for i in range(0, exec_cnt):
        retcode = subprocess.call(cmd, env=cmd_env, shell=True, executable='/bin/bash')
        if retcode != 0:
            print('# Command failed with returncode %d: %s' % (retcode, cmd))
            ret = False

    try:
        os.remove(test_conf_file)
    except:
        pass

    cluster.stop(force=True)

    cluster.cleanup(keeptypes=['log'])
    return ret