Ejemplo n.º 1
0
def test_spark_kafka_interservice():
    if utils.kafka_enabled():
        rc, stdout, stderr = sdk_cmd.run_raw_cli("package install {} --yes --cli".format(KAFKA_PACKAGE_NAME))
        if rc != 0:
            LOGGER.warn("Got return code {rc} when trying to install {package} cli\nstdout:{out}\n{err}"
                        .format(rc=rc, package=KAFKA_PACKAGE_NAME, out=stdout, err=stderr))
        stop_count = os.getenv("STOP_COUNT", "1000")
        test_pipeline(
            kerberos_flag="true",
            stop_count=stop_count,
            jar_uri=JAR_URI,
            keytab_secret=KAFKA_KEYTAB_SECRET,
            spark_app_name=SOAK_SPARK_APP_NAME,
            jaas_uri=KAFKA_JAAS_URI)
Ejemplo n.º 2
0
@pytest.fixture(scope='module', autouse=True)
def setup_spark(kerberized_kafka, configure_security_spark,
                configure_universe):
    try:
        # need to do this here also in case this test is run first
        # and the jar hasn't been updated
        utils.upload_file(os.environ["SCALA_TEST_JAR_PATH"])
        utils.require_spark()
        yield
    finally:
        utils.teardown_spark()


@pytest.mark.sanity
@pytest.mark.smoke
@pytest.mark.skipif(not utils.kafka_enabled(), reason='KAFKA_ENABLED is false')
def test_spark_and_kafka():
    kerberos_flag = "true" if KERBERIZED_KAFKA else "false"  # flag for using kerberized kafka given to app
    stop_count = "48"  # some reasonable number
    test_pipeline(kerberos_flag=kerberos_flag,
                  jar_uri=utils._scala_test_jar_url(),
                  keytab_secret="__dcos_base64___keytab",
                  stop_count=stop_count,
                  spark_app_name=utils.SPARK_APP_NAME)


def test_pipeline(kerberos_flag,
                  stop_count,
                  jar_uri,
                  keytab_secret,
                  spark_app_name,