예제 #1
0
def _test():
    import os
    import sys
    from pyspark.testing.utils import search_jar
    avro_jar = search_jar("external/avro", "spark-avro", "spark-avro")
    if avro_jar is None:
        print(
            "Skipping all Avro Python tests as the optional Avro project was "
            "not compiled into a JAR. To run these tests, "
            "you need to build Spark with 'build/sbt -Pavro package' or "
            "'build/mvn -Pavro package' before running this test.")
        sys.exit(0)
    else:
        existing_args = os.environ.get("PYSPARK_SUBMIT_ARGS", "pyspark-shell")
        jars_args = "--jars %s" % avro_jar
        os.environ["PYSPARK_SUBMIT_ARGS"] = " ".join(
            [jars_args, existing_args])

    import doctest
    from pyspark.sql import Row, SparkSession
    import pyspark.sql.avro.functions
    globs = pyspark.sql.avro.functions.__dict__.copy()
    spark = SparkSession.builder\
        .master("local[4]")\
        .appName("sql.avro.functions tests")\
        .getOrCreate()
    globs['spark'] = spark
    (failure_count, test_count) = doctest.testmod(
        pyspark.sql.avro.functions,
        globs=globs,
        optionflags=doctest.ELLIPSIS | doctest.NORMALIZE_WHITESPACE)
    spark.stop()
    if failure_count:
        sys.exit(-1)
예제 #2
0
파일: functions.py 프로젝트: Brett-A/spark
def _test():
    import os
    import sys
    from pyspark.testing.utils import search_jar
    avro_jar = search_jar("external/avro", "spark-avro")
    if avro_jar is None:
        print(
            "Skipping all Avro Python tests as the optional Avro project was "
            "not compiled into a JAR. To run these tests, "
            "you need to build Spark with 'build/sbt -Pavro package' or "
            "'build/mvn -Pavro package' before running this test.")
        sys.exit(0)
    else:
        existing_args = os.environ.get("PYSPARK_SUBMIT_ARGS", "pyspark-shell")
        jars_args = "--jars %s" % avro_jar
        os.environ["PYSPARK_SUBMIT_ARGS"] = " ".join([jars_args, existing_args])

    import doctest
    from pyspark.sql import Row, SparkSession
    import pyspark.sql.avro.functions
    globs = pyspark.sql.avro.functions.__dict__.copy()
    spark = SparkSession.builder\
        .master("local[4]")\
        .appName("sql.avro.functions tests")\
        .getOrCreate()
    globs['spark'] = spark
    (failure_count, test_count) = doctest.testmod(
        pyspark.sql.avro.functions, globs=globs,
        optionflags=doctest.ELLIPSIS | doctest.NORMALIZE_WHITESPACE)
    spark.stop()
    if failure_count:
        sys.exit(-1)
예제 #3
0
from pyspark import SparkConf, SparkContext, RDD
from pyspark.streaming import StreamingContext
from pyspark.testing.utils import search_jar

# Must be same as the variable and condition defined in KinesisTestUtils.scala and modules.py
kinesis_test_environ_var = "ENABLE_KINESIS_TESTS"
should_skip_kinesis_tests = not os.environ.get(kinesis_test_environ_var) == '1'

if should_skip_kinesis_tests:
    kinesis_requirement_message = (
        "Skipping all Kinesis Python tests as environmental variable 'ENABLE_KINESIS_TESTS' "
        "was not set.")
else:
    kinesis_asl_assembly_jar = search_jar(
        "external/kinesis-asl-assembly",
        "spark-streaming-kinesis-asl-assembly-",
        "spark-streaming-kinesis-asl-assembly_")
    if kinesis_asl_assembly_jar is None:
        kinesis_requirement_message = (  # type: ignore
            "Skipping all Kinesis Python tests as the optional Kinesis project was "
            "not compiled into a JAR. To run these tests, "
            "you need to build Spark with 'build/sbt -Pkinesis-asl assembly/package "
            "streaming-kinesis-asl-assembly/assembly' or "
            "'build/mvn -Pkinesis-asl package' before running this test.")
    else:
        existing_args = os.environ.get("PYSPARK_SUBMIT_ARGS", "pyspark-shell")
        jars_args = "--jars %s" % kinesis_asl_assembly_jar
        os.environ["PYSPARK_SUBMIT_ARGS"] = " ".join(
            [jars_args, existing_args])
        kinesis_requirement_message = None  # type: ignore
예제 #4
0
from pyspark.testing.utils import search_jar


# Must be same as the variable and condition defined in KinesisTestUtils.scala and modules.py
kinesis_test_environ_var = "ENABLE_KINESIS_TESTS"
should_skip_kinesis_tests = not os.environ.get(kinesis_test_environ_var) == "1"

if should_skip_kinesis_tests:
    kinesis_requirement_message = (
        "Skipping all Kinesis Python tests as environmental variable 'ENABLE_KINESIS_TESTS' "
        "was not set."
    )
else:
    kinesis_asl_assembly_jar = search_jar(
        "connector/kinesis-asl-assembly",
        "spark-streaming-kinesis-asl-assembly-",
        "spark-streaming-kinesis-asl-assembly_",
    )
    if kinesis_asl_assembly_jar is None:
        kinesis_requirement_message = (
            "Skipping all Kinesis Python tests as the optional Kinesis project was "
            "not compiled into a JAR. To run these tests, "
            "you need to build Spark with 'build/sbt -Pkinesis-asl assembly/package "
            "streaming-kinesis-asl-assembly/assembly' or "
            "'build/mvn -Pkinesis-asl package' before running this test."
        )
    else:
        existing_args = os.environ.get("PYSPARK_SUBMIT_ARGS", "pyspark-shell")
        jars_args = "--jars %s" % kinesis_asl_assembly_jar
        os.environ["PYSPARK_SUBMIT_ARGS"] = " ".join([jars_args, existing_args])
        kinesis_requirement_message = None  # type: ignore
예제 #5
0
from pyspark import SparkConf, SparkContext, RDD
from pyspark.streaming import StreamingContext
from pyspark.testing.utils import search_jar


# Must be same as the variable and condition defined in KinesisTestUtils.scala and modules.py
kinesis_test_environ_var = "ENABLE_KINESIS_TESTS"
should_skip_kinesis_tests = not os.environ.get(kinesis_test_environ_var) == '1'

if should_skip_kinesis_tests:
    kinesis_requirement_message = (
        "Skipping all Kinesis Python tests as environmental variable 'ENABLE_KINESIS_TESTS' "
        "was not set.")
else:
    kinesis_asl_assembly_jar = search_jar("external/kinesis-asl-assembly",
                                          "spark-streaming-kinesis-asl-assembly")
    if kinesis_asl_assembly_jar is None:
        kinesis_requirement_message = (
            "Skipping all Kinesis Python tests as the optional Kinesis project was "
            "not compiled into a JAR. To run these tests, "
            "you need to build Spark with 'build/sbt -Pkinesis-asl assembly/package "
            "streaming-kinesis-asl-assembly/assembly' or "
            "'build/mvn -Pkinesis-asl package' before running this test.")
    else:
        existing_args = os.environ.get("PYSPARK_SUBMIT_ARGS", "pyspark-shell")
        jars_args = "--jars %s" % kinesis_asl_assembly_jar
        os.environ["PYSPARK_SUBMIT_ARGS"] = " ".join([jars_args, existing_args])
        kinesis_requirement_message = None

should_test_kinesis = kinesis_requirement_message is None