Ejemplo n.º 1
0
def tc(request):
    global global_tc
    with lock:
        if global_tc is None:
            from sparktk import TkContext
            from sparktk import create_sc
            from sparktk.tests import utils
            import daaltk
            #from sparktk.loggers import loggers
            #loggers.set("d", "sparktk.sparkconf")

            # Get path to sparktk jars from SPARKTK_HOME
            if os.environ.has_key('SPARKTK_HOME'):
                sparktk_dir = os.environ.get('SPARKTK_HOME', None)
            else:
                raise RuntimeError("SPARKTK_HOME must be defined.")

            sc = create_sc(
                other_libs=[daaltk],
                master='local[2]',
                sparktk_home=sparktk_dir,
                app_name="pytest-pyspark-local-testing",
                extra_conf_dict={"spark.hadoop.fs.default.name": "file:///"})
            request.addfinalizer(lambda: sc.stop())

            global_tc = TkContext(sc,
                                  other_libs=[daaltk],
                                  sparktk_home=sparktk_dir)
            global_tc.testing = utils
    return global_tc
Ejemplo n.º 2
0
def tc(request):
    global global_tc
    with lock:
        if global_tc is None:
            from sparktk import TkContext
            from sparktk import create_sc
            from sparktk.tests import utils
            #from sparktk.loggers import loggers
            #loggers.set("d", "sparktk.sparkconf")
            sc = create_sc(master='local[2]',
                           app_name="pytest-pyspark-local-testing",
                           extra_conf_dict={"spark.hadoop.fs.default.name": "file:///"})
            request.addfinalizer(lambda: sc.stop())
            global_tc = TkContext(sc)
            global_tc.testing = utils
    return global_tc
Ejemplo n.º 3
0
def tc(request):
    global global_tc
    with lock:
        if global_tc is None:
            from sparktk import TkContext
            from sparktk import create_sc
            from sparktk.tests import utils
            #from sparktk.loggers import loggers
            #loggers.set("d", "sparktk.sparkconf")
            sc = create_sc(master='local[2]',
                           app_name="pytest-pyspark-local-testing",
                           extra_conf={"spark.hadoop.fs.default.name": "file:///"})
            request.addfinalizer(lambda: sc.stop())
            global_tc = TkContext(sc)
            global_tc.testing = utils
    return global_tc