Пример #1
0
def init(quiet=False, spark_conf=None, **kwargs):
    """ Initialises hail context with variant-spark support.

        :param kwargs: same as for hail.init()
    """

    jars = [p.strip() for p in spark_conf.get("spark.jars", "").split(",")
            ] if spark_conf else []
    vs_jar_path = vs.find_jar()
    assert os.path.exists(vs_jar_path), "%s does not exist" % vs_jar_path
    if not quiet:
        sys.stderr.write("using variant-spark jar at '%s'\n" % vs_jar_path)
    if not vs_jar_path in jars:
        jars.append(vs_jar_path)
    hl.init(quiet=quiet, spark_conf={'spark.jars': ",".join(jars)}, **kwargs)
Пример #2
0
def init(**kwargs):

    jars = []
    vs_jar_path=vs.find_jar()
    assert os.path.exists(vs_jar_path), "%s does not exist" % vs_jar_path
    sys.stderr.write("using variant-spark jar at '%s'\n" % vs_jar_path)
    jars.append(vs_jar_path)

    if pkg_resources.resource_exists(hl.__name__, "hail-all-spark.jar"):
        hail_jar_path = pkg_resources.resource_filename(hl.__name__, "hail-all-spark.jar")
        assert os.path.exists(hail_jar_path), "%s does not exist" % hail_jar_path
        sys.stderr.write("using hail jar at '%s'\n" % hail_jar_path)
        jars.append(hail_jar_path)

    conf = SparkConf()
    conf.set('spark.jars', ",".join(jars))
    conf.set('spark.driver.extraClassPath', hail_jar_path)
    conf.set('spark.executor.extraClassPath', './hail-all-spark.jar')
    SparkContext._ensure_initialized(conf=conf)

    hl.init(**kwargs)
Пример #3
0
def varspark_jar():
    print(find_jar())
Пример #4
0
def varspark_submit():
    args = ['spark-submit', '--jars', find_jar()] + sys.argv[1:]
    exit(subprocess.call(" ".join(["'%s'" % arg for arg in args]), shell=True))