def __init__(self, sparkContext):
        """Create a new HbaseContext.

    @param sparkContext: The SparkContext to wrap.
    """
        SQLContext.__init__(self, sparkContext)
        self._scala_HBaseSQLContext = self._get_hbase_ctx()
    def __init__(self, sparkContext):
        """Create a new HbaseContext.

    @param sparkContext: The SparkContext to wrap.
    """
        SQLContext.__init__(self, sparkContext)
        self._scala_HBaseSQLContext = self._get_hbase_ctx()
    def __init__(self, sparkContext, sapSQLContext=None):
        """Creates a new SapSQLContext.

                >>> from datetime import datetime
                >>> from pyspark_vora import *
                >>> allTypes = sc.parallelize([Row(i=1, s="string", d=1.0, l=1,
                ...     b=True, list=[1, 2, 3], dict={"s": 0}, row=Row(a=1),
                ...     time=datetime(2014, 8, 1, 14, 1, 5))])
                >>> df = allTypes.toDF()
                >>> df.registerTempTable("allTypes")
                >>> q = 'select i+1, d+1, not b, list[1], dict["s"], time, row.a from allTypes where b and i > 0'
                >>> result = sqlContext.sql(q).collect()
                >>> list(map(lambda x: tuple(x), result)) # Row.__repr__ is not compatible Spark 1.4/1.5
                [(2, 2.0, False, 2, 0, datetime.datetime(2014, 8, 1, 14, 1, 5), 1)]
                >>> df.map(lambda x: (x.i, str(x.s), x.d, x.l, x.b, x.time, x.row.a, x.list)).collect()
                [(1, 'string', 1.0, 1, True, datetime.datetime(2014, 8, 1, 14, 1, 5), 1, [1, 2, 3])]
        """
        SQLContext.__init__(self, sparkContext, sapSQLContext)
        self._scala_SapSQLContext = sapSQLContext
    def __init__(self, sparkContext, sapSQLContext=None):
        """Creates a new SapSQLContext.

                >>> from datetime import datetime
                >>> from pyspark_vora import *
                >>> allTypes = sc.parallelize([Row(i=1, s="string", d=1.0, l=1,
                ...     b=True, list=[1, 2, 3], dict={"s": 0}, row=Row(a=1),
                ...     time=datetime(2014, 8, 1, 14, 1, 5))])
                >>> df = allTypes.toDF()
                >>> df.registerTempTable("allTypes")
                >>> q = 'select i+1, d+1, not b, list[1], dict["s"], time, row.a from allTypes where b and i > 0'
                >>> result = sqlContext.sql(q).collect()
                >>> list(map(lambda x: tuple(x), result)) # Row.__repr__ is not compatible Spark 1.4/1.5
                [(2, 2.0, False, 2, 0, datetime.datetime(2014, 8, 1, 14, 1, 5), 1)]
                >>> df.map(lambda x: (x.i, str(x.s), x.d, x.l, x.b, x.time, x.row.a, x.list)).collect()
                [(1, 'string', 1.0, 1, True, datetime.datetime(2014, 8, 1, 14, 1, 5), 1, [1, 2, 3])]
        """
        SQLContext.__init__(self, sparkContext, sapSQLContext)
        self._scala_SapSQLContext = sapSQLContext
 def __init__(self, sparkContext, magellanContext=None):
     SQLContext.__init__(self, sparkContext)
     if magellanContext:
         self._scala_MagellanContext = magellanContext
Exemple #6
0
 def __init__(self, sparkContext, magellanContext=None):
     SQLContext.__init__(self, sparkContext)
     if magellanContext:
         self._scala_MagellanContext = magellanContext