Example #1
0
 def create_table_using_datasource(self, provider, schemaddl=False):
     sparkSession = SnappySession(self.sc)
     df = sparkSession._sc.parallelize(SnappyContextTests.testdata, 5).toDF(["COL1", "COL2", "COL3"])
     if schemaddl is False:
         sparkSession.createTable(SnappyContextTests.tablename, provider, df.schema)
     else:
         sparkSession.createTable(SnappyContextTests.tablename, provider, "(COL1 INT , COL2 INT , COL3 INT)")
     df.write.format("row").mode("append").saveAsTable(SnappyContextTests.tablename)
Example #2
0
 def test_create_table_without_schema(self):
     self.drop_table(True)
     snappy = SnappySession(self.sc)
     #should use default provider which is parquet and schema will be picked from parquet file
     parquetPath = os.path.join(os.path.dirname(os.path.abspath(__file__)), "../../test_support/kv.parquet")
     snappy.createTable(SnappyContextTests.tablename, path = parquetPath)
     self.verify_table_rows(3)
     self.drop_table()
Example #3
0
 def create_table_using_datasource(self, provider, schemaddl=False):
     sparkSession = SnappySession(self.sc)
     schema = StructType().add("col1", IntegerType()).add("col2", IntegerType()).add("col3", IntegerType())
     input = SnappyContextTests.testdata
     df = sparkSession.createDataFrame(input, schema)
     if schemaddl is False:
         sparkSession.createTable(SnappyContextTests.tablename, provider, schema)
     else:
         sparkSession.createTable(SnappyContextTests.tablename, provider, "(COL1 INT , COL2 INT , COL3 INT)")
     df.write.format("row").mode("append").saveAsTable(SnappyContextTests.tablename)
Example #4
0
 def create_table_using_datasource(self, provider, schemaddl=False):
     sparkSession = SnappySession(self.sc)
     df = sparkSession._sc.parallelize(SnappyContextTests.testdata,
                                       5).toDF(["COL1", "COL2", "COL3"])
     if schemaddl is False:
         sparkSession.createTable(SnappyContextTests.tablename, provider,
                                  df.schema)
     else:
         sparkSession.createTable(SnappyContextTests.tablename, provider,
                                  "(COL1 INT , COL2 INT , COL3 INT)")
     df.write.format("row").mode("append").saveAsTable(
         SnappyContextTests.tablename)