Exemplo n.º 1
0
 def test_should_connect(self, mock_df):
     dal = ParquetDAL(self.dirname, self.sc)
     table = ParquetTable(self.table_name, schema_index_file=self.filename)
     dal.set_table(table)
     mock_df.reset_mock()
     dal.connect(table.name)
     self.assertTrue(mock_df.parquet.called)
Exemplo n.º 2
0
    def test_should_not_connect_twice_on_next_get_schema_from_parquet(self,
                                                                      mock_df):
        dal = ParquetDAL(self.dirname, self.sc)
        table = ParquetTable(self.table_name, schema_index_file=self.filename)
        dal.set_table(table)  # here it gets schema too

        self.assertTrue(mock_df.parquet.called)
        mock_df.reset_mock()
        table.schema()
        self.assertFalse(mock_df.parquet.called)
Exemplo n.º 3
0
    def setUp(self):
        self.sc = SparkContext._active_spark_context
        self.dirname = os.path.dirname(os.path.abspath(__file__))
        self.dal = ParquetDAL(self.dirname, self.sc)
        self.table_name = "test_table"
        self.filename = "example.parquet"
        self.full_path_file = os.path.join(self.dirname, self.table_name,
                                           self.filename)
        self.dataframe = OrderedDict([('A', [1]), ('B', [2]), ('C', [3])])
        self.df = pd.DataFrame(self.dataframe)
        self.spark_df = self.dal.context.createDataFrame(self.df,
                                                         self.dataframe.keys())

        self.spark_df.write.parquet(self.full_path_file)
Exemplo n.º 4
0
 def test_should_raise_error_connecting_to_not_found_table(self, mock_df):
     dal = ParquetDAL(self.dirname, self.sc)
     table = ParquetTable(self.table_name, schema_index_file=self.filename)
     dal.set_table(table)
     self.assertRaises(ValueError, dal.connect, 'not_found_table')