def test_exception_readData(): print("test_readData Called") with pytest.raises(Exception) as excinfo: csvfile = "hdfs://localhost:8020/sales/data/test_data.csv" processData = ProceeSalesData() csvdata = processData.readData(csvfile) assert str(excinfo.value) == 'testRaghu'
def test_readData(): print("test_readData Called") csvfile = "hdfs://localhost:8020/sales/data/test_data.csv" processData = ProceeSalesData() csvdata = processData.readData(csvfile) count = csvdata.count() assert count == 18
def test_readdata_with_fixture(setup): print("test_readdata_with_fixture Called") csvfile = "hdfs://localhost:8020/sales/data/test_data.csv" expected_count = setup.count() processData = ProceeSalesData() csvdata = processData.readData(csvfile) count = csvdata.count() assert count == expected_count
def test_exception_storedata(setup,setup_sparksession): print("test_storedata Called") testdata = setup processData = ProceeSalesData() with pytest.raises(Exception) as excinfo: print("entered") tablename= "raw_sales_db.raw_sales_transcation_test1123" processData.storeData(testdata,tablename) assert str(excinfo.value) == 'testRaghu'
def test_storedata(setup,setup_sparksession): print("test_storedata Called") testdata = setup processData = ProceeSalesData() tablename= "raw_sales_db.raw_sales_transcation_test" orginaldata = setup_sparksession.sql("""select * from raw_sales_db.raw_sales_transcation_test""") orginalCount = orginaldata.count() processData.storeData(testdata,tablename) data = setup_sparksession.sql("""select * from raw_sales_db.raw_sales_transcation_test""") count = data.count() finalcount = count - orginalCount assert finalcount == testdata.count()