def test_dataset_upload_validation_fails_with_query_and_append(self): query = 'SELECT 1' dataset = Dataset.from_query(query=query, context=self.cc) err_msg = 'Error using append with a query Dataset. It is not possible to append data to a query' with self.assertRaises(CartoException, msg=err_msg): dataset.upload(table_name=self.test_write_table, if_exists=Dataset.APPEND)
def test_dataset_upload_validation_query_fails_without_table_name(self): query = 'SELECT 1' dataset = Dataset.from_query(query=query, context=self.cc) with self.assertRaises( ValueError, msg= 'You should provide a table_name and context to upload data.'): dataset.upload()
def test_dataset_from_query(self): query = 'SELECT * FROM fake_table' dataset = Dataset.from_query(query=query, context=self.cc) self.assertIsInstance(dataset, Dataset) self.assertEqual(dataset.query, query) self.assertIsNone(dataset.table_name) self.assertIsNone(dataset.df) self.assertIsNone(dataset.gdf) self.assertEqual(dataset.cc, self.cc) self.assertEqual(dataset.state, Dataset.STATE_REMOTE)
def test_dataset_download_and_upload(self): self.assertNotExistsTable(self.test_write_table) query = 'SELECT 1 as fakec' dataset = Dataset.from_query(query=query, context=self.cc) dataset.upload(table_name=self.test_write_table) dataset = Dataset.from_table(table_name=self.test_write_table, context=self.cc) dataset.download() dataset.upload(table_name=self.test_write_table, if_exists=Dataset.REPLACE)
def test_dataset_upload_into_existing_table_fails_without_replace_property( self): query = 'SELECT 1' dataset = Dataset.from_query(query=query, context=self.cc) dataset.upload(table_name=self.test_write_table) err_msg = ( 'Table with name {t} and schema {s} already exists in CARTO. Please choose a different `table_name`' 'or use if_exists="replace" to overwrite it').format( t=self.test_write_table, s='public') with self.assertRaises(CartoException, msg=err_msg): dataset.upload(table_name=self.test_write_table) dataset.upload(table_name=self.test_write_table, if_exists=Dataset.REPLACE)
def test_dataset_download_bool_null(self): self.assertNotExistsTable(self.test_write_table) query = 'SELECT * FROM (values (true, true), (false, false), (false, null)) as x(fakec_bool, fakec_bool_null)' dataset = Dataset.from_query(query=query, context=self.cc) dataset.upload(table_name=self.test_write_table) dataset = Dataset.from_table(table_name=self.test_write_table, context=self.cc) df = dataset.download() self.assertEqual(df['fakec_bool'].dtype, 'bool') self.assertEqual(df['fakec_bool_null'].dtype, 'object') self.assertEqual(list(df['fakec_bool']), [True, False, False]) self.assertEqual(list(df['fakec_bool_null']), [True, False, None])
def test_dataset_download_validations(self): self.assertNotExistsTable(self.test_write_table) df = load_geojson(self.test_geojson) dataset = Dataset.from_dataframe(df=df) error_msg = 'You should provide a context and a table_name or query to download data.' with self.assertRaises(ValueError, msg=error_msg): dataset.download() query = 'SELECT 1 as fakec' dataset = Dataset.from_query(query=query, context=self.cc) dataset.upload(table_name=self.test_write_table) dataset.table_name = 'non_used_table' df = dataset.download() self.assertEqual('fakec' in df.columns, True) dataset = Dataset.from_table(table_name=self.test_write_table, context=self.cc) df = dataset.download() self.assertEqual('fakec' in df.columns, True)