def test_staging_new_table(self): # For the entry in MetaTable without a table, create a staging table. # We'll need to read from a fixture csv. with Staging(self.unloaded_meta, source_path=self.radio_path) as s_table: all_rows = session.execute(s_table.table.select()).fetchall() self.assertEqual(len(all_rows), 5)
def test_staging_existing_table(self): # With a fixture CSV whose columns match the existing dataset, # create a staging table. with Staging(self.existing_meta, source_path=self.dog_path) as s_table: with postgres_engine.begin() as connection: all_rows = connection.execute(s_table.table.select()).fetchall() self.assertEqual(len(all_rows), 5)
def test_col_info_provided(self): # The frontend should send back strings compatible with the COL_VALUES in etl.point col_info_raw = [('event_name', 'string'), ('date', 'date'), ('lat', 'float'), ('lon', 'float')] stored_col_info = [{'field_name': name, 'data_type': d_type} for name, d_type in col_info_raw] self.unloaded_meta.contributed_data_types = json.dumps(stored_col_info) with Staging(self.unloaded_meta, source_path=self.radio_path) as s_table: observed_names = self.extract_names(s_table.cols) self.assertEqual(set(observed_names), set(self.expected_radio_col_names))
def test_col_info_existing(self): with Staging(self.existing_meta, source_path=self.dog_path) as s_table: observed_col_names = self.extract_names(s_table.cols) self.assertEqual(set(observed_col_names), set(self.expected_dog_col_names))
def test_col_info_infer(self): with Staging(self.unloaded_meta, source_path=self.radio_path) as s_table: observed_names = self.extract_names(s_table.cols) self.assertEqual(set(observed_names), set(self.expected_radio_col_names))