def test_format_partition_strings(self): test_partitions = ['banana=33', 'orange=65', 'apple=abcd'] final_partitions = parq._format_partition_strings_for_sql( test_partitions) assert final_partitions == [ "banana='33'", "orange='65'", "apple='abcd'" ]
def test_create_partitions(self, mock_session_helper, mock_execute): bucket, schema, table, filepath = 'MyBucket', 'MySchema', 'MyTable', 'path/to/data/apple=abcd/banana=1234/abcd1234.parquet' mock_execute.return_value = MockScopeObj() mock_session_helper.db_session_scope.return_value.__enter__ = scope_execute_mock partitions = parq._get_partitions_for_spectrum(filepath) formatted_partitions = parq._format_partition_strings_for_sql( partitions) path_to_data = parq._get_partition_location(filepath) with mock_session_helper.db_session_scope() as mock_scope: generated_sql = parq.create_partitions(bucket, schema, table, filepath, mock_session_helper) expected_sql = f"ALTER TABLE {schema}.{table} \ ADD PARTITION ({' ,'.join(formatted_partitions)}) \ LOCATION 's3://{bucket}/{path_to_data}';" assert mock_scope.execute.called_once_with(expected_sql)
def test_format_partition_strings_no_partitions(self): test_partitions = [] final_partitions = parq._format_partition_strings_for_sql( test_partitions) assert final_partitions == []
def test_format_partition_strings_no_partitions(self): test_partitions = [] final_partitions = publish_redshift._format_partition_strings_for_sql( test_partitions) assert final_partitions == []