Exemplo n.º 1
0
    def test_spark_read_from_jdbc(self, mock_reader_load):
        # Given
        arguments = _parse_arguments(self.jdbc_arguments)
        spark_session = _create_spark_session(arguments)
        spark_session.sql("CREATE TABLE IF NOT EXISTS " + arguments.metastore_table + " (key INT)")

        # When
        spark_read_from_jdbc(
            spark_session,
            arguments.url,
            arguments.user,
            arguments.password,
            arguments.metastore_table,
            arguments.jdbc_table,
            arguments.jdbc_driver,
            arguments.save_mode,
            arguments.save_format,
            arguments.fetch_size,
            arguments.num_partitions,
            arguments.partition_column,
            arguments.lower_bound,
            arguments.upper_bound,
        )

        # Then
        mock_reader_load().write.saveAsTable.assert_called_once_with(
            arguments.metastore_table, format=arguments.save_format, mode=arguments.save_mode
        )
Exemplo n.º 2
0
    def test_spark_write_to_jdbc(self, mock_writer_save):
        # Given
        arguments = _parse_arguments(self.jdbc_arguments)
        spark_session = _create_spark_session(arguments)
        spark_session.sql("CREATE TABLE IF NOT EXISTS " +
                          arguments.metastore_table + " (key INT)")
        # When

        spark_write_to_jdbc(
            spark_session=spark_session,
            url=arguments.url,
            user=arguments.user,
            password=arguments.password,
            metastore_table=arguments.metastore_table,
            jdbc_table=arguments.jdbc_table,
            driver=arguments.jdbc_driver,
            truncate=arguments.truncate,
            save_mode=arguments.save_mode,
            batch_size=arguments.batch_size,
            num_partitions=arguments.num_partitions,
            create_table_column_types=arguments.create_table_column_types,
        )

        # Then
        mock_writer_save.assert_called_once_with(mode=arguments.save_mode)