コード例 #1
0
    def test_run_spark_read_from_jdbc(self, mock_spark_read_from_jdbc, mock_spark_session):
        # Given
        arguments = _parse_arguments(['-cmdType', SPARK_READ_FROM_JDBC] + self.jdbc_arguments[2:])
        spark_session = mock_spark_session.builder.appName(arguments.name).enableHiveSupport().getOrCreate()

        # When
        _run_spark(arguments=arguments)

        # Then
        mock_spark_read_from_jdbc.assert_called_once_with(
            spark_session,
            arguments.url,
            arguments.user,
            arguments.password,
            arguments.metastore_table,
            arguments.jdbc_table,
            arguments.jdbc_driver,
            arguments.save_mode,
            arguments.save_format,
            arguments.fetch_size,
            arguments.num_partitions,
            arguments.partition_column,
            arguments.lower_bound,
            arguments.upper_bound,
        )
コード例 #2
0
    def test_run_spark_write_to_jdbc(self, mock_spark_write_to_jdbc,
                                     mock_spark_session):
        # Given
        arguments = _parse_arguments(['-cmdType', SPARK_WRITE_TO_JDBC] +
                                     self.jdbc_arguments[2:])
        spark_session = mock_spark_session.builder.appName(
            arguments.name).enableHiveSupport().getOrCreate()

        # When
        _run_spark(arguments=arguments)

        # Then
        mock_spark_write_to_jdbc.assert_called_once_with(
            spark_session,
            arguments.url,
            arguments.user,
            arguments.password,
            arguments.metastore_table,
            arguments.jdbc_table,
            arguments.jdbc_driver,
            arguments.truncate,
            arguments.save_mode,
            arguments.batch_size,
            arguments.num_partitions,
            arguments.create_table_column_types,
        )