Пример #1
0
    def test_parse_arguments(self):
        # When
        parsed_arguments = _parse_arguments(args=self.jdbc_arguments)

        # Then
        for argument_name, argument_value in self.default_arguments.items():
            assert getattr(parsed_arguments, argument_name) == argument_value
Пример #2
0
    def test_spark_read_from_jdbc(self, mock_reader_load):
        # Given
        arguments = _parse_arguments(self.jdbc_arguments)
        spark_session = _create_spark_session(arguments)
        spark_session.sql("CREATE TABLE IF NOT EXISTS " + arguments.metastore_table + " (key INT)")

        # When
        spark_read_from_jdbc(
            spark_session,
            arguments.url,
            arguments.user,
            arguments.password,
            arguments.metastore_table,
            arguments.jdbc_table,
            arguments.jdbc_driver,
            arguments.save_mode,
            arguments.save_format,
            arguments.fetch_size,
            arguments.num_partitions,
            arguments.partition_column,
            arguments.lower_bound,
            arguments.upper_bound,
        )

        # Then
        mock_reader_load().write.saveAsTable.assert_called_once_with(
            arguments.metastore_table, format=arguments.save_format, mode=arguments.save_mode
        )
Пример #3
0
    def test_spark_write_to_jdbc(self, mock_writer_save):
        # Given
        arguments = _parse_arguments(self.jdbc_arguments)
        spark_session = _create_spark_session(arguments)
        spark_session.sql("CREATE TABLE IF NOT EXISTS " +
                          arguments.metastore_table + " (key INT)")
        # When

        spark_write_to_jdbc(
            spark_session=spark_session,
            url=arguments.url,
            user=arguments.user,
            password=arguments.password,
            metastore_table=arguments.metastore_table,
            jdbc_table=arguments.jdbc_table,
            driver=arguments.jdbc_driver,
            truncate=arguments.truncate,
            save_mode=arguments.save_mode,
            batch_size=arguments.batch_size,
            num_partitions=arguments.num_partitions,
            create_table_column_types=arguments.create_table_column_types,
        )

        # Then
        mock_writer_save.assert_called_once_with(mode=arguments.save_mode)
Пример #4
0
    def test_run_spark_read_from_jdbc(self, mock_spark_read_from_jdbc, mock_spark_session):
        # Given
        arguments = _parse_arguments(['-cmdType', SPARK_READ_FROM_JDBC] + self.jdbc_arguments[2:])
        spark_session = mock_spark_session.builder.appName(arguments.name).enableHiveSupport().getOrCreate()

        # When
        _run_spark(arguments=arguments)

        # Then
        mock_spark_read_from_jdbc.assert_called_once_with(
            spark_session,
            arguments.url,
            arguments.user,
            arguments.password,
            arguments.metastore_table,
            arguments.jdbc_table,
            arguments.jdbc_driver,
            arguments.save_mode,
            arguments.save_format,
            arguments.fetch_size,
            arguments.num_partitions,
            arguments.partition_column,
            arguments.lower_bound,
            arguments.upper_bound,
        )
Пример #5
0
    def test_run_spark_write_to_jdbc(self, mock_spark_write_to_jdbc,
                                     mock_spark_session):
        # Given
        arguments = _parse_arguments(['-cmdType', SPARK_WRITE_TO_JDBC] +
                                     self.jdbc_arguments[2:])
        spark_session = mock_spark_session.builder.appName(
            arguments.name).enableHiveSupport().getOrCreate()

        # When
        _run_spark(arguments=arguments)

        # Then
        mock_spark_write_to_jdbc.assert_called_once_with(
            spark_session,
            arguments.url,
            arguments.user,
            arguments.password,
            arguments.metastore_table,
            arguments.jdbc_table,
            arguments.jdbc_driver,
            arguments.truncate,
            arguments.save_mode,
            arguments.batch_size,
            arguments.num_partitions,
            arguments.create_table_column_types,
        )