示例#1
0
    def test_build_jdbc_arguments(self):
        # Given
        hook = SparkJDBCHook(**self._config)

        # When
        cmd = hook._build_jdbc_application_arguments(hook._resolve_jdbc_connection())

        # Then
        expected_jdbc_arguments = [
            '-cmdType', 'spark_to_jdbc',
            '-url', 'jdbc:postgresql://localhost:5432/default',
            '-user', 'user',
            '-password', 'supersecret',
            '-metastoreTable', 'hiveMcHiveFace',
            '-jdbcTable', 'tableMcTableFace',
            '-jdbcDriver', 'org.postgresql.Driver',
            '-batchsize', '100',
            '-fetchsize', '200',
            '-numPartitions', '10',
            '-partitionColumn', 'columnMcColumnFace',
            '-lowerBound', '10',
            '-upperBound', '20',
            '-saveMode', 'append',
            '-saveFormat', 'parquet',
            '-createTableColumnTypes', 'columnMcColumnFace INTEGER(100), name CHAR(64),'
                                       'comments VARCHAR(1024)'
        ]
        self.assertEquals(expected_jdbc_arguments, cmd)
示例#2
0
    def test_build_jdbc_arguments_invalid(self):
        # Given
        hook = SparkJDBCHook(**self._invalid_config)

        # Expect Exception
        hook._build_jdbc_application_arguments(hook._resolve_jdbc_connection())
    def test_build_jdbc_arguments_invalid(self):
        # Given
        hook = SparkJDBCHook(**self._invalid_config)

        # Expect Exception
        hook._build_jdbc_application_arguments(hook._resolve_jdbc_connection())