def test_build_jdbc_arguments(self): # Given hook = SparkJDBCHook(**self._config) # When cmd = hook._build_jdbc_application_arguments(hook._resolve_jdbc_connection()) # Then expected_jdbc_arguments = [ '-cmdType', 'spark_to_jdbc', '-url', 'jdbc:postgresql://localhost:5432/default', '-user', 'user', '-password', 'supersecret', '-metastoreTable', 'hiveMcHiveFace', '-jdbcTable', 'tableMcTableFace', '-jdbcDriver', 'org.postgresql.Driver', '-batchsize', '100', '-fetchsize', '200', '-numPartitions', '10', '-partitionColumn', 'columnMcColumnFace', '-lowerBound', '10', '-upperBound', '20', '-saveMode', 'append', '-saveFormat', 'parquet', '-createTableColumnTypes', 'columnMcColumnFace INTEGER(100), name CHAR(64),' 'comments VARCHAR(1024)' ] self.assertEquals(expected_jdbc_arguments, cmd)
def test_resolve_jdbc_connection(self): # Given hook = SparkJDBCHook(jdbc_conn_id='jdbc-default') expected_connection = { 'url': 'localhost:5432', 'schema': 'default', 'conn_prefix': 'jdbc:postgresql://', 'user': '******', 'password': '******' } # When connection = hook._resolve_jdbc_connection() # Then self.assertEqual(connection, expected_connection)
def test_build_jdbc_arguments_invalid(self): # Given hook = SparkJDBCHook(**self._invalid_config) # Expect Exception hook._build_jdbc_application_arguments(hook._resolve_jdbc_connection())