from pyspark import SparkContext, SparkConf from pyspark.sql import DataFrameReader, SQLContext import os # sparkClassPath = os.getenv('SPARK_CLASSPATH', r'C:\tools\postgresql-42.2.5.jar') # print( sparkClassPath ) # Populate configuration conf = SparkConf() # conf.setAppName('application') # conf.set('spark.jars', 'file:%s' % sparkClassPath) # conf.set('spark.executor.extraClassPath', sparkClassPath) # conf.set('spark.driver.extraClassPath', sparkClassPath) # Uncomment line below and modify ip address if you need to use cluster on different IP address # conf.set('spark.master', 'spark://127.0.0.1:7077') sc = SparkContext(conf=conf) sqlContext = SQLContext(sc) url = 'postgresql://192.168.175.3:5432/entrata44_dev' properties = {'user':'******', 'password':'******'} df = DataFrameReader(sqlContext).jdbc(url='jdbc:%s' % url, table='lease_interval_types', properties=properties) df.show()
# Global temporary view is tied to a system preserved database `global_temp` spark.sql("SELECT * FROM global_temp.people").show() # Global temporary view is cross-session spark.newSession().sql("SELECT * FROM global_temp.people").show() # read from postgres import os from pyspark import SparkContext, SparkConf from pyspark.sql import SQLContext, Row, DataFrameReader os.environ['SPARK_CLASSPATH'] = "/home/david/Downloads/postgresql-42.2.1.jar" sparkClassPath = os.getenv('SPARK_CLASSPATH') # Populate configuration conf = SparkConf() conf.setAppName('application') conf.set('spark.jars', 'file:%s' % sparkClassPath) conf.set('spark.executor.extraClassPath', sparkClassPath) conf.set('spark.driver.extraClassPath', sparkClassPath) sc = SparkContext(conf=conf) sqlContext = SQLContext(sc) url = f'postgresql://localhost:{PG_PORT}/tse' properties = {'user': PG_USER, 'password': PG_PWD} df = DataFrameReader(sqlContext).jdbc(url='jdbc:%s' % url, table='"每日收盤行情(全部(不含權證、牛熊證))"', properties=properties) df.printSchema() df.show(truncate=False)