def test_set_if_missing(self): conf = SparkConf() conf.set(self.RANDOM_KEY, self.RANDOM_VALUE) conf.setIfMissing(self.RANDOM_KEY, self.RANDOM_VALUE2) self.assertEquals(conf.get(self.RANDOM_KEY), self.RANDOM_VALUE) conf.setIfMissing(self.RANDOM_KEY2, self.RANDOM_VALUE2) self.assertEquals(conf.get(self.RANDOM_KEY2), self.RANDOM_VALUE2)
# -*- coding: utf-8 -*- import os import random from dummy_spark import SparkContext, SparkConf from dummy_spark.sql import SQLContext __author__ = 'willmcginnis' # make a spark conf sconf = SparkConf() # set some property (won't do anything) sconf.set('spark.executor.extraClassPath', 'foo') # use the spark conf to make a spark context sc = SparkContext(master='', conf=sconf) # set the log level (also doesn't do anything) sc.setLogLevel('INFO') # maybe make a useless sqlcontext (nothing implimented here yet) sqlctx = SQLContext(sc) # add pyfile just appends to the sys path sc.addPyFile(os.path.dirname(__file__)) # do some hadoop configuration into the ether sc._jsc.hadoopConfiguration().set('foo', 'bar')