def create_kernel_json(self, location, interpreter): python_lib_contents = listdir("{0}/python/lib".format(self.spark_home)) try: py4j_zip = list(filter( lambda filename: "py4j" in filename, python_lib_contents))[0] except: self.log.warn('Unable to find py4j, installing without PySpark support.') kernel_spec = KernelSpec() interpreter_lang = INTERPRETER_LANGUAGES[interpreter] kernel_spec.display_name = '{} - {}'.format(self.kernel_name, interpreter) kernel_spec.language = interpreter_lang kernel_spec.argv = [os.path.join(location, 'bin', 'run.sh'), '--profile', '{connection_file}'] kernel_spec.env = { DEFAULT_INTERPRETER : interpreter, # The SPARK_OPTS values are stored in TOREE_SPARK_OPTS to allow the two values to be merged when kernels # are run. This allows values to be specified during install, but also during runtime. TOREE_SPARK_OPTS : self.spark_opts, TOREE_OPTS : self.toree_opts, SPARK_HOME : self.spark_home, PYTHON_PATH : '{0}/python:{0}/python/lib/{1}'.format(self.spark_home, py4j_zip), PYTHON_EXEC : self.python_exec } kernel_json_file = os.path.join(location, 'kernel.json') self.log.debug('Creating kernel json file for {}'.format(interpreter)) with open(kernel_json_file, 'w+') as f: json.dump(kernel_spec.to_dict(), f, indent=2)
def create_kernel_json(self, location, interpreter): python_lib_contents = listdir("{0}/python/lib".format(self.spark_home)) try: py4j_zip = list(filter( lambda filename: "py4j" in filename, python_lib_contents))[0] except: self.log.warn('Unable to find py4j, installing without PySpark support.') kernel_spec = KernelSpec() interpreter_lang = INTERPRETER_LANGUAGES[interpreter] kernel_spec.display_name = '{} - {}'.format(self.kernel_name, interpreter) kernel_spec.language = interpreter_lang kernel_spec.argv = [os.path.join(location, 'bin', 'run.sh'), '--profile', '{connection_file}'] kernel_spec.env = { DEFAULT_INTERPRETER : interpreter, # The SPARK_OPTS values are stored in TOREE_SPARK_OPTS to allow the two values to be merged when kernels # are run. This allows values to be specified during install, but also during runtime. TOREE_SPARK_OPTS : self.spark_opts, TOREE_OPTS : self.toree_opts, SPARK_HOME : self.spark_home, HADOOP_CONF_DIR : self.hadoop_conf_dir, SPARK_CONF_DIR : self.spark_conf_dir, PYTHON_PATH : '{0}/python:{0}/python/lib/{1}'.format(self.spark_home, py4j_zip), PYTHON_EXEC : self.python_exec } kernel_json_file = os.path.join(location, 'kernel.json') self.log.debug('Creating kernel json file for {}'.format(interpreter)) with open(kernel_json_file, 'w+') as f: json.dump(kernel_spec.to_dict(), f, indent=2)
def create_kernel_json(self, location, interpreter): kernel_spec = KernelSpec() interpreter_lang = INTERPRETER_LANGUAGES[interpreter] kernel_spec.display_name = '{} - {}'.format(self.kernel_name, interpreter) kernel_spec.language = interpreter_lang kernel_spec.argv = [os.path.join(location, 'bin', 'run.sh'), '--profile', '{connection_file}'] kernel_spec.env = { DEFAULT_INTERPRETER : interpreter, # The SPARK_OPTS values are stored in TOREE_SPARK_OPTS to allow the two values to be merged when kernels # are run. This allows values to be specified during install, but also during runtime. TOREE_SPARK_OPTS : self.spark_opts, SPARK_HOME : self.spark_home, PYTHON_PATH : '{0}/python:{0}/python/lib/py4j-0.8.2.1-src.zip'.format(self.spark_home) } kernel_json_file = os.path.join(location, 'kernel.json') self.log.debug('Creating kernel json file for {}'.format(interpreter)) with open(kernel_json_file, 'w+') as f: json.dump(kernel_spec.to_dict(), f, indent=2)
def create_kernel_json(self, location, interpreter): kernel_spec = KernelSpec() interpreter_lang = INTERPRETER_LANGUAGES[interpreter] kernel_spec.display_name = '{} - {}'.format(self.kernel_name, interpreter) kernel_spec.language = interpreter_lang kernel_spec.argv = [os.path.join(location, 'bin', 'run.sh'), '--profile', '{connection_file}'] kernel_spec.env = { DEFAULT_INTERPRETER : interpreter, # The SPARK_OPTS values are stored in TOREE_SPARK_OPTS to allow the two values to be merged when kernels # are run. This allows values to be specified during install, but also during runtime. TOREE_SPARK_OPTS : self.spark_opts, TOREE_OPTS : self.toree_opts, SPARK_HOME : self.spark_home, } kernel_json_file = os.path.join(location, 'kernel.json') self.log.debug('Creating kernel json file for {}'.format(interpreter)) with open(kernel_json_file, 'w+') as f: json.dump(kernel_spec.to_dict(), f, indent=2)