def start(self, create_sql_context=True): """Start the session against actual livy server.""" self._spark_events.emit_session_creation_start_event(self.guid, self.kind) try: r = self._http_client.post_session(self.properties) self.id = r[u"id"] self.status = str(r[u"state"]) self.ipython_display.writeln(u"Creating SparkContext as 'sc'") # We wait for livy_session_startup_timeout_seconds() for the session to start up. try: self.wait_for_idle(conf.livy_session_startup_timeout_seconds()) except LivyClientTimeoutException: raise LivyClientTimeoutException(u"Session {} did not start up in {} seconds." .format(self.id, conf.livy_session_startup_timeout_seconds())) if create_sql_context: self.create_sql_context() except Exception as e: self._spark_events.emit_session_creation_end_event(self.guid, self.kind, self.id, self.status, False, e.__class__.__name__, str(e)) raise else: self._spark_events.emit_session_creation_end_event(self.guid, self.kind, self.id, self.status, True, "", "")
def start(self, create_sql_context=True): """Start the session against actual livy server.""" self._spark_events.emit_session_creation_start_event( self.guid, self.kind) try: r = self._http_client.post_session(self.properties) self.id = r[u"id"] self.status = str(r[u"state"]) self.ipython_display.writeln(u"Creating SparkContext as 'sc'") # We wait for livy_session_startup_timeout_seconds() for the session to start up. try: self.wait_for_idle(conf.livy_session_startup_timeout_seconds()) except LivyClientTimeoutException: raise LivyClientTimeoutException( u"Session {} did not start up in {} seconds.".format( self.id, conf.livy_session_startup_timeout_seconds())) if create_sql_context: self.create_sql_context() except Exception as e: self._spark_events.emit_session_creation_end_event( self.guid, self.kind, self.id, self.status, False, e.__class__.__name__, str(e)) raise else: self._spark_events.emit_session_creation_end_event( self.guid, self.kind, self.id, self.status, True, "", "")
def start(self): """Start the session against actual livy server.""" self._spark_events.emit_session_creation_start_event( self.guid, self.kind) self._printed_resource_warning = False try: r = self._http_client.post_session(self.properties) self.id = r[u"id"] self.status = str(r[u"state"]) self.ipython_display.writeln(u"Starting Spark application") # Start heartbeat thread to keep Livy interactive session alive. self._start_heartbeat_thread() # We wait for livy_session_startup_timeout_seconds() for the session to start up. try: self.wait_for_idle(conf.livy_session_startup_timeout_seconds()) except LivyClientTimeoutException: raise LivyClientTimeoutException( u"Session {} did not start up in {} seconds.".format( self.id, conf.livy_session_startup_timeout_seconds())) html = get_sessions_info_html([self], self.id) self.ipython_display.html(html) command = Command("spark") (success, out, mimetype) = command.execute(self) if success: self.ipython_display.writeln( u"SparkSession available as 'spark'.") self.sql_context_variable_name = "spark" else: command = Command("sqlContext") (success, out, mimetype) = command.execute(self) if success: self.ipython_display.writeln( u"SparkContext available as 'sc'.") if ("hive" in out.lower()): self.ipython_display.writeln( u"HiveContext available as 'sqlContext'.") else: self.ipython_display.writeln( u"SqlContext available as 'sqlContext'.") self.sql_context_variable_name = "sqlContext" else: raise SqlContextNotFoundException( u"Neither SparkSession nor HiveContext/SqlContext is available." ) except Exception as e: self._spark_events.emit_session_creation_end_event( self.guid, self.kind, self.id, self.status, False, e.__class__.__name__, str(e)) raise else: self._spark_events.emit_session_creation_end_event( self.guid, self.kind, self.id, self.status, True, "", "")
def start(self): """Start the session against actual livy server.""" self._spark_events.emit_session_creation_start_event(self.guid, self.kind) self._printed_resource_warning = False try: r = self._http_client.post_session(self.properties) self.id = r[u"id"] self.status = str(r[u"state"]) self.ipython_display.writeln(u"Starting Spark application") # Start heartbeat thread to keep Livy interactive session alive. self._start_heartbeat_thread() # We wait for livy_session_startup_timeout_seconds() for the session to start up. try: self.wait_for_idle(conf.livy_session_startup_timeout_seconds()) except LivyClientTimeoutException: raise LivyClientTimeoutException(u"Session {} did not start up in {} seconds." .format(self.id, conf.livy_session_startup_timeout_seconds())) html = get_sessions_info_html([self], self.id) self.ipython_display.html(html) command = Command("spark") (success, out) = command.execute(self) if success: self.ipython_display.writeln(u"SparkSession available as 'spark'.") self.sql_context_variable_name = "spark" else: command = Command("sqlContext") (success, out) = command.execute(self) if success: self.ipython_display.writeln(u"SparkContext available as 'sc'.") if ("hive" in out.lower()): self.ipython_display.writeln(u"HiveContext available as 'sqlContext'.") else: self.ipython_display.writeln(u"SqlContext available as 'sqlContext'.") self.sql_context_variable_name = "sqlContext" else: raise SqlContextNotFoundException(u"Neither SparkSession nor HiveContext/SqlContext is available.") except Exception as e: self._spark_events.emit_session_creation_end_event(self.guid, self.kind, self.id, self.status, False, e.__class__.__name__, str(e)) raise else: self._spark_events.emit_session_creation_end_event(self.guid, self.kind, self.id, self.status, True, "", "")
def test_configuration_override_fallback_to_password(): kpc = {'username': '******', 'password': '******', 'url': 'L', 'auth': NO_AUTH} overrides = {conf.kernel_python_credentials.__name__: kpc} conf.override_all(overrides) conf.override(conf.livy_session_startup_timeout_seconds.__name__, 1) assert_equals( conf.d, { conf.kernel_python_credentials.__name__: kpc, conf.livy_session_startup_timeout_seconds.__name__: 1 }) assert_equals(conf.livy_session_startup_timeout_seconds(), 1) assert_equals(conf.base64_kernel_python_credentials(), kpc)
def test_configuration_override_work_with_empty_password(): kpc = { 'username': '******', 'base64_password': '', 'password': '', 'url': '', 'auth': AUTH_BASIC } overrides = {conf.kernel_python_credentials.__name__: kpc} conf.override_all(overrides) conf.override(conf.livy_session_startup_timeout_seconds.__name__, 1) assert_equals( conf.d, { conf.kernel_python_credentials.__name__: kpc, conf.livy_session_startup_timeout_seconds.__name__: 1 }) assert_equals(conf.livy_session_startup_timeout_seconds(), 1) assert_equals(conf.base64_kernel_python_credentials(), { 'username': '******', 'password': '', 'url': '', 'auth': AUTH_BASIC })
def start(self): """Start the session against actual livy server.""" self._spark_events.emit_session_creation_start_event( self.guid, self.kind) self._printed_resource_warning = False try: connection_file = os.path.basename(ipykernel.get_connection_file()) if 'kernel' in connection_file: kernel_id = connection_file.split('-', 1)[1].split('.')[0] self.properties['conf'][ 'spark.yarn.appMasterEnv.HOPSWORKS_KERNEL_ID'] = kernel_id self.properties['conf'][ 'spark.executorEnv.HOPSWORKS_KERNEL_ID'] = kernel_id if 'hops.util' in sys.modules: util.attach_jupyter_configuration_to_notebook(kernel_id) r = self._http_client.post_session(self.properties) self.id = r[u"id"] self.status = str(r[u"state"]) self.ipython_display.writeln(u"Starting Spark application") # Start heartbeat thread to keep Livy interactive session alive. self._start_heartbeat_thread() # We wait for livy_session_startup_timeout_seconds() for the session to start up. try: self.wait_for_idle(conf.livy_session_startup_timeout_seconds()) except LivyClientTimeoutException: raise LivyClientTimeoutException( u"Session {} did not start up in {} seconds.".format( self.id, conf.livy_session_startup_timeout_seconds())) html = get_sessions_info_html([self], self.id) self.ipython_display.html(html) command = Command("spark") (success, out, mimetype) = command.execute(self) if success: self.ipython_display.writeln( u"SparkSession available as 'spark'.") self.sql_context_variable_name = "spark" else: command = Command("sqlContext") (success, out, mimetype) = command.execute(self) if success: self.ipython_display.writeln( u"SparkContext available as 'sc'.") if ("hive" in out.lower()): self.ipython_display.writeln( u"HiveContext available as 'sqlContext'.") else: self.ipython_display.writeln( u"SqlContext available as 'sqlContext'.") self.sql_context_variable_name = "sqlContext" else: raise SqlContextNotFoundException( u"Neither SparkSession nor HiveContext/SqlContext is available." ) except Exception as e: self._spark_events.emit_session_creation_end_event( self.guid, self.kind, self.id, self.status, False, e.__class__.__name__, str(e)) raise else: self._spark_events.emit_session_creation_end_event( self.guid, self.kind, self.id, self.status, True, "", "")