def __init__(self, implementation, implementation_version, language, language_version, language_info, session_language, user_code_parser=None, **kwargs): # Required by Jupyter - Override self.implementation = implementation self.implementation_version = implementation_version self.language = language self.language_version = language_version self.language_info = language_info # Override self.session_language = session_language super(SparkKernelBase, self).__init__(**kwargs) self.logger = SparkLog(u"{}_jupyter_kernel".format(self.session_language)) self._fatal_error = None self.ipython_display = IpythonDisplay() if user_code_parser is None: self.user_code_parser = UserCodeParser() else: self.user_code_parser = user_code_parser # Disable warnings for test env in HDI requests.packages.urllib3.disable_warnings() if not kwargs.get("testing", False): self._load_magics_extension() self._change_language() if conf.use_auto_viz(): self._register_auto_viz()
def __init__(self, shell, data=None, widget=None): super(LinkisMagic, self).__init__(shell) self.ipython_display = IpythonDisplay() self.data = data # if widget is None: # widget = MagicsControllerWidget(self.spark_controller, IpyWidgetFactory(), self.ipython_display) # self.manage_widget = widget self.linkis_client = LinkisClient()
def test_stdout_flush(): ipython_shell = MagicMock() ipython_display = IpythonDisplay() ipython_display._ipython_shell = ipython_shell sys.stdout = MagicMock() ipython_display.write(u'Testing Stdout Flush è') assert sys.stdout.flush.call_count == 1
def test_stderr_flush(): ipython_shell = MagicMock() ipython_display = IpythonDisplay() ipython_display._ipython_shell = ipython_shell sys.stderr = MagicMock() ipython_display.send_error(u'Testing Stderr Flush è') assert sys.stderr.flush.call_count == 1
def __init__(self, df, encoding, renderer=None, ipywidget_factory=None, encoding_widget=None, ipython_display=None, nested_widget_mode=False, spark_events=None, testing=False, **kwargs): assert encoding is not None assert df is not None assert type(df) is pd.DataFrame kwargs['orientation'] = 'vertical' if not testing: super(AutoVizWidget, self).__init__((), **kwargs) self.df = self._convert_to_displayable_dataframe(df) if renderer is None: renderer = GraphRenderer() self.renderer = renderer if ipywidget_factory is None: ipywidget_factory = IpyWidgetFactory() self.ipywidget_factory = ipywidget_factory if encoding_widget is None: encoding_widget = EncodingWidget(self.df, encoding, self.on_render_viz) self.encoding_widget = encoding_widget if ipython_display is None: ipython_display = IpythonDisplay() self.ipython_display = ipython_display self.encoding = encoding # Widget that will become the only child of AutoVizWidget self.widget = self.ipywidget_factory.get_vbox() # Create output area self.to_display = self.ipywidget_factory.get_output() self.to_display.width = "800px" self.output = self.ipywidget_factory.get_hbox() self.output.children = [self.to_display] self.controls = self._create_controls_widget() if spark_events is None: spark_events = AutoVizEvents() self._spark_events = spark_events if nested_widget_mode: self.widget.children = [self.controls, self.output] self.children = [self.widget] else: self.ipython_display.display(self.controls) self.ipython_display.display(self.to_display) self.on_render_viz()
def __init__(self, shell, data=None, spark_events=None): # You must call the parent constructor super(SparkMagicBase, self).__init__(shell) self.logger = SparkLog(u"SparkMagics") self.ipython_display = IpythonDisplay() self.spark_controller = SparkController(self.ipython_display) self.logger.debug("Initialized spark magics.") if spark_events is None: spark_events = SparkEvents() spark_events.emit_library_loaded_event()
def __init__(self, implementation, implementation_version, language, language_version, language_info, **kwargs): self.implementation = implementation self.implementation_version = implementation_version self.language = language self.language_version = language_version self.language_info = language_info super(HdfsKernelBase, self).__init__(**kwargs) self._fatal_error = None self.ipython_display = IpythonDisplay() self.session_manager = HdfsSessionManager()
def __init__(self, spark_controller, ipywidget_factory=None, ipython_display=None, nested_widget_mode=False, testing=False, **kwargs): kwargs['orientation'] = 'vertical' if not testing: super(AbstractMenuWidget, self).__init__((), **kwargs) self.spark_controller = spark_controller if ipywidget_factory is None: ipywidget_factory = IpyWidgetFactory() self.ipywidget_factory = ipywidget_factory if ipython_display is None: ipython_display = IpythonDisplay() self.ipython_display = ipython_display self.children = [] if not nested_widget_mode: self._repr_html_()
import subprocess import re import random import urllib3.util from hdijupyterutils.ipythondisplay import IpythonDisplay import ipyvuetify as v from google.cloud import dataproc_v1beta2 import google.auth.transport.requests from google.auth import _cloud_sdk from google.auth.exceptions import UserAccessTokenError from google.oauth2.credentials import Credentials from sparkmagic.auth.customauth import Authenticator from sparkmagic.livyclientlib.exceptions import BadUserConfigurationException import googledataprocauthenticator.utils.constants as constants ipython_display = IpythonDisplay() def list_credentialed_user_accounts(): """Load all of user's credentialed accounts with ``gcloud auth list`` command. Returns: Sequence[str]: each value is a str of one of the users credentialed accounts Raises: sparkmagic.livyclientlib.BadUserConfigurationException: if gcloud cannot be invoked """ accounts_json = "" if os.name == "nt": command = constants.CLOUD_SDK_WINDOWS_COMMAND else:
def __init__(self, display=None): if display is None: self.display = IpythonDisplay() else: self.display = display