コード例 #1
0
    def run(self):
        try:
            properties_json = self.properties.value
            if properties_json.strip() != "":
                conf.override(conf.session_configs.__name__, json.loads(self.properties.value))
        except ValueError as e:
            self.ipython_display.send_error("Session properties must be a valid JSON string. Error:\n{}".format(e))
            return

        endpoint = self.endpoints_dropdown_widget.value
        language = self.lang_widget.value
        alias = self.session_widget.value
        skip = False
        properties = conf.get_session_properties(language)

        try:
            self.spark_controller.add_session(alias, endpoint, skip, properties)
        except ValueError as e:
            self.ipython_display.send_error(
                """Could not add session with
name:
    {}
properties:
    {}

due to error: '{}'""".format(
                    alias, properties, e
                )
            )
            return

        self.refresh_method()
コード例 #2
0
    def run(self):
        try:
            properties_json = self.properties.value
            if properties_json.strip() != "":
                conf.override(conf.session_configs.__name__,
                              json.loads(self.properties.value))
        except ValueError as e:
            self.ipython_display.send_error(
                "Session properties must be a valid JSON string. Error:\n{}".
                format(e))
            return

        endpoint = self.endpoints_dropdown_widget.value
        language = self.lang_widget.value
        alias = self.session_widget.value
        skip = False
        properties = conf.get_session_properties(language)

        try:
            self.spark_controller.add_session(alias, endpoint, skip,
                                              properties)
        except ValueError as e:
            self.ipython_display.send_error("""Could not add session with
name:
    {}
properties:
    {}

due to error: '{}'""".format(alias, properties, e))
            return

        self.refresh_method()
コード例 #3
0
def test_configuration_override_work_with_empty_password():
    kpc = { 'username': '******', 'base64_password': '', 'password': '', 'url': '' }
    overrides = { conf.kernel_python_credentials.__name__: kpc }
    conf.override_all(overrides)
    conf.override(conf.status_sleep_seconds.__name__, 1)
    assert_equals(conf.d, { conf.kernel_python_credentials.__name__: kpc,
                                     conf.status_sleep_seconds.__name__: 1 })
    assert_equals(conf.status_sleep_seconds(), 1)
    assert_equals(conf.base64_kernel_python_credentials(),  { 'username': '******', 'password': '', 'url': '' })
コード例 #4
0
def test_configuration_override_fallback_to_password():
    kpc = {'username': '******', 'password': '******', 'url': 'L', 'auth': NO_AUTH}
    overrides = {conf.kernel_python_credentials.__name__: kpc}
    conf.override_all(overrides)
    conf.override(conf.livy_session_startup_timeout_seconds.__name__, 1)
    assert_equals(
        conf.d, {
            conf.kernel_python_credentials.__name__: kpc,
            conf.livy_session_startup_timeout_seconds.__name__: 1
        })
    assert_equals(conf.livy_session_startup_timeout_seconds(), 1)
    assert_equals(conf.base64_kernel_python_credentials(), kpc)
コード例 #5
0
def test_configuration_override_fallback_to_password():
    kpc = {'username': '******', 'password': '******', 'url': 'L'}
    overrides = {conf.kernel_python_credentials.__name__: kpc}
    conf.override_all(overrides)
    conf.override(conf.status_sleep_seconds.__name__, 1)
    assert_equals(
        conf.d, {
            conf.kernel_python_credentials.__name__: kpc,
            conf.status_sleep_seconds.__name__: 1
        })
    assert_equals(conf.status_sleep_seconds(), 1)
    assert_equals(conf.base64_kernel_python_credentials(), kpc)
def test_cleanup_all_sessions_on_exit():
    conf.override(conf.cleanup_all_sessions_on_exit.__name__, True)
    client0 = MagicMock()
    client1 = MagicMock()
    manager = get_session_manager()
    manager.add_session("name0", client0)
    manager.add_session("name1", client1)

    atexit._run_exitfuncs()

    client0.delete.assert_called_once_with()
    client1.delete.assert_called_once_with()
    manager.ipython_display.writeln.assert_called_once_with(
        u"Cleaning up livy sessions on exit is enabled")
def test_cleanup_all_sessions_on_exit_fails():
    """
    Cleanup on exit is best effort only. When cleanup fails, exception is caught and error is logged.
    """
    conf.override(conf.cleanup_all_sessions_on_exit.__name__, True)
    client0 = MagicMock()
    client1 = MagicMock()
    client0.delete.side_effect = Exception(
        'Mocked exception for client1.delete')
    manager = get_session_manager()
    manager.add_session("name0", client0)
    manager.add_session("name1", client1)

    atexit._run_exitfuncs()

    client0.delete.assert_called_once_with()
    client1.delete.assert_not_called()
コード例 #8
0
    def run(self):
        try:
            properties_json = self.properties.value
            if properties_json.strip() != "":
                conf.override(conf.session_configs.__name__,
                              json.loads(self.properties.value))
        except ValueError as e:
            self.ipython_display.send_error(
                "Session properties must be a valid JSON string. Error:\n{}".
                format(e))
            return

        language = self.lang_widget.value
        alias = self.session_widget.value
        skip = False
        properties = conf.get_session_properties(language)
        properties["name"] = alias
        properties["conf"][
            "spark.kubernetes.file.upload.path"] = conf.s3_bucket()
        properties["conf"][
            "spark.hadoop.fs.s3a.access.key"] = conf.s3_access_key()
        properties["conf"][
            "spark.hadoop.fs.s3a.secret.key"] = conf.s3_secret_key()

        try:
            self.spark_controller.add_session(alias, self.endpoint, skip,
                                              properties)
        except ValueError as e:
            self.ipython_display.send_error("""Could not add session with
name:
    {}
properties:
    {}

due to error: '{}'""".format(alias, properties, e))
            return

        self.refresh_method()
コード例 #9
0
def test_configuration_override_work_with_empty_password():
    kpc = {
        'username': '******',
        'base64_password': '',
        'password': '',
        'url': '',
        'auth': AUTH_BASIC
    }
    overrides = {conf.kernel_python_credentials.__name__: kpc}
    conf.override_all(overrides)
    conf.override(conf.livy_session_startup_timeout_seconds.__name__, 1)
    assert_equals(
        conf.d, {
            conf.kernel_python_credentials.__name__: kpc,
            conf.livy_session_startup_timeout_seconds.__name__: 1
        })
    assert_equals(conf.livy_session_startup_timeout_seconds(), 1)
    assert_equals(conf.base64_kernel_python_credentials(), {
        'username': '******',
        'password': '',
        'url': '',
        'auth': AUTH_BASIC
    })
コード例 #10
0
def test_configuration_override_base64_password():
    kpc = {
        'username': '******',
        'password': '******',
        'base64_password': '******',
        'url': 'L',
        "auth": AUTH_BASIC
    }
    overrides = {conf.kernel_python_credentials.__name__: kpc}
    conf.override_all(overrides)
    conf.override(conf.status_sleep_seconds.__name__, 1)
    assert_equals(
        conf.d, {
            conf.kernel_python_credentials.__name__: kpc,
            conf.status_sleep_seconds.__name__: 1
        })
    assert_equals(conf.status_sleep_seconds(), 1)
    assert_equals(conf.base64_kernel_python_credentials(), {
        'username': '******',
        'password': '******',
        'url': 'L',
        'auth': AUTH_BASIC
    })
コード例 #11
0
def test_configuration_raise_error_for_bad_base64_password():
    kpc = { 'username': '******', 'base64_password': '******', 'url': 'L' }
    overrides = { conf.kernel_python_credentials.__name__: kpc }
    conf.override_all(overrides)
    conf.override(conf.status_sleep_seconds.__name__, 1)
    conf.base64_kernel_python_credentials()
コード例 #12
0
    def spark(self, line, cell="", local_ns=None):
        """Magic to execute spark remotely.

           This magic allows you to create a Livy Scala or Python session against a Livy endpoint. Every session can
           be used to execute either Spark code or SparkSQL code by executing against the SQL context in the session.
           When the SQL context is used, the result will be a Pandas dataframe of a sample of the results.

           If invoked with no subcommand, the cell will be executed against the specified session.

           Subcommands
           -----------
           info
               Display the available Livy sessions and other configurations for sessions.
           add
               Add a Livy session given a session name (-s), language (-l), and endpoint credentials.
               The -k argument, if present, will skip adding this session if it already exists.
               e.g. `%spark add -s test -l python -u https://sparkcluster.net/livy -t Kerberos -a u -p -k`
           config
               Override the livy session properties sent to Livy on session creation. All session creations will
               contain these config settings from then on.
               Expected value is a JSON key-value string to be sent as part of the Request Body for the POST /sessions
               endpoint in Livy.
               e.g. `%%spark config`
                    `{"driverMemory":"1000M", "executorCores":4}`
           run
               Run Spark code against a session.
               e.g. `%%spark -s testsession` will execute the cell code against the testsession previously created
               e.g. `%%spark -s testsession -c sql` will execute the SQL code against the testsession previously created
               e.g. `%%spark -s testsession -c sql -o my_var` will execute the SQL code against the testsession
                        previously created and store the pandas dataframe created in the my_var variable in the
                        Python environment.
           logs
               Returns the logs for a given session.
               e.g. `%spark logs -s testsession` will return the logs for the testsession previously created
           delete
               Delete a Livy session.
               e.g. `%spark delete -s defaultlivy`
           cleanup
               Delete all Livy sessions created by the notebook. No arguments required.
               e.g. `%spark cleanup`
        """
        usage = "Please look at usage of %spark by executing `%spark?`."
        user_input = line
        args = parse_argstring_or_throw(self.spark, user_input)

        subcommand = args.command[0].lower()

        if args.auth is None:
            args.auth = conf.get_auth_value(args.user, args.password)
        else:
            args.auth = args.auth

        # info
        if subcommand == "info":
            if args.url is not None:
                endpoint = Endpoint(args.url, args.auth, args.user, args.password)
                info_sessions = self.spark_controller.get_all_sessions_endpoint_info(endpoint)
                self._print_endpoint_info(info_sessions)
            else:
                self._print_local_info()
        # config
        elif subcommand == "config":
            conf.override(conf.session_configs.__name__, json.loads(cell))

        #conf file
        elif subcommand == "encoded":
            language = args.language
            session = args.session
            conf_json = json.loads(base64.b64decode(args.encodedconf).decode('utf-8'))
            lang_args = conf_json['kernel_{}_credentials'.format(language)]
            url = lang_args['url']
            auth = lang_args['auth']
            username = lang_args['username']
            password = lang_args['base64_password']
            conf.override_all(conf_json)

            properties = conf.get_session_properties(language)
            if url is not None:
                endpoint = Endpoint(url, auth, username, password)
                info_sessions = self.spark_controller.get_all_sessions_endpoint_info(endpoint)

                if session in info_sessions:
                    print("found session")

                else:

                    self.spark_controller.add_session(session, endpoint, True, properties)
                    coerce = get_coerce_value(args.coerce)
                    if args.context == CONTEXT_NAME_SPARK:
                        return self.execute_spark(cell, args.output, args.samplemethod,
                                                  args.maxrows, args.samplefraction, session, coerce)
                    elif args.context == CONTEXT_NAME_SQL:
                        return self.execute_sqlquery(cell, args.samplemethod, args.maxrows, args.samplefraction,
                                                     session, args.output, args.quiet, coerce)
                    else:
                        self.ipython_display.send_error("Context '{}' not found".format(args.context))
                #check if session already exists

        # add
        elif subcommand == "add":
            if args.url is None:
                self.ipython_display.send_error("Need to supply URL argument (e.g. -u https://example.com/livyendpoint)")
                return

            name = args.session
            language = args.language
            endpoint = Endpoint(args.url, args.auth, args.user, args.password)
            skip = args.skip

            properties = conf.get_session_properties(language)

            self.spark_controller.add_session(name, endpoint, skip, properties)
        # delete
        elif subcommand == "delete":
            if args.session is not None:
                self.spark_controller.delete_session_by_name(args.session)
            elif args.url is not None:
                if args.id is None:
                    self.ipython_display.send_error("Must provide --id or -i option to delete session at endpoint from URL")
                    return
                endpoint = Endpoint(args.url, args.auth, args.user, args.password)
                session_id = args.id
                self.spark_controller.delete_session_by_id(endpoint, session_id)
            else:
                self.ipython_display.send_error("Subcommand 'delete' requires a session name or a URL and session ID")
        # cleanup
        elif subcommand == "cleanup":
            if args.url is not None:
                endpoint = Endpoint(args.url, args.auth, args.user, args.password)
                self.spark_controller.cleanup_endpoint(endpoint)
            else:
                self.spark_controller.cleanup()
        # logs
        elif subcommand == "logs":
            self.ipython_display.write(self.spark_controller.get_logs(args.session))
        # run
        elif len(subcommand) == 0:
            coerce = get_coerce_value(args.coerce)
            if args.context == CONTEXT_NAME_SPARK:
                return self.execute_spark(cell, args.output, args.samplemethod,
                                          args.maxrows, args.samplefraction, args.session, coerce)
            elif args.context == CONTEXT_NAME_SQL:
                return self.execute_sqlquery(cell, args.samplemethod, args.maxrows, args.samplefraction,
                                             args.session, args.output, args.quiet, coerce)
            else:
                self.ipython_display.send_error("Context '{}' not found".format(args.context))
        # error
        else:
            self.ipython_display.send_error("Subcommand '{}' not found. {}".format(subcommand, usage))
コード例 #13
0
    def spark(self, line, cell="", local_ns=None):
        """Magic to execute spark remotely.

           This magic allows you to create a Livy Scala or Python session against a Livy endpoint. Every session can
           be used to execute either Spark code or SparkSQL code by executing against the SQL context in the session.
           When the SQL context is used, the result will be a Pandas dataframe of a sample of the results.

           If invoked with no subcommand, the cell will be executed against the specified session.

           Subcommands
           -----------
           info
               Display the available Livy sessions and other configurations for sessions.
           add
               Add a Livy session given a session name (-s), language (-l), and endpoint credentials.
               The -k argument, if present, will skip adding this session if it already exists.
               e.g. `%spark add -s test -l python -u https://sparkcluster.net/livy -a u -p -k`
           config
               Override the livy session properties sent to Livy on session creation. All session creations will
               contain these config settings from then on.
               Expected value is a JSON key-value string to be sent as part of the Request Body for the POST /sessions
               endpoint in Livy.
               e.g. `%%spark config`
                    `{"driverMemory":"1000M", "executorCores":4}`
           run
               Run Spark code against a session.
               e.g. `%%spark -s testsession` will execute the cell code against the testsession previously created
               e.g. `%%spark -s testsession -c sql` will execute the SQL code against the testsession previously created
               e.g. `%%spark -s testsession -c sql -o my_var` will execute the SQL code against the testsession
                        previously created and store the pandas dataframe created in the my_var variable in the
                        Python environment.
           logs
               Returns the logs for a given session.
               e.g. `%spark logs -s testsession` will return the logs for the testsession previously created
           delete
               Delete a Livy session.
               e.g. `%spark delete -s defaultlivy`
           cleanup
               Delete all Livy sessions created by the notebook. No arguments required.
               e.g. `%spark cleanup`
        """
        usage = "Please look at usage of %spark by executing `%spark?`."
        user_input = line
        args = parse_argstring_or_throw(self.spark, user_input)

        subcommand = args.command[0].lower()

        # info
        if subcommand == "info":
            if args.url is not None:
                endpoint = Endpoint(args.url, args.user, args.password)
                info_sessions = self.spark_controller.get_all_sessions_endpoint_info(endpoint)
                self._print_endpoint_info(info_sessions)
            else:
                self._print_local_info()
        # config
        elif subcommand == "config":
            conf.override(conf.session_configs.__name__, json.loads(cell))
        # add
        elif subcommand == "add":
            if args.url is None:
                self.ipython_display.send_error("Need to supply URL argument (e.g. -u https://example.com/livyendpoint)")
                return

            name = args.session
            language = args.language
            endpoint = Endpoint(args.url, args.user, args.password)
            skip = args.skip

            properties = conf.get_session_properties(language)

            self.spark_controller.add_session(name, endpoint, skip, properties)
        # delete
        elif subcommand == "delete":
            if args.session is not None:
                self.spark_controller.delete_session_by_name(args.session)
            elif args.url is not None:
                if args.id is None:
                    self.ipython_display.send_error("Must provide --id or -i option to delete session at endpoint from URL")
                    return
                endpoint = Endpoint(args.url, args.user, args.password)
                session_id = args.id
                self.spark_controller.delete_session_by_id(endpoint, session_id)
            else:
                self.ipython_display.send_error("Subcommand 'delete' requires a session name or a URL and session ID")
        # cleanup
        elif subcommand == "cleanup":
            if args.url is not None:
                endpoint = Endpoint(args.url, args.user, args.password)
                self.spark_controller.cleanup_endpoint(endpoint)
            else:
                self.spark_controller.cleanup()
        # logs
        elif subcommand == "logs":
            self.ipython_display.write(self.spark_controller.get_logs(args.session))
        # run
        elif len(subcommand) == 0:
            if args.context == CONTEXT_NAME_SPARK:
                (success, out) = self.spark_controller.run_command(Command(cell), args.session)
                if success:
                    self.ipython_display.write(out)
                else:
                    self.ipython_display.send_error(out)
            elif args.context == CONTEXT_NAME_SQL:
                return self.execute_sqlquery(cell, args.samplemethod, args.maxrows, args.samplefraction,
                                             args.session, args.output, args.quiet)
            else:
                self.ipython_display.send_error("Context '{}' not found".format(args.context))
        # error
        else:
            self.ipython_display.send_error("Subcommand '{}' not found. {}".format(subcommand, usage))
コード例 #14
0
 def _override_session_settings(settings):
     conf.override(conf.session_configs.__name__, settings)
コード例 #15
0
ファイル: kernelmagics.py プロジェクト: ganeshraju/sparkmagic
 def _override_session_settings(settings):
     conf.override(conf.session_configs.__name__, settings)
コード例 #16
0
def test_configuration_raise_error_for_bad_base64_password():
    kpc = {'username': '******', 'base64_password': '******', 'url': 'L'}
    overrides = {conf.kernel_python_credentials.__name__: kpc}
    conf.override_all(overrides)
    conf.override(conf.livy_session_startup_timeout_seconds.__name__, 1)
    conf.base64_kernel_python_credentials()