Exemplo n.º 1
0
    def spark(self, line, cell="", local_ns=None):
        """Magic to execute spark remotely.

           This magic allows you to create a Livy Scala or Python session against a Livy endpoint. Every session can
           be used to execute either Spark code or SparkSQL code by executing against the SQL context in the session.
           When the SQL context is used, the result will be a Pandas dataframe of a sample of the results.

           If invoked with no subcommand, the cell will be executed against the specified session.

           Subcommands
           -----------
           info
               Display the available Livy sessions and other configurations for sessions.
           add
               Add a Livy session given a session name (-s), language (-l), and endpoint credentials.
               The -k argument, if present, will skip adding this session if it already exists.
               e.g. `%spark add -s test -l python -u https://sparkcluster.net/livy -t Kerberos -a u -p -k`
           config
               Override the livy session properties sent to Livy on session creation. All session creations will
               contain these config settings from then on.
               Expected value is a JSON key-value string to be sent as part of the Request Body for the POST /sessions
               endpoint in Livy.
               e.g. `%%spark config`
                    `{"driverMemory":"1000M", "executorCores":4}`
           run
               Run Spark code against a session.
               e.g. `%%spark -s testsession` will execute the cell code against the testsession previously created
               e.g. `%%spark -s testsession -c sql` will execute the SQL code against the testsession previously created
               e.g. `%%spark -s testsession -c sql -o my_var` will execute the SQL code against the testsession
                        previously created and store the pandas dataframe created in the my_var variable in the
                        Python environment.
           use
              Run Spark code against a exist session
              e.g. `%%spark use -u https://sparkcluster.net/livy -i 1 -l python` will execute the cell code against the existed session 1 
              e.g. `%%spark use -u https://sparkcluster.net/livy -i 1 -l python -c sql -l python` will execute the SQL code against the existed session 1 
              e.g. `%%spark use -u https://sparkcluster.net/livy  -i 1 -l python -c sql -o my_var -l python` will execute the SQL code against the existed session 1 
                        previously created and store the pandas dataframe created in the my_var variable in the
                        Python environment.
           logs
               Returns the logs for a given session.
               e.g. `%spark logs -s testsession` will return the logs for the testsession previously created
           delete
               Delete a Livy session.
               e.g. `%spark delete -s defaultlivy`
           cleanup
               Delete all Livy sessions created by the notebook. No arguments required.
               e.g. `%spark cleanup`
        """
        usage = "Please look at usage of %spark by executing `%spark?`."
        user_input = line
        args = parse_argstring_or_throw(self.spark, user_input)

        subcommand = args.command[0].lower()

        if args.auth is None:
            args.auth = conf.get_auth_value(args.user, args.password)
        else:
            args.auth = args.auth

        # info
        if subcommand == "info":
            if args.url is not None and args.id is not None:
                endpoint = Endpoint(args.url, args.auth, args.user,
                                    args.password)
                info_sessions = self.spark_controller.get_all_sessions_endpoint_info(
                    endpoint)
                self._print_endpoint_info(info_sessions, args.id)
            else:
                self._print_local_info()
        # config
        elif subcommand == "config":
            conf.override(conf.session_configs.__name__, json.loads(cell))
        # add
        elif subcommand == "add":
            if args.url is None:
                self.ipython_display.send_error(
                    "Need to supply URL argument (e.g. -u https://example.com/livyendpoint)"
                )
                return

            name = args.session
            language = args.language
            endpoint = Endpoint(args.url, args.auth, args.user, args.password)
            skip = args.skip

            properties = conf.get_session_properties(language)

            self.spark_controller.add_session(name, endpoint, skip, properties)
        # delete
        elif subcommand == "delete":
            if args.session is not None:
                self.spark_controller.delete_session_by_name(args.session)
            elif args.url is not None:
                if args.id is None:
                    self.ipython_display.send_error(
                        "Must provide --id or -i option to delete session at endpoint from URL"
                    )
                    return
                endpoint = Endpoint(args.url, args.auth, args.user,
                                    args.password)
                session_id = args.id
                self.spark_controller.delete_session_by_id(
                    endpoint, session_id)
            else:
                self.ipython_display.send_error(
                    "Subcommand 'delete' requires a session name or a URL and session ID"
                )
        # cleanup
        elif subcommand == "cleanup":
            if args.url is not None:
                endpoint = Endpoint(args.url, args.auth, args.user,
                                    args.password)
                self.spark_controller.cleanup_endpoint(endpoint)
            else:
                self.spark_controller.cleanup()
        # logs
        elif subcommand == "logs":
            self.ipython_display.write(
                self.spark_controller.get_logs(args.session))
        # use
        elif subcommand == "use":
            if args.url is None:
                self.ipython_display.send_error(
                    "Need to supply URL argument (e.g. -u https://example.com/livyendpoint)"
                )
                return

            endpoint = Endpoint(args.url, args.auth, args.user, args.password)
            properties = conf.get_session_properties(args.language)
            session = self.spark_controller.tmp_session(
                endpoint, properties, args.id)
            coerce = get_coerce_value(args.coerce)
            if args.context == CONTEXT_NAME_SPARK:
                return self.execute_spark2(cell, args.output,
                                           args.samplemethod, args.maxrows,
                                           args.samplefraction, session,
                                           coerce)
            elif args.context == CONTEXT_NAME_SQL:
                return self.execute_sqlquery2(cell, args.samplemethod,
                                              args.maxrows,
                                              args.samplefraction, session,
                                              args.output, args.quiet, coerce)
            else:
                self.ipython_display.send_error(
                    "Context '{}' not found".format(args.context))

        # run
        elif len(subcommand) == 0:
            coerce = get_coerce_value(args.coerce)
            if args.context == CONTEXT_NAME_SPARK:
                return self.execute_spark(cell, args.output, args.samplemethod,
                                          args.maxrows, args.samplefraction,
                                          args.session, coerce)
            elif args.context == CONTEXT_NAME_SQL:
                return self.execute_sqlquery(cell, args.samplemethod,
                                             args.maxrows, args.samplefraction,
                                             args.session, args.output,
                                             args.quiet, coerce)
            else:
                self.ipython_display.send_error(
                    "Context '{}' not found".format(args.context))
        # error
        else:
            self.ipython_display.send_error(
                "Subcommand '{}' not found. {}".format(subcommand, usage))
Exemplo n.º 2
0
 def refresh_configuration(self):
     credentials = getattr(conf, 'base64_kernel_' + self.language + '_credentials')()
     (username, password, auth, url) = (credentials['username'], credentials['password'], credentials['auth'], credentials['url'])
     self.endpoint = Endpoint(url, auth, username, password)
            'googledataprocauthenticator.magics.dataprocmagics.DataprocMagics.self.db',
            new_callable=PropertyMock,
            return_value=mocked_db):
        global magic, spark_controller, shell, ipython_display
        magic = DataprocMagics(shell=None, widget=MagicMock())
        magic.shell = shell = MagicMock()
        magic.ipython_display = ipython_display = MagicMock()
        magic.spark_controller = spark_controller = MagicMock()


def _teardown():
    pass


stored_endpoints = ("http://url.com",
                    Endpoint("http://url.com", "default-credentials"))
get_session_id_to_name = {1234: 'my_session'}
sessions_mock = {'my_session': LivySession(http_client=MagicMock(), properties={"kind":SESSION_KIND_SPARK, \
    "heartbeatTimeoutInSecond": 60}, ipython_display=ipython_display, session_id=1234)}
sessions_list_mock = [LivySession(http_client=MagicMock(), properties={"kind":SESSION_KIND_SPARK,\
    "heartbeatTimeoutInSecond": 60}, ipython_display=ipython_display, session_id=1234)]
mocked_db = {
    'autorestore/stored_endpoints': stored_endpoints,
    'autorestore/get_session_id_to_name': get_session_id_to_name,
}


def make_credentials():
    return credentials.Credentials(
        token=None,
        refresh_token='refresh',
Exemplo n.º 4
0
 def refresh_configuration(self):
     self.endpoint = Endpoint("new_url", None)
Exemplo n.º 5
0
    def __init__(self, shell, data=None, spark_events=None):
        super(TestKernelMagics, self).__init__(shell,
                                               spark_events=spark_events)

        self.language = constants.LANG_PYTHON
        self.endpoint = Endpoint("url", None)
Exemplo n.º 6
0
def test_equality():
    assert_equals(Endpoint("http://url.com", AUTH_BASIC, "sdf", "w"),
                  Endpoint("http://url.com", AUTH_BASIC, "sdf", "w"))
    assert_equals(Endpoint("http://url.com", NO_AUTH, "sdf", "w"),
                  Endpoint("http://url.com", NO_AUTH, "sdf", "w"))