Esempio n. 1
0
    def test_create_sql_hive_context_pyspark(self):
        kind = "python"
        http_client = MagicMock()
        self.post_responses = [DummyResponse(201, self.session_create_json),
                               DummyResponse(201, self.post_statement_json),
                               DummyResponse(201, self.post_statement_json)]
        http_client.post.side_effect = self._next_response_post
        self.get_responses = [DummyResponse(200, self.ready_sessions_json),
                              DummyResponse(200, self.running_statement_json),
                              DummyResponse(200, self.ready_statement_json),
                              DummyResponse(200, self.ready_sessions_json),
                              DummyResponse(200, self.ready_statement_json)]
        http_client.get.side_effect = self._next_response_get
        _t_config_hook({
            "status_sleep_seconds": 0.01,
            "statement_sleep_seconds": 0.01
        })
        session = LivySession(http_client, kind, "-1", False)
        _t_config_hook({})
        session.start()

        session.create_sql_context()

        assert call("/sessions/0/statements", [201], {"code": "from pyspark.sql import SQLContext\n"
                                                              "from pyspark.sql.types import *\n"
                                                              "sqlContext = SQLContext(sc)"}) \
               in http_client.post.call_args_list
        assert call("/sessions/0/statements", [201], {"code": "from pyspark.sql import HiveContext\n"
                                                              "hiveContext = HiveContext(sc)"}) \
               in http_client.post.call_args_list
Esempio n. 2
0
    def test_create_sql_context_spark(self):
        kind = "scala"
        http_client = MagicMock()
        self.post_responses = [DummyResponse(201, self.session_create_json),
                               DummyResponse(201, self.post_statement_json),
                               DummyResponse(201, self.post_statement_json)]
        http_client.post.side_effect = self._next_response_post
        self.get_responses = [DummyResponse(200, self.ready_sessions_json),
                              DummyResponse(200, self.running_statement_json),
                              DummyResponse(200, self.ready_statement_json),
                              DummyResponse(200, self.ready_sessions_json),
                              DummyResponse(200, self.ready_statement_json)]
        http_client.get.side_effect = self._next_response_get
        _t_config_hook({
            "status_sleep_seconds": 0.01,
            "statement_sleep_seconds": 0.01
        })
        session = LivySession(http_client, kind, "-1", False)
        _t_config_hook({})
        session.start()

        session.create_sql_context()

        assert call("/sessions/0/statements", [201], {"code": "val sqlContext = new org.apache.spark.sql.SQLContext"
                                                              "(sc)\nimport sqlContext.implicits._"}) \
               in http_client.post.call_args_list
        assert call("/sessions/0/statements", [201], {"code": "val hiveContext = new org.apache.spark.sql.hive.Hive"
                                                              "Context(sc)"}) \
               in http_client.post.call_args_list
Esempio n. 3
0
    def test_create_sql_context_unknown_throws(self):
        kind = "unknown"
        http_client = MagicMock()
        self.post_responses = [DummyResponse(201, self.session_create_json),
                               DummyResponse(201, self.post_statement_json)]
        http_client.post.side_effect = self._next_response_post
        self.get_responses = [DummyResponse(200, self.ready_sessions_json),
                              DummyResponse(200, self.running_statement_json),
                              DummyResponse(200, self.ready_statement_json)]
        http_client.get.side_effect = self._next_response_get
        session = LivySession(http_client, kind, state_sleep_seconds=0.01, statement_sleep_seconds=0.01)
        session.start()

        session.create_sql_context()
Esempio n. 4
0
    def test_create_sql_context_spark(self):
        kind = "scala"
        http_client = MagicMock()
        self.post_responses = [DummyResponse(201, self.session_create_json),
                               DummyResponse(201, self.post_statement_json)]
        http_client.post.side_effect = self._next_response_post
        self.get_responses = [DummyResponse(200, self.ready_sessions_json),
                              DummyResponse(200, self.running_statement_json),
                              DummyResponse(200, self.ready_statement_json)]
        http_client.get.side_effect = self._next_response_get
        session = LivySession(http_client, kind, state_sleep_seconds=0.01, statement_sleep_seconds=0.01)
        session.start()

        session.create_sql_context()

        http_client.post.assert_called_with("/sessions/0/statements", [201],
                                            {"code": "val sqlContext = new org.apache.spark.sql.SQLContext(sc)\n"
                                                     "import sqlContext.implicits._"})
Esempio n. 5
0
    def test_create_sql_context_pyspark(self):
        kind = "python"
        http_client = MagicMock()
        self.post_responses = [DummyResponse(201, self.session_create_json),
                               DummyResponse(201, self.post_statement_json)]
        http_client.post.side_effect = self._next_response_post
        self.get_responses = [DummyResponse(200, self.ready_sessions_json),
                              DummyResponse(200, self.running_statement_json),
                              DummyResponse(200, self.ready_statement_json)]
        http_client.get.side_effect = self._next_response_get
        session = LivySession(http_client, kind, state_sleep_seconds=0.01, statement_sleep_seconds=0.01)
        session.start()

        session.create_sql_context()

        http_client.post.assert_called_with("/sessions/0/statements", [201],
                                            {"code": "from pyspark.sql import SQLContext\n"
                                                     "from pyspark.sql.types import *\n"
                                                     "sqlContext = SQLContext(sc)"})
Esempio n. 6
0
    def test_create_sql_context_happens_once(self):
        kind = "scala"
        http_client = MagicMock()
        self.post_responses = [DummyResponse(201, self.session_create_json),
                               DummyResponse(201, self.post_statement_json)]
        http_client.post.side_effect = self._next_response_post
        self.get_responses = [DummyResponse(200, self.ready_sessions_json),
                              DummyResponse(200, self.running_statement_json),
                              DummyResponse(200, self.ready_statement_json)]
        http_client.get.side_effect = self._next_response_get
        session = LivySession(http_client, kind, state_sleep_seconds=0.01, statement_sleep_seconds=0.01)
        session.start()

        # Reset the mock so that post called count is accurate
        http_client.reset_mock()

        session.create_sql_context()

        # Second call should not issue a post request
        session.create_sql_context()

        http_client.post.assert_called_once_with("/sessions/0/statements", [201],
                                                 {"code": "val sqlContext = new org.apache.spark.sql.SQLContext(sc)\n"
                                                          "import sqlContext.implicits._"})