Exemple #1
0
    def test_start_kernel(self, name, cluster_id, spark):
        self.log.info("Paramters: %s %s %s", name, cluster_id, spark)
        kernel_name, _ = get_kernel_path(cluster_id, spark)

        km, kc = TestRunKernel.get_kernel_client(cluster_id, spark, self.log)
        self.log.info("Kernel: %s %s", km, kc)
        ready = False
        while not ready:
            try:
                kc_msg = kc.get_iopub_msg(timeout=2, block=True)
                self.log.info(
                    "trace_back: %s, execution_state: %s",
                    kc_msg["content"].get("trace_back"),
                    kc_msg["content"].get("execution_state"),
                )
                assert kc_msg["content"].get("traceback") is None
                ready = kc_msg["content"].get("execution_state") == "idle"
            except queue.Empty:
                ready = True

        with open("{}{}_std_err.log".format(TEMP, kernel_name), "r") as fd:
            stderr = fd.read()
        self.log.info("stderr:\n%s", stderr)
        with open("{}{}_std_out.log".format(TEMP, kernel_name), "r") as fd:
            stdout = fd.read()
        self.log.info("stdout:\n%s", stdout)
Exemple #2
0
 def stop_kernel_client(cluster_id, spark, log):
     kernel_name, _ = get_kernel_path(cluster_id, spark)
     km, kc = TestRunKernel.get_kernel_client(cluster_id, spark, log)
     kc.stop_channels()
     km.shutdown_kernel(now=True)
     kc = None
     km = None
     del TestRunKernel.CLIENTS[kernel_name]
Exemple #3
0
 def get_kernel_client(cluster_id, spark, log):
     kernel_name, _ = get_kernel_path(cluster_id, spark)
     if TestRunKernel.CLIENTS.get(kernel_name) is None:
         km, kc = start_new_kernel(
             kernel_name=kernel_name,
             stderr=open("{}{}_std_err.log".format(TEMP, kernel_name), "w"),
             stdout=open("{}{}_std_out.log".format(TEMP, kernel_name), "w"),
         )
         TestRunKernel.CLIENTS[kernel_name] = (km, kc)
     return TestRunKernel.CLIENTS[kernel_name]
    def test_kernelspec_spark(self, name, cluster_id):
        python_path = get_python_path(cluster_id)

        create_kernelspec(
            self.profile,
            self.org,
            self.host,
            cluster_id,
            name,
            os.environ["CONDA_DEFAULT_ENV"],
            python_path,
            False,
        )

        kernel_path = get_kernel_path(cluster_id, True)[1]
        with open(kernel_path + "/kernel.json") as fd:
            k = json.load(fd)
            assert k["display_name"] == "SSH {} {}:{} ({}/Spark)".format(
                cluster_id, self.profile, name,
                os.environ["CONDA_DEFAULT_ENV"])