def test_create(self, name, cluster_id): result = None if is_aws(): result = subprocess.check_output( [EXE, self.profile, "-k", "-i", cluster_id]) if is_azure(): if self.host.startswith("https://"): result = subprocess.check_output( [EXE, self.profile, "-k", "-i", cluster_id]) else: result = subprocess.check_output([ EXE, self.profile, "-k", "-o", str(self.org), "-i", cluster_id ]) assert result is not None self.log.info("result %s", result)
client = ClusterApi(apiclient) except Exception as ex: # pylint: disable=broad-except print(ex) sys.exit(1) random.seed(42) cluster_ids = {} for spark_version in spark_versions: if random.random() < 0.5: cluster_conf = AUTOSCALE_CLUSTER.copy() else: cluster_conf = FIXED_CLUSTER.copy() cluster_conf["spark_version"] = spark_version cluster_conf["cluster_name"] = "TEST-" + spark_version.split("-scala")[0] cluster_conf["ssh_public_keys"] = [ssh_key] cluster_conf["driver_node_type_id"] = instances cluster_conf["node_type_id"] = instances if is_azure() and (spark_version == "5.5.x-scala2.11"): print(cluster_conf["cluster_name"], "(skipped)") else: print(cluster_conf["cluster_name"]) result = create_cluster(client, cluster_conf) cluster_ids[cluster_conf["cluster_name"]] = result["cluster_id"] save_running_clusters(cluster_ids)