Beispiel #1
0
        def my_pipeline():
            spec = {
                "spark_version": "5.3.x-scala2.11",
                "spark_conf": {
                    "spark.speculation": "true"
                }
            }

            expected_spec = {
                "cluster_name": "test-cluster",
                "spark_version": "5.3.x-scala2.11",
                "node_type_id": "Standard_D3_v2",
                "spark_conf": {
                    "spark.speculation": "true"
                },
                "num_workers": 2
            }

            res = CreateClusterOp(name="createcluster",
                                  cluster_name="test-cluster",
                                  spec=spec,
                                  node_type_id="Standard_D3_v2",
                                  num_workers=2)

            self.assert_res(res, expected_spec)
Beispiel #2
0
        def my_pipeline():
            cluster_name = "test-cluster"
            json_spec = """
            {
                "spark_version": "5.3.x-scala2.11",
                "node_type_id": "Standard_D3_v2",
                "spark_conf": {
                    "spark.speculation": "true"
                },
                "num_workers": 2
            }
            """

            expected_spec = {
                "cluster_name": cluster_name,
                "spark_version": "5.3.x-scala2.11",
                "node_type_id": "Standard_D3_v2",
                "spark_conf": {
                    "spark.speculation": "true"
                },
                "num_workers": 2
            }

            res = CreateClusterOp.from_json_spec(name="createcluster",
                                                 cluster_name=cluster_name,
                                                 json_spec=json_spec)

            self.assert_res(res, expected_spec)
Beispiel #3
0
        def my_pipeline():
            cluster_name = "test-cluster"
            spark_version = "5.3.x-scala2.11"
            node_type_id = "Standard_D3_v2"
            autoscale = {
                "min_workers": 2,
                "max_workers": 50
            }

            expected_spec = {
                "cluster_name": cluster_name,
                "spark_version":spark_version,
                "node_type_id": node_type_id,
                "autoscale": autoscale
            }

            res = CreateClusterOp(
                name="createcluster",
                cluster_name=cluster_name,
                spark_version=spark_version,
                node_type_id=node_type_id,
                autoscale=autoscale
            )

            self.assert_res(res, expected_spec)
Beispiel #4
0
        def my_pipeline():
            cluster_name = "test-cluster"
            spark_version = "5.3.x-scala2.11"
            node_type_id = "Standard_D3_v2"
            spark_conf = {
                "spark.speculation": "true"
            }
            num_workers = 2

            expected_spec = {
                "cluster_name": cluster_name,
                "spark_version": spark_version,
                "node_type_id": node_type_id,
                "spark_conf": spark_conf,
                "num_workers": num_workers
            }

            res = CreateClusterOp(
                name="createcluster",
                cluster_name=cluster_name,
                spark_version=spark_version,
                node_type_id=node_type_id,
                spark_conf=spark_conf,
                num_workers=num_workers
            )

            self.assert_res(res, expected_spec)
Beispiel #5
0
 def my_pipeline():
     CreateClusterOp(
         name="createcluster",
         spark_version="5.3.x-scala2.11",
         node_type_id="Standard_D3_v2",
         spark_conf={
             "spark.speculation": "true"
         },
         num_workers=2
     )
Beispiel #6
0
        def my_pipeline():
            cluster_name = "test-cluster"
            current_path = Path(__file__).parent
            json_spec_file_name = current_path.joinpath("cluster_spec.json")

            expected_spec = {
                "cluster_name": cluster_name,
                "spark_version": "5.3.x-scala2.11",
                "node_type_id": "Standard_D3_v2",
                "spark_conf": {
                    "spark.speculation": "true"
                },
                "num_workers": 2
            }

            res = CreateClusterOp.from_file_name(name="createcluster",
                                                 cluster_name=cluster_name,
                                                 file_name=json_spec_file_name)

            self.assert_res(res, expected_spec)