def test_create_execute(self, body, mock_hook):
        operator = GKECreateClusterOperator(project_id=TEST_GCP_PROJECT_ID,
                                            location=PROJECT_LOCATION,
                                            body=body,
                                            task_id=PROJECT_TASK_ID)

        operator.execute(None)
        mock_hook.return_value.create_cluster.assert_called_once_with(
            cluster=body, project_id=TEST_GCP_PROJECT_ID)
Esempio n. 2
0
    # [START composer_gkeoperator_affinity_airflow_1]
    # [START composer_gkeoperator_fullconfig_airflow_1]
    # TODO(developer): update with your values
    PROJECT_ID = "my-project-id"
    CLUSTER_ZONE = "us-west1-a"
    CLUSTER_NAME = "example-cluster"
    # [END composer_gkeoperator_minconfig_airflow_1]
    # [END composer_gkeoperator_templateconfig_airflow_1]
    # [END composer_gkeoperator_affinity_airflow_1]
    # [END composer_gkeoperator_fullconfig_airflow_1]
    CLUSTER = {"name": CLUSTER_NAME, "initial_node_count": 1}
    # [END composer_gke_create_cluster_airflow_1]
    # [START composer_gke_create_cluster_airflow_1]
    create_cluster = GKECreateClusterOperator(
        task_id="create_cluster",
        project_id=PROJECT_ID,
        location=CLUSTER_ZONE,
        body=CLUSTER,
    )
    # Using the BashOperator to create node pools is a workaround
    # In Airflow 2, because of https://github.com/apache/airflow/pull/17820
    # Node pool creation can be done using the GKECreateClusterOperator

    create_node_pools = BashOperator(
        task_id="create_node_pools",
        bash_command=f"gcloud container node-pools create pool-0 \
                        --cluster {CLUSTER_NAME} \
                        --num-nodes 1 \
                        --zone {CLUSTER_ZONE} \
                        && gcloud container node-pools create pool-1 \
                        --cluster {CLUSTER_NAME} \
                        --num-nodes 1 \
 def test_create_execute_error_body(self, body, mock_hook):
     with self.assertRaises(AirflowException):
         GKECreateClusterOperator(project_id=TEST_GCP_PROJECT_ID,
                                  location=PROJECT_LOCATION,
                                  body=body,
                                  task_id=PROJECT_TASK_ID)
 def test_create_execute_error_project_id(self, mock_hook):
     with self.assertRaises(AirflowException):
         GKECreateClusterOperator(location=PROJECT_LOCATION,
                                  body=PROJECT_BODY,
                                  task_id=PROJECT_TASK_ID)
Esempio n. 5
0
 def test_create_execute_error_location(self, mock_hook):
     with pytest.raises(AirflowException):
         GKECreateClusterOperator(project_id=TEST_GCP_PROJECT_ID,
                                  body=PROJECT_BODY,
                                  task_id=PROJECT_TASK_ID)
Esempio n. 6
0
GCP_LOCATION = os.environ.get("GCP_GKE_LOCATION", "europe-north1-a")
CLUSTER_NAME = os.environ.get("GCP_GKE_CLUSTER_NAME", "cluster-name")

CLUSTER = {"name": CLUSTER_NAME, "initial_node_count": 1}

default_args = {"start_date": days_ago(1)}

with models.DAG(
        "example_gcp_gke",
        default_args=default_args,
        schedule_interval=None,  # Override to match your needs
        tags=['example'],
) as dag:
    create_cluster = GKECreateClusterOperator(
        task_id="create_cluster",
        project_id=GCP_PROJECT_ID,
        location=GCP_LOCATION,
        body=CLUSTER,
    )

    pod_task = GKEStartPodOperator(
        task_id="pod_task",
        project_id=GCP_PROJECT_ID,
        location=GCP_LOCATION,
        cluster_name=CLUSTER_NAME,
        namespace="default",
        image="perl",
        name="test-pod",
    )

    pod_task_xcom = GKEStartPodOperator(
        task_id="pod_task_xcom",