def test_different_error_reraised(self, mock_hook): mock_hook.return_value.get_instance.return_value = None op = BigtableInstanceCreateOperator(project_id=PROJECT_ID, instance_id=INSTANCE_ID, main_cluster_id=CLUSTER_ID, main_cluster_zone=CLUSTER_ZONE, task_id="id", gcp_conn_id=GCP_CONN_ID) mock_hook.return_value.create_instance.side_effect = mock.Mock( side_effect=google.api_core.exceptions.GoogleAPICallError('error')) with self.assertRaises(google.api_core.exceptions.GoogleAPICallError): op.execute(None) mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID) mock_hook.return_value.create_instance.assert_called_once_with( cluster_nodes=None, cluster_storage_type=None, instance_display_name=None, instance_id=INSTANCE_ID, instance_labels=None, instance_type=None, main_cluster_id=CLUSTER_ID, main_cluster_zone=CLUSTER_ZONE, project_id=PROJECT_ID, replica_cluster_id=None, replica_cluster_zone=None, timeout=None)
def test_create_instance_that_exists_empty_project_id(self, mock_hook): mock_hook.return_value.get_instance.return_value = mock.Mock(Instance) op = BigtableInstanceCreateOperator(instance_id=INSTANCE_ID, main_cluster_id=CLUSTER_ID, main_cluster_zone=CLUSTER_ZONE, task_id="id", gcp_conn_id=GCP_CONN_ID) op.execute(None) mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID) mock_hook.return_value.create_instance.assert_not_called()
def test_empty_attribute(self, missing_attribute, project_id, instance_id, main_cluster_id, main_cluster_zone, mock_hook): with self.assertRaises(AirflowException) as e: BigtableInstanceCreateOperator(project_id=project_id, instance_id=instance_id, main_cluster_id=main_cluster_id, main_cluster_zone=main_cluster_zone, task_id="id", gcp_conn_id=GCP_CONN_ID) err = e.exception self.assertEqual(str(err), 'Empty parameter: {}'.format(missing_attribute)) mock_hook.assert_not_called()
# [END howto_operator_gcp_bigtable_args] default_args = {'start_date': airflow.utils.dates.days_ago(1)} with models.DAG( 'example_gcp_bigtable_operators', default_args=default_args, schedule_interval=None # Override to match your needs ) as dag: # [START howto_operator_gcp_bigtable_instance_create] create_instance_task = BigtableInstanceCreateOperator( project_id=GCP_PROJECT_ID, instance_id=CBT_INSTANCE_ID, main_cluster_id=CBT_CLUSTER_ID, main_cluster_zone=CBT_CLUSTER_ZONE, instance_display_name=CBT_INSTANCE_DISPLAY_NAME, instance_type=int(CBT_INSTANCE_TYPE), instance_labels=json.loads(CBT_INSTANCE_LABELS), cluster_nodes=int(CBT_CLUSTER_NODES), cluster_storage_type=int(CBT_CLUSTER_STORAGE_TYPE), task_id='create_instance_task', ) create_instance_task2 = BigtableInstanceCreateOperator( instance_id=CBT_INSTANCE_ID, main_cluster_id=CBT_CLUSTER_ID, main_cluster_zone=CBT_CLUSTER_ZONE, instance_display_name=CBT_INSTANCE_DISPLAY_NAME, instance_type=int(CBT_INSTANCE_TYPE), instance_labels=json.loads(CBT_INSTANCE_LABELS), cluster_nodes=int(CBT_CLUSTER_NODES), cluster_storage_type=int(CBT_CLUSTER_STORAGE_TYPE), task_id='create_instance_task2',