def test_execute_bad_type(self, mock_hook): operator = BigQueryOperator( task_id=TASK_ID, sql=1, destination_dataset_table=None, write_disposition='WRITE_EMPTY', allow_large_results=False, flatten_results=None, bigquery_conn_id='google_cloud_default', udf_config=None, use_legacy_sql=True, maximum_billing_tier=None, maximum_bytes_billed=None, create_disposition='CREATE_IF_NEEDED', schema_update_options=(), query_params=None, labels=None, priority='INTERACTIVE', time_partitioning=None, api_resource_configs=None, cluster_fields=None, ) with self.assertRaises(AirflowException): operator.execute(MagicMock())
def test_bigquery_operator_defaults(self, mock_hook): operator = BigQueryOperator(task_id=TASK_ID, sql='Select * from test_table', dag=self.dag, default_args=self.args) operator.execute(None) mock_hook.return_value \ .get_conn() \ .cursor() \ .run_query \ .assert_called_once_with( sql='Select * from test_table', destination_dataset_table=None, write_disposition='WRITE_EMPTY', allow_large_results=False, flatten_results=None, udf_config=None, maximum_billing_tier=None, maximum_bytes_billed=None, create_disposition='CREATE_IF_NEEDED', schema_update_options=(), query_params=None, labels=None, priority='INTERACTIVE', time_partitioning=None, api_resource_configs=None, cluster_fields=None, ) self.assertTrue(isinstance(operator.sql, six.string_types)) ti = TaskInstance(task=operator, execution_date=DEFAULT_DATE) ti.render_templates() self.assertTrue(isinstance(ti.task.sql, six.string_types))
def test_bigquery_operator_defaults(self, mock_hook): operator = BigQueryOperator( task_id=TASK_ID, sql='Select * from test_table', ) operator.execute(None) mock_hook.return_value \ .get_conn() \ .cursor() \ .run_query \ .assert_called_once_with( sql='Select * from test_table', destination_dataset_table=None, write_disposition='WRITE_EMPTY', allow_large_results=False, flatten_results=None, udf_config=None, maximum_billing_tier=None, maximum_bytes_billed=None, create_disposition='CREATE_IF_NEEDED', schema_update_options=(), query_params=None, labels=None, priority='INTERACTIVE', time_partitioning=None, api_resource_configs=None, cluster_fields=None, )
def test_bigquery_operator_defaults(self, mock_hook): operator = BigQueryOperator( task_id=TASK_ID, sql='Select * from test_table', ) operator.execute(None) mock_hook.return_value \ .get_conn() \ .cursor() \ .run_query \ .assert_called_once_with( sql='Select * from test_table', destination_dataset_table=None, write_disposition='WRITE_EMPTY', allow_large_results=False, flatten_results=None, udf_config=None, maximum_billing_tier=None, maximum_bytes_billed=None, create_disposition='CREATE_IF_NEEDED', schema_update_options=(), query_params=None, labels=None, priority='INTERACTIVE', time_partitioning=None, api_resource_configs=None, cluster_fields=None, )
def execute(self, context): # TODO checar se 'hasattr' contempla try: self.sql = self.SQL_TEMPLATE.format(**self.sql_template_params) except AttributeError: self.sql = self.sql.format(**self.sql_template_params) BigQueryOperator.execute(self, context)
def test_execute(self, mock_hook): encryption_configuration = {'key': 'kk'} operator = BigQueryOperator( task_id=TASK_ID, sql='Select * from test_table', destination_dataset_table=None, write_disposition='WRITE_EMPTY', allow_large_results=False, flatten_results=None, gcp_conn_id='google_cloud_default', udf_config=None, use_legacy_sql=True, maximum_billing_tier=None, maximum_bytes_billed=None, create_disposition='CREATE_IF_NEEDED', schema_update_options=(), query_params=None, labels=None, priority='INTERACTIVE', time_partitioning=None, api_resource_configs=None, cluster_fields=None, encryption_configuration=encryption_configuration ) operator.execute(MagicMock()) mock_hook.return_value \ .get_conn.return_value \ .cursor.return_value \ .run_query \ .assert_called_once_with( sql='Select * from test_table', destination_dataset_table=None, write_disposition='WRITE_EMPTY', allow_large_results=False, flatten_results=None, udf_config=None, maximum_billing_tier=None, maximum_bytes_billed=None, create_disposition='CREATE_IF_NEEDED', schema_update_options=(), query_params=None, labels=None, priority='INTERACTIVE', time_partitioning=None, api_resource_configs=None, cluster_fields=None, encryption_configuration=encryption_configuration )
def test_bigquery_operator_defaults(self, mock_hook): operator = BigQueryOperator( task_id=TASK_ID, sql='Select * from test_table', dag=self.dag, default_args=self.args ) operator.execute(None) mock_hook.return_value \ .get_conn() \ .cursor() \ .run_query \ .assert_called_once_with( sql='Select * from test_table', destination_dataset_table=None, write_disposition='WRITE_EMPTY', allow_large_results=False, flatten_results=None, udf_config=None, maximum_billing_tier=None, maximum_bytes_billed=None, create_disposition='CREATE_IF_NEEDED', schema_update_options=(), query_params=None, labels=None, priority='INTERACTIVE', time_partitioning=None, api_resource_configs=None, cluster_fields=None, ) self.assertTrue(isinstance(operator.sql, six.string_types)) ti = TaskInstance(task=operator, execution_date=DEFAULT_DATE) ti.render_templates() self.assertTrue(isinstance(ti.task.sql, six.string_types))