class DatabricksHookTest(unittest.TestCase): """ Tests for DatabricksHook. """ @db.provide_session def setUp(self, session=None): conn = session.query(Connection) \ .filter(Connection.conn_id == DEFAULT_CONN_ID) \ .first() conn.host = HOST conn.login = LOGIN conn.password = PASSWORD conn.extra = None session.commit() self.hook = DatabricksHook(retry_delay=0) def test_parse_host_with_proper_host(self): host = self.hook._parse_host(HOST) self.assertEquals(host, HOST) def test_parse_host_with_scheme(self): host = self.hook._parse_host(HOST_WITH_SCHEME) self.assertEquals(host, HOST) def test_init_bad_retry_limit(self): with self.assertRaises(ValueError): DatabricksHook(retry_limit=0) def test_do_api_call_retries_with_retryable_error(self): for exception in [ requests_exceptions.ConnectionError, requests_exceptions.SSLError, requests_exceptions.Timeout, requests_exceptions.ConnectTimeout, requests_exceptions.HTTPError ]: with mock.patch('airflow.contrib.hooks.databricks_hook.requests' ) as mock_requests: with mock.patch.object(self.hook.log, 'error') as mock_errors: setup_mock_requests(mock_requests, exception) with self.assertRaises(AirflowException): self.hook._do_api_call(SUBMIT_RUN_ENDPOINT, {}) self.assertEquals(mock_errors.call_count, self.hook.retry_limit) @mock.patch('airflow.contrib.hooks.databricks_hook.requests') def test_do_api_call_does_not_retry_with_non_retryable_error( self, mock_requests): setup_mock_requests(mock_requests, requests_exceptions.HTTPError, status_code=400) with mock.patch.object(self.hook.log, 'error') as mock_errors: with self.assertRaises(AirflowException): self.hook._do_api_call(SUBMIT_RUN_ENDPOINT, {}) mock_errors.assert_not_called() def test_do_api_call_succeeds_after_retrying(self): for exception in [ requests_exceptions.ConnectionError, requests_exceptions.SSLError, requests_exceptions.Timeout, requests_exceptions.ConnectTimeout, requests_exceptions.HTTPError ]: with mock.patch('airflow.contrib.hooks.databricks_hook.requests' ) as mock_requests: with mock.patch.object(self.hook.log, 'error') as mock_errors: setup_mock_requests(mock_requests, exception, error_count=2, response_content={'run_id': '1'}) response = self.hook._do_api_call(SUBMIT_RUN_ENDPOINT, {}) self.assertEquals(mock_errors.call_count, 2) self.assertEquals(response, {'run_id': '1'}) @mock.patch('airflow.contrib.hooks.databricks_hook.sleep') def test_do_api_call_waits_between_retries(self, mock_sleep): retry_delay = 5 self.hook = DatabricksHook(retry_delay=retry_delay) for exception in [ requests_exceptions.ConnectionError, requests_exceptions.SSLError, requests_exceptions.Timeout, requests_exceptions.ConnectTimeout, requests_exceptions.HTTPError ]: with mock.patch('airflow.contrib.hooks.databricks_hook.requests' ) as mock_requests: with mock.patch.object(self.hook.log, 'error'): mock_sleep.reset_mock() setup_mock_requests(mock_requests, exception) with self.assertRaises(AirflowException): self.hook._do_api_call(SUBMIT_RUN_ENDPOINT, {}) self.assertEquals(len(mock_sleep.mock_calls), self.hook.retry_limit - 1) mock_sleep.assert_called_with(retry_delay) @mock.patch('airflow.contrib.hooks.databricks_hook.requests') def test_submit_run(self, mock_requests): mock_requests.post.return_value.json.return_value = {'run_id': '1'} json = {'notebook_task': NOTEBOOK_TASK, 'new_cluster': NEW_CLUSTER} run_id = self.hook.submit_run(json) self.assertEquals(run_id, '1') mock_requests.post.assert_called_once_with( submit_run_endpoint(HOST), json={ 'notebook_task': NOTEBOOK_TASK, 'new_cluster': NEW_CLUSTER, }, auth=(LOGIN, PASSWORD), headers=USER_AGENT_HEADER, timeout=self.hook.timeout_seconds) @mock.patch('airflow.contrib.hooks.databricks_hook.requests') def test_run_now(self, mock_requests): mock_requests.codes.ok = 200 mock_requests.post.return_value.json.return_value = {'run_id': '1'} status_code_mock = mock.PropertyMock(return_value=200) type(mock_requests.post.return_value).status_code = status_code_mock json = { 'notebook_params': NOTEBOOK_PARAMS, 'jar_params': JAR_PARAMS, 'job_id': JOB_ID } run_id = self.hook.run_now(json) self.assertEquals(run_id, '1') mock_requests.post.assert_called_once_with( run_now_endpoint(HOST), json={ 'notebook_params': NOTEBOOK_PARAMS, 'jar_params': JAR_PARAMS, 'job_id': JOB_ID }, auth=(LOGIN, PASSWORD), headers=USER_AGENT_HEADER, timeout=self.hook.timeout_seconds) @mock.patch('airflow.contrib.hooks.databricks_hook.requests') def test_get_run_page_url(self, mock_requests): mock_requests.get.return_value.json.return_value = GET_RUN_RESPONSE run_page_url = self.hook.get_run_page_url(RUN_ID) self.assertEquals(run_page_url, RUN_PAGE_URL) mock_requests.get.assert_called_once_with( get_run_endpoint(HOST), json={'run_id': RUN_ID}, auth=(LOGIN, PASSWORD), headers=USER_AGENT_HEADER, timeout=self.hook.timeout_seconds) @mock.patch('airflow.contrib.hooks.databricks_hook.requests') def test_get_run_state(self, mock_requests): mock_requests.get.return_value.json.return_value = GET_RUN_RESPONSE run_state = self.hook.get_run_state(RUN_ID) self.assertEquals( run_state, RunState(LIFE_CYCLE_STATE, RESULT_STATE, STATE_MESSAGE)) mock_requests.get.assert_called_once_with( get_run_endpoint(HOST), json={'run_id': RUN_ID}, auth=(LOGIN, PASSWORD), headers=USER_AGENT_HEADER, timeout=self.hook.timeout_seconds) @mock.patch('airflow.contrib.hooks.databricks_hook.requests') def test_cancel_run(self, mock_requests): mock_requests.post.return_value.json.return_value = GET_RUN_RESPONSE self.hook.cancel_run(RUN_ID) mock_requests.post.assert_called_once_with( cancel_run_endpoint(HOST), json={'run_id': RUN_ID}, auth=(LOGIN, PASSWORD), headers=USER_AGENT_HEADER, timeout=self.hook.timeout_seconds) @mock.patch('airflow.contrib.hooks.databricks_hook.requests') def test_start_cluster(self, mock_requests): mock_requests.codes.ok = 200 mock_requests.post.return_value.json.return_value = {} status_code_mock = mock.PropertyMock(return_value=200) type(mock_requests.post.return_value).status_code = status_code_mock self.hook.start_cluster({"cluster_id": CLUSTER_ID}) mock_requests.post.assert_called_once_with( start_cluster_endpoint(HOST), json={'cluster_id': CLUSTER_ID}, auth=(LOGIN, PASSWORD), headers=USER_AGENT_HEADER, timeout=self.hook.timeout_seconds) @mock.patch('airflow.contrib.hooks.databricks_hook.requests') def test_restart_cluster(self, mock_requests): mock_requests.codes.ok = 200 mock_requests.post.return_value.json.return_value = {} status_code_mock = mock.PropertyMock(return_value=200) type(mock_requests.post.return_value).status_code = status_code_mock self.hook.restart_cluster({"cluster_id": CLUSTER_ID}) mock_requests.post.assert_called_once_with( restart_cluster_endpoint(HOST), json={'cluster_id': CLUSTER_ID}, auth=(LOGIN, PASSWORD), headers=USER_AGENT_HEADER, timeout=self.hook.timeout_seconds) @mock.patch('airflow.contrib.hooks.databricks_hook.requests') def test_terminate_cluster(self, mock_requests): mock_requests.codes.ok = 200 mock_requests.post.return_value.json.return_value = {} status_code_mock = mock.PropertyMock(return_value=200) type(mock_requests.post.return_value).status_code = status_code_mock self.hook.terminate_cluster({"cluster_id": CLUSTER_ID}) mock_requests.post.assert_called_once_with( terminate_cluster_endpoint(HOST), json={'cluster_id': CLUSTER_ID}, auth=(LOGIN, PASSWORD), headers=USER_AGENT_HEADER, timeout=self.hook.timeout_seconds)
class DatabricksHookTest(unittest.TestCase): """ Tests for DatabricksHook. """ @db.provide_session def setUp(self, session=None): conn = session.query(Connection) \ .filter(Connection.conn_id == DEFAULT_CONN_ID) \ .first() conn.host = HOST conn.login = LOGIN conn.password = PASSWORD conn.extra = None session.commit() self.hook = DatabricksHook(retry_delay=0) def test_parse_host_with_proper_host(self): host = self.hook._parse_host(HOST) self.assertEqual(host, HOST) def test_parse_host_with_scheme(self): host = self.hook._parse_host(HOST_WITH_SCHEME) self.assertEqual(host, HOST) def test_init_bad_retry_limit(self): with self.assertRaises(ValueError): DatabricksHook(retry_limit=0) def test_do_api_call_retries_with_retryable_error(self): for exception in [requests_exceptions.ConnectionError, requests_exceptions.SSLError, requests_exceptions.Timeout, requests_exceptions.ConnectTimeout, requests_exceptions.HTTPError]: with mock.patch('airflow.contrib.hooks.databricks_hook.requests') as mock_requests: with mock.patch.object(self.hook.log, 'error') as mock_errors: setup_mock_requests(mock_requests, exception) with self.assertRaises(AirflowException): self.hook._do_api_call(SUBMIT_RUN_ENDPOINT, {}) self.assertEqual(mock_errors.call_count, self.hook.retry_limit) @mock.patch('airflow.contrib.hooks.databricks_hook.requests') def test_do_api_call_does_not_retry_with_non_retryable_error(self, mock_requests): setup_mock_requests( mock_requests, requests_exceptions.HTTPError, status_code=400 ) with mock.patch.object(self.hook.log, 'error') as mock_errors: with self.assertRaises(AirflowException): self.hook._do_api_call(SUBMIT_RUN_ENDPOINT, {}) mock_errors.assert_not_called() def test_do_api_call_succeeds_after_retrying(self): for exception in [requests_exceptions.ConnectionError, requests_exceptions.SSLError, requests_exceptions.Timeout, requests_exceptions.ConnectTimeout, requests_exceptions.HTTPError]: with mock.patch('airflow.contrib.hooks.databricks_hook.requests') as mock_requests: with mock.patch.object(self.hook.log, 'error') as mock_errors: setup_mock_requests( mock_requests, exception, error_count=2, response_content={'run_id': '1'} ) response = self.hook._do_api_call(SUBMIT_RUN_ENDPOINT, {}) self.assertEqual(mock_errors.call_count, 2) self.assertEqual(response, {'run_id': '1'}) @mock.patch('airflow.contrib.hooks.databricks_hook.sleep') def test_do_api_call_waits_between_retries(self, mock_sleep): retry_delay = 5 self.hook = DatabricksHook(retry_delay=retry_delay) for exception in [requests_exceptions.ConnectionError, requests_exceptions.SSLError, requests_exceptions.Timeout, requests_exceptions.ConnectTimeout, requests_exceptions.HTTPError]: with mock.patch('airflow.contrib.hooks.databricks_hook.requests') as mock_requests: with mock.patch.object(self.hook.log, 'error'): mock_sleep.reset_mock() setup_mock_requests(mock_requests, exception) with self.assertRaises(AirflowException): self.hook._do_api_call(SUBMIT_RUN_ENDPOINT, {}) self.assertEqual(len(mock_sleep.mock_calls), self.hook.retry_limit - 1) mock_sleep.assert_called_with(retry_delay) @mock.patch('airflow.contrib.hooks.databricks_hook.requests') def test_submit_run(self, mock_requests): mock_requests.post.return_value.json.return_value = {'run_id': '1'} json = { 'notebook_task': NOTEBOOK_TASK, 'new_cluster': NEW_CLUSTER } run_id = self.hook.submit_run(json) self.assertEqual(run_id, '1') mock_requests.post.assert_called_once_with( submit_run_endpoint(HOST), json={ 'notebook_task': NOTEBOOK_TASK, 'new_cluster': NEW_CLUSTER, }, auth=(LOGIN, PASSWORD), headers=USER_AGENT_HEADER, timeout=self.hook.timeout_seconds) @mock.patch('airflow.contrib.hooks.databricks_hook.requests') def test_run_now(self, mock_requests): mock_requests.codes.ok = 200 mock_requests.post.return_value.json.return_value = {'run_id': '1'} status_code_mock = mock.PropertyMock(return_value=200) type(mock_requests.post.return_value).status_code = status_code_mock json = { 'notebook_params': NOTEBOOK_PARAMS, 'jar_params': JAR_PARAMS, 'job_id': JOB_ID } run_id = self.hook.run_now(json) self.assertEqual(run_id, '1') mock_requests.post.assert_called_once_with( run_now_endpoint(HOST), json={ 'notebook_params': NOTEBOOK_PARAMS, 'jar_params': JAR_PARAMS, 'job_id': JOB_ID }, auth=(LOGIN, PASSWORD), headers=USER_AGENT_HEADER, timeout=self.hook.timeout_seconds) @mock.patch('airflow.contrib.hooks.databricks_hook.requests') def test_get_run_page_url(self, mock_requests): mock_requests.get.return_value.json.return_value = GET_RUN_RESPONSE run_page_url = self.hook.get_run_page_url(RUN_ID) self.assertEqual(run_page_url, RUN_PAGE_URL) mock_requests.get.assert_called_once_with( get_run_endpoint(HOST), json={'run_id': RUN_ID}, auth=(LOGIN, PASSWORD), headers=USER_AGENT_HEADER, timeout=self.hook.timeout_seconds) @mock.patch('airflow.contrib.hooks.databricks_hook.requests') def test_get_run_state(self, mock_requests): mock_requests.get.return_value.json.return_value = GET_RUN_RESPONSE run_state = self.hook.get_run_state(RUN_ID) self.assertEqual(run_state, RunState( LIFE_CYCLE_STATE, RESULT_STATE, STATE_MESSAGE)) mock_requests.get.assert_called_once_with( get_run_endpoint(HOST), json={'run_id': RUN_ID}, auth=(LOGIN, PASSWORD), headers=USER_AGENT_HEADER, timeout=self.hook.timeout_seconds) @mock.patch('airflow.contrib.hooks.databricks_hook.requests') def test_cancel_run(self, mock_requests): mock_requests.post.return_value.json.return_value = GET_RUN_RESPONSE self.hook.cancel_run(RUN_ID) mock_requests.post.assert_called_once_with( cancel_run_endpoint(HOST), json={'run_id': RUN_ID}, auth=(LOGIN, PASSWORD), headers=USER_AGENT_HEADER, timeout=self.hook.timeout_seconds) @mock.patch('airflow.contrib.hooks.databricks_hook.requests') def test_start_cluster(self, mock_requests): mock_requests.codes.ok = 200 mock_requests.post.return_value.json.return_value = {} status_code_mock = mock.PropertyMock(return_value=200) type(mock_requests.post.return_value).status_code = status_code_mock self.hook.start_cluster({"cluster_id": CLUSTER_ID}) mock_requests.post.assert_called_once_with( start_cluster_endpoint(HOST), json={'cluster_id': CLUSTER_ID}, auth=(LOGIN, PASSWORD), headers=USER_AGENT_HEADER, timeout=self.hook.timeout_seconds) @mock.patch('airflow.contrib.hooks.databricks_hook.requests') def test_restart_cluster(self, mock_requests): mock_requests.codes.ok = 200 mock_requests.post.return_value.json.return_value = {} status_code_mock = mock.PropertyMock(return_value=200) type(mock_requests.post.return_value).status_code = status_code_mock self.hook.restart_cluster({"cluster_id": CLUSTER_ID}) mock_requests.post.assert_called_once_with( restart_cluster_endpoint(HOST), json={'cluster_id': CLUSTER_ID}, auth=(LOGIN, PASSWORD), headers=USER_AGENT_HEADER, timeout=self.hook.timeout_seconds) @mock.patch('airflow.contrib.hooks.databricks_hook.requests') def test_terminate_cluster(self, mock_requests): mock_requests.codes.ok = 200 mock_requests.post.return_value.json.return_value = {} status_code_mock = mock.PropertyMock(return_value=200) type(mock_requests.post.return_value).status_code = status_code_mock self.hook.terminate_cluster({"cluster_id": CLUSTER_ID}) mock_requests.post.assert_called_once_with( terminate_cluster_endpoint(HOST), json={'cluster_id': CLUSTER_ID}, auth=(LOGIN, PASSWORD), headers=USER_AGENT_HEADER, timeout=self.hook.timeout_seconds)
class DatabricksHookTest(unittest.TestCase): """ Tests for DatabricksHook. """ @db.provide_session def setUp(self, session=None): conn = session.query(Connection) \ .filter(Connection.conn_id == DEFAULT_CONN_ID) \ .first() conn.host = HOST conn.login = LOGIN conn.password = PASSWORD session.commit() self.hook = DatabricksHook() def test_parse_host_with_proper_host(self): host = self.hook._parse_host(HOST) self.assertEquals(host, HOST) def test_parse_host_with_scheme(self): host = self.hook._parse_host(HOST_WITH_SCHEME) self.assertEquals(host, HOST) def test_init_bad_retry_limit(self): with self.assertRaises(AssertionError): DatabricksHook(retry_limit=0) @mock.patch('airflow.contrib.hooks.databricks_hook.requests') def test_do_api_call_with_error_retry(self, mock_requests): for exception in [ requests_exceptions.ConnectionError, requests_exceptions.Timeout ]: with mock.patch.object(self.hook.logger, 'error') as mock_errors: mock_requests.reset_mock() mock_requests.post.side_effect = exception() with self.assertRaises(AirflowException): self.hook._do_api_call(SUBMIT_RUN_ENDPOINT, {}) self.assertEquals(len(mock_errors.mock_calls), self.hook.retry_limit) @mock.patch('airflow.contrib.hooks.databricks_hook.requests') def test_do_api_call_with_bad_status_code(self, mock_requests): mock_requests.codes.ok = 200 status_code_mock = mock.PropertyMock(return_value=500) type(mock_requests.post.return_value).status_code = status_code_mock with self.assertRaises(AirflowException): self.hook._do_api_call(SUBMIT_RUN_ENDPOINT, {}) @mock.patch('airflow.contrib.hooks.databricks_hook.requests') def test_submit_run(self, mock_requests): mock_requests.codes.ok = 200 mock_requests.post.return_value.json.return_value = {'run_id': '1'} status_code_mock = mock.PropertyMock(return_value=200) type(mock_requests.post.return_value).status_code = status_code_mock json = {'notebook_task': NOTEBOOK_TASK, 'new_cluster': NEW_CLUSTER} run_id = self.hook.submit_run(json) self.assertEquals(run_id, '1') mock_requests.post.assert_called_once_with( submit_run_endpoint(HOST), json={ 'notebook_task': NOTEBOOK_TASK, 'new_cluster': NEW_CLUSTER, }, auth=(LOGIN, PASSWORD), headers=USER_AGENT_HEADER, timeout=self.hook.timeout_seconds) @mock.patch('airflow.contrib.hooks.databricks_hook.requests') def test_get_run_page_url(self, mock_requests): mock_requests.codes.ok = 200 mock_requests.get.return_value.json.return_value = GET_RUN_RESPONSE status_code_mock = mock.PropertyMock(return_value=200) type(mock_requests.get.return_value).status_code = status_code_mock run_page_url = self.hook.get_run_page_url(RUN_ID) self.assertEquals(run_page_url, RUN_PAGE_URL) mock_requests.get.assert_called_once_with( get_run_endpoint(HOST), json={'run_id': RUN_ID}, auth=(LOGIN, PASSWORD), headers=USER_AGENT_HEADER, timeout=self.hook.timeout_seconds) @mock.patch('airflow.contrib.hooks.databricks_hook.requests') def test_get_run_state(self, mock_requests): mock_requests.codes.ok = 200 mock_requests.get.return_value.json.return_value = GET_RUN_RESPONSE status_code_mock = mock.PropertyMock(return_value=200) type(mock_requests.get.return_value).status_code = status_code_mock run_state = self.hook.get_run_state(RUN_ID) self.assertEquals( run_state, RunState(LIFE_CYCLE_STATE, RESULT_STATE, STATE_MESSAGE)) mock_requests.get.assert_called_once_with( get_run_endpoint(HOST), json={'run_id': RUN_ID}, auth=(LOGIN, PASSWORD), headers=USER_AGENT_HEADER, timeout=self.hook.timeout_seconds) @mock.patch('airflow.contrib.hooks.databricks_hook.requests') def test_cancel_run(self, mock_requests): mock_requests.codes.ok = 200 mock_requests.post.return_value.json.return_value = GET_RUN_RESPONSE status_code_mock = mock.PropertyMock(return_value=200) type(mock_requests.post.return_value).status_code = status_code_mock self.hook.cancel_run(RUN_ID) mock_requests.post.assert_called_once_with( cancel_run_endpoint(HOST), json={'run_id': RUN_ID}, auth=(LOGIN, PASSWORD), headers=USER_AGENT_HEADER, timeout=self.hook.timeout_seconds)
class DatabricksHookTest(unittest.TestCase): """ Tests for DatabricksHook. """ @db.provide_session def setUp(self, session=None): conn = session.query(Connection) \ .filter(Connection.conn_id == DEFAULT_CONN_ID) \ .first() conn.host = HOST conn.login = LOGIN conn.password = PASSWORD session.commit() self.hook = DatabricksHook() def test_parse_host_with_proper_host(self): host = self.hook._parse_host(HOST) self.assertEquals(host, HOST) def test_parse_host_with_scheme(self): host = self.hook._parse_host(HOST_WITH_SCHEME) self.assertEquals(host, HOST) def test_init_bad_retry_limit(self): with self.assertRaises(AssertionError): DatabricksHook(retry_limit = 0) @mock.patch('airflow.contrib.hooks.databricks_hook.requests') def test_do_api_call_with_error_retry(self, mock_requests): for exception in [requests_exceptions.ConnectionError, requests_exceptions.Timeout]: with mock.patch.object(self.hook.log, 'error') as mock_errors: mock_requests.reset_mock() mock_requests.post.side_effect = exception() with self.assertRaises(AirflowException): self.hook._do_api_call(SUBMIT_RUN_ENDPOINT, {}) self.assertEquals(len(mock_errors.mock_calls), self.hook.retry_limit) @mock.patch('airflow.contrib.hooks.databricks_hook.requests') def test_do_api_call_with_bad_status_code(self, mock_requests): mock_requests.codes.ok = 200 status_code_mock = mock.PropertyMock(return_value=500) type(mock_requests.post.return_value).status_code = status_code_mock with self.assertRaises(AirflowException): self.hook._do_api_call(SUBMIT_RUN_ENDPOINT, {}) @mock.patch('airflow.contrib.hooks.databricks_hook.requests') def test_submit_run(self, mock_requests): mock_requests.codes.ok = 200 mock_requests.post.return_value.json.return_value = {'run_id': '1'} status_code_mock = mock.PropertyMock(return_value=200) type(mock_requests.post.return_value).status_code = status_code_mock json = { 'notebook_task': NOTEBOOK_TASK, 'new_cluster': NEW_CLUSTER } run_id = self.hook.submit_run(json) self.assertEquals(run_id, '1') mock_requests.post.assert_called_once_with( submit_run_endpoint(HOST), json={ 'notebook_task': NOTEBOOK_TASK, 'new_cluster': NEW_CLUSTER, }, auth=(LOGIN, PASSWORD), headers=USER_AGENT_HEADER, timeout=self.hook.timeout_seconds) @mock.patch('airflow.contrib.hooks.databricks_hook.requests') def test_get_run_page_url(self, mock_requests): mock_requests.codes.ok = 200 mock_requests.get.return_value.json.return_value = GET_RUN_RESPONSE status_code_mock = mock.PropertyMock(return_value=200) type(mock_requests.get.return_value).status_code = status_code_mock run_page_url = self.hook.get_run_page_url(RUN_ID) self.assertEquals(run_page_url, RUN_PAGE_URL) mock_requests.get.assert_called_once_with( get_run_endpoint(HOST), json={'run_id': RUN_ID}, auth=(LOGIN, PASSWORD), headers=USER_AGENT_HEADER, timeout=self.hook.timeout_seconds) @mock.patch('airflow.contrib.hooks.databricks_hook.requests') def test_get_run_state(self, mock_requests): mock_requests.codes.ok = 200 mock_requests.get.return_value.json.return_value = GET_RUN_RESPONSE status_code_mock = mock.PropertyMock(return_value=200) type(mock_requests.get.return_value).status_code = status_code_mock run_state = self.hook.get_run_state(RUN_ID) self.assertEquals(run_state, RunState( LIFE_CYCLE_STATE, RESULT_STATE, STATE_MESSAGE)) mock_requests.get.assert_called_once_with( get_run_endpoint(HOST), json={'run_id': RUN_ID}, auth=(LOGIN, PASSWORD), headers=USER_AGENT_HEADER, timeout=self.hook.timeout_seconds) @mock.patch('airflow.contrib.hooks.databricks_hook.requests') def test_cancel_run(self, mock_requests): mock_requests.codes.ok = 200 mock_requests.post.return_value.json.return_value = GET_RUN_RESPONSE status_code_mock = mock.PropertyMock(return_value=200) type(mock_requests.post.return_value).status_code = status_code_mock self.hook.cancel_run(RUN_ID) mock_requests.post.assert_called_once_with( cancel_run_endpoint(HOST), json={'run_id': RUN_ID}, auth=(LOGIN, PASSWORD), headers=USER_AGENT_HEADER, timeout=self.hook.timeout_seconds)