def test_exec_failure(self, db_mock_class):
        """
        Test the execute function in case where the run failed.
        """
        run = {
          'new_cluster': NEW_CLUSTER,
          'notebook_task': NOTEBOOK_TASK,
        }
        op = DatabricksSubmitRunOperator(task_id=TASK_ID, json=run)
        db_mock = db_mock_class.return_value
        db_mock.submit_run.return_value = 1
        db_mock.get_run_state.return_value = RunState('TERMINATED', 'FAILED', '')

        with self.assertRaises(AirflowException):
            op.execute(None)

        expected = op._deep_string_coerce({
          'new_cluster': NEW_CLUSTER,
          'notebook_task': NOTEBOOK_TASK,
          'run_name': TASK_ID,
        })
        db_mock_class.assert_called_once_with(
            DEFAULT_CONN_ID,
            retry_limit=op.databricks_retry_limit,
            retry_delay=op.databricks_retry_delay)
        db_mock.submit_run.assert_called_once_with(expected)
        db_mock.get_run_page_url.assert_called_once_with(RUN_ID)
        db_mock.get_run_state.assert_called_once_with(RUN_ID)
        self.assertEquals(RUN_ID, op.run_id)
    def test_exec_failure(self, db_mock_class):
        """
        Test the execute function in case where the run failed.
        """
        run = {
            'new_cluster': NEW_CLUSTER,
            'notebook_task': NOTEBOOK_TASK,
        }
        op = DatabricksSubmitRunOperator(task_id=TASK_ID, json=run)
        db_mock = db_mock_class.return_value
        db_mock.submit_run.return_value = 1
        db_mock.get_run_state.return_value = RunState('TERMINATED', 'FAILED',
                                                      '')

        with self.assertRaises(AirflowException):
            op.execute(None)

        expected = op._deep_string_coerce({
            'new_cluster': NEW_CLUSTER,
            'notebook_task': NOTEBOOK_TASK,
            'run_name': TASK_ID,
        })
        db_mock_class.assert_called_once_with(
            DEFAULT_CONN_ID, retry_limit=op.databricks_retry_limit)
        db_mock.submit_run.assert_called_once_with(expected)
        db_mock.get_run_page_url.assert_called_once_with(RUN_ID)
        db_mock.get_run_state.assert_called_once_with(RUN_ID)
        self.assertEquals(RUN_ID, op.run_id)
 def test_init_with_named_parameters(self):
     """
     Test the initializer with the named parameters.
     """
     op = DatabricksSubmitRunOperator(task_id=TASK_ID, new_cluster=NEW_CLUSTER, notebook_task=NOTEBOOK_TASK)
     expected = op._deep_string_coerce({
       'new_cluster': NEW_CLUSTER,
       'notebook_task': NOTEBOOK_TASK,
       'run_name': TASK_ID
     })
     self.assertDictEqual(expected, op.json)
 def test_init_with_named_parameters(self):
     """
     Test the initializer with the named parameters.
     """
     op = DatabricksSubmitRunOperator(task_id=TASK_ID, new_cluster=NEW_CLUSTER, notebook_task=NOTEBOOK_TASK)
     expected = op._deep_string_coerce({
       'new_cluster': NEW_CLUSTER,
       'notebook_task': NOTEBOOK_TASK,
       'run_name': TASK_ID
     })
     self.assertDictEqual(expected, op.json)
 def test_init_with_json(self):
     """
     Test the initializer with json data.
     """
     json = {'new_cluster': NEW_CLUSTER, 'notebook_task': NOTEBOOK_TASK}
     op = DatabricksSubmitRunOperator(task_id=TASK_ID, json=json)
     expected = op._deep_string_coerce({
         'new_cluster': NEW_CLUSTER,
         'notebook_task': NOTEBOOK_TASK,
         'run_name': TASK_ID
     })
     self.assertDictEqual(expected, op.json)
 def test_init_with_templating(self):
     json = {
       'new_cluster': NEW_CLUSTER,
       'notebook_task': TEMPLATED_NOTEBOOK_TASK,
     }
     dag = DAG('test', start_date=datetime.now())
     op = DatabricksSubmitRunOperator(dag=dag, task_id=TASK_ID, json=json)
     op.json = op.render_template('json', op.json, {'ds': DATE})
     expected = op._deep_string_coerce({
       'new_cluster': NEW_CLUSTER,
       'notebook_task': RENDERED_TEMPLATED_NOTEBOOK_TASK,
       'run_name': TASK_ID,
     })
     self.assertDictEqual(expected, op.json)
 def test_init_with_templating(self):
     json = {
         'new_cluster': NEW_CLUSTER,
         'notebook_task': TEMPLATED_NOTEBOOK_TASK,
     }
     dag = DAG('test', start_date=datetime.now())
     op = DatabricksSubmitRunOperator(dag=dag, task_id=TASK_ID, json=json)
     op.json = op.render_template('json', op.json, {'ds': DATE})
     expected = op._deep_string_coerce({
         'new_cluster': NEW_CLUSTER,
         'notebook_task': RENDERED_TEMPLATED_NOTEBOOK_TASK,
         'run_name': TASK_ID,
     })
     self.assertDictEqual(expected, op.json)
 def test_init_with_json(self):
     """
     Test the initializer with json data.
     """
     json = {
       'new_cluster': NEW_CLUSTER,
       'notebook_task': NOTEBOOK_TASK
     }
     op = DatabricksSubmitRunOperator(task_id=TASK_ID, json=json)
     expected = op._deep_string_coerce({
       'new_cluster': NEW_CLUSTER,
       'notebook_task': NOTEBOOK_TASK,
       'run_name': TASK_ID
     })
     self.assertDictEqual(expected, op.json)
 def test_deep_string_coerce(self):
     op = DatabricksSubmitRunOperator(task_id='test')
     test_json = {
         'test_int': 1,
         'test_float': 1.0,
         'test_dict': {'key': 'value'},
         'test_list': [1, 1.0, 'a', 'b'],
         'test_tuple': (1, 1.0, 'a', 'b')
     }
     expected = {
         'test_int': '1',
         'test_float': '1.0',
         'test_dict': {'key': 'value'},
         'test_list': ['1', '1.0', 'a', 'b'],
         'test_tuple': ['1', '1.0', 'a', 'b']
     }
     self.assertDictEqual(op._deep_string_coerce(test_json), expected)
 def test_deep_string_coerce(self):
     op = DatabricksSubmitRunOperator(task_id='test')
     test_json = {
         'test_int': 1,
         'test_float': 1.0,
         'test_dict': {'key': 'value'},
         'test_list': [1, 1.0, 'a', 'b'],
         'test_tuple': (1, 1.0, 'a', 'b')
     }
     expected = {
         'test_int': '1',
         'test_float': '1.0',
         'test_dict': {'key': 'value'},
         'test_list': ['1', '1.0', 'a', 'b'],
         'test_tuple': ['1', '1.0', 'a', 'b']
     }
     self.assertDictEqual(op._deep_string_coerce(test_json), expected)
 def test_init_with_merging(self):
     """
     Test the initializer when json and other named parameters are both
     provided. The named parameters should override top level keys in the
     json dict.
     """
     override_new_cluster = {'workers': 999}
     json = {
       'new_cluster': NEW_CLUSTER,
       'notebook_task': NOTEBOOK_TASK,
     }
     op = DatabricksSubmitRunOperator(task_id=TASK_ID, json=json, new_cluster=override_new_cluster)
     expected = op._deep_string_coerce({
       'new_cluster': override_new_cluster,
       'notebook_task': NOTEBOOK_TASK,
       'run_name': TASK_ID,
     })
     self.assertDictEqual(expected, op.json)
 def test_init_with_merging(self):
     """
     Test the initializer when json and other named parameters are both
     provided. The named parameters should override top level keys in the
     json dict.
     """
     override_new_cluster = {'workers': 999}
     json = {
       'new_cluster': NEW_CLUSTER,
       'notebook_task': NOTEBOOK_TASK,
     }
     op = DatabricksSubmitRunOperator(task_id=TASK_ID, json=json, new_cluster=override_new_cluster)
     expected = op._deep_string_coerce({
       'new_cluster': override_new_cluster,
       'notebook_task': NOTEBOOK_TASK,
       'run_name': TASK_ID,
     })
     self.assertDictEqual(expected, op.json)