def test_exec_failure(self, db_mock_class):
        """
        Test the execute function in case where the run failed.
        """
        run = {
            'notebook_params': NOTEBOOK_PARAMS,
            'notebook_task': NOTEBOOK_TASK,
            'jar_params': JAR_PARAMS
        }
        op = DatabricksRunNowOperator(task_id=TASK_ID, job_id=JOB_ID, json=run)
        db_mock = db_mock_class.return_value
        db_mock.run_now.return_value = 1
        db_mock.get_run_state.return_value = RunState('TERMINATED', 'FAILED', '')

        with self.assertRaises(AirflowException):
            op.execute(None)

        expected = databricks_operator._deep_string_coerce({
            'notebook_params': NOTEBOOK_PARAMS,
            'notebook_task': NOTEBOOK_TASK,
            'jar_params': JAR_PARAMS,
            'job_id': JOB_ID
        })
        db_mock_class.assert_called_once_with(
            DEFAULT_CONN_ID,
            retry_limit=op.databricks_retry_limit,
            retry_delay=op.databricks_retry_delay)
        db_mock.run_now.assert_called_once_with(expected)
        db_mock.get_run_page_url.assert_called_once_with(RUN_ID)
        db_mock.get_run_state.assert_called_once_with(RUN_ID)
        self.assertEquals(RUN_ID, op.run_id)
    def test_exec_failure(self, db_mock_class):
        """
        Test the execute function in case where the run failed.
        """
        run = {
            'notebook_params': NOTEBOOK_PARAMS,
            'notebook_task': NOTEBOOK_TASK,
            'jar_params': JAR_PARAMS
        }
        op = DatabricksRunNowOperator(task_id=TASK_ID, job_id=JOB_ID, json=run)
        db_mock = db_mock_class.return_value
        db_mock.run_now.return_value = 1
        db_mock.get_run_state.return_value = RunState('TERMINATED', 'FAILED',
                                                      '')

        with self.assertRaises(AirflowException):
            op.execute(None)

        expected = databricks_operator._deep_string_coerce({
            'notebook_params': NOTEBOOK_PARAMS,
            'notebook_task': NOTEBOOK_TASK,
            'jar_params': JAR_PARAMS,
            'job_id': JOB_ID
        })
        db_mock_class.assert_called_once_with(
            DEFAULT_CONN_ID,
            retry_limit=op.databricks_retry_limit,
            retry_delay=op.databricks_retry_delay)
        db_mock.run_now.assert_called_once_with(expected)
        db_mock.get_run_page_url.assert_called_once_with(RUN_ID)
        db_mock.get_run_state.assert_called_once_with(RUN_ID)
        self.assertEquals(RUN_ID, op.run_id)
    def test_on_kill(self, db_mock_class):
        run = {
            'notebook_params': NOTEBOOK_PARAMS,
            'notebook_task': NOTEBOOK_TASK,
            'jar_params': JAR_PARAMS
        }
        op = DatabricksRunNowOperator(task_id=TASK_ID, job_id=JOB_ID, json=run)
        db_mock = db_mock_class.return_value
        op.run_id = RUN_ID

        op.on_kill()
        db_mock.cancel_run.assert_called_once_with(RUN_ID)
    def test_on_kill(self, db_mock_class):
        run = {
            'notebook_params': NOTEBOOK_PARAMS,
            'notebook_task': NOTEBOOK_TASK,
            'jar_params': JAR_PARAMS
        }
        op = DatabricksRunNowOperator(task_id=TASK_ID, job_id=JOB_ID, json=run)
        db_mock = db_mock_class.return_value
        op.run_id = RUN_ID

        op.on_kill()
        db_mock.cancel_run.assert_called_once_with(RUN_ID)
    def test_init_with_templating(self):
        json = {
            'notebook_params': NOTEBOOK_PARAMS,
            'jar_params': TEMPLATED_JAR_PARAMS
        }

        dag = DAG('test', start_date=datetime.now())
        op = DatabricksRunNowOperator(dag=dag, task_id=TASK_ID, job_id=JOB_ID, json=json)
        op.json = op.render_template('json', op.json, {'ds': DATE})
        expected = databricks_operator._deep_string_coerce({
            'notebook_params': NOTEBOOK_PARAMS,
            'jar_params': RENDERED_TEMPLATED_JAR_PARAMS,
            'job_id': JOB_ID
        })
        self.assertDictEqual(expected, op.json)
 def test_init_with_bad_type(self):
     json = {'test': datetime.now()}
     # Looks a bit weird since we have to escape regex reserved symbols.
     exception_message = 'Type \<(type|class) \'datetime.datetime\'\> used ' + \
                         'for parameter json\[test\] is not a number or a string'
     with self.assertRaisesRegexp(AirflowException, exception_message):
         DatabricksRunNowOperator(task_id=TASK_ID, job_id=JOB_ID, json=json)
    def test_init_with_merging(self):
        """
        Test the initializer when json and other named parameters are both
        provided. The named parameters should override top level keys in the
        json dict.
        """
        override_notebook_params = {'workers': 999}
        json = {'notebook_params': NOTEBOOK_PARAMS, 'jar_params': JAR_PARAMS}

        op = DatabricksRunNowOperator(task_id=TASK_ID,
                                      json=json,
                                      job_id=JOB_ID,
                                      notebook_params=override_notebook_params,
                                      python_params=PYTHON_PARAMS,
                                      spark_submit_params=SPARK_SUBMIT_PARAMS)

        expected = databricks_operator._deep_string_coerce({
            'notebook_params':
            override_notebook_params,
            'jar_params':
            JAR_PARAMS,
            'python_params':
            PYTHON_PARAMS,
            'spark_submit_params':
            SPARK_SUBMIT_PARAMS,
            'job_id':
            JOB_ID
        })

        self.assertDictEqual(expected, op.json)
    def test_init_with_json(self):
        """
        Test the initializer with json data.
        """
        json = {
            'notebook_params': NOTEBOOK_PARAMS,
            'jar_params': JAR_PARAMS,
            'python_params': PYTHON_PARAMS,
            'spark_submit_params': SPARK_SUBMIT_PARAMS
        }
        op = DatabricksRunNowOperator(task_id=TASK_ID,
                                      job_id=JOB_ID,
                                      json=json)

        expected = databricks_operator._deep_string_coerce({
            'notebook_params':
            NOTEBOOK_PARAMS,
            'jar_params':
            JAR_PARAMS,
            'python_params':
            PYTHON_PARAMS,
            'spark_submit_params':
            SPARK_SUBMIT_PARAMS,
            'job_id':
            JOB_ID
        })

        self.assertDictEqual(expected, op.json)
    def test_init_with_named_parameters(self):
        """
        Test the initializer with the named parameters.
        """
        op = DatabricksRunNowOperator(job_id=JOB_ID, task_id=TASK_ID)
        expected = databricks_operator._deep_string_coerce({'job_id': 42})

        self.assertDictEqual(expected, op.json)
    def test_init_with_templating(self):
        json = {
            'notebook_params': NOTEBOOK_PARAMS,
            'jar_params': TEMPLATED_JAR_PARAMS
        }

        dag = DAG('test', start_date=datetime.now())
        op = DatabricksRunNowOperator(dag=dag,
                                      task_id=TASK_ID,
                                      job_id=JOB_ID,
                                      json=json)
        op.json = op.render_template('json', op.json, {'ds': DATE})
        expected = databricks_operator._deep_string_coerce({
            'notebook_params': NOTEBOOK_PARAMS,
            'jar_params': RENDERED_TEMPLATED_JAR_PARAMS,
            'job_id': JOB_ID
        })
        self.assertDictEqual(expected, op.json)