def test_notify(self): dag = DAG(DAG_ID, start_date=DEFAULT_DATE) with dag: task = QuboleOperator(task_id=TASK_ID, command_type='sparkcmd', notify=True, dag=dag) assert task.get_hook().create_cmd_args({'run_id': 'dummy'})[0] == "--notify"
def test_get_hook(self): dag = DAG(DAG_ID, start_date=DEFAULT_DATE) with dag: task = QuboleOperator(task_id=TASK_ID, command_type='hivecmd', dag=dag) hook = task.get_hook() assert hook.__class__ == QuboleHook
def test_position_args_parameters(self): dag = DAG(DAG_ID, start_date=DEFAULT_DATE) with dag: task = QuboleOperator(task_id=TASK_ID, command_type='pigcmd', parameters="key1=value1 key2=value2", dag=dag) self.assertEqual( task.get_hook().create_cmd_args({'run_id': 'dummy'})[1], "key1=value1") self.assertEqual( task.get_hook().create_cmd_args({'run_id': 'dummy'})[2], "key2=value2") cmd = "s3distcp --src s3n://airflow/source_hadoopcmd --dest s3n://airflow/destination_hadoopcmd" task = QuboleOperator(task_id=TASK_ID + "_1", command_type='hadoopcmd', dag=dag, sub_command=cmd) self.assertEqual( task.get_hook().create_cmd_args({'run_id': 'dummy'})[1], "s3distcp") self.assertEqual( task.get_hook().create_cmd_args({'run_id': 'dummy'})[2], "--src") self.assertEqual( task.get_hook().create_cmd_args({'run_id': 'dummy'})[3], "s3n://airflow/source_hadoopcmd") self.assertEqual( task.get_hook().create_cmd_args({'run_id': 'dummy'})[4], "--dest") self.assertEqual( task.get_hook().create_cmd_args({'run_id': 'dummy'})[5], "s3n://airflow/destination_hadoopcmd")
def test_hyphen_args_note_id(self): dag = DAG(DAG_ID, start_date=DEFAULT_DATE) with dag: task = QuboleOperator(task_id=TASK_ID, command_type='sparkcmd', note_id="123", dag=dag) self.assertEqual( task.get_hook().create_cmd_args({'run_id': 'dummy'})[0], "--note-id=123")