def test_position_args_parameters(self): dag = DAG(DAG_ID, start_date=DEFAULT_DATE) with dag: task = QuboleOperator(task_id=TASK_ID, command_type='pigcmd', parameters="key1=value1 key2=value2", dag=dag) self.assertEqual(task.get_hook().create_cmd_args({'run_id':'dummy'})[1], "key1=value1") self.assertEqual(task.get_hook().create_cmd_args({'run_id':'dummy'})[2], "key2=value2")
def test_position_args_parameters(self): dag = DAG(DAG_ID, start_date=DEFAULT_DATE) with dag: task = QuboleOperator(task_id=TASK_ID, command_type='pigcmd', parameters="key1=value1 key2=value2", dag=dag) self.assertEqual( task.get_hook().create_cmd_args({'run_id': 'dummy'})[1], "key1=value1") self.assertEqual( task.get_hook().create_cmd_args({'run_id': 'dummy'})[2], "key2=value2")
def test_hyphen_args_note_id(self): dag = DAG(DAG_ID, start_date=DEFAULT_DATE) with dag: task = QuboleOperator(task_id=TASK_ID, command_type='sparkcmd', note_id="123", dag=dag) self.assertEqual(task.get_hook().create_cmd_args({'run_id': 'dummy'})[0], "--note-id=123")
def test_get_hook(self): dag = DAG(DAG_ID, start_date=DEFAULT_DATE) with dag: task = QuboleOperator(task_id=TASK_ID, command_type='hivecmd', dag=dag) hook = task.get_hook() self.assertEqual(hook.__class__, QuboleHook)
def test_position_args_parameters(self): dag = DAG(DAG_ID, start_date=DEFAULT_DATE) with dag: task = QuboleOperator(task_id=TASK_ID, command_type='pigcmd', parameters="key1=value1 key2=value2", dag=dag) self.assertEqual( task.get_hook().create_cmd_args({'run_id': 'dummy'})[1], "key1=value1") self.assertEqual( task.get_hook().create_cmd_args({'run_id': 'dummy'})[2], "key2=value2") cmd = "s3distcp --src s3n://airflow/source_hadoopcmd --dest s3n://airflow/destination_hadoopcmd" task = QuboleOperator(task_id=TASK_ID, command_type='hadoopcmd', dag=dag, sub_command=cmd) self.assertEqual( task.get_hook().create_cmd_args({'run_id': 'dummy'})[1], "s3distcp") self.assertEqual( task.get_hook().create_cmd_args({'run_id': 'dummy'})[2], "--src") self.assertEqual( task.get_hook().create_cmd_args({'run_id': 'dummy'})[3], "s3n://airflow/source_hadoopcmd") self.assertEqual( task.get_hook().create_cmd_args({'run_id': 'dummy'})[4], "--dest") self.assertEqual( task.get_hook().create_cmd_args({'run_id': 'dummy'})[5], "s3n://airflow/destination_hadoopcmd")
def test_position_args_parameters(self): dag = DAG(DAG_ID, start_date=DEFAULT_DATE) with dag: task = QuboleOperator(task_id=TASK_ID, command_type='pigcmd', parameters="key1=value1 key2=value2", dag=dag) self.assertEqual(task.get_hook().create_cmd_args({'run_id':'dummy'})[1], "key1=value1") self.assertEqual(task.get_hook().create_cmd_args({'run_id':'dummy'})[2], "key2=value2") task = QuboleOperator(task_id=TASK_ID, command_type='hadoopcmd', sub_command="s3distcp --src s3n://airflow/source_hadoopcmd " + "--dest s3n://airflow/destination_hadoopcmd", dag=dag) self.assertEqual(task.get_hook().create_cmd_args({'run_id': 'dummy'})[1], "s3distcp") self.assertEqual(task.get_hook().create_cmd_args({'run_id': 'dummy'})[2], "--src") self.assertEqual(task.get_hook().create_cmd_args({'run_id': 'dummy'})[3], "s3n://airflow/source_hadoopcmd") self.assertEqual(task.get_hook().create_cmd_args({'run_id': 'dummy'})[4], "--dest") self.assertEqual(task.get_hook().create_cmd_args({'run_id': 'dummy'})[5], "s3n://airflow/destination_hadoopcmd")