コード例 #1
0
    def test_hook_correct_region():
        with patch(HOOK) as mock_hook:
            dataproc_task = DataProcPySparkOperator(task_id=TASK_ID,
                                                    main=MAIN_URI,
                                                    region=GCP_REGION)

            dataproc_task.execute(None)
            mock_hook.return_value.submit.assert_called_once_with(
                mock.ANY, mock.ANY, GCP_REGION, mock.ANY)
コード例 #2
0
    def test_hook_correct_region(self):
        with patch('airflow.contrib.operators.dataproc_operator.DataProcHook'
                   ) as mock_hook:
            dataproc_task = DataProcPySparkOperator(task_id=TASK_ID,
                                                    main=MAIN_URI,
                                                    region=REGION)

            dataproc_task.execute(None)
            mock_hook.return_value.submit.assert_called_once_with(
                mock.ANY, mock.ANY, REGION)
コード例 #3
0
    def test_hook_correct_region(self):
       with patch('airflow.contrib.operators.dataproc_operator.DataProcHook') as mock_hook:
            dataproc_task = DataProcPySparkOperator(
                task_id=TASK_ID,
                main=MAIN_URI,
                region=REGION
            )

            dataproc_task.execute(None)
            mock_hook.return_value.submit.assert_called_once_with(mock.ANY, mock.ANY, REGION)
コード例 #4
0
    def test_hook_correct_region():
        with patch(HOOK) as mock_hook:
            dataproc_task = DataProcPySparkOperator(
                task_id=TASK_ID,
                main=MAIN_URI,
                region=GCP_REGION
            )

            dataproc_task.execute(None)
            mock_hook.return_value.submit.assert_called_once_with(mock.ANY, mock.ANY,
                                                                  GCP_REGION, mock.ANY)
コード例 #5
0
    def test_correct_job_definition(self, mock_hook, mock_uuid, mock_project_id):
        # Expected job
        job_definition = deepcopy(DATAPROC_JOB_TO_SUBMIT)
        job_definition['job']['pysparkJob'] = {'mainPythonFileUri': 'main_class'}
        job_definition['job']['reference']['jobId'] = DATAPROC_JOB_ID + "_test"

        # Prepare job using operator
        task = DataProcPySparkOperator(
            task_id=TASK_ID,
            region=GCP_REGION,
            cluster_name=CLUSTER_NAME,
            job_name=DATAPROC_JOB_ID,
            labels=LABELS,
            main="main_class"
        )

        task.execute(context=None)
        self.assertDictEqual(job_definition, task.job_template.job)