def test_read_spec_from_file(self): m = mock.mock_open(read_data='{"some": "json"}') with mock.patch('airflow.contrib.operators.druid_operator.open', m, create=True) as m: druid = DruidOperator( task_id='druid_indexing_job', json_index_file='index_spec.json', dag=self.dag ) m.assert_called_once_with('index_spec.json') self.assertEqual(druid.index_spec_str, '{\n "some": "json"\n}')
def test_render_template(self): json_str = ''' { "type": "{{ params.index_type }}", "datasource": "{{ params.datasource }}", "spec": { "dataSchema": { "granularitySpec": { "intervals": ["{{ ds }}/{{ macros.ds_add(ds, 1) }}"] } } } } ''' m = mock.mock_open(read_data=json_str) with mock.patch('airflow.contrib.operators.druid_operator.open', m, create=True) as m: operator = DruidOperator( task_id='spark_submit_job', json_index_file='index_spec.json', params={ 'index_type': 'index_hadoop', 'datasource': 'datasource_prd' }, dag=self.dag ) ti = TaskInstance(operator, DEFAULT_DATE) ti.render_templates() m.assert_called_once_with('index_spec.json') expected = '''{ "datasource": "datasource_prd", "spec": { "dataSchema": { "granularitySpec": { "intervals": [ "2017-01-01/2017-01-02" ] } } }, "type": "index_hadoop" }''' self.assertEqual(expected, getattr(operator, 'index_spec_str'))