def get_export(export_type, domain, export_id=None, username=None): from corehq.apps.export.models import ( FormExportInstance, CaseExportInstance, SMSExportInstance, SMSExportDataSchema ) if export_type == 'form': try: return FormExportInstance.get(export_id) except ResourceNotFound: raise Http404() elif export_type == 'case': try: return CaseExportInstance.get(export_id) except ResourceNotFound: raise Http404() elif export_type == 'sms': if not username: raise Exception("Username needed to ensure permissions") include_metadata = MESSAGE_LOG_METADATA.enabled(username) return SMSExportInstance._new_from_schema( SMSExportDataSchema.get_latest_export_schema(domain, include_metadata) ) raise Exception("Unexpected export type received %s" % export_type)
def test_edit_daily_saved_export_filters(self, _): # Create an export # Update the filters # confirm that the filters on the export have been updated appropriately export_post_data = json.dumps({ "doc_type": "CaseExportInstance", "domain": self.domain.name, "xmlns": "http://openrosa.org/formdesigner/237B85C0-78B1-4034-8277-5D37E3EA7FD1", "last_updated": None, "legacy_saved_export_schema_id": None, "is_daily_saved_export": True, "tables": [], "transform_dates": True, "last_accessed": None, "app_id": "6a48b8838d06febeeabb28c8c9516ab6", "is_deidentified": False, "split_multiselects": False, "external_blobs": {}, "export_format": "csv", "include_errors": False, "type": "form", "name": "A Villager's Health > Registrationaa > Reg form: 2016-06-27" }) resp = self.client.post( reverse(CreateNewDailySavedCaseExport.urlname, args=[self.domain.name]), export_post_data, content_type="application/json", follow=True ) self.assertEqual(resp.status_code, 200) exports = get_case_export_instances(self.domain.name) self.assertEqual(len(exports), 1) export = exports[0] filter_form_data = { "emwf_case_filter": [], "date_range": "range", "start_date": "1992-01-30", "end_date": "2016-10-01", } resp = self.client.post( reverse('commit_filters', args=[self.domain.name]), { "export_id": export._id, "model_type": "case", "form_data": json.dumps(filter_form_data), }, ) self.assertEqual(resp.status_code, 200) response_content = json.loads(resp.content) self.assertFalse("error" in response_content, response_content.get("error")) export = CaseExportInstance.get(export._id) self.assertEqual(export.filters.date_period.period_type, 'range')
def test_selected_false(self, export_save): export_json = get_export_json( CaseExportInstance(export_format=Format.JSON, domain=DOMAIN, case_type=DEFAULT_CASE_TYPE, tables=[ TableConfiguration(label="My table", selected=False, path=[], columns=[]) ])) self.assertEqual(export_json, {}) self.assertTrue(export_save.called)
def test_request_succeeded(self): export_config = CaseExportInstance( _id='config_id', tables=[TableConfiguration(columns=[])], case_type='my_case_type', domain=self.domain.name, ) export_config.save() self.addCleanup(export_config.delete) correct_credentials = self._get_correct_credentials() response = self._execute_query(correct_credentials) self.assertEqual(response.status_code, 200) self.assertEqual(response['Content-Type'], 'application/json; charset=utf-8') self.assertEqual(response['OData-Version'], '4.0') self.assertEqual( json.loads(response.content.decode('utf-8')), { '@odata.context': 'http://localhost:8000/a/test_domain/api/v0.5/odata/cases/config_id/$metadata#feed', 'value': [] })
def setUpClass(cls): cls.form_instance_deid = FormExportInstance( domain=cls.domain, name='Forms', is_deidentified=True ) cls.form_instance_wrong = FormExportInstance( domain='wrong-domain', name='Forms', ) cls.form_instance_daily_saved = FormExportInstance( domain='wrong-domain', is_daily_saved_export=True, ) cls.case_instance_deid = CaseExportInstance( domain=cls.domain, name='Cases', is_deidentified=True ) cls.case_instance = CaseExportInstance( domain=cls.domain, name='Cases', is_deidentified=False ) cls.case_instance_daily_saved = CaseExportInstance( domain='wrong-domain', is_daily_saved_export=True, ) cls.instances = [ cls.form_instance_deid, cls.form_instance_wrong, cls.form_instance_daily_saved, cls.case_instance, cls.case_instance_deid, cls.case_instance_daily_saved, ] for instance in cls.instances: instance.save()
def test_caseid_column_label(self): export_with_modified_caseid_column = CaseExportInstance( is_odata_config=True, tables=[ TableConfiguration( selected=True, columns=[ ExportColumn( label='modified_case_id_column', item=ExportItem( path=[ PathNode(name='_id') ] ), selected=True, ) ] ) ] ) export_with_modified_caseid_column.save() self.addCleanup(export_with_modified_caseid_column.delete) cleaned_export = CaseExportInstance.get(export_with_modified_caseid_column.get_id) self.assertEqual(cleaned_export.tables[0].columns[0].label, 'caseid')
def setUp(self): super().setUp() self.export_instance = CaseExportInstance( export_format=Format.UNZIPPED_CSV, domain=self.domain, case_type=DEFAULT_CASE_TYPE, tables=[ TableConfiguration( label="My table", selected=True, path=[], columns=[ ExportColumn( label="Foo column", item=ExportItem(path=[PathNode(name="foo")]), selected=True, ), ExportColumn( label="Bar column", item=ExportItem(path=[PathNode(name="bar")]), selected=True, ) ]) ]) self.export_instance.save() self.incremental_export = IncrementalExport.objects.create( domain=self.domain, name='test_export', export_instance_id=self.export_instance.get_id, connection_settings=ConnectionSettings.objects.create( domain=self.domain, name='test conn', url='http://somewhere', auth_type=BASIC_AUTH, ))
def test_get_export_file(self): export_file = get_export_file( [ CaseExportInstance( export_format=Format.JSON, domain=DOMAIN, case_type=DEFAULT_CASE_TYPE, tables=[TableConfiguration( label="My table", selected=True, path=[], columns=[ ExportColumn( label="Foo column", item=ExportItem( path=[PathNode(name="foo")] ), selected=True, ), ExportColumn( label="Bar column", item=ExportItem( path=[PathNode(name="bar")] ), selected=True, ) ] )] ), ], [] # No filters ) with export_file as export: self.assertEqual( json.loads(export.read()), { u'My table': { u'headers': [ u'Foo column', u'Bar column'], u'rows': [ [u'apple', u'banana'], [u'apple', u'banana'], [u'apple', u'banana'], ], } } )
def test_missing_value_is_null(self): self.assertEqual( ODataCaseSerializer.serialize_cases_using_config( [{}], CaseExportInstance(tables=[ TableConfiguration(columns=[ ExportColumn( label='owner-name-label', item=ExportItem(path=[PathNode( name='owner_name')]), selected=True, ) ]) ])), [{ 'owner-name-label': '---' }])
def test_populated_metadata_document(self): non_odata_config = CaseExportInstance(domain=self.domain.name) non_odata_config.save() self.addCleanup(non_odata_config.delete) config_in_other_domain = CaseExportInstance(domain='other_domain', is_odata_config=True) config_in_other_domain.save() self.addCleanup(config_in_other_domain.delete) correct_credentials = self._get_correct_credentials() response = self._execute_query(correct_credentials) self.assertEqual(response.status_code, 200) self.assertEqual(response['Content-Type'], 'application/xml') self.assertEqual(response['OData-Version'], '4.0') self.assertXmlEqual( self.get_xml('populated_case_odata_metadata_document_from_config', override_path=PATH_TO_TEST_DATA), response.content)
def test_case_name(self): self.assertEqual( ODataCaseSerializer.serialize_cases_using_config( [{ 'name': 'case-name-value' }], CaseExportInstance(tables=[ TableConfiguration(columns=[ ExportColumn( label='case-name-label', item=ExportItem(path=[PathNode(name='name')]), selected=True, ) ]) ])), [{ 'case-name-label': 'case-name-value' }])
def test_unselected_column_excluded(self): self.assertEqual( ODataCaseSerializer.serialize_cases_using_config( [{ 'owner_name': 'owner-name-value', 'properties': {} }], CaseExportInstance(tables=[ TableConfiguration(columns=[ ExportColumn( label='owner-name-label', item=ExportItem(path=[PathNode( name='owner_name')]), selected=False, ) ]) ])), [{}])
def test_selected_false(self): export_file = get_export_file( [ CaseExportInstance(export_format=Format.JSON, domain=DOMAIN, case_type=DEFAULT_CASE_TYPE, tables=[ TableConfiguration(label="My table", selected=False, path=[], columns=[]) ]), ], [] # No filters ) with export_file as export: self.assertEqual(json.loads(export.read()), {})
def test_get_export_file(self): export_json = get_export_json( CaseExportInstance( export_format=Format.JSON, domain=DOMAIN, case_type=DEFAULT_CASE_TYPE, tables=[TableConfiguration( label="My table", selected=True, path=[], columns=[ ExportColumn( label="Foo column", item=ExportItem( path=[PathNode(name="foo")] ), selected=True, ), ExportColumn( label="Bar column", item=ExportItem( path=[PathNode(name="bar")] ), selected=True, ) ] )] ), ) self.assertEqual( export_json, { 'My table': { 'headers': [ 'Foo column', 'Bar column'], 'rows': [ ['apple', 'banana'], ['apple', 'banana'], ['apple', 'banana'], ], } } )
def test_non_standard_case_property(self): self.assertEqual( ODataCaseSerializer.serialize_cases_using_config( [{ 'property_1': 'property-1-value' }], CaseExportInstance(tables=[ TableConfiguration(columns=[ ExportColumn( label='property-1-label', item=ExportItem(path=[PathNode( name='property_1')]), selected=True, ) ]) ])), [{ 'property-1-label': 'property-1-value' }])
def test_export_transforms(self, _): export_file = get_export_file( [ CaseExportInstance( export_format=Format.JSON, domain=DOMAIN, case_type=DEFAULT_CASE_TYPE, tables=[TableConfiguration( label="My table", selected=True, path=[], columns=[ ExportColumn( label="DEID Date Transform column", item=ExportItem( path=[PathNode(name="date")] ), selected=True, deid_transform=DEID_DATE_TRANSFORM, ) ] )] ), ], [] # No filters ) with export_file as export: export_dict = json.loads(export.read()) export_dict['My table']['rows'].sort() self.assertEqual( export_dict, { u'My table': { u'headers': [ u'DEID Date Transform column [sensitive]', ], u'rows': [ [MISSING_VALUE], [u'2016-04-07'], [u'2016-04-27'], # offset by 3 since that's the mocked random offset ], } } )
def test_export_transforms(self, export_save, _): export_json = get_export_json( CaseExportInstance( export_format=Format.JSON, domain=DOMAIN, case_type=DEFAULT_CASE_TYPE, tables=[ TableConfiguration( label="My table", selected=True, path=[], columns=[ ExportColumn( label="DEID Date Transform column", item=ExportItem(path=[PathNode(name="date")]), selected=True, deid_transform=DEID_DATE_TRANSFORM, ) ]) ]), ) export_json['My table']['rows'].sort() self.assertEqual( export_json, { 'My table': { 'headers': [ 'DEID Date Transform column [sensitive]', ], 'rows': [ [MISSING_VALUE], ['2016-04-07'], [ '2016-04-27' ], # offset by 3 since that's the mocked random offset ], } }) self.assertTrue(export_save.called)
def get_export(export_type, domain, export_id=None, username=None): from corehq.apps.export.models import (FormExportInstance, CaseExportInstance, SMSExportInstance, SMSExportDataSchema) if export_type == 'form': try: return FormExportInstance.get(export_id) except ResourceNotFound: raise Http404() elif export_type == 'case': try: return CaseExportInstance.get(export_id) except ResourceNotFound: raise Http404() elif export_type == 'sms': if not username: raise Exception("Username needed to ensure permissions") include_metadata = MESSAGE_LOG_METADATA.enabled(username) return SMSExportInstance._new_from_schema( SMSExportDataSchema.get_latest_export_schema( domain, include_metadata)) raise Exception("Unexpected export type received %s" % export_type)
def to_json(self, data, options=None): # Convert bundled objects to JSON data['objects'] = [ bundle.obj for bundle in data['objects'] ] domain = data.pop('domain', None) config_id = data.pop('config_id', None) api_path = data.pop('api_path', None) assert all([domain, config_id, api_path]), [domain, config_id, api_path] data['@odata.context'] = '{}#{}'.format( absolute_reverse(ODataCaseMetadataView.urlname, args=[domain, config_id]), 'feed' ) next_link = self.get_next_url(data.pop('meta'), api_path) if next_link: data['@odata.nextLink'] = next_link config = CaseExportInstance.get(config_id) data['value'] = self.serialize_cases_using_config(data.pop('objects'), config) return json.dumps(data, cls=DjangoJSONEncoder, sort_keys=True)
def test_default_case_values(self): # Confirm that CaseExportInstances set the default show_all_data flag correctly case_export = CaseExportInstance() case_export_wrapped = CaseExportInstance.wrap({}) for e in [case_export, case_export_wrapped]: self.assertTrue(e.filters.show_all_data)
def _generate_instance(self, build_ids_and_versions, schema, saved_export=None): with mock.patch( 'corehq.apps.export.models.new.get_latest_app_ids_and_versions', return_value=build_ids_and_versions): return CaseExportInstance.generate_instance_from_schema(schema, saved_export=saved_export)
def test_simple_bulk_export(self): export_file = get_export_file( [ CaseExportInstance( export_format=Format.JSON, domain=DOMAIN, case_type=DEFAULT_CASE_TYPE, tables=[TableConfiguration( selected=True, label="My table", path=MAIN_TABLE, columns=[ ExportColumn( label="Foo column", item=ExportItem( path=[PathNode(name="foo")] ), selected=True, ), ] )] ), CaseExportInstance( export_format=Format.JSON, domain=DOMAIN, case_type=DEFAULT_CASE_TYPE, tables=[TableConfiguration( label="My table", selected=True, path=MAIN_TABLE, columns=[ ExportColumn( label="Bar column", item=ExportItem( path=[PathNode(name="bar")] ), selected=True, ) ] )] ), ], [] # No filters ) expected = { 'Export1-My table': { "A1": "Foo column", "A2": "apple", "A3": "apple", "A4": "apple", }, "Export2-My table": { "A1": "Bar column", "A2": "banana", "A3": "banana", "A4": "banana", }, } with export_file as export: wb = load_workbook(export) self.assertEqual(wb.get_sheet_names(), ["Export1-My table", "Export2-My table"]) for sheet in expected.keys(): for cell in expected[sheet].keys(): self.assertEqual( wb[sheet][cell].value, expected[sheet][cell], 'AssertionError: Sheet "{}", cell "{}" expected: "{}", got "{}"'.format( sheet, cell, expected[sheet][cell], wb[sheet][cell].value ) )
def test_edit_daily_saved_export_filters(self, _): # Create an export # Update the filters # confirm that the filters on the export have been updated appropriately export_post_data = json.dumps({ "doc_type": "CaseExportInstance", "domain": self.domain.name, "xmlns": "http://openrosa.org/formdesigner/237B85C0-78B1-4034-8277-5D37E3EA7FD1", "last_updated": None, "legacy_saved_export_schema_id": None, "is_daily_saved_export": True, "tables": [], "transform_dates": True, "last_accessed": None, "app_id": "6a48b8838d06febeeabb28c8c9516ab6", "is_deidentified": False, "split_multiselects": False, "external_blobs": {}, "export_format": "csv", "include_errors": False, "type": "case", "name": "A Villager's Health > Registrationaa > Reg form: 2016-06-27" }) resp = self.client.post(reverse(CreateNewDailySavedCaseExport.urlname, args=[self.domain.name]), export_post_data, content_type="application/json", follow=True) self.assertEqual(resp.status_code, 200) exports = get_case_exports_by_domain(self.domain.name) self.assertEqual(len(exports), 1) export = exports[0] filter_form_data = { "emwf_case_filter": [], "date_range": "range", "start_date": "1992-01-30", "end_date": "2016-10-01", } resp = self.client.post( reverse('commit_filters', args=[self.domain.name]), { "export_id": export._id, "model_type": "case", "form_data": json.dumps(filter_form_data), }, ) self.assertEqual(resp.status_code, 200) response_content = json.loads(resp.content) self.assertFalse("error" in response_content, response_content.get("error")) export = CaseExportInstance.get(export._id) self.assertEqual(export.filters.date_period.period_type, 'range')
def test_populated_service_document(self): odata_config_1 = CaseExportInstance(domain=self.domain.name, is_odata_config=True) odata_config_1.save() self.addCleanup(odata_config_1.delete) odata_config_2 = CaseExportInstance(domain=self.domain.name, is_odata_config=True) odata_config_2.save() self.addCleanup(odata_config_2.delete) non_odata_config = CaseExportInstance(domain=self.domain.name) non_odata_config.save() self.addCleanup(non_odata_config.delete) config_in_other_domain = CaseExportInstance(domain='other_domain', is_odata_config=True) config_in_other_domain.save() self.addCleanup(config_in_other_domain.delete) correct_credentials = self._get_correct_credentials() with flag_enabled('ODATA'): response = self._execute_query(correct_credentials) self.assertEqual(response.status_code, 200) self.assertEqual(response['OData-Version'], '4.0') response_content = json.loads(response.content.decode('utf-8')) self.assertCountEqual(response_content, ['@odata.context', 'value']) self.assertEqual( response_content['@odata.context'], 'http://localhost:8000/a/test_domain/api/v0.5/odata/cases/$metadata' ) self.assertCountEqual(response_content['value'], [ { 'url': odata_config_1.get_id, 'kind': 'EntitySet', 'name': odata_config_1.get_id, }, { 'url': odata_config_2.get_id, 'kind': 'EntitySet', 'name': odata_config_2.get_id, }, ])
def get_export(self, id=None): return CaseExportInstance.get(id)
def get_config(self, config_id): return CaseExportInstance.get(config_id)
def test_default_case_values(self): # Confirm that CaseExportInstances set the default project_data flag correctly case_export = CaseExportInstance() case_export_wrapped = CaseExportInstance.wrap({}) for e in [case_export, case_export_wrapped]: self.assertTrue(e.filters.show_project_data)
class TestIncrementalExport(TestCase): @classmethod def setUpClass(cls): super().setUpClass() with trap_extra_setup(ConnectionError, msg="cannot connect to elasicsearch"): cls.es = get_es_new() initialize_index_and_mapping(cls.es, CASE_INDEX_INFO) initialize_index_and_mapping(cls.es, USER_INDEX_INFO) cls.domain = uuid.uuid4().hex create_domain(cls.domain) cls.now = datetime.utcnow() cases = [ new_case(domain=cls.domain, foo="apple", bar="banana", server_modified_on=cls.now - timedelta(hours=3)), new_case(domain=cls.domain, foo="orange", bar="pear", server_modified_on=cls.now - timedelta(hours=2)), ] for case in cases: send_to_elasticsearch('cases', case.to_json()) cls.es.indices.refresh(CASE_INDEX_INFO.index) @classmethod def tearDownClass(cls): ensure_index_deleted(CASE_INDEX_INFO.index) super().tearDownClass() def setUp(self): super().setUp() self.export_instance = CaseExportInstance( export_format=Format.UNZIPPED_CSV, domain=self.domain, case_type=DEFAULT_CASE_TYPE, tables=[ TableConfiguration( label="My table", selected=True, path=[], columns=[ ExportColumn( label="Foo column", item=ExportItem(path=[PathNode(name="foo")]), selected=True, ), ExportColumn( label="Bar column", item=ExportItem(path=[PathNode(name="bar")]), selected=True, ) ]) ]) self.export_instance.save() self.incremental_export = IncrementalExport.objects.create( domain=self.domain, name='test_export', export_instance_id=self.export_instance.get_id, connection_settings=ConnectionSettings.objects.create( domain=self.domain, name='test conn', url='http://somewhere', auth_type=BASIC_AUTH, )) def tearDown(self): self.incremental_export.delete() self.export_instance.delete() super().tearDown() def _cleanup_case(self, case_id): def _clean(): self.es.delete(CASE_INDEX_INFO.index, CASE_INDEX_INFO.type, case_id) self.es.indices.refresh(CASE_INDEX_INFO.index) return _clean def test_initial(self): checkpoint = _generate_incremental_export(self.incremental_export) data = checkpoint.get_blob().read().decode('utf-8-sig') expected = "Foo column,Bar column\r\napple,banana\r\norange,pear\r\n" self.assertEqual(data, expected) self.assertEqual(checkpoint.doc_count, 2) return checkpoint def test_initial_failure(self): # calling it twice should result in the same output since the checkpoints were not # marked as success self.test_initial() self.test_initial() def test_incremental_success(self): checkpoint = self.test_initial() checkpoint.status = IncrementalExportStatus.SUCCESS checkpoint.save() case = new_case(domain=self.domain, foo="peach", bar="plumb", server_modified_on=datetime.utcnow()) send_to_elasticsearch('cases', case.to_json()) self.es.indices.refresh(CASE_INDEX_INFO.index) self.addCleanup(self._cleanup_case(case.case_id)) checkpoint = _generate_incremental_export( self.incremental_export, last_doc_date=checkpoint.last_doc_date) data = checkpoint.get_blob().read().decode('utf-8-sig') expected = "Foo column,Bar column\r\npeach,plumb\r\n" self.assertEqual(data, expected) self.assertEqual(checkpoint.doc_count, 1) checkpoint = _generate_incremental_export( self.incremental_export, last_doc_date=self.now - timedelta(hours=2, minutes=1)) data = checkpoint.get_blob().read().decode("utf-8-sig") expected = "Foo column,Bar column\r\norange,pear\r\npeach,plumb\r\n" self.assertEqual(data, expected) self.assertEqual(checkpoint.doc_count, 2) self.assertEqual(self.incremental_export.checkpoints.count(), 3) def test_sending_success(self): self._test_sending(200, IncrementalExportStatus.SUCCESS) def test_sending_fail(self): self._test_sending(401, IncrementalExportStatus.FAILURE) def _test_sending(self, status_code, expected_status): checkpoint = self.test_initial() with requests_mock.Mocker() as m: m.post('http://somewhere/', status_code=status_code) _send_incremental_export(self.incremental_export, checkpoint) checkpoint.refresh_from_db() self.assertEqual(checkpoint.status, expected_status) self.assertEqual(checkpoint.request_log.response_status, status_code) def test_owner_filter(self): setup_locations_and_types( self.domain, ['state', 'health-department', 'team', 'sub-team'], [], [('State1', [ ('HealthDepartment1', [ ('Team1', [ ('SubTeam1', []), ('SubTeam2', []), ]), ('Team2', []), ]), ])]) team1 = SQLLocation.objects.filter(domain=self.domain, name='Team1').first() health_department = SQLLocation.objects.filter( domain=self.domain, name='HealthDepartment1').first() self.addCleanup(delete_all_locations) user = CommCareUser.create(self.domain, 'm2', 'abc', None, None, location=team1) send_to_elasticsearch('users', user.to_json()) self.es.indices.refresh(USER_INDEX_INFO.index) self.addCleanup(delete_all_users) cases = [ new_case( domain=self.domain, foo="peach", bar="plumb", server_modified_on=datetime.utcnow() + timedelta(hours=-1), owner_id='123', ), new_case( domain=self.domain, foo="orange", bar="melon", server_modified_on=datetime.utcnow(), owner_id=user. user_id, # this user is part of the team1 location. ), new_case( domain=self.domain, foo="grape", bar="pineapple", server_modified_on=datetime.utcnow(), ), ] for case in cases: send_to_elasticsearch("cases", case.to_json()) self.addCleanup(self._cleanup_case(case.case_id)) self.es.indices.refresh(CASE_INDEX_INFO.index) self.export_instance.filters.show_project_data = False self.export_instance.filters.locations = [ health_department.location_id ] self.export_instance.filters.users = ['123'] self.export_instance.save() checkpoint = _generate_incremental_export(self.incremental_export) data = checkpoint.get_blob().read().decode("utf-8-sig") expected = "Foo column,Bar column\r\npeach,plumb\r\norange,melon\r\n" self.assertEqual(data, expected)
def setUpClass(cls): super().setUpClass() cls.export_instance = CaseExportInstance.wrap(cls.get_json('case_export_instance'))
def test_populated_metadata_document(self): odata_config_1 = CaseExportInstance( _id='odata_config_1', domain=self.domain.name, is_odata_config=True, tables=[TableConfiguration(columns=[])]) odata_config_1.save() self.addCleanup(odata_config_1.delete) odata_config_2 = CaseExportInstance( _id='odata_config_2', domain=self.domain.name, is_odata_config=True, tables=[ TableConfiguration(columns=[ ExportColumn(label='selected_property_1', selected=True), ExportColumn(label='selected_property_2', selected=True), ExportColumn(label='unselected_property'), ], ), ]) odata_config_2.save() self.addCleanup(odata_config_2.delete) non_odata_config = CaseExportInstance(domain=self.domain.name) non_odata_config.save() self.addCleanup(non_odata_config.delete) config_in_other_domain = CaseExportInstance(domain='other_domain', is_odata_config=True) config_in_other_domain.save() self.addCleanup(config_in_other_domain.delete) correct_credentials = self._get_correct_credentials() with flag_enabled('ODATA'): with patch( 'corehq.apps.api.odata.views.get_odata_case_configs_by_domain', return_value=sorted(get_odata_case_configs_by_domain( self.domain.name), key=lambda _config: _config.get_id)): response = self._execute_query(correct_credentials) self.assertEqual(response.status_code, 200) self.assertEqual(response['Content-Type'], 'application/xml') self.assertEqual(response['OData-Version'], '4.0') self.assertXmlEqual( self.get_xml('populated_case_odata_metadata_document_from_config', override_path=PATH_TO_TEST_DATA), response.content)
def test_populated_metadata_document(self): odata_config = CaseExportInstance( _id='my_config_id', domain=self.domain.name, is_odata_config=True, tables=[ TableConfiguration( selected=True, columns=[ ExportColumn(label='closed', selected=True, # this is what exports generate for a base level property item=ExportItem(path=[PathNode(name='closed')])), ExportColumn(label='date_modified', selected=True, item=ExportItem(path=[PathNode(name='date_modified')])), ExportColumn(label='selected_property_1', selected=True), ExportColumn(label='selected_property_2', selected=True), ExportColumn(label='unselected_property'), ], ), ] ) odata_config.save() self.addCleanup(odata_config.delete) non_odata_config = CaseExportInstance(domain=self.domain.name) non_odata_config.save() self.addCleanup(non_odata_config.delete) config_in_other_domain = CaseExportInstance(domain='other_domain', is_odata_config=True) config_in_other_domain.save() self.addCleanup(config_in_other_domain.delete) correct_credentials = self._get_correct_credentials() with flag_enabled('BI_INTEGRATION_PREVIEW', is_preview=True): response = self._execute_query(correct_credentials) self.assertEqual(response.status_code, 200) self.assertEqual(response['Content-Type'], 'application/xml') self.assertEqual(response['OData-Version'], '4.0') self.assertXmlEqual( self.get_xml( 'populated_case_odata_metadata_document_from_config', override_path=PATH_TO_TEST_DATA ), response.content )