def test_build_from_saved_schema(self): app = self.current_app schema = CaseExportDataSchema.generate_schema_from_builds( app.domain, app._id, self.case_type, ) self.assertEqual(schema.last_app_versions[app._id], app.version) # One for case, one for case history self.assertEqual(len(schema.group_schemas), 2) # After the first schema has been saved let's add a second app to process second_build = Application.wrap( self.get_json('basic_case_application')) second_build._id = '456' second_build.copy_of = app.get_id second_build.version = 6 with drop_connected_signals(app_post_save): second_build.save() self.addCleanup(second_build.delete) new_schema = CaseExportDataSchema.generate_schema_from_builds( app.domain, app._id, self.case_type, ) self.assertEqual(new_schema._id, schema._id) self.assertEqual(new_schema.last_app_versions[app._id], app.version) # One for case, one for case history self.assertEqual(len(new_schema.group_schemas), 2)
def test_basic_case_prop_merge(self): case_property_mapping = { 'candy': ['my_case_property', 'my_second_case_property'] } schema1 = CaseExportDataSchema._generate_schema_from_case_property_mapping( case_property_mapping, 1, ) case_property_mapping = { 'candy': ['my_case_property', 'my_third_case_property'] } schema2 = CaseExportDataSchema._generate_schema_from_case_property_mapping( case_property_mapping, 2, ) schema3 = CaseExportDataSchema._generate_schema_for_case_history(case_property_mapping, 2) merged = CaseExportDataSchema._merge_schemas(schema1, schema2, schema3) self.assertEqual(len(merged.group_schemas), 2) group_schema1 = merged.group_schemas[0] group_schema2 = merged.group_schemas[1] self.assertEqual(group_schema1.last_occurrence, 2) self.assertEqual(len(group_schema1.items), 3) items = filter(lambda i: i.last_occurrence == 1, group_schema1.items) self.assertEqual(len(items), 1) self.assertEqual(group_schema2.last_occurrence, 2) self.assertEqual( len(group_schema2.items), len(CASE_HISTORY_PROPERTIES) + len(case_property_mapping['candy']) )
def test_build_with_inferred_schema(self): app = self.current_app schema = CaseExportDataSchema.generate_schema_from_builds( app.domain, app._id, self.case_type, ) # Main table group_schema = schema.group_schemas[0] self.assertEqual(len(group_schema.items), 2) add_inferred_export_properties( 'TestSend', app.domain, self.case_type, ['question2', 'new-property'], ) schema = CaseExportDataSchema.generate_schema_from_builds( app.domain, app._id, self.case_type, ) # Main table group_schema = schema.group_schemas[0] # Only the new property should be added. The repeated one should be merged self.assertEqual(len(group_schema.items), 3)
def test_build_from_saved_schema(self): app = self.current_app schema = CaseExportDataSchema.generate_schema_from_builds( app.domain, app._id, self.case_type, ) self.assertEqual(schema.last_app_versions[app._id], app.version) # One for case, one for case history self.assertEqual(len(schema.group_schemas), 2) # After the first schema has been saved let's add a second app to process second_build = Application.wrap(self.get_json('basic_case_application')) second_build._id = '456' second_build.copy_of = app.get_id second_build.version = 6 with drop_connected_signals(app_post_save): second_build.save() self.addCleanup(second_build.delete) new_schema = CaseExportDataSchema.generate_schema_from_builds( app.domain, app._id, self.case_type, ) self.assertEqual(new_schema._id, schema._id) self.assertEqual(new_schema.last_app_versions[app._id], app.version) # One for case, one for case history self.assertEqual(len(new_schema.group_schemas), 2)
def setUpClass(cls): super(TestCaseExportInstanceGeneration, cls).setUpClass() cls.app_id = '1234' cls.schema = CaseExportDataSchema(group_schemas=[ ExportGroupSchema( path=MAIN_TABLE, items=[ ScalarItem( path=[PathNode(name='p1')], label='p1', last_occurrences={}, ), ], last_occurrences={cls.app_id: 3}, ), ], ) cls.new_schema = CaseExportDataSchema(group_schemas=[ ExportGroupSchema( path=MAIN_TABLE, items=[ ScalarItem( path=[PathNode(name='p1')], label='p1', last_occurrences={}, ), ScalarItem( path=[PathNode(name='name')], label='name', last_occurrences={cls.app_id: 3}, ), ], last_occurrences={cls.app_id: 3}, ), ], )
def test_inferred_schema_merge(self): schema = CaseExportDataSchema( domain='my-domain', group_schemas=[ ExportGroupSchema( path=MAIN_TABLE, items=[ExportItem(path=[PathNode(name='case_property')])], ) ]) inferred_schema = CaseExportDataSchema( domain='my-domain', group_schemas=[ ExportGroupSchema( path=MAIN_TABLE, items=[ ExportItem( path=[PathNode(name='case_property')], inferred=True, inferred_from=set(['One']), ) ], inferred=True, ) ]) inferred_schema_two = CaseExportDataSchema( domain='my-domain', group_schemas=[ ExportGroupSchema( path=MAIN_TABLE, items=[ ExportItem( path=[PathNode(name='case_property')], inferred=True, inferred_from=set(['Two']), ) ], inferred=True, ) ]) merged = ExportDataSchema._merge_schemas(schema, inferred_schema, inferred_schema_two) self.assertEqual(len(merged.group_schemas), 1) self.assertTrue(merged.group_schemas[0].inferred) group_schema = merged.group_schemas[0] self.assertEqual(len(group_schema.items), 1) self.assertTrue(group_schema.items[0].inferred) self.assertEqual(group_schema.items[0].inferred_from, set(['One', 'Two']))
def test_get_case_properties_for_case_type(self): schema = CaseExportDataSchema( group_schemas=[ ExportGroupSchema( path=MAIN_TABLE, items=[ ExportItem( path=[PathNode(name='name')], label='name', last_occurrences={}, ), ExportItem( path=[PathNode(name='color')], label='color', last_occurrences={}, ), ], last_occurrences={}, ), ], ) with mock.patch( 'corehq.apps.export.models.new.CaseExportDataSchema.generate_schema_from_builds', return_value=schema): case_types = get_case_properties_for_case_type('test-domain', 'case-type') self.assertEqual(sorted(case_types), ['color', 'name'])
def setUpClass(cls): super(TestConvertSavedExportSchemaToCaseExportInstance, cls).setUpClass() cls.project = create_domain(cls.domain) cls.project.commtrack_enabled = True cls.project.save() cls.schema = CaseExportDataSchema( domain=cls.domain, case_type='wonderwoman', group_schemas=[ ExportGroupSchema( path=MAIN_TABLE, items=[ ExportItem( path=[PathNode(name='DOB')], label='Case Propery DOB', last_occurrences={cls.app_id: 3}, ), ], last_occurrences={cls.app_id: 3}, ), ExportGroupSchema( path=CASE_HISTORY_TABLE, last_occurrences={cls.app_id: 3}, ), ExportGroupSchema( path=PARENT_CASE_TABLE, last_occurrences={cls.app_id: 3}, ), ], )
def test_different_doc_types_ordering(self): schema = self._create_schema([ GeopointItem(path=[PathNode(name='one')]), ScalarItem(path=[PathNode(name='two')]), ScalarItem(path=[PathNode(name='three')]), ScalarItem(path=[PathNode(name='one')]), ]) ordered_schema = self._create_schema([ ScalarItem(path=[PathNode(name='two')]), ScalarItem(path=[PathNode(name='one')]), ScalarItem(path=[PathNode(name='three')]), GeopointItem(path=[PathNode(name='one')]), ]) schema = CaseExportDataSchema._reorder_schema_from_schema( schema, ordered_schema, ) self._assert_item_order( schema, [], [ ScalarItem(path=[PathNode(name='two')]), ScalarItem(path=[PathNode(name='one')]), ScalarItem(path=[PathNode(name='three')]), GeopointItem(path=[PathNode(name='one')]), ], )
def test_basic_ordering(self): schema = self._create_schema([ ScalarItem(path=[PathNode(name='three')]), ScalarItem(path=[PathNode(name='one')]), ScalarItem(path=[PathNode(name='two')]), ScalarItem(path=[PathNode(name='four')]), ]) ordered_schema = self._create_schema([ ScalarItem(path=[PathNode(name='one')]), ScalarItem(path=[PathNode(name='two')]), ScalarItem(path=[PathNode(name='three')]), ScalarItem(path=[PathNode(name='four')]), ]) schema = CaseExportDataSchema._reorder_schema_from_schema( schema, ordered_schema, ) self._assert_item_order( schema, [], [ ScalarItem(path=[PathNode(name='one')]), ScalarItem(path=[PathNode(name='two')]), ScalarItem(path=[PathNode(name='three')]), ScalarItem(path=[PathNode(name='four')]), ], )
def test_partial_match_ordering(self): schema = self._create_schema([ ExportItem(path=[PathNode(name='two')]), ExportItem(path=[PathNode(name='one')]), ExportItem(path=[PathNode(name='three')]), ]) ordered_schema = self._create_schema([ ExportItem(path=[PathNode(name='one')]), ExportItem(path=[PathNode(name='four')]), ExportItem(path=[PathNode(name='five')]), ExportItem(path=[PathNode(name='six')]), ]) schema = CaseExportDataSchema._reorder_schema_from_schema( schema, ordered_schema, ) self._assert_item_order( schema, [], [ ExportItem(path=[PathNode(name='one')]), ExportItem(path=[PathNode(name='two')]), ExportItem(path=[PathNode(name='three')]), ], )
def test_parent_case_table_generation(self): """ Ensures that the child case generates a parent case table and indices columns in main table """ schema = CaseExportDataSchema.generate_schema_from_builds( self.domain, self.current_app._id, 'child-case', ) # One for case, one for case history, one for parent case self.assertEqual(len(schema.group_schemas), 3) main_table = next( filter(lambda gs: gs.path == MAIN_TABLE, schema.group_schemas)) self.assertEqual( len([ item for item in main_table.items if item.doc_type == 'CaseIndexItem' ]), 1) self.assertEqual( len([ gs for gs in schema.group_schemas if gs.path == PARENT_CASE_TABLE ]), 1)
def get(self, request, *args, **kwargs): case_type = request.GET.get("export_tag").strip('"') schema = CaseExportDataSchema.generate_schema_from_builds(self.domain, case_type, force_rebuild=True) self.export_instance = self.export_instance_cls.generate_instance_from_schema(schema) return super(CreateNewCustomCaseExportView, self).get(request, *args, **kwargs)
def test_different_doc_types_ordering(self): schema = self._create_schema([ GeopointItem(path=[PathNode(name='one')]), ScalarItem(path=[PathNode(name='two')]), ScalarItem(path=[PathNode(name='three')]), ScalarItem(path=[PathNode(name='one')]), ]) ordered_schema = self._create_schema([ ScalarItem(path=[PathNode(name='two')]), ScalarItem(path=[PathNode(name='one')]), ScalarItem(path=[PathNode(name='three')]), GeopointItem(path=[PathNode(name='one')]), ]) schema = CaseExportDataSchema._reorder_schema_from_schema( schema, ordered_schema, ) self._assert_item_order( schema, [], [ ScalarItem(path=[PathNode(name='two')]), ScalarItem(path=[PathNode(name='one')]), ScalarItem(path=[PathNode(name='three')]), GeopointItem(path=[PathNode(name='one')]), ], )
def get_case_data_source(app, case_type): schema = CaseExportDataSchema.generate_schema_from_builds( app.domain, app._id, case_type, only_process_current_builds=True, ) # the first two (row number and case id) are redundant/export specific, meta_properties_to_use = MAIN_CASE_TABLE_PROPERTIES[2:] # anything with a transform should also be removed meta_properties_to_use = [property_def for property_def in meta_properties_to_use if property_def.item.transform is None] meta_indicators = [_export_column_to_ucr_indicator(c) for c in meta_properties_to_use] dynamic_indicators = _get_dynamic_indicators_from_export_schema(schema) # filter out any duplicately defined columns from dynamic indicators meta_column_names = set([c['column_id'] for c in meta_indicators]) dynamic_indicators = [indicator for indicator in dynamic_indicators if indicator['column_id'] not in meta_column_names] return DataSourceConfiguration( domain=app.domain, referenced_doc_type='CommCareCase', table_id=clean_table_name(app.domain, case_type), display_name=case_type, configured_filter=make_case_data_source_filter(case_type), configured_indicators=meta_indicators + dynamic_indicators + _get_shared_indicators(), )
def test_partial_match_ordering(self): schema = self._create_schema([ ExportItem(path=[PathNode(name='two')]), ExportItem(path=[PathNode(name='one')]), ExportItem(path=[PathNode(name='three')]), ]) ordered_schema = self._create_schema([ ExportItem(path=[PathNode(name='one')]), ExportItem(path=[PathNode(name='four')]), ExportItem(path=[PathNode(name='five')]), ExportItem(path=[PathNode(name='six')]), ]) schema = CaseExportDataSchema._reorder_schema_from_schema( schema, ordered_schema, ) self._assert_item_order( schema, [], [ ExportItem(path=[PathNode(name='one')]), ExportItem(path=[PathNode(name='two')]), ExportItem(path=[PathNode(name='three')]), ], )
def test_basic_ordering(self): schema = self._create_schema([ ScalarItem(path=[PathNode(name='three')]), ScalarItem(path=[PathNode(name='one')]), ScalarItem(path=[PathNode(name='two')]), ScalarItem(path=[PathNode(name='four')]), ]) ordered_schema = self._create_schema([ ScalarItem(path=[PathNode(name='one')]), ScalarItem(path=[PathNode(name='two')]), ScalarItem(path=[PathNode(name='three')]), ScalarItem(path=[PathNode(name='four')]), ]) schema = CaseExportDataSchema._reorder_schema_from_schema( schema, ordered_schema, ) self._assert_item_order( schema, [], [ ScalarItem(path=[PathNode(name='one')]), ScalarItem(path=[PathNode(name='two')]), ScalarItem(path=[PathNode(name='three')]), ScalarItem(path=[PathNode(name='four')]), ], )
def _create_schema(self, items): return CaseExportDataSchema( domain=self.domain, case_type=self.case_type, group_schemas=[ExportGroupSchema( path=[], items=items, )])
def test_basic_application_schema(self): schema = CaseExportDataSchema.generate_schema_from_builds(self.domain, 'candy') self.assertEqual(len(schema.group_schemas), 2) group_schema = schema.group_schemas[0] self.assertEqual(group_schema.last_occurrence, 3) self.assertEqual(len(group_schema.items), 2)
def test_build_from_saved_schema(self): app = self.current_app schema = CaseExportDataSchema.generate_schema_from_builds( app.domain, app._id, self.case_type, ) self.assertEqual(schema.last_app_versions[app._id], self.first_build.version) # One for case, one for case history self.assertEqual(len(schema.group_schemas), 2) self.assertEqual(len(schema.group_schemas[0].items), 2) self.assertEqual(len(schema.group_schemas[1].items), len(KNOWN_CASE_PROPERTIES) + 2) # After the first schema has been saved let's add a second app to process second_build = Application.wrap( self.get_json('basic_case_application')) second_build._id = '456' second_build.copy_of = app.get_id second_build.version = 6 second_build.has_submissions = True second_build.get_module(0).get_form( 0).actions.update_case.update['name'] = ConditionalCaseUpdate( question_path='/data/question2') with drop_connected_signals(app_post_save): second_build.save() self.addCleanup(second_build.delete) new_schema = CaseExportDataSchema.generate_schema_from_builds( app.domain, app._id, self.case_type, ) self.assertEqual(new_schema._id, schema._id) self.assertEqual(new_schema.last_app_versions[app._id], second_build.version) # One for case, one for case history self.assertEqual(len(new_schema.group_schemas), 2) self.assertEqual(len(schema.group_schemas[0].items), 2) self.assertEqual(len(schema.group_schemas[1].items), len(KNOWN_CASE_PROPERTIES) + 2)
def test_basic_case_prop_merge(self): app_id = '1234' case_property_mapping = { 'candy': ['my_case_property', 'my_second_case_property'] } schema1 = CaseExportDataSchema._generate_schema_from_case_property_mapping( case_property_mapping, [], app_id, 1, ) case_property_mapping = { 'candy': ['my_case_property', 'my_third_case_property'] } schema2 = CaseExportDataSchema._generate_schema_from_case_property_mapping( case_property_mapping, [], app_id, 2, ) schema3 = CaseExportDataSchema._generate_schema_for_case_history( case_property_mapping, app_id, 2, ) merged = CaseExportDataSchema._merge_schemas(schema1, schema2, schema3) self.assertEqual(len(merged.group_schemas), 2) group_schema1 = merged.group_schemas[0] group_schema2 = merged.group_schemas[1] self.assertEqual(group_schema1.last_occurrences[app_id], 2) self.assertEqual(len(group_schema1.items), 3) items = [ i for i in group_schema1.items if i.last_occurrences[app_id] == 1 ] self.assertEqual(len(items), 1) self.assertEqual(group_schema2.last_occurrences[app_id], 2) self.assertEqual( len(group_schema2.items), len(case_property_mapping['candy']) + len(KNOWN_CASE_PROPERTIES), )
def setUpClass(cls): super(TestExportDBAccessors, cls).setUpClass() cls.form_schema = FormExportDataSchema( domain=cls.domain, app_id=cls.app_id, xmlns=cls.xmlns, ) cls.form_schema_other = FormExportDataSchema( domain='other', app_id=cls.app_id, xmlns=cls.xmlns, ) cls.form_schema_before = FormExportDataSchema( domain=cls.domain, app_id=cls.app_id, xmlns=cls.xmlns, created_on=datetime.utcnow() - timedelta(1) ) cls.case_schema = CaseExportDataSchema( domain=cls.domain, case_type=cls.case_type, ) cls.case_schema_other = CaseExportDataSchema( domain=cls.domain, case_type='other', ) cls.case_schema_before = CaseExportDataSchema( domain=cls.domain, case_type=cls.case_type, created_on=datetime.utcnow() - timedelta(1) ) cls.schemas = [ cls.form_schema, cls.form_schema_before, cls.form_schema_other, cls.case_schema_before, cls.case_schema, cls.case_schema_other, ] for schema in cls.schemas: schema.save()
def test_case_history_parsing(self): schema = CaseExportDataSchema._generate_schema_for_case_history({ 'candy': ['my_case_property', 'my_second_case_property'] }, self.app_id, 1) self.assertEqual(len(schema.group_schemas), 1) group_schema = schema.group_schemas[0] update_items = filter(lambda item: item.tag == PROPERTY_TAG_UPDATE, group_schema.items) self.assertEqual(len(update_items), 2 + len(KNOWN_CASE_PROPERTIES))
def test_basic_case_prop_merge(self): app_id = '1234' case_property_mapping = { 'candy': ['my_case_property', 'my_second_case_property'] } schema1 = CaseExportDataSchema._generate_schema_from_case_property_mapping( case_property_mapping, [], app_id, 1, ) case_property_mapping = { 'candy': ['my_case_property', 'my_third_case_property'] } schema2 = CaseExportDataSchema._generate_schema_from_case_property_mapping( case_property_mapping, [], app_id, 2, ) schema3 = CaseExportDataSchema._generate_schema_for_case_history( case_property_mapping, app_id, 2, ) merged = CaseExportDataSchema._merge_schemas(schema1, schema2, schema3) self.assertEqual(len(merged.group_schemas), 2) group_schema1 = merged.group_schemas[0] group_schema2 = merged.group_schemas[1] self.assertEqual(group_schema1.last_occurrences[app_id], 2) self.assertEqual(len(group_schema1.items), 3) items = [i for i in group_schema1.items if i.last_occurrences[app_id] == 1] self.assertEqual(len(items), 1) self.assertEqual(group_schema2.last_occurrences[app_id], 2) self.assertEqual( len(group_schema2.items), len(case_property_mapping['candy']) + len(KNOWN_CASE_PROPERTIES), )
def test_parent_case_table_generation_for_parent_case(self): """Ensures that the parent case doesn't have a parent case table""" schema = CaseExportDataSchema.generate_schema_from_builds( self.domain, self.current_app._id, self.case_type, ) # One for case, one for case history self.assertEqual(len(schema.group_schemas), 2)
def test_case_history_parsing(self): schema = CaseExportDataSchema._generate_schema_for_case_history({ 'candy': ['my_case_property', 'my_second_case_property'] }, self.app_id, 1) self.assertEqual(len(schema.group_schemas), 1) group_schema = schema.group_schemas[0] update_items = [item for item in group_schema.items if item.tag == PROPERTY_TAG_UPDATE] self.assertEqual(len(update_items), 2 + len(KNOWN_CASE_PROPERTIES))
def test_parent_case_table_generation_for_parent_case(self): """Ensures that the parent case doesn't have a parent case table""" schema = CaseExportDataSchema.generate_schema_from_builds( self.domain, self.current_app._id, self.case_type, ) # One for case, one for case history self.assertEqual(len(schema.group_schemas), 2)
def testCaseReferencesMakeItToCaseSchema(self): schema = CaseExportDataSchema.generate_schema_from_builds( self.domain, self.current_app._id, self.case_type, only_process_current_builds=False) self.assertEqual( {'save_to_case_p1', 'save_to_case_p2'}, {item.path[0].name for item in schema.group_schemas[0].items})
def get_case_type_to_properties(domain): case_type_to_properties = {} case_types = get_case_types_for_domain_es(domain) for case_type in case_types: case_type_to_properties[case_type] = [] case_export_schema = CaseExportDataSchema.generate_schema_from_builds(domain, None, case_type) for export_group_schema in case_export_schema.group_schemas[0].items: cleaned_case_property = export_group_schema.label.replace('_', '') case_type_to_properties[case_type].append(cleaned_case_property) return case_type_to_properties
def testCaseReferencesMakeItToCaseSchema(self): schema = CaseExportDataSchema.generate_schema_from_builds( self.domain, self.current_app._id, self.case_type, only_process_current_builds=False ) self.assertEqual( {'save_to_case_p1', 'save_to_case_p2'}, {item.path[0].name for item in schema.group_schemas[0].items} )
def test_basic_delayed_schema(self): schema = CaseExportDataSchema.generate_schema_from_builds( self.domain, self.current_app._id, self.case_type, only_process_current_builds=True) self.assertIsNone(schema.last_app_versions.get(self.current_app._id)) group_schema = schema.group_schemas[0] self.assertEqual(len(group_schema.items), 2) schema = CaseExportDataSchema.generate_schema_from_builds( self.domain, self.current_app._id, self.case_type, only_process_current_builds=False) self.assertEqual(schema.last_app_versions[self.current_app._id], self.build.version) group_schema = schema.group_schemas[0] self.assertEqual(len(group_schema.items), 3)
def test_basic_delayed_schema(self): schema = CaseExportDataSchema.generate_schema_from_builds( self.domain, self.current_app._id, self.case_type, only_process_current_builds=True ) self.assertIsNone(schema.last_app_versions.get(self.current_app._id)) group_schema = schema.group_schemas[0] self.assertEqual(len(group_schema.items), 2) schema = CaseExportDataSchema.generate_schema_from_builds( self.domain, self.current_app._id, self.case_type, only_process_current_builds=False ) self.assertEqual(schema.last_app_versions[self.current_app._id], self.build.version) group_schema = schema.group_schemas[0] self.assertEqual(len(group_schema.items), 3)
def test_basic_application_schema(self): schema = CaseExportDataSchema.generate_schema_from_builds( self.domain, self.current_app._id, self.case_type, ) # One for case, one for case history self.assertEqual(len(schema.group_schemas), 2) group_schema = schema.group_schemas[0] self.assertEqual(group_schema.last_occurrences[self.current_app._id], self.current_app.version) self.assertEqual(len(group_schema.items), 2)
def test_basic_application_schema(self): schema = CaseExportDataSchema.generate_schema_from_builds( self.domain, self.current_app._id, self.case_type, ) # One for case, one for case history self.assertEqual(len(schema.group_schemas), 2) group_schema = schema.group_schemas[0] self.assertEqual(group_schema.last_occurrences[self.current_app._id], self.current_app.version) self.assertEqual(len(group_schema.items), 2)
def test_build_from_saved_schema(self): app = self.current_app schema = CaseExportDataSchema.generate_schema_from_builds( app.domain, app._id, self.case_type, ) self.assertEqual(schema.last_app_versions[app._id], self.first_build.version) # One for case, one for case history self.assertEqual(len(schema.group_schemas), 2) self.assertEqual(len(schema.group_schemas[0].items), 2) self.assertEqual(len(schema.group_schemas[1].items), 8) # After the first schema has been saved let's add a second app to process second_build = Application.wrap(self.get_json('basic_case_application')) second_build._id = '456' second_build.copy_of = app.get_id second_build.version = 6 second_build.has_submissions = True second_build.get_module(0).get_form(0).actions.update_case.update['name'] = '/data/question2' with drop_connected_signals(app_post_save): second_build.save() self.addCleanup(second_build.delete) new_schema = CaseExportDataSchema.generate_schema_from_builds( app.domain, app._id, self.case_type, ) self.assertEqual(new_schema._id, schema._id) self.assertEqual(new_schema.last_app_versions[app._id], second_build.version) # One for case, one for case history self.assertEqual(len(new_schema.group_schemas), 2) self.assertEqual(len(schema.group_schemas[0].items), 2) self.assertEqual(len(schema.group_schemas[1].items), 8)
def get_case_properties_for_case_type(domain, case_type): if should_use_sql_backend(domain): from corehq.apps.export.models import CaseExportDataSchema from corehq.apps.export.const import MAIN_TABLE schema = CaseExportDataSchema.generate_schema_from_builds( domain, case_type, ) group_schemas = [gs for gs in schema.group_schemas if gs.path == MAIN_TABLE] if group_schemas: return sorted(set([item.path[0] for item in group_schemas[0].items])) else: from corehq.apps.hqcase.dbaccessors import get_case_properties return get_case_properties(domain, case_type)
def test_get_app_build_ids_to_process(self): from corehq.apps.app_manager.dbaccessors import AppBuildVersion results = [ AppBuildVersion(app_id='1', build_id='2', version=3), AppBuildVersion(app_id='1', build_id='4', version=5), AppBuildVersion(app_id='2', build_id='2', version=3), ] last_app_versions = {'1': 3} with patch( 'corehq.apps.export.models.new.get_all_built_app_ids_and_versions', return_value=results): build_ids = CaseExportDataSchema._get_app_build_ids_to_process( 'dummy', 'dummy-app-id', last_app_versions) self.assertEqual(sorted(build_ids), ['2', '4'])
def test_case_history_parsing(self): schema = CaseExportDataSchema._generate_schema_for_case_history({ 'candy': ['my_case_property', 'my_second_case_property'] }, 1) self.assertEqual(len(schema.group_schemas), 1) group_schema = schema.group_schemas[0] for idx, prop in enumerate(CASE_HISTORY_PROPERTIES): self.assertEqual(group_schema.items[idx].path, [prop.name]) self.assertEqual(group_schema.items[idx].tag, prop.tag) update_items = filter(lambda item: item.tag == PROPERTY_TAG_UPDATE, group_schema.items) self.assertEqual(len(update_items), 2)
def test_build_with_inferred_schema(self): app = self.current_app schema = CaseExportDataSchema.generate_schema_from_builds( app.domain, app._id, self.case_type, ) group_schema = schema.group_schemas[0] self.assertEqual(group_schema.path, MAIN_TABLE) self.assertTrue(group_schema.inferred) inferred_items = [item for item in group_schema.items if item.inferred] self.assertEqual(len(inferred_items), 2)
def test_build_with_inferred_schema(self): app = self.current_app schema = CaseExportDataSchema.generate_schema_from_builds( app.domain, app._id, self.case_type, ) group_schema = schema.group_schemas[0] self.assertEqual(group_schema.path, MAIN_TABLE) self.assertTrue(group_schema.inferred) inferred_items = [item for item in group_schema.items if item.inferred] self.assertEqual(len(inferred_items), 2)
def get_case_properties_for_case_type(domain, case_type): if should_use_sql_backend(domain): from corehq.apps.export.models import CaseExportDataSchema from corehq.apps.export.models.new import MAIN_TABLE schema = CaseExportDataSchema.generate_schema_from_builds( domain, None, case_type, ) group_schemas = [gs for gs in schema.group_schemas if gs.path == MAIN_TABLE] if group_schemas: return sorted(set([item.path[0].name for item in group_schemas[0].items])) else: from corehq.apps.hqcase.dbaccessors import get_case_properties return get_case_properties(domain, case_type)
def get_case_type_to_properties(domain): case_type_to_properties = defaultdict(list) case_types = get_case_types_for_domain_es(domain) for case_type in case_types: if not case_type: # TODO - understand why a case can have a blank case type and handle appropriately continue case_export_schema = ( get_latest_case_export_schema(domain, case_type) or CaseExportDataSchema.generate_schema_from_builds(domain, None, case_type) ) for export_group_schema in case_export_schema.group_schemas[0].items: cleaned_case_property = export_group_schema.label.replace('_', '') case_type_to_properties[case_type].append(cleaned_case_property) return dict(case_type_to_properties)
def get_case_type_to_properties(domain): case_type_to_properties = defaultdict(list) case_types = get_case_types_for_domain_es(domain) for case_type in case_types: if not case_type: # TODO - understand why a case can have a blank case type and handle appropriately continue case_export_schema = (get_latest_case_export_schema(domain, case_type) or CaseExportDataSchema.generate_schema_from_builds( domain, None, case_type)) for export_group_schema in case_export_schema.group_schemas[0].items: cleaned_case_property = export_group_schema.label.replace('_', '') case_type_to_properties[case_type].append(cleaned_case_property) return dict(case_type_to_properties)
def test_case_type_metadata_parsing(self): case_property_mapping = { 'candy': ['my_case_property', 'my_second_case_property'] } schema = CaseExportDataSchema._generate_schema_from_case_property_mapping( case_property_mapping, 1, ) self.assertEqual(len(schema.group_schemas), 1) group_schema = schema.group_schemas[0] self.assertEqual(group_schema.items[0].path, ['my_case_property']) self.assertEqual(group_schema.items[0].last_occurrence, 1) self.assertEqual(group_schema.items[1].path, ['my_second_case_property']) self.assertEqual(group_schema.items[1].last_occurrence, 1)
def test_multiple_app_schema_generation(self): schema = CaseExportDataSchema.generate_schema_from_builds( self.domain, self.current_app._id, self.case_type, ) self.assertEqual( schema.last_app_versions[self.other_build.copy_of], self.other_build.version, ) # One for case, one for case history self.assertEqual(len(schema.group_schemas), 2) group_schema = schema.group_schemas[0] self.assertEqual(group_schema.last_occurrences[self.current_app._id], self.current_app.version) self.assertEqual(len(group_schema.items), 2)
def test_multiple_app_schema_generation(self): schema = CaseExportDataSchema.generate_schema_from_builds( self.domain, self.current_app._id, self.case_type, ) self.assertEqual( schema.last_app_versions[self.other_build.copy_of], self.other_build.version, ) # One for case, one for case history self.assertEqual(len(schema.group_schemas), 2) group_schema = schema.group_schemas[0] self.assertEqual(group_schema.last_occurrences[self.current_app._id], self.current_app.version) self.assertEqual(len(group_schema.items), 2)
def get_case_properties_for_case_type(domain, case_type): # todo: seems like poor boundaries for this function care about the backend # todo: get_case_properties just always return the right answer, # todo: possibly by moving this there. if should_use_sql_backend(domain): from corehq.apps.export.models import CaseExportDataSchema from corehq.apps.export.models.new import MAIN_TABLE schema = CaseExportDataSchema.generate_schema_from_builds( domain, None, case_type, ) group_schemas = [gs for gs in schema.group_schemas if gs.path == MAIN_TABLE] if group_schemas: return sorted(set([item.path[0].name for item in group_schemas[0].items])) else: from corehq.apps.hqcase.dbaccessors import get_case_properties return get_case_properties(domain, case_type)
def get_case_properties_for_case_type(domain, case_type): # todo: seems like poor boundaries for this function care about the backend # todo: get_case_properties just always return the right answer, # todo: possibly by moving this there. if should_use_sql_backend(domain): from corehq.apps.export.models import CaseExportDataSchema from corehq.apps.export.models.new import MAIN_TABLE schema = CaseExportDataSchema.generate_schema_from_builds( domain, None, case_type, ) group_schemas = [gs for gs in schema.group_schemas if gs.path == MAIN_TABLE] if group_schemas: return sorted(set([item.path[0].name for item in group_schemas[0].items])) else: from corehq.apps.hqcase.dbaccessors import get_case_properties return get_case_properties(domain, case_type)
def test_get_app_build_ids_to_process(self): from corehq.apps.app_manager.dbaccessors import AppBuildVersion results = [ AppBuildVersion(app_id='1', build_id='2', version=3), AppBuildVersion(app_id='1', build_id='4', version=5), AppBuildVersion(app_id='2', build_id='2', version=3), ] last_app_versions = { '1': 3 } with patch( 'corehq.apps.export.models.new.get_all_built_app_ids_and_versions', return_value=results): build_ids = CaseExportDataSchema._get_app_build_ids_to_process( 'dummy', last_app_versions ) self.assertEqual(sorted(build_ids), ['2', '4'])
def setUpClass(cls): super(TestExportInstanceGenerationWithInferredSchema, cls).setUpClass() cls.schema = CaseExportDataSchema( app_id=cls.app_id, case_type=cls.case_type, group_schemas=[ ExportGroupSchema( path=MAIN_TABLE, items=[ ExportItem( path=[ PathNode(name='data'), PathNode(name='case_property') ], label='Question 1', last_occurrences={cls.app_id: 3}, ), ], last_occurrences={cls.app_id: 3}, ), ], ) cls.inferred_schema = InferredSchema( case_type=cls.case_type, group_schemas=[ InferredExportGroupSchema( path=MAIN_TABLE, items=[ ExportItem(path=[ PathNode(name='data'), PathNode(name='case_property') ], label='Inferred 1', inferred=True), ExportItem(path=[ PathNode(name='data'), PathNode(name='case_property_2') ], label='Inferred 1', inferred=True), ], inferred=True), ])
def test_case_type_metadata_parsing(self): case_property_mapping = { 'candy': ['my_case_property', 'my_second_case_property'] } schema = CaseExportDataSchema._generate_schema_from_case_property_mapping( case_property_mapping, [], self.app_id, 1, ) self.assertEqual(len(schema.group_schemas), 1) group_schema = schema.group_schemas[0] my_case_property_item = group_schema.items[0] my_second_case_property_item = group_schema.items[1] self.assertEqual(my_case_property_item.path, [PathNode(name='my_case_property')]) self.assertEqual(my_case_property_item.last_occurrences[self.app_id], 1) self.assertEqual(my_second_case_property_item.path, [PathNode(name='my_second_case_property')]) self.assertEqual(my_second_case_property_item.last_occurrences[self.app_id], 1)
def test_case_type_metadata_parsing(self): case_property_mapping = { 'candy': ['my_case_property', 'my_second_case_property'] } schema = CaseExportDataSchema._generate_schema_from_case_property_mapping( case_property_mapping, [], self.app_id, 1, ) self.assertEqual(len(schema.group_schemas), 1) group_schema = schema.group_schemas[0] my_case_property_item = group_schema.items[0] my_second_case_property_item = group_schema.items[1] self.assertEqual(my_case_property_item.path, [PathNode(name='my_case_property')]) self.assertEqual(my_case_property_item.last_occurrences[self.app_id], 1) self.assertEqual(my_second_case_property_item.path, [PathNode(name='my_second_case_property')]) self.assertEqual(my_second_case_property_item.last_occurrences[self.app_id], 1)
def setUpClass(cls): super(TestForceConvertExport, cls).setUpClass() cls.project = create_domain(cls.domain) cls.project.commtrack_enabled = True cls.project.save() cls.schema = CaseExportDataSchema( domain=cls.domain, group_schemas=[ ExportGroupSchema( path=MAIN_TABLE, items=[ ExportItem( path=[PathNode(name='DOB')], label='Case Property DOB', last_occurrences={cls.app_id: 3}, ), ], last_occurrences={cls.app_id: 3}, ), ], )
def test_parent_case_table_generation(self): """ Ensures that the child case generates a parent case table and indices columns in main table """ schema = CaseExportDataSchema.generate_schema_from_builds( self.domain, self.current_app._id, 'child-case', ) # One for case, one for case history, one for parent case self.assertEqual(len(schema.group_schemas), 3) main_table = next(filter(lambda gs: gs.path == MAIN_TABLE, schema.group_schemas)) self.assertEqual( len([item for item in main_table.items if item.doc_type == 'CaseIndexItem']), 1 ) self.assertEqual( len([gs for gs in schema.group_schemas if gs.path == PARENT_CASE_TABLE]), 1 )
def tearDownClass(cls): cls.current_app.delete() CaseExportDataSchema.get_latest_export_schema(cls.domain, cls.current_app._id, cls.case_type).delete() super(TestAppCasePropertyReferences, cls).tearDownClass()
def get_export_schema(self, export_instance): return CaseExportDataSchema.generate_schema_from_builds(self.domain, export_instance.case_type)
def tearDownClass(cls): cls.current_app.delete() CaseExportDataSchema.get_latest_export_schema(cls.domain, cls.current_app._id, cls.case_type).delete() super(TestAppCasePropertyReferences, cls).tearDownClass()