def test_properties_merge_without_losing_pairings(self): # Create 2 pairings and distribute them to the two -Views. taxlot_factory = FakeTaxLotFactory(organization=self.org) taxlot_state_factory = FakeTaxLotStateFactory(organization=self.org) taxlot_1 = taxlot_factory.get_taxlot() state_1 = taxlot_state_factory.get_taxlot_state() taxlot_view_1 = TaxLotView.objects.create( taxlot=taxlot_1, cycle=self.cycle, state=state_1 ) taxlot_2 = taxlot_factory.get_taxlot() state_2 = taxlot_state_factory.get_taxlot_state() taxlot_view_2 = TaxLotView.objects.create( taxlot=taxlot_2, cycle=self.cycle, state=state_2 ) TaxLotProperty( primary=True, cycle_id=self.cycle.id, property_view_id=self.view_1.id, taxlot_view_id=taxlot_view_1.id ).save() TaxLotProperty( primary=True, cycle_id=self.cycle.id, property_view_id=self.view_2.id, taxlot_view_id=taxlot_view_2.id ).save() # Merge the properties url = reverse('api:v2:properties-merge') + '?organization_id={}'.format(self.org.pk) post_params = json.dumps({ 'state_ids': [self.state_2.pk, self.state_1.pk] # priority given to state_1 }) self.client.post(url, post_params, content_type='application/json') # There should still be 2 TaxLotProperties self.assertEqual(TaxLotProperty.objects.count(), 2) property_view = PropertyView.objects.first() paired_taxlotview_ids = list( TaxLotProperty.objects.filter(property_view_id=property_view.id).values_list('taxlot_view_id', flat=True) ) self.assertCountEqual(paired_taxlotview_ids, [taxlot_view_1.id, taxlot_view_2.id])
def test_error_occurs_when_trying_to_apply_a_label_to_taxlotview_from_a_different_org(self): org_1_taxlot = TaxLot.objects.create(organization=self.org) taxlot_state_factory = FakeTaxLotStateFactory(organization=self.org) org_1_taxlotstate = taxlot_state_factory.get_taxlot_state() org_1_taxlotview = TaxLotView.objects.create( taxlot=org_1_taxlot, state=org_1_taxlotstate, cycle=self.cycle ) with transaction.atomic(): with self.assertRaises(IntegrityError): self.api_view.add_labels( self.api_view.models['taxlot'].objects.none(), 'taxlot', [org_1_taxlot.id], [self.org_2_status_label.id] ) with transaction.atomic(): with self.assertRaises(IntegrityError): org_1_taxlotview.labels.add(self.org_2_status_label) # Via TaxLotState Rule with Label org_1_dq = DataQualityCheck.objects.get(organization=self.org) org_1_tls_rule = org_1_dq.rules.filter(table_name='TaxLotState').first() # Purposely give an Org 1 Rule an Org 2 Label org_1_tls_rule.status_label = self.org_2_status_label org_1_tls_rule.save() with transaction.atomic(): with self.assertRaises(IntegrityError): org_1_dq.update_status_label( self.TaxlotViewLabels, Rule.objects.get(pk=org_1_tls_rule.id), org_1_taxlot.id, org_1_taxlotstate.id ) self.assertFalse(TaxLotView.objects.get(pk=org_1_taxlotview.id).labels.all().exists())
class TestMatchingOutsideImportFile(DataMappingBaseTestCase): def setUp(self): selfvars = self.set_up(ASSESSED_RAW) self.user, self.org, self.import_file_1, self.import_record_1, self.cycle = selfvars self.import_record_2, self.import_file_2 = self.create_import_file( self.user, self.org, self.cycle) self.property_state_factory = FakePropertyStateFactory( organization=self.org) self.taxlot_state_factory = FakeTaxLotStateFactory( organization=self.org) def test_duplicate_properties_identified(self): base_details = { 'address_line_1': '123 Match Street', 'import_file_id': self.import_file_1.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, } # Create property in first ImportFile ps_1 = self.property_state_factory.get_property_state(**base_details) self.import_file_1.mapping_done = True self.import_file_1.save() match_buildings(self.import_file_1.id) # Create duplicate property coming from second ImportFile base_details['import_file_id'] = self.import_file_2.id ps_2 = self.property_state_factory.get_property_state(**base_details) self.import_file_2.mapping_done = True self.import_file_2.save() match_buildings(self.import_file_2.id) # 1 Property, 1 PropertyViews, 2 PropertyStates self.assertEqual(Property.objects.count(), 1) self.assertEqual(PropertyView.objects.count(), 1) self.assertEqual(PropertyState.objects.count(), 2) # Be sure the first property is used in the -View and the second is marked for "deletion" self.assertEqual(PropertyView.objects.first().state_id, ps_1.id) self.assertEqual( PropertyState.objects.get(data_state=DATA_STATE_DELETE).id, ps_2.id) def test_match_properties_if_all_default_fields_match(self): base_details = { 'address_line_1': '123 Match Street', 'import_file_id': self.import_file_1.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, } # Create property in first ImportFile ps_1 = self.property_state_factory.get_property_state(**base_details) self.import_file_1.mapping_done = True self.import_file_1.save() match_buildings(self.import_file_1.id) # Create properties from second ImportFile, one matching existing PropertyState base_details['import_file_id'] = self.import_file_2.id base_details['city'] = 'Denver' ps_2 = self.property_state_factory.get_property_state(**base_details) base_details['pm_property_id'] = '11111' base_details['city'] = 'Philadelphia' ps_3 = self.property_state_factory.get_property_state(**base_details) self.import_file_2.mapping_done = True self.import_file_2.save() match_buildings(self.import_file_2.id) # 2 Property, 2 PropertyViews, 4 PropertyStates (3 imported, 1 merge result) self.assertEqual(Property.objects.count(), 2) self.assertEqual(PropertyView.objects.count(), 2) self.assertEqual(PropertyState.objects.count(), 4) cities_from_views = [] ps_ids_from_views = [] for pv in PropertyView.objects.all(): cities_from_views.append(pv.state.city) ps_ids_from_views.append(pv.state_id) self.assertIn('Denver', cities_from_views) self.assertIn('Philadelphia', cities_from_views) self.assertIn(ps_3.id, ps_ids_from_views) self.assertNotIn(ps_1.id, ps_ids_from_views) self.assertNotIn(ps_2.id, ps_ids_from_views) # Refresh -States and check data_state and merge_state values rps_1 = PropertyState.objects.get(pk=ps_1.id) self.assertEqual(rps_1.data_state, DATA_STATE_MATCHING) self.assertEqual(rps_1.merge_state, MERGE_STATE_NEW) rps_2 = PropertyState.objects.get(pk=ps_2.id) self.assertEqual(rps_2.data_state, DATA_STATE_MATCHING) self.assertEqual(rps_2.merge_state, MERGE_STATE_UNKNOWN) ps_1_plus_2 = PropertyState.objects.filter( pm_property_id__isnull=True, city='Denver', address_line_1='123 Match Street').exclude( data_state=DATA_STATE_MATCHING, merge_state=MERGE_STATE_UNKNOWN).get() self.assertEqual(ps_1_plus_2.data_state, DATA_STATE_MATCHING) self.assertEqual(ps_1_plus_2.merge_state, MERGE_STATE_MERGED) rps_3 = PropertyState.objects.get(pk=ps_3.id) self.assertEqual(rps_3.data_state, DATA_STATE_MATCHING) self.assertEqual(rps_3.merge_state, MERGE_STATE_NEW) def test_match_properties_rolls_up_multiple_existing_matches_in_id_order_if_they_exist( self): base_details = { 'pm_property_id': '123MatchID', 'city': 'Golden', 'import_file_id': self.import_file_1.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, } # Create 3 non-matching properties in first ImportFile ps_1 = self.property_state_factory.get_property_state(**base_details) base_details['pm_property_id'] = '789DifferentID' base_details['city'] = 'Denver' ps_2 = self.property_state_factory.get_property_state(**base_details) base_details['pm_property_id'] = '1337AnotherDifferentID' base_details['city'] = 'Philadelphia' ps_3 = self.property_state_factory.get_property_state(**base_details) self.import_file_1.mapping_done = True self.import_file_1.save() match_buildings(self.import_file_1.id) # Make all those states match PropertyState.objects.filter(pk__in=[ps_2.id, ps_3.id]).update( pm_property_id='123MatchID') # Verify that none of the 3 have been merged self.assertEqual(Property.objects.count(), 3) self.assertEqual(PropertyState.objects.count(), 3) self.assertEqual(PropertyView.objects.count(), 3) # Import a property that will identify the first 3 as matches. base_details['import_file_id'] = self.import_file_2.id base_details['pm_property_id'] = '123MatchID' del base_details['city'] ps_4 = self.property_state_factory.get_property_state(**base_details) self.import_file_2.mapping_done = True self.import_file_2.save() match_buildings(self.import_file_2.id) # There should only be one PropertyView which is associated to new, merged -State self.assertEqual(PropertyView.objects.count(), 1) view = PropertyView.objects.first() self.assertNotIn(view.state_id, [ps_1.id, ps_2.id, ps_3.id, ps_4.id]) # It will have a -State having city as Philadelphia self.assertEqual(view.state.city, 'Philadelphia') # The corresponding log should be a System Match audit_log = PropertyAuditLog.objects.get(state_id=view.state_id) self.assertEqual(audit_log.name, 'System Match') def test_match_taxlots_if_all_default_fields_match(self): base_details = { 'address_line_1': '123 Match Street', 'import_file_id': self.import_file_1.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, } # Create property in first ImportFile tls_1 = self.taxlot_state_factory.get_taxlot_state(**base_details) self.import_file_1.mapping_done = True self.import_file_1.save() match_buildings(self.import_file_1.id) # Create properties from second ImportFile, one matching existing PropertyState base_details['import_file_id'] = self.import_file_2.id base_details['city'] = 'Denver' tls_2 = self.taxlot_state_factory.get_taxlot_state(**base_details) base_details['jurisdiction_tax_lot_id'] = '11111' base_details['city'] = 'Philadelphia' tls_3 = self.taxlot_state_factory.get_taxlot_state(**base_details) self.import_file_2.mapping_done = True self.import_file_2.save() match_buildings(self.import_file_2.id) # 2 TaxLot, 2 TaxLotViews, 4 TaxLotStates (3 imported, 1 merge result) self.assertEqual(TaxLot.objects.count(), 2) self.assertEqual(TaxLotView.objects.count(), 2) self.assertEqual(TaxLotState.objects.count(), 4) cities_from_views = [] tls_ids_from_views = [] for tlv in TaxLotView.objects.all(): cities_from_views.append(tlv.state.city) tls_ids_from_views.append(tlv.state_id) self.assertIn('Denver', cities_from_views) self.assertIn('Philadelphia', cities_from_views) self.assertIn(tls_3.id, tls_ids_from_views) self.assertNotIn(tls_1.id, tls_ids_from_views) self.assertNotIn(tls_2.id, tls_ids_from_views) # Refresh -States and check data_state and merge_state values rtls_1 = TaxLotState.objects.get(pk=tls_1.id) self.assertEqual(rtls_1.data_state, DATA_STATE_MATCHING) self.assertEqual(rtls_1.merge_state, MERGE_STATE_NEW) rtls_2 = TaxLotState.objects.get(pk=tls_2.id) self.assertEqual(rtls_2.data_state, DATA_STATE_MATCHING) self.assertEqual(rtls_2.merge_state, MERGE_STATE_UNKNOWN) tls_1_plus_2 = TaxLotState.objects.filter( jurisdiction_tax_lot_id__isnull=True, city='Denver', address_line_1='123 Match Street').exclude( data_state=DATA_STATE_MATCHING, merge_state=MERGE_STATE_UNKNOWN).get() self.assertEqual(tls_1_plus_2.data_state, DATA_STATE_MATCHING) self.assertEqual(tls_1_plus_2.merge_state, MERGE_STATE_MERGED) rtls_3 = TaxLotState.objects.get(pk=tls_3.id) self.assertEqual(rtls_3.data_state, DATA_STATE_MATCHING) self.assertEqual(rtls_3.merge_state, MERGE_STATE_NEW) def test_match_taxlots_rolls_up_multiple_existing_matches_in_id_order_if_they_exist( self): base_details = { 'jurisdiction_tax_lot_id': '123MatchID', 'city': 'Golden', 'import_file_id': self.import_file_1.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, } # Create 3 non-matching taxlots in first ImportFile tls_1 = self.taxlot_state_factory.get_taxlot_state(**base_details) base_details['jurisdiction_tax_lot_id'] = '789DifferentID' base_details['city'] = 'Denver' tls_2 = self.taxlot_state_factory.get_taxlot_state(**base_details) base_details['jurisdiction_tax_lot_id'] = '1337AnotherDifferentID' base_details['city'] = 'Philadelphia' tls_3 = self.taxlot_state_factory.get_taxlot_state(**base_details) self.import_file_1.mapping_done = True self.import_file_1.save() match_buildings(self.import_file_1.id) # Make all those states match TaxLotState.objects.filter(pk__in=[tls_2.id, tls_3.id]).update( jurisdiction_tax_lot_id='123MatchID') # Verify that none of the 3 have been merged self.assertEqual(TaxLot.objects.count(), 3) self.assertEqual(TaxLotState.objects.count(), 3) self.assertEqual(TaxLotView.objects.count(), 3) # Import a property that will identify the first 3 as matches. base_details['import_file_id'] = self.import_file_2.id base_details['jurisdiction_tax_lot_id'] = '123MatchID' del base_details['city'] tls_4 = self.taxlot_state_factory.get_taxlot_state(**base_details) self.import_file_2.mapping_done = True self.import_file_2.save() match_buildings(self.import_file_2.id) # There should only be one TaxLotView which is associated to new, merged -State self.assertEqual(TaxLotView.objects.count(), 1) view = TaxLotView.objects.first() self.assertNotIn(view.state_id, [tls_1.id, tls_2.id, tls_3.id, tls_4.id]) # It will have a -State having city as Philadelphia self.assertEqual(view.state.city, 'Philadelphia') # The corresponding log should be a System Match audit_log = TaxLotAuditLog.objects.get(state_id=view.state_id) self.assertEqual(audit_log.name, 'System Match')
class TestMatchingImportIntegration(DataMappingBaseTestCase): def setUp(self): selfvars = self.set_up(ASSESSED_RAW) self.user, self.org, self.import_file_1, self.import_record_1, self.cycle = selfvars self.import_record_2, self.import_file_2 = self.create_import_file( self.user, self.org, self.cycle) self.property_state_factory = FakePropertyStateFactory( organization=self.org) self.taxlot_state_factory = FakeTaxLotStateFactory( organization=self.org) def test_properties(self): # Define matching values matching_pm_property_id = '11111' matching_address_line_1 = '123 Match Street' matching_ubid = '86HJPCWQ+2VV-1-3-2-3' matching_custom_id_1 = 'MatchingID12345' # For first file, create properties with no duplicates or matches base_details_file_1 = { 'import_file_id': self.import_file_1.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, } # No matching_criteria values self.property_state_factory.get_property_state(**base_details_file_1) # Build out properties with increasingly more matching_criteria values base_details_file_1['pm_property_id'] = matching_pm_property_id self.property_state_factory.get_property_state(**base_details_file_1) base_details_file_1['address_line_1'] = matching_address_line_1 self.property_state_factory.get_property_state(**base_details_file_1) base_details_file_1['ubid'] = matching_ubid self.property_state_factory.get_property_state(**base_details_file_1) base_details_file_1['custom_id_1'] = matching_custom_id_1 self.property_state_factory.get_property_state(**base_details_file_1) self.import_file_1.mapping_done = True self.import_file_1.save() match_buildings(self.import_file_1.id) # Verify no duplicates/matched-merges yet counts = [ Property.objects.count(), PropertyState.objects.count(), PropertyView.objects.count(), ] self.assertEqual([5, 5, 5], counts) """ For second file, create several properties that are one or many of the following: - duplicates amongst file_1 - duplicates amongst file_2 - matching amongst file_1 - matching amongst file_2 - completely new """ base_details_file_2 = { 'import_file_id': self.import_file_2.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, } # Create 1 duplicate of the 'No matching_criteria values' properties # (outcome: 1 additional -States, NO new Property/-View) ps_1 = self.property_state_factory.get_property_state( **base_details_file_2) # Create a non-duplicate property also having no matching criteria values # (outcome: 1 additional -States, 1 new Property/-View) base_details_file_2['postal_code'] = '01234' ps_2 = self.property_state_factory.get_property_state( **base_details_file_2) # Create 2 completely new properties with misaligned combinations of matching values # (outcome: 2 additional -States, 2 new Property/-View) base_details_file_2['custom_id_1'] = matching_custom_id_1 ps_3 = self.property_state_factory.get_property_state( **base_details_file_2) base_details_file_2['ubid'] = matching_ubid ps_4 = self.property_state_factory.get_property_state( **base_details_file_2) # Create 3 properties - with 1 duplicate and 1 match within it's own file that will # eventually become 1 completely new property # (outcome: 4 additional -States, 1 new Property/-View) base_details_file_2['address_line_1'] = matching_address_line_1 base_details_file_2['city'] = 'Denver' ps_5 = self.property_state_factory.get_property_state( **base_details_file_2) ps_6 = self.property_state_factory.get_property_state( **base_details_file_2) base_details_file_2['city'] = 'Golden' ps_7 = self.property_state_factory.get_property_state( **base_details_file_2) # Create 3 properties - with 1 duplicate and 1 match within it's own file that will # eventually match the last property in file_1 # (outcome: 5 additional -States, NO new Property/-View) base_details_file_2['pm_property_id'] = matching_pm_property_id base_details_file_2['state'] = 'Colorado' ps_8 = self.property_state_factory.get_property_state( **base_details_file_2) ps_9 = self.property_state_factory.get_property_state( **base_details_file_2) base_details_file_2['state'] = 'California' ps_10 = self.property_state_factory.get_property_state( **base_details_file_2) self.import_file_2.mapping_done = True self.import_file_2.save() match_buildings(self.import_file_2.id) self.assertEqual(9, Property.objects.count()) self.assertEqual(9, PropertyView.objects.count()) self.assertEqual(18, PropertyState.objects.count()) ps_ids_of_deleted = PropertyState.objects.filter( data_state=DATA_STATE_DELETE).values_list('id', flat=True).order_by('id') self.assertEqual([ps_1.id, ps_6.id, ps_9.id], list(ps_ids_of_deleted)) ps_ids_of_merged_in_file = PropertyState.objects.filter( data_state=DATA_STATE_MAPPING, merge_state=MERGE_STATE_UNKNOWN).values_list( 'id', flat=True).order_by('id') self.assertEqual([ps_5.id, ps_7.id, ps_8.id, ps_10.id], list(ps_ids_of_merged_in_file)) ps_ids_of_all_promoted = PropertyView.objects.values_list('state_id', flat=True) self.assertIn(ps_2.id, ps_ids_of_all_promoted) self.assertIn(ps_3.id, ps_ids_of_all_promoted) self.assertIn(ps_4.id, ps_ids_of_all_promoted) rimport_file_2 = ImportFile.objects.get(pk=self.import_file_2.id) results = rimport_file_2.matching_results_data del results['progress_key'] expected = { 'import_file_records': None, # This is calculated in a separate process 'property_all_unmatched': 10, 'property_duplicates': 2, 'property_duplicates_of_existing': 1, 'property_unmatched': 4, 'tax_lot_all_unmatched': 0, 'tax_lot_duplicates': 0, 'tax_lot_duplicates_of_existing': 0, 'tax_lot_unmatched': 0, } self.assertEqual(results, expected) def test_taxlots(self): # Define matching values matching_jurisdiction_tax_lot_id = '11111' matching_address_line_1 = '123 Match Street' matching_ulid = '86HJPCWQ+2VV-1-3-2-3' matching_custom_id_1 = 'MatchingID12345' # For first file, create taxlots with no duplicates or matches base_details_file_1 = { 'import_file_id': self.import_file_1.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, } # No matching_criteria values self.taxlot_state_factory.get_taxlot_state(**base_details_file_1) # Build out taxlots with increasingly more matching_criteria values base_details_file_1[ 'jurisdiction_tax_lot_id'] = matching_jurisdiction_tax_lot_id self.taxlot_state_factory.get_taxlot_state(**base_details_file_1) base_details_file_1['address_line_1'] = matching_address_line_1 self.taxlot_state_factory.get_taxlot_state(**base_details_file_1) base_details_file_1['ulid'] = matching_ulid self.taxlot_state_factory.get_taxlot_state(**base_details_file_1) base_details_file_1['custom_id_1'] = matching_custom_id_1 self.taxlot_state_factory.get_taxlot_state(**base_details_file_1) self.import_file_1.mapping_done = True self.import_file_1.save() match_buildings(self.import_file_1.id) # Verify no duplicates/matched-merges yet counts = [ TaxLot.objects.count(), TaxLotState.objects.count(), TaxLotView.objects.count(), ] self.assertEqual([5, 5, 5], counts) """ For second file, create several taxlots that are one or many of the following: - duplicates amongst file_1 - duplicates amongst file_2 - matching amongst file_1 - matching amongst file_2 - completely new """ base_details_file_2 = { 'import_file_id': self.import_file_2.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, } # Create 2 duplicates of the 'No matching_criteria values' taxlots # (outcome: 2 additional -States, NO new TaxLot/-View) tls_1 = self.taxlot_state_factory.get_taxlot_state( **base_details_file_2) tls_2 = self.taxlot_state_factory.get_taxlot_state( **base_details_file_2) # Create 2 completely new taxlots with misaligned combinations of matching values # (outcome: 2 additional -States, 2 new TaxLot/-View) base_details_file_2['custom_id_1'] = matching_custom_id_1 tls_3 = self.taxlot_state_factory.get_taxlot_state( **base_details_file_2) base_details_file_2['ulid'] = matching_ulid tls_4 = self.taxlot_state_factory.get_taxlot_state( **base_details_file_2) # Create 3 taxlots - with 1 duplicate and 1 match within it's own file that will # eventually become 1 completely new property # (outcome: 4 additional -States, 1 new TaxLot/-View) base_details_file_2['address_line_1'] = matching_address_line_1 base_details_file_2['city'] = 'Denver' tls_5 = self.taxlot_state_factory.get_taxlot_state( **base_details_file_2) tls_6 = self.taxlot_state_factory.get_taxlot_state( **base_details_file_2) base_details_file_2['city'] = 'Golden' tls_7 = self.taxlot_state_factory.get_taxlot_state( **base_details_file_2) # Create 3 properties - with 1 duplicate and 1 match within it's own file that will # eventually match the last property in file_1 # (outcome: 5 additional -States, NO new TaxLot/-View) base_details_file_2[ 'jurisdiction_tax_lot_id'] = matching_jurisdiction_tax_lot_id base_details_file_2['state'] = 'Colorado' tls_8 = self.taxlot_state_factory.get_taxlot_state( **base_details_file_2) tls_9 = self.taxlot_state_factory.get_taxlot_state( **base_details_file_2) base_details_file_2['state'] = 'California' tls_10 = self.taxlot_state_factory.get_taxlot_state( **base_details_file_2) self.import_file_2.mapping_done = True self.import_file_2.save() match_buildings(self.import_file_2.id) self.assertEqual(8, TaxLot.objects.count()) self.assertEqual(8, TaxLotView.objects.count()) self.assertEqual(18, TaxLotState.objects.count()) tls_ids_of_deleted = TaxLotState.objects.filter( data_state=DATA_STATE_DELETE).values_list('id', flat=True).order_by('id') self.assertEqual([tls_1.id, tls_2.id, tls_6.id, tls_9.id], list(tls_ids_of_deleted)) tls_ids_of_merged_in_file = TaxLotState.objects.filter( data_state=DATA_STATE_MAPPING, merge_state=MERGE_STATE_UNKNOWN).values_list( 'id', flat=True).order_by('id') self.assertEqual([tls_5.id, tls_7.id, tls_8.id, tls_10.id], list(tls_ids_of_merged_in_file)) tls_ids_of_all_promoted = TaxLotView.objects.values_list('state_id', flat=True) self.assertIn(tls_3.id, tls_ids_of_all_promoted) self.assertIn(tls_4.id, tls_ids_of_all_promoted) rimport_file_2 = ImportFile.objects.get(pk=self.import_file_2.id) results = rimport_file_2.matching_results_data del results['progress_key'] expected = { 'import_file_records': None, # This is calculated in a separate process 'property_all_unmatched': 0, 'property_duplicates': 0, 'property_duplicates_of_existing': 0, 'property_unmatched': 0, 'tax_lot_all_unmatched': 10, 'tax_lot_duplicates': 3, 'tax_lot_duplicates_of_existing': 1, 'tax_lot_unmatched': 3, } self.assertEqual(results, expected)
class TestOrganizationPreviewViews(DataMappingBaseTestCase): def setUp(self): selfvars = self.set_up(ASSESSED_RAW) self.user, self.org, self.import_file_1, self.import_record_1, self.cycle_1 = selfvars cycle_factory = FakeCycleFactory(organization=self.org, user=self.user) self.cycle_2 = cycle_factory.get_cycle(name="Cycle 2") self.import_record_2, self.import_file_2 = self.create_import_file( self.user, self.org, self.cycle_2 ) user_details = { 'username': '******', 'password': '******', 'email': '*****@*****.**' } self.client.login(**user_details) self.property_state_factory = FakePropertyStateFactory(organization=self.org) self.taxlot_state_factory = FakeTaxLotStateFactory(organization=self.org) def test_whole_org_match_merge_link_preview_endpoint_invalid_columns(self): url = reverse('api:v3:organizations-match-merge-link-preview', args=[self.org.id]) post_params = json.dumps({ "inventory_type": "properties", "add": ['DNE col 1'], "remove": ['DNE col 2'] }) raw_result = self.client.post(url, post_params, content_type='application/json') self.assertEqual(404, raw_result.status_code) def test_whole_org_match_merge_link_preview_endpoint_properties(self): # Cycle 1 / ImportFile 1 - Create 1 property base_property_details = { 'pm_property_id': '1st Non-Match Set', 'city': 'City 1', 'property_name': 'Match Set', 'import_file_id': self.import_file_1.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, } ps_1 = self.property_state_factory.get_property_state(**base_property_details) self.import_file_1.mapping_done = True self.import_file_1.save() match_buildings(self.import_file_1.id) # Cycle 2 / ImportFile 2 - Create 1 unlinked property base_property_details['pm_property_id'] = '2nd Non-Match Set' base_property_details['property_name'] = 'Match Set' base_property_details['import_file_id'] = self.import_file_2.id ps_2 = self.property_state_factory.get_property_state(**base_property_details) self.import_file_2.mapping_done = True self.import_file_2.save() match_buildings(self.import_file_2.id) # Check there doesn't exist links self.assertNotEqual(ps_1.propertyview_set.first().property_id, ps_2.propertyview_set.first().property_id) url = reverse('api:v3:organizations-match-merge-link-preview', args=[self.org.id]) post_params = json.dumps({ "inventory_type": "properties", "add": ['property_name'], "remove": ['pm_property_id'] }) raw_result = self.client.post(url, post_params, content_type='application/json') # Check there *still* doesn't exist links self.assertNotEqual(ps_1.propertyview_set.first().property_id, ps_2.propertyview_set.first().property_id) self.assertEqual(200, raw_result.status_code) raw_content = json.loads(raw_result.content) identifier = ProgressData.from_key(raw_content['progress_key']).data['unique_id'] result_key = "org_match_merge_link_result__%s" % identifier raw_summary = get_cache_raw(result_key) summary = {str(k): v for k, v in raw_summary.items() if v} # ignore empty cycles # Check format of summary self.assertCountEqual([str(self.cycle_1.id), str(self.cycle_2.id)], summary.keys()) # Check that preview shows links would be created self.assertEqual(summary[str(self.cycle_1.id)][0]['id'], summary[str(self.cycle_2.id)][0]['id']) # try to get result using results endpoint get_result_url = reverse('api:v3:organizations-match-merge-link-result', args=[self.org.id]) + '?match_merge_link_id=' + str(identifier) get_result_raw_response = self.client.get(get_result_url) raw_summary = json.loads(get_result_raw_response.content) summary = {str(k): v for k, v in raw_summary.items() if v} # ignore empty cycles # Check format of summary self.assertCountEqual([str(self.cycle_1.id), str(self.cycle_2.id)], summary.keys()) # Check that preview shows links would be created self.assertEqual(summary[str(self.cycle_1.id)][0]['id'], summary[str(self.cycle_2.id)][0]['id']) def test_whole_org_match_merge_link_preview_endpoint_taxlots(self): # Cycle 1 / ImportFile 1 - Create 1 taxlot base_taxlot_details = { 'jurisdiction_tax_lot_id': '1st Non-Match Set', 'city': 'City 1', 'district': 'Match Set', 'import_file_id': self.import_file_1.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, } tls_1 = self.taxlot_state_factory.get_taxlot_state(**base_taxlot_details) self.import_file_1.mapping_done = True self.import_file_1.save() match_buildings(self.import_file_1.id) # Cycle 2 / ImportFile 2 - Create 1 unlinked taxlot base_taxlot_details['jurisdiction_tax_lot_id'] = '2nd Non-Match Set' base_taxlot_details['district'] = 'Match Set' base_taxlot_details['import_file_id'] = self.import_file_2.id tls_2 = self.taxlot_state_factory.get_taxlot_state(**base_taxlot_details) self.import_file_2.mapping_done = True self.import_file_2.save() match_buildings(self.import_file_2.id) # Check there doesn't exist links self.assertNotEqual(tls_1.taxlotview_set.first().taxlot_id, tls_2.taxlotview_set.first().taxlot_id) url = reverse('api:v3:organizations-match-merge-link-preview', args=[self.org.id]) post_params = json.dumps({ "inventory_type": "taxlots", "add": ['district'], "remove": ['jurisdiction_tax_lot_id'] }) raw_result = self.client.post(url, post_params, content_type='application/json') # Check there *still* doesn't exist links self.assertNotEqual(tls_1.taxlotview_set.first().taxlot_id, tls_2.taxlotview_set.first().taxlot_id) self.assertEqual(200, raw_result.status_code) raw_content = json.loads(raw_result.content) identifier = ProgressData.from_key(raw_content['progress_key']).data['unique_id'] result_key = "org_match_merge_link_result__%s" % identifier raw_summary = get_cache_raw(result_key) summary = {str(k): v for k, v in raw_summary.items() if v} # ignore empty cycles # Check format of summary self.assertCountEqual([str(self.cycle_1.id), str(self.cycle_2.id)], summary.keys()) # Check that preview shows links would be created self.assertEqual(summary[str(self.cycle_1.id)][0]['id'], summary[str(self.cycle_2.id)][0]['id']) # try to get result using results endpoint get_result_url = reverse('api:v3:organizations-match-merge-link-result', args=[self.org.id]) + '?match_merge_link_id=' + str(identifier) get_result_raw_response = self.client.get(get_result_url) raw_summary = json.loads(get_result_raw_response.content) summary = {str(k): v for k, v in raw_summary.items() if v} # ignore empty cycles # Check format of summary self.assertCountEqual([str(self.cycle_1.id), str(self.cycle_2.id)], summary.keys()) # Check that preview shows links would be created self.assertEqual(summary[str(self.cycle_1.id)][0]['id'], summary[str(self.cycle_2.id)][0]['id'])
class TestMatchingInImportFile(DataMappingBaseTestCase): def setUp(self): selfvars = self.set_up(ASSESSED_RAW) self.user, self.org, self.import_file, self.import_record, self.cycle = selfvars self.property_state_factory = FakePropertyStateFactory( organization=self.org) self.taxlot_state_factory = FakeTaxLotStateFactory( organization=self.org) def test_duplicate_properties_identified(self): base_details = { 'address_line_1': '123 Match Street', 'import_file_id': self.import_file.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, } # Create pair of properties that are exact duplicates self.property_state_factory.get_property_state(**base_details) self.property_state_factory.get_property_state(**base_details) # Create a non-matching, non-duplicate property base_details['address_line_1'] = '123 Different Ave' base_details['city'] = 'Denver' self.property_state_factory.get_property_state(**base_details) # set import_file mapping done so that matching can occur. self.import_file.mapping_done = True self.import_file.save() match_buildings(self.import_file.id) # 2 Property, 2 PropertyViews, 3 PropertyState (1 flagged to be ignored) self.assertEqual(Property.objects.count(), 2) self.assertEqual(PropertyView.objects.count(), 2) self.assertEqual(PropertyState.objects.count(), 3) self.assertEqual( PropertyState.objects.filter(data_state=DATA_STATE_DELETE).count(), 1) # Make sure "deleted" -States are not found in the -Views deleted = PropertyState.objects.get(data_state=DATA_STATE_DELETE) self.assertNotIn( deleted.id, PropertyView.objects.values_list('state_id', flat=True)) def test_duplicate_taxlots_identified(self): base_details = { 'address_line_1': '123 Match Street', 'import_file_id': self.import_file.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, } # Create pair of properties that are exact duplicates self.taxlot_state_factory.get_taxlot_state(**base_details) self.taxlot_state_factory.get_taxlot_state(**base_details) # Create a non-matching, non-duplicate property base_details['address_line_1'] = '123 Different Ave' base_details['city'] = 'Denver' self.taxlot_state_factory.get_taxlot_state(**base_details) # set import_file mapping done so that matching can occur. self.import_file.mapping_done = True self.import_file.save() match_buildings(self.import_file.id) # 2 TaxLot, 2 TaxLotViews, 3 TaxLotState (1 flagged to be ignored) self.assertEqual(TaxLot.objects.count(), 2) self.assertEqual(TaxLotView.objects.count(), 2) self.assertEqual(TaxLotState.objects.count(), 3) self.assertEqual( TaxLotState.objects.filter(data_state=DATA_STATE_DELETE).count(), 1) # Make sure "deleted" -States are not found in the -Views deleted = TaxLotState.objects.get(data_state=DATA_STATE_DELETE) self.assertNotIn(deleted.id, TaxLotView.objects.values_list('state_id', flat=True)) def test_match_properties_if_all_default_fields_match(self): base_details = { 'address_line_1': '123 Match Street', 'import_file_id': self.import_file.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, } # Create first set of properties that match each other ps_1 = self.property_state_factory.get_property_state(**base_details) base_details['city'] = 'Denver' ps_2 = self.property_state_factory.get_property_state(**base_details) # Create second set of properties that match each other base_details['pm_property_id'] = '11111' ps_3 = self.property_state_factory.get_property_state(**base_details) base_details['city'] = 'Philadelphia' ps_4 = self.property_state_factory.get_property_state(**base_details) # Create unmatched property base_details['pm_property_id'] = '000' ps_5 = self.property_state_factory.get_property_state(**base_details) # set import_file mapping done so that matching can occur. self.import_file.mapping_done = True self.import_file.save() match_buildings(self.import_file.id) # 3 Property, 3 PropertyViews, 7 PropertyStates (5 imported, 2 merge results) self.assertEqual(Property.objects.count(), 3) self.assertEqual(PropertyView.objects.count(), 3) self.assertEqual(PropertyState.objects.count(), 7) # Refresh -States and check data_state and merge_state values rps_1 = PropertyState.objects.get(pk=ps_1.id) self.assertEqual(rps_1.data_state, DATA_STATE_MAPPING) self.assertEqual(rps_1.merge_state, MERGE_STATE_UNKNOWN) rps_2 = PropertyState.objects.get(pk=ps_2.id) self.assertEqual(rps_2.data_state, DATA_STATE_MAPPING) self.assertEqual(rps_2.merge_state, MERGE_STATE_UNKNOWN) ps_1_plus_2 = PropertyState.objects.filter( pm_property_id__isnull=True, city='Denver', address_line_1='123 Match Street').exclude( data_state=DATA_STATE_MAPPING, merge_state=MERGE_STATE_UNKNOWN).get() self.assertEqual(ps_1_plus_2.data_state, DATA_STATE_MATCHING) self.assertEqual(ps_1_plus_2.merge_state, MERGE_STATE_MERGED) rps_3 = PropertyState.objects.get(pk=ps_3.id) self.assertEqual(rps_3.data_state, DATA_STATE_MAPPING) self.assertEqual(rps_3.merge_state, MERGE_STATE_UNKNOWN) rps_4 = PropertyState.objects.get(pk=ps_4.id) self.assertEqual(rps_4.data_state, DATA_STATE_MAPPING) self.assertEqual(rps_4.merge_state, MERGE_STATE_UNKNOWN) ps_3_plus_4 = PropertyState.objects.filter( pm_property_id='11111', city='Philadelphia', address_line_1='123 Match Street').exclude( data_state=DATA_STATE_MAPPING, merge_state=MERGE_STATE_UNKNOWN).get() self.assertEqual(ps_3_plus_4.data_state, DATA_STATE_MATCHING) self.assertEqual(ps_3_plus_4.merge_state, MERGE_STATE_MERGED) rps_5 = PropertyState.objects.get(pk=ps_5.id) self.assertEqual(rps_5.data_state, DATA_STATE_MATCHING) self.assertEqual(rps_5.merge_state, MERGE_STATE_NEW) def test_match_taxlots_if_all_default_fields_match(self): base_details = { 'address_line_1': '123 Match Street', 'import_file_id': self.import_file.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, } # Create first set of taxlots that match each other tls_1 = self.taxlot_state_factory.get_taxlot_state(**base_details) base_details['city'] = 'Denver' tls_2 = self.taxlot_state_factory.get_taxlot_state(**base_details) # Create second set of taxlots that match each other base_details['jurisdiction_tax_lot_id'] = '11111' tls_3 = self.taxlot_state_factory.get_taxlot_state(**base_details) base_details['city'] = 'Philadelphia' tls_4 = self.taxlot_state_factory.get_taxlot_state(**base_details) # Create unmatched taxlot base_details['jurisdiction_tax_lot_id'] = '000' tls_5 = self.taxlot_state_factory.get_taxlot_state(**base_details) # set import_file mapping done so that matching can occur. self.import_file.mapping_done = True self.import_file.save() match_buildings(self.import_file.id) # 3 TaxLot, 3 TaxLotViews, 7 TaxLotStates (5 imported, 2 merge results) self.assertEqual(TaxLot.objects.count(), 3) self.assertEqual(TaxLotView.objects.count(), 3) self.assertEqual(TaxLotState.objects.count(), 7) # Refresh -States and check data_state and merge_state values rtls_1 = TaxLotState.objects.get(pk=tls_1.id) self.assertEqual(rtls_1.data_state, DATA_STATE_MAPPING) self.assertEqual(rtls_1.merge_state, MERGE_STATE_UNKNOWN) rtls_2 = TaxLotState.objects.get(pk=tls_2.id) self.assertEqual(rtls_2.data_state, DATA_STATE_MAPPING) self.assertEqual(rtls_2.merge_state, MERGE_STATE_UNKNOWN) tls_1_plus_2 = TaxLotState.objects.filter( jurisdiction_tax_lot_id__isnull=True, city='Denver', address_line_1='123 Match Street').exclude( data_state=DATA_STATE_MAPPING, merge_state=MERGE_STATE_UNKNOWN).get() self.assertEqual(tls_1_plus_2.data_state, DATA_STATE_MATCHING) self.assertEqual(tls_1_plus_2.merge_state, MERGE_STATE_MERGED) rtls_3 = TaxLotState.objects.get(pk=tls_3.id) self.assertEqual(rtls_3.data_state, DATA_STATE_MAPPING) self.assertEqual(rtls_3.merge_state, MERGE_STATE_UNKNOWN) rtls_4 = TaxLotState.objects.get(pk=tls_4.id) self.assertEqual(rtls_4.data_state, DATA_STATE_MAPPING) self.assertEqual(rtls_4.merge_state, MERGE_STATE_UNKNOWN) tls_3_plus_4 = TaxLotState.objects.filter( jurisdiction_tax_lot_id='11111', city='Philadelphia', address_line_1='123 Match Street').exclude( data_state=DATA_STATE_MAPPING, merge_state=MERGE_STATE_UNKNOWN).get() self.assertEqual(tls_3_plus_4.data_state, DATA_STATE_MATCHING) self.assertEqual(tls_3_plus_4.merge_state, MERGE_STATE_MERGED) rtls_5 = TaxLotState.objects.get(pk=tls_5.id) self.assertEqual(rtls_5.data_state, DATA_STATE_MATCHING) self.assertEqual(rtls_5.merge_state, MERGE_STATE_NEW) def test_match_properties_on_ubid(self): base_details = { 'ubid': '86HJPCWQ+2VV-1-3-2-3', 'import_file_id': self.import_file.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, } # Create set of properties that match each other self.property_state_factory.get_property_state(**base_details) base_details['city'] = 'Denver' self.property_state_factory.get_property_state(**base_details) # set import_file mapping done so that matching can occur. self.import_file.mapping_done = True self.import_file.save() match_buildings(self.import_file.id) # 1 Property, 1 PropertyView, 3 PropertyStates (2 imported, 1 merge result) self.assertEqual(Property.objects.count(), 1) self.assertEqual(PropertyView.objects.count(), 1) self.assertEqual(PropertyState.objects.count(), 3) def test_match_properties_normalized_address_used_instead_of_address_line_1( self): base_details = { 'import_file_id': self.import_file.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, } # Create set of properties that have the same address_line_1 in slightly different format base_details['address_line_1'] = '123 Match Street' self.property_state_factory.get_property_state(**base_details) base_details['address_line_1'] = '123 match St.' base_details['city'] = 'Denver' self.property_state_factory.get_property_state(**base_details) # set import_file mapping done so that matching can occur. self.import_file.mapping_done = True self.import_file.save() match_buildings(self.import_file.id) # 1 Property, 1 PropertyView, 3 PropertyStates (2 imported, 1 merge result) self.assertEqual(Property.objects.count(), 1) self.assertEqual(PropertyView.objects.count(), 1) self.assertEqual(PropertyState.objects.count(), 3) def test_match_taxlots_normalized_address_used_instead_of_address_line_1( self): base_details = { 'import_file_id': self.import_file.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, } # Create set of taxlots that have the same address_line_1 in slightly different format base_details['address_line_1'] = '123 Match Street' self.taxlot_state_factory.get_taxlot_state(**base_details) base_details['address_line_1'] = '123 match St.' base_details['city'] = 'Denver' self.taxlot_state_factory.get_taxlot_state(**base_details) # set import_file mapping done so that matching can occur. self.import_file.mapping_done = True self.import_file.save() match_buildings(self.import_file.id) # 1 TaxLot, 1 TaxLotView, 3 TaxLotStates (2 imported, 1 merge result) self.assertEqual(TaxLot.objects.count(), 1) self.assertEqual(TaxLotView.objects.count(), 1) self.assertEqual(TaxLotState.objects.count(), 3) def test_no_matches_if_all_matching_criteria_is_None(self): """ Default matching criteria for PropertyStates are: - address_line_1 (substituted by normalized_address) - ubid - pm_property_id - custom_id_1 and all are set to None. """ base_details = { 'import_file_id': self.import_file.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, } # Create set of properties that won't match self.property_state_factory.get_property_state(**base_details) base_details['city'] = 'Denver' self.property_state_factory.get_property_state(**base_details) # set import_file mapping done so that matching can occur. self.import_file.mapping_done = True self.import_file.save() match_buildings(self.import_file.id) # 2 Property, 2 PropertyView, 2 PropertyStates - No merges self.assertEqual(Property.objects.count(), 2) self.assertEqual(PropertyView.objects.count(), 2) self.assertEqual(PropertyState.objects.count(), 2) def test_match_properties_get_rolled_up_into_one_in_the_order_their_uploaded( self): """ The most recently uploaded should take precedence when merging states. If more than 2 states match each other, they are merged two at a time until one is remaining. Reminder, this is only for -States within an ImportFile. """ base_details = { 'address_line_1': '123 Match Street', 'import_file_id': self.import_file.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, } # Create first set of properties that match each other base_details['city'] = 'Philadelphia' self.property_state_factory.get_property_state(**base_details) base_details['city'] = 'Arvada' self.property_state_factory.get_property_state(**base_details) base_details['city'] = 'Golden' self.property_state_factory.get_property_state(**base_details) base_details['city'] = 'Denver' self.property_state_factory.get_property_state(**base_details) # set import_file mapping done so that matching can occur. self.import_file.mapping_done = True self.import_file.save() match_buildings(self.import_file.id) # 1 Property, 1 PropertyViews, 7 PropertyStates (4 imported, 3 merge results) self.assertEqual(Property.objects.count(), 1) self.assertEqual(PropertyView.objects.count(), 1) self.assertEqual(PropertyState.objects.count(), 7) self.assertEqual(PropertyView.objects.first().state.city, 'Denver')
class TaxLotViewTests(DataMappingBaseTestCase): def setUp(self): user_details = { 'username': '******', 'password': '******', 'email': '*****@*****.**' } self.user = User.objects.create_superuser(**user_details) self.org, self.org_user, _ = create_organization(self.user) self.client.login(**user_details) self.cycle_factory = FakeCycleFactory(organization=self.org, user=self.user) self.cycle = self.cycle_factory.get_cycle( start=datetime(2010, 10, 10, tzinfo=get_current_timezone())) self.taxlot_factory = FakeTaxLotFactory(organization=self.org) self.taxlot_state_factory = FakeTaxLotStateFactory( organization=self.org) self.column_list_factory = FakeColumnListSettingsFactory( organization=self.org) def test_get_links_for_a_single_property(self): # Create 2 linked property sets state = self.taxlot_state_factory.get_taxlot_state( extra_data={"field_1": "value_1"}) taxlot = self.taxlot_factory.get_taxlot() view_1 = TaxLotView.objects.create(taxlot=taxlot, cycle=self.cycle, state=state) later_cycle = self.cycle_factory.get_cycle( start=datetime(2100, 10, 10, tzinfo=get_current_timezone())) state_2 = self.taxlot_state_factory.get_taxlot_state( extra_data={"field_1": "value_2"}) view_2 = TaxLotView.objects.create(taxlot=taxlot, cycle=later_cycle, state=state_2) # save all the columns in the state to the database Column.save_column_names(state) url = reverse('api:v2:taxlots-links', args=[view_1.id]) post_params = json.dumps({'organization_id': self.org.pk}) response = self.client.post(url, post_params, content_type='application/json') data = response.json()['data'] self.assertEqual(len(data), 2) # results should be ordered by descending cycle start date result_1 = data[1] self.assertEqual(result_1['address_line_1'], state.address_line_1) self.assertEqual(result_1['extra_data']['field_1'], 'value_1') self.assertEqual(result_1['cycle_id'], self.cycle.id) self.assertEqual(result_1['view_id'], view_1.id) result_2 = data[0] self.assertEqual(result_2['address_line_1'], state_2.address_line_1) self.assertEqual(result_2['extra_data']['field_1'], 'value_2') self.assertEqual(result_2['cycle_id'], later_cycle.id) self.assertEqual(result_2['view_id'], view_2.id) def test_first_lat_long_edit(self): state = self.taxlot_state_factory.get_taxlot_state() taxlot = self.taxlot_factory.get_taxlot() view = TaxLotView.objects.create(taxlot=taxlot, cycle=self.cycle, state=state) # update the address new_data = { "state": { "latitude": 39.765251, "longitude": -104.986138, } } url = reverse('api:v2:taxlots-detail', args=[ view.id ]) + '?organization_id={}'.format(self.org.pk) response = self.client.put(url, json.dumps(new_data), content_type='application/json') data = json.loads(response.content) self.assertEqual(data['status'], 'success') response = self.client.get(url, content_type='application/json') data = json.loads(response.content) self.assertEqual(data['status'], 'success') self.assertIsNotNone(data['state']['long_lat']) self.assertIsNotNone(data['state']['geocoding_confidence']) def test_merged_indicators_provided_on_filter_endpoint(self): _import_record, import_file_1 = self.create_import_file( self.user, self.org, self.cycle) base_details = { 'address_line_1': '123 Match Street', 'import_file_id': import_file_1.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, } self.taxlot_state_factory.get_taxlot_state(**base_details) # set import_file_1 mapping done so that record is "created for users to view". import_file_1.mapping_done = True import_file_1.save() match_buildings(import_file_1.id) _import_record_2, import_file_2 = self.create_import_file( self.user, self.org, self.cycle) url = reverse( 'api:v2:taxlots-filter' ) + '?cycle_id={}&organization_id={}&page=1&per_page=999999999'.format( self.cycle.pk, self.org.pk) response = self.client.post(url) data = json.loads(response.content) self.assertFalse(data['results'][0]['merged_indicator']) # make sure merged_indicator is True when merge occurs base_details['city'] = 'Denver' base_details['import_file_id'] = import_file_2.id self.taxlot_state_factory.get_taxlot_state(**base_details) # set import_file_2 mapping done so that match merging can occur. import_file_2.mapping_done = True import_file_2.save() match_buildings(import_file_2.id) url = reverse( 'api:v2:taxlots-filter' ) + '?cycle_id={}&organization_id={}&page=1&per_page=999999999'.format( self.cycle.pk, self.org.pk) response = self.client.post(url) data = json.loads(response.content) self.assertTrue(data['results'][0]['merged_indicator']) # Create pairings and check if paired object has indicator as well property_factory = FakePropertyFactory(organization=self.org) property_state_factory = FakePropertyStateFactory( organization=self.org) property = property_factory.get_property() property_state = property_state_factory.get_property_state() property_view = PropertyView.objects.create(property=property, cycle=self.cycle, state=property_state) # attach pairing to one and only taxlot_view TaxLotProperty(primary=True, cycle_id=self.cycle.id, property_view_id=property_view.id, taxlot_view_id=TaxLotView.objects.get().id).save() url = reverse( 'api:v2:taxlots-filter' ) + '?cycle_id={}&organization_id={}&page=1&per_page=999999999'.format( self.cycle.pk, self.org.pk) response = self.client.post(url) data = json.loads(response.content) related = data['results'][0]['related'][0] self.assertTrue('merged_indicator' in related) self.assertFalse(related['merged_indicator']) def test_taxlot_match_merge_link(self): base_details = { 'jurisdiction_tax_lot_id': '123MatchID', 'no_default_data': False, } tls_1 = self.taxlot_state_factory.get_taxlot_state(**base_details) taxlot = self.taxlot_factory.get_taxlot() view_1 = TaxLotView.objects.create(taxlot=taxlot, cycle=self.cycle, state=tls_1) cycle_2 = self.cycle_factory.get_cycle( start=datetime(2018, 10, 10, tzinfo=get_current_timezone())) tls_2 = self.taxlot_state_factory.get_taxlot_state(**base_details) taxlot_2 = self.taxlot_factory.get_taxlot() TaxLotView.objects.create(taxlot=taxlot_2, cycle=cycle_2, state=tls_2) url = reverse('api:v2:taxlots-match-merge-link', args=[view_1.id]) response = self.client.post(url, content_type='application/json') summary = response.json() expected_summary = { 'view_id': None, 'match_merged_count': 0, 'match_link_count': 1, } self.assertEqual(expected_summary, summary) refreshed_view_1 = TaxLotView.objects.get(state_id=tls_1.id) view_2 = TaxLotView.objects.get(state_id=tls_2.id) self.assertEqual(refreshed_view_1.taxlot_id, view_2.taxlot_id) def test_taxlots_cycles_list(self): # Create TaxLot set in cycle 1 state = self.taxlot_state_factory.get_taxlot_state( extra_data={"field_1": "value_1"}) taxlot = self.taxlot_factory.get_taxlot() TaxLotView.objects.create(taxlot=taxlot, cycle=self.cycle, state=state) cycle_2 = self.cycle_factory.get_cycle( start=datetime(2018, 10, 10, tzinfo=get_current_timezone())) state_2 = self.taxlot_state_factory.get_taxlot_state( extra_data={"field_1": "value_2"}) taxlot_2 = self.taxlot_factory.get_taxlot() TaxLotView.objects.create(taxlot=taxlot_2, cycle=cycle_2, state=state_2) # save all the columns in the state to the database so we can setup column list settings Column.save_column_names(state) # get the columnlistsetting (default) for all columns columnlistsetting = self.column_list_factory.get_columnlistsettings( inventory_type=VIEW_LIST_TAXLOT, columns=['address_line_1', 'field_1'], table_name='TaxLotState') post_params = json.dumps({ 'organization_id': self.org.pk, 'profile_id': columnlistsetting.id, 'cycle_ids': [self.cycle.id, cycle_2.id] }) url = reverse('api:v2:taxlots-cycles') response = self.client.post(url, post_params, content_type='application/json') data = response.json() address_line_1_key = 'address_line_1_' + str( columnlistsetting.columns.get(column_name='address_line_1').id) field_1_key = 'field_1_' + str( columnlistsetting.columns.get(column_name='field_1').id) self.assertEqual(len(data), 2) result_1 = data[str(self.cycle.id)] self.assertEqual(result_1[0][address_line_1_key], state.address_line_1) self.assertEqual(result_1[0][field_1_key], 'value_1') self.assertEqual(result_1[0]['id'], taxlot.id) result_2 = data[str(cycle_2.id)] self.assertEqual(result_2[0][address_line_1_key], state_2.address_line_1) self.assertEqual(result_2[0][field_1_key], 'value_2') self.assertEqual(result_2[0]['id'], taxlot_2.id)
class TestPropertyViewAsStateSerializers(DeleteModelsTestCase): def setUp(self): self.maxDiff = None user_details = { 'username': '******', 'password': '******', } self.user = User.objects.create_superuser(email='*****@*****.**', **user_details) self.org, _, _ = create_organization(self.user) self.audit_log_factory = FakePropertyAuditLogFactory( organization=self.org, user=self.user) self.cycle_factory = FakeCycleFactory(organization=self.org, user=self.user) self.property_state_factory = FakePropertyStateFactory( organization=self.org) self.property_view_factory = FakePropertyViewFactory( organization=self.org, user=self.user) self.ga_factory = FakeGreenAssessmentFactory(organization=self.org) self.gap_factory = FakeGreenAssessmentPropertyFactory( organization=self.org, user=self.user) self.taxlot_property_factory = FakeTaxLotPropertyFactory( organization=self.org, user=self.user) self.taxlot_state_factory = FakeTaxLotStateFactory( organization=self.org) self.taxlot_view_factory = FakeTaxLotViewFactory(organization=self.org, user=self.user) self.assessment = self.ga_factory.get_green_assessment() self.cycle = self.cycle_factory.get_cycle() self.property_state = self.property_state_factory.get_property_state() self.property_view = self.property_view_factory.get_property_view( state=self.property_state, cycle=self.cycle) self.taxlot_state = self.taxlot_state_factory.get_taxlot_state() self.taxlot_view = self.taxlot_view_factory.get_taxlot_view( state=self.taxlot_state, cycle=self.cycle) self.audit_log = self.audit_log_factory.get_property_audit_log( state=self.property_state, view=self.property_view, record_type=AUDIT_USER_EDIT, description=json.dumps(['a', 'b'])) self.audit_log2 = self.audit_log_factory.get_property_audit_log( view=self.property_view) self.gap_data = { 'source': 'test', 'status': 'complete', 'status_date': datetime.date(2017, 0o1, 0o1), 'metric': 5, 'version': '0.1', 'date': datetime.date(2016, 0o1, 0o1), 'eligibility': True, 'assessment': self.assessment, 'view': self.property_view, } self.urls = ['http://example.com', 'http://example.org'] self.gap = self.gap_factory.get_green_assessment_property( **self.gap_data) self.serializer = PropertyViewAsStateSerializer( instance=self.property_view) def test_init(self): """Test __init__.""" expected = PropertyAuditLogReadOnlySerializer(self.audit_log).data # for now convert the site_eui to a magnitude to get the test to pass # this really needs to be at another level data = self.serializer.current # data['state']['site_eui'] = data['state']['site_eui'].magnitude self.assertEqual(data, expected) def test_get_certifications(self): """Test get_certifications""" expected = [GreenAssessmentPropertyReadOnlySerializer(self.gap).data] self.assertEqual( self.serializer.get_certifications(self.property_view), expected) def test_get_changed_fields(self): """Test get_changed_fields""" expected = ['a', 'b'] self.assertEqual(self.serializer.get_changed_fields(None), expected) def test_get_date_edited(self): """Test get_date_edited""" expected = self.audit_log.created.ctime() self.assertEqual(self.serializer.get_date_edited(None), expected) def test_get_filename(self): """Test get_filename""" expected = self.audit_log.import_filename self.assertEqual(self.serializer.get_filename(None), expected) def test_get_history(self): """Test get_history""" obj = mock.MagicMock() obj.state = self.property_state data = self.serializer.get_history(obj) # Really need to figure out how to get the serializer to save the magnitude correctly. # data[0]['state']['site_eui'] = data[0]['state']['site_eui'].magnitude expected = [PropertyAuditLogReadOnlySerializer(self.audit_log2).data] self.assertEqual(data, expected) def test_get_state(self): obj = mock.MagicMock() obj.state = self.property_state def test_get_source(self): """Test get_source""" expected = self.audit_log.get_record_type_display() self.assertEqual(self.serializer.get_source(None), expected) def test_get_taxlots(self): """Test get_taxlots""" self.taxlot_property_factory.get_taxlot_property( cycle=self.cycle, property_view=self.property_view, taxlot_view=self.taxlot_view) result = self.serializer.get_taxlots(self.property_view) self.assertEqual(result[0]['state']['id'], self.taxlot_state.id) @mock.patch('seed.serializers.properties.PropertyView') @mock.patch('seed.serializers.properties.PropertyStateWritableSerializer') def test_create(self, mock_serializer, mock_pview): """Test create""" mock_serializer.return_value.is_valid.return_value = True mock_serializer.return_value.save.return_value = self.property_state mock_pview.objects.create.return_value = self.property_view data = {'org_id': 1, 'cycle': 2, 'state': {'test': 3}, 'property': 4} serializer = PropertyViewAsStateSerializer() serializer.create(data) mock_serializer.assert_called_with(data={'test': 3}) self.assertTrue(mock_serializer.return_value.save.called) mock_pview.objects.create.assert_called_with(state=self.property_state, cycle_id=2, property_id=4, org_id=1) @mock.patch('seed.serializers.properties.PropertyStateWritableSerializer') def test_update_put(self, mock_serializer): """Test update with PUT""" mock_serializer.return_value.is_valid.return_value = True mock_serializer.return_value.save.return_value = self.property_state mock_request = mock.MagicMock() data = {'org_id': 1, 'cycle': 2, 'state': {'test': 3}, 'property': 4} serializer = PropertyViewAsStateSerializer( context={'request': mock_request}) mock_request.METHOD = 'PUT' serializer.update(self.property_view, data) mock_serializer.assert_called_with(data={'test': 3}) self.assertTrue(mock_serializer.return_value.save.called) @mock.patch('seed.serializers.properties.PropertyStateWritableSerializer') def test_update_patch(self, mock_serializer): """Test update with PATCH""" mock_serializer.return_value.is_valid.return_value = True mock_serializer.return_value.save.return_value = self.property_state mock_request = mock.MagicMock() mock_request.method = 'PATCH' data = {'org_id': 1, 'cycle': 2, 'state': {'test': 3}, 'property': 4} serializer = PropertyViewAsStateSerializer( context={'request': mock_request}) serializer.update(self.property_view, data) mock_serializer.assert_called_with(self.property_state, data={'test': 3}) self.assertTrue(mock_serializer.return_value.save.called)
class TestMatchMergeLink(DataMappingBaseTestCase): def setUp(self): selfvars = self.set_up(ASSESSED_RAW) self.user, self.org, self.import_file_1, self.import_record_1, self.cycle_1 = selfvars cycle_factory = FakeCycleFactory(organization=self.org, user=self.user) self.cycle_2 = cycle_factory.get_cycle(name="Cycle 2") self.import_record_2, self.import_file_2 = self.create_import_file( self.user, self.org, self.cycle_2 ) self.cycle_3 = cycle_factory.get_cycle(name="Cycle 3") self.import_record_3, self.import_file_3 = self.create_import_file( self.user, self.org, self.cycle_3 ) self.property_state_factory = FakePropertyStateFactory(organization=self.org) self.taxlot_state_factory = FakeTaxLotStateFactory(organization=self.org) def test_match_merge_link_for_properties(self): """ In this context, a "set" includes a -State, -View, and canonical record. Set up consists of 3 imports across 3 cycles respectively: Cycle 1 - 3 sets will be imported. - 2 sets match each other and are merged - 1 set doesn't match any others Cycle 2 - 4 sets will be imported. - 3 sets match. All will merge then link to match set in Cycle 1 - 1 set doesn't match any others Cycle 3 - 2 sets will be imported. - 1 set will match sets from Cycles 1 and 2 and link to them - 1 set doesn't match any others """ # Cycle 1 / ImportFile 1 base_state_details = { 'pm_property_id': '1st Match Set', 'city': '1st Match - Cycle 1 - City 1', 'import_file_id': self.import_file_1.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, } self.property_state_factory.get_property_state(**base_state_details) base_state_details['pm_property_id'] = '1st Match Set' base_state_details['city'] = '1st Match - Cycle 1 - City 2' self.property_state_factory.get_property_state(**base_state_details) base_state_details['pm_property_id'] = 'Single Unmatched - 1' base_state_details['city'] = 'Unmatched City - Cycle 1' self.property_state_factory.get_property_state(**base_state_details) # Import file and create -Views and canonical records. self.import_file_1.mapping_done = True self.import_file_1.save() geocode_and_match_buildings_task(self.import_file_1.id) # Cycle 2 / ImportFile 2 base_state_details['import_file_id'] = self.import_file_2.id base_state_details['pm_property_id'] = '1st Match Set' base_state_details['city'] = '1st Match - Cycle 2 - City 1' self.property_state_factory.get_property_state(**base_state_details) base_state_details['pm_property_id'] = '1st Match Set' base_state_details['city'] = '1st Match - Cycle 2 - City 2' self.property_state_factory.get_property_state(**base_state_details) base_state_details['pm_property_id'] = '1st Match Set' base_state_details['city'] = '1st Match - Cycle 2 - City 3' self.property_state_factory.get_property_state(**base_state_details) base_state_details['pm_property_id'] = 'Single Unmatched - 2' base_state_details['city'] = 'Unmatched City - Cycle 2' self.property_state_factory.get_property_state(**base_state_details) # Import file and create -Views and canonical records. self.import_file_2.mapping_done = True self.import_file_2.save() geocode_and_match_buildings_task(self.import_file_2.id) # Cycle 3 / ImportFile 3 base_state_details['import_file_id'] = self.import_file_3.id base_state_details['pm_property_id'] = '1st Match Set' base_state_details['city'] = '1st Match - Cycle 3 - City 1' self.property_state_factory.get_property_state(**base_state_details) base_state_details['pm_property_id'] = 'Single Unmatched - 3' base_state_details['city'] = 'Unmatched City - Cycle 3' self.property_state_factory.get_property_state(**base_state_details) # Import file and create -Views and canonical records. self.import_file_3.mapping_done = True self.import_file_3.save() geocode_and_match_buildings_task(self.import_file_3.id) # Verify merges and links happened self.assertEqual(6, PropertyView.objects.count()) self.assertEqual(4 + 6 + 2, PropertyState.objects.count()) # 4 unique canonical records used in -Views # For now, Properties are not deleted when they aren't used in -Views so a count test wouldn't be appropriate self.assertEqual( 4, len(set(PropertyView.objects.values_list('property_id', flat=True))) ) # At the moment, there should be 3 -Views with the same canonical record across 3 cycles views_with_same_canonical_record = PropertyView.objects.\ values('property_id').\ annotate(times_used=Count('id'), cycle_ids=ArrayAgg('cycle_id')).\ filter(times_used__gt=1).\ get() self.assertEqual(3, views_with_same_canonical_record['times_used']) self.assertCountEqual( [self.cycle_1.id, self.cycle_2.id, self.cycle_3.id], views_with_same_canonical_record['cycle_ids'] ) def test_match_merge_link_for_taxlots(self): """ In this context, a "set" includes a -State, -View, and canonical record. Set up consists of 3 imports across 3 cycles respectively: Cycle 1 - 3 sets will be imported. - 2 sets match each other and are merged - 1 set doesn't match any others Cycle 2 - 4 sets will be imported. - 3 sets match. All will merge then link to match set in Cycle 1 - 1 set doesn't match any others Cycle 3 - 2 sets will be imported. - 1 set will match sets from Cycles 1 and 2 and link to them - 1 set doesn't match any others """ # Cycle 1 / ImportFile 1 base_state_details = { 'jurisdiction_tax_lot_id': '1st Match Set', 'city': '1st Match - Cycle 1 - City 1', 'import_file_id': self.import_file_1.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, } self.taxlot_state_factory.get_taxlot_state(**base_state_details) base_state_details['jurisdiction_tax_lot_id'] = '1st Match Set' base_state_details['city'] = '1st Match - Cycle 1 - City 2' self.taxlot_state_factory.get_taxlot_state(**base_state_details) base_state_details['jurisdiction_tax_lot_id'] = 'Single Unmatched - 1' base_state_details['city'] = 'Unmatched City - Cycle 1' self.taxlot_state_factory.get_taxlot_state(**base_state_details) # Import file and create -Views and canonical records. self.import_file_1.mapping_done = True self.import_file_1.save() geocode_and_match_buildings_task(self.import_file_1.id) # Cycle 2 / ImportFile 2 base_state_details['import_file_id'] = self.import_file_2.id base_state_details['jurisdiction_tax_lot_id'] = '1st Match Set' base_state_details['city'] = '1st Match - Cycle 2 - City 1' self.taxlot_state_factory.get_taxlot_state(**base_state_details) base_state_details['jurisdiction_tax_lot_id'] = '1st Match Set' base_state_details['city'] = '1st Match - Cycle 2 - City 2' self.taxlot_state_factory.get_taxlot_state(**base_state_details) base_state_details['jurisdiction_tax_lot_id'] = '1st Match Set' base_state_details['city'] = '1st Match - Cycle 2 - City 3' self.taxlot_state_factory.get_taxlot_state(**base_state_details) base_state_details['jurisdiction_tax_lot_id'] = 'Single Unmatched - 2' base_state_details['city'] = 'Unmatched City - Cycle 2' self.taxlot_state_factory.get_taxlot_state(**base_state_details) # Import file and create -Views and canonical records. self.import_file_2.mapping_done = True self.import_file_2.save() geocode_and_match_buildings_task(self.import_file_2.id) # Cycle 3 / ImportFile 3 base_state_details['import_file_id'] = self.import_file_3.id base_state_details['jurisdiction_tax_lot_id'] = '1st Match Set' base_state_details['city'] = '1st Match - Cycle 3 - City 1' self.taxlot_state_factory.get_taxlot_state(**base_state_details) base_state_details['jurisdiction_tax_lot_id'] = 'Single Unmatched - 3' base_state_details['city'] = 'Unmatched City - Cycle 3' self.taxlot_state_factory.get_taxlot_state(**base_state_details) # Import file and create -Views and canonical records. self.import_file_3.mapping_done = True self.import_file_3.save() geocode_and_match_buildings_task(self.import_file_3.id) # Verify merges and links happened self.assertEqual(6, TaxLotView.objects.count()) self.assertEqual(4 + 6 + 2, TaxLotState.objects.count()) # 4 unique canonical records used in -Views # For now, Properties are not deleted when they aren't used in -Views so a count test wouldn't be appropriate self.assertEqual( 4, len(set(TaxLotView.objects.values_list('taxlot_id', flat=True))) ) # At the moment, there should be 3 -Views with the same canonical record across 3 cycles views_with_same_canonical_record = TaxLotView.objects.\ values('taxlot_id').\ annotate(times_used=Count('id'), cycle_ids=ArrayAgg('cycle_id')).\ filter(times_used__gt=1).\ get() self.assertEqual(3, views_with_same_canonical_record['times_used']) self.assertCountEqual( [self.cycle_1.id, self.cycle_2.id, self.cycle_3.id], views_with_same_canonical_record['cycle_ids'] )
class TaxLotMergeUnmergeViewTests(DataMappingBaseTestCase): def setUp(self): user_details = { 'username': '******', 'password': '******', 'email': '*****@*****.**' } self.user = User.objects.create_superuser(**user_details) self.org, self.org_user, _ = create_organization(self.user) self.cycle_factory = FakeCycleFactory(organization=self.org, user=self.user) self.taxlot_factory = FakeTaxLotFactory(organization=self.org) self.taxlot_state_factory = FakeTaxLotStateFactory( organization=self.org) self.cycle = self.cycle_factory.get_cycle( start=datetime(2010, 10, 10, tzinfo=get_current_timezone())) self.client.login(**user_details) self.state_1 = self.taxlot_state_factory.get_taxlot_state() self.taxlot_1 = self.taxlot_factory.get_taxlot() self.view_1 = TaxLotView.objects.create(taxlot=self.taxlot_1, cycle=self.cycle, state=self.state_1) self.state_2 = self.taxlot_state_factory.get_taxlot_state() self.taxlot_2 = self.taxlot_factory.get_taxlot() self.view_2 = TaxLotView.objects.create(taxlot=self.taxlot_2, cycle=self.cycle, state=self.state_2) def test_taxlots_merge_without_losing_labels(self): # Create 3 Labels label_factory = FakeStatusLabelFactory(organization=self.org) label_1 = label_factory.get_statuslabel() label_2 = label_factory.get_statuslabel() label_3 = label_factory.get_statuslabel() self.view_1.labels.add(label_1, label_2) self.view_2.labels.add(label_2, label_3) # Merge the taxlots url = reverse('api:v2:taxlots-merge') + '?organization_id={}'.format( self.org.pk) post_params = json.dumps( {'state_ids': [self.state_2.pk, self.state_1.pk]}) self.client.post(url, post_params, content_type='application/json') # The resulting -View should have 3 notes view = TaxLotView.objects.first() self.assertEqual(view.labels.count(), 3) label_names = list(view.labels.values_list('name', flat=True)) self.assertCountEqual(label_names, [label_1.name, label_2.name, label_3.name]) def test_taxlots_merge_without_losing_notes(self): note_factory = FakeNoteFactory(organization=self.org, user=self.user) # Create 3 Notes and distribute them to the two -Views. note1 = note_factory.get_note(name='non_default_name_1') note2 = note_factory.get_note(name='non_default_name_2') self.view_1.notes.add(note1) self.view_1.notes.add(note2) note3 = note_factory.get_note(name='non_default_name_3') self.view_2.notes.add(note2) self.view_2.notes.add(note3) # Merge the taxlots url = reverse('api:v2:taxlots-merge') + '?organization_id={}'.format( self.org.pk) post_params = json.dumps( {'state_ids': [self.state_2.pk, self.state_1.pk]}) self.client.post(url, post_params, content_type='application/json') # The resulting -View should have 3 notes view = TaxLotView.objects.first() self.assertEqual(view.notes.count(), 3) note_names = list(view.notes.values_list('name', flat=True)) self.assertCountEqual(note_names, [note1.name, note2.name, note3.name]) def test_taxlots_merge_without_losing_pairings(self): # Create 2 pairings and distribute them to the two -Views. property_factory = FakePropertyFactory(organization=self.org) property_state_factory = FakePropertyStateFactory( organization=self.org) property_1 = property_factory.get_property() state_1 = property_state_factory.get_property_state() property_view_1 = PropertyView.objects.create(property=property_1, cycle=self.cycle, state=state_1) property_2 = property_factory.get_property() state_2 = property_state_factory.get_property_state() property_view_2 = PropertyView.objects.create(property=property_2, cycle=self.cycle, state=state_2) TaxLotProperty(primary=True, cycle_id=self.cycle.id, property_view_id=property_view_1.id, taxlot_view_id=self.view_1.id).save() TaxLotProperty(primary=True, cycle_id=self.cycle.id, property_view_id=property_view_2.id, taxlot_view_id=self.view_2.id).save() # Merge the taxlots url = reverse('api:v2:taxlots-merge') + '?organization_id={}'.format( self.org.pk) post_params = json.dumps({ 'state_ids': [self.state_2.pk, self.state_1.pk] # priority given to state_1 }) self.client.post(url, post_params, content_type='application/json') # There should still be 2 TaxLotProperties self.assertEqual(TaxLotProperty.objects.count(), 2) taxlot_view = TaxLotView.objects.first() paired_propertyview_ids = list( TaxLotProperty.objects.filter( taxlot_view_id=taxlot_view.id).values_list('property_view_id', flat=True)) self.assertCountEqual(paired_propertyview_ids, [property_view_1.id, property_view_2.id]) def test_merge_assigns_new_canonical_records_to_each_resulting_record_and_old_canonical_records_are_deleted_when_if_associated_to_views( self): # Capture old taxlot_ids persisting_taxlot_id = self.taxlot_1.id deleted_taxlot_id = self.taxlot_2.id new_cycle = self.cycle_factory.get_cycle( start=datetime(2011, 10, 10, tzinfo=get_current_timezone())) new_taxlot_state = self.taxlot_state_factory.get_taxlot_state() TaxLotView.objects.create(taxlot=self.taxlot_1, cycle=new_cycle, state=new_taxlot_state) # Merge the taxlots url = reverse('api:v2:taxlots-merge') + '?organization_id={}'.format( self.org.pk) post_params = json.dumps({ 'state_ids': [self.state_2.pk, self.state_1.pk] # priority given to state_1 }) self.client.post(url, post_params, content_type='application/json') self.assertFalse( TaxLotView.objects.filter(taxlot_id=deleted_taxlot_id).exists()) self.assertFalse(TaxLot.objects.filter(pk=deleted_taxlot_id).exists()) self.assertEqual( TaxLotView.objects.filter(taxlot_id=persisting_taxlot_id).count(), 1) def test_taxlots_unmerge_without_losing_labels(self): # Merge the taxlots url = reverse('api:v2:taxlots-merge') + '?organization_id={}'.format( self.org.pk) post_params = json.dumps({ 'state_ids': [self.state_2.pk, self.state_1.pk] # priority given to state_1 }) self.client.post(url, post_params, content_type='application/json') # Create 3 Labels - add 2 to view label_factory = FakeStatusLabelFactory(organization=self.org) label_1 = label_factory.get_statuslabel() label_2 = label_factory.get_statuslabel() view = TaxLotView.objects.first() # There's only one TaxLotView view.labels.add(label_1, label_2) # Unmerge the taxlots url = reverse('api:v2:taxlots-unmerge', args=[ view.id ]) + '?organization_id={}'.format(self.org.pk) self.client.post(url, content_type='application/json') for new_view in TaxLotView.objects.all(): self.assertEqual(new_view.labels.count(), 2) label_names = list(new_view.labels.values_list('name', flat=True)) self.assertCountEqual(label_names, [label_1.name, label_2.name]) def test_unmerge_results_in_the_use_of_new_canonical_taxlots_and_deletion_of_old_canonical_state_if_unrelated_to_any_views( self): # Merge the taxlots url = reverse('api:v2:taxlots-merge') + '?organization_id={}'.format( self.org.pk) post_params = json.dumps({ 'state_ids': [self.state_2.pk, self.state_1.pk] # priority given to state_1 }) self.client.post(url, post_params, content_type='application/json') # Capture "old" taxlot_id - there's only one TaxLotView view = TaxLotView.objects.first() taxlot_id = view.taxlot_id # Unmerge the taxlots url = reverse('api:v2:taxlots-unmerge', args=[ view.id ]) + '?organization_id={}'.format(self.org.pk) self.client.post(url, content_type='application/json') self.assertFalse(TaxLot.objects.filter(pk=taxlot_id).exists()) self.assertEqual(TaxLot.objects.count(), 2) def test_unmerge_results_in_the_persistence_of_old_canonical_state_if_related_to_any_views( self): # Merge the taxlots url = reverse('api:v2:taxlots-merge') + '?organization_id={}'.format( self.org.pk) post_params = json.dumps({ 'state_ids': [self.state_2.pk, self.state_1.pk] # priority given to state_1 }) self.client.post(url, post_params, content_type='application/json') # Associate only canonical taxlot with records across Cycle view = TaxLotView.objects.first() taxlot_id = view.taxlot_id new_cycle = self.cycle_factory.get_cycle( start=datetime(2011, 10, 10, tzinfo=get_current_timezone())) new_taxlot_state = self.taxlot_state_factory.get_taxlot_state() TaxLotView.objects.create(taxlot_id=taxlot_id, cycle=new_cycle, state=new_taxlot_state) # Unmerge the taxlots url = reverse('api:v2:taxlots-unmerge', args=[ view.id ]) + '?organization_id={}'.format(self.org.pk) self.client.post(url, content_type='application/json') self.assertTrue(TaxLot.objects.filter(pk=view.taxlot_id).exists()) self.assertEqual(TaxLot.objects.count(), 3)
class DefaultColumnsViewTests(DeleteModelsTestCase): """ Tests of the SEED default custom saved columns """ def setUp(self): user_details = { 'username': '******', 'password': '******', 'email': '*****@*****.**' } user_details_2 = { 'username': '******', 'password': '******', 'email': '*****@*****.**' } self.user = User.objects.create_superuser(**user_details) self.user_2 = User.objects.create_superuser(**user_details_2) self.org, _, _ = create_organization(self.user, "test-organization-a") self.org_2, _, _ = create_organization(self.user_2, "test-organization-b") self.property_state_factory = FakePropertyStateFactory(organization=self.org) self.tax_lot_state_factory = FakeTaxLotStateFactory(organization=self.org) Column.objects.create(column_name='test', organization=self.org) Column.objects.create(column_name='extra_data_test', table_name='PropertyState', organization=self.org, is_extra_data=True) self.cross_org_column = Column.objects.create(column_name='extra_data_test', table_name='PropertyState', organization=self.org_2, is_extra_data=True) self.client.login(**user_details) def test_set_default_columns(self): url = reverse_lazy('api:v1:set_default_columns') columns = ['s', 'c1', 'c2'] post_data = { 'columns': columns, 'show_shared_buildings': True } # set the columns response = self.client.post( url, content_type='application/json', data=json.dumps(post_data) ) json_string = response.content data = json.loads(json_string) self.assertEqual(200, response.status_code) # get the columns # url = reverse_lazy('api:v1:columns-get-default-columns') # response = self.client.get(url) # json_string = response.content # data = json.loads(json_string) # self.assertEqual(data['columns'], columns) # get show_shared_buildings url = reverse_lazy('api:v2:users-shared-buildings', args=[self.user.pk]) response = self.client.get(url) data = response.json() self.assertEqual(data['show_shared_buildings'], True) # set show_shared_buildings to False # post_data['show_shared_buildings'] = False # url = reverse_lazy('api:v1:set_default_columns') # response = self.client.post( # url, # content_type='application/json', # data=json.dumps(post_data) # ) # json_string = response.content # data = json.loads(json_string) # self.assertEqual(200, response.status_code) # get show_shared_buildings # url = reverse_lazy('api:v2:users-shared-buildings', args=[self.user.pk]) # response = self.client.get(url) # json_string = response.content # data = json.loads(json_string) # self.assertEqual(data['show_shared_buildings'], False) def test_get_all_columns(self): # test building list columns response = self.client.get(reverse('api:v2:columns-list'), { 'organization_id': self.org.id }) data = json.loads(response.content)['columns'] # remove the id columns to make checking existence easier for result in data: del result['id'] del result['name'] # name is hard to compare because it is name_{ID} del result['organization_id'] # org changes based on test expected = { 'table_name': 'PropertyState', 'column_name': 'pm_property_id', 'display_name': 'PM Property ID', 'is_extra_data': False, 'merge_protection': 'Favor New', 'data_type': 'string', 'related': False, 'sharedFieldType': 'None', 'pinnedLeft': True, 'unit_name': None, 'unit_type': None, 'is_matching_criteria': True, } # randomly check a column self.assertIn(expected, data) def test_rename_column_property(self): column = Column.objects.filter( organization=self.org, table_name='PropertyState', column_name='address_line_1' ).first() for i in range(1, 10): self.property_state_factory.get_property_state(data_state=DATA_STATE_MATCHING) self.tax_lot_state_factory.get_taxlot_state(data_state=DATA_STATE_MATCHING) for ps in PropertyState.objects.filter(organization=self.org).order_by("pk"): # orig_data = [{"al1": ps.address_line_1, # "ed": ps.extra_data, # "na": ps.normalized_address}] expected_data = [{"al1": None, "ed": {"address_line_1_extra_data": ps.address_line_1}, "na": None}] # test building list columns response = self.client.post( reverse('api:v2:columns-rename', args=[column.pk]), content_type='application/json', data=json.dumps({ 'new_column_name': 'address_line_1_extra_data', 'overwrite': False }) ) result = response.json() self.assertEqual(response.status_code, 200) self.assertTrue(result['success']) for ps in PropertyState.objects.filter(organization=self.org).order_by("pk"): new_data = [{"al1": ps.address_line_1, "ed": ps.extra_data, "na": ps.normalized_address}] self.assertListEqual(expected_data, new_data) def test_rename_column_property_existing(self): column = Column.objects.filter( organization=self.org, table_name='PropertyState', column_name='address_line_1' ).first() for i in range(1, 10): self.property_state_factory.get_property_state(data_state=DATA_STATE_MATCHING) for ps in PropertyState.objects.filter(organization=self.org).order_by("pk"): expected_data = [{"al1": None, "pn": ps.address_line_1, "na": None}] response = self.client.post( reverse('api:v2:columns-rename', args=[column.pk]), content_type='application/json', data=json.dumps({ 'new_column_name': 'property_name', 'overwrite': False }) ) result = response.json() self.assertEqual(response.status_code, 400) self.assertFalse(result['success']) response = self.client.post( reverse('api:v2:columns-rename', args=[column.pk]), content_type='application/json', data=json.dumps({ 'new_column_name': 'property_name', 'overwrite': True }) ) result = response.json() self.assertEqual(response.status_code, 200) self.assertTrue(result['success']) for ps in PropertyState.objects.filter(organization=self.org).order_by("pk"): new_data = [{"al1": ps.address_line_1, "pn": ps.property_name, "na": ps.normalized_address}] self.assertListEqual(expected_data, new_data) def test_rename_column_taxlot(self): column = Column.objects.filter( organization=self.org, table_name='TaxLotState', column_name='address_line_1' ).first() for i in range(1, 10): self.property_state_factory.get_property_state(data_state=DATA_STATE_MATCHING) self.tax_lot_state_factory.get_taxlot_state(data_state=DATA_STATE_MATCHING) for ps in TaxLotState.objects.filter(organization=self.org).order_by("pk"): # orig_data = [{"al1": ps.address_line_1, # "ed": ps.extra_data, # "na": ps.normalized_address}] expected_data = [{"al1": None, "ed": {"address_line_1_extra_data": ps.address_line_1}, "na": None}] # test building list columns response = self.client.post( reverse('api:v2:columns-rename', args=[column.pk]), content_type='application/json', data=json.dumps({ 'new_column_name': 'address_line_1_extra_data', 'overwrite': False }) ) result = response.json() self.assertEqual(response.status_code, 200) self.assertTrue(result['success']) for ps in TaxLotState.objects.filter(organization=self.org).order_by("pk"): new_data = [{"al1": ps.address_line_1, "ed": ps.extra_data, "na": ps.normalized_address}] self.assertListEqual(expected_data, new_data) def test_rename_column_wrong_org(self): response = self.client.post( reverse('api:v2:columns-rename', args=[self.cross_org_column.pk]), content_type='application/json', ) result = response.json() # self.assertFalse(result['success']) self.assertEqual( 'Cannot find column in org=%s with pk=%s' % (self.org.id, self.cross_org_column.pk), result['message'], ) def test_rename_column_dne(self): # test building list columns response = self.client.post( reverse('api:v2:columns-rename', args=[-999]), content_type='application/json', ) self.assertEqual(response.status_code, 404) result = response.json() self.assertFalse(result['success']) self.assertEqual(result['message'], 'Cannot find column in org=%s with pk=-999' % self.org.id)
class TestMatchingPostMerge(DataMappingBaseTestCase): def setUp(self): selfvars = self.set_up(ASSESSED_RAW) self.user, self.org, self.import_file, self.import_record, self.cycle = selfvars user_details = { 'username': '******', 'password': '******', 'email': '*****@*****.**' } self.client.login(**user_details) self.property_state_factory = FakePropertyStateFactory( organization=self.org) self.taxlot_state_factory = FakeTaxLotStateFactory( organization=self.org) def test_match_merge_happens_after_property_merge(self): base_details = { 'pm_property_id': '123MatchID', 'city': 'Golden', 'import_file_id': self.import_file.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, } # Create 4 non-matching properties where merging 1 and 2, will match 4 ps_1 = self.property_state_factory.get_property_state(**base_details) del base_details['pm_property_id'] base_details['address_line_1'] = '123 Match Street' base_details['city'] = 'Denver' ps_2 = self.property_state_factory.get_property_state(**base_details) # Property 3 is here to be sure it remains unchanged del base_details['address_line_1'] base_details['pm_property_id'] = '1337AnotherDifferentID' base_details['city'] = 'Philadelphia' ps_3 = self.property_state_factory.get_property_state(**base_details) base_details['address_line_1'] = '123 Match Street' base_details['pm_property_id'] = '123MatchID' base_details['city'] = 'Colorado Springs' self.property_state_factory.get_property_state(**base_details) self.import_file.mapping_done = True self.import_file.save() match_buildings(self.import_file.id) # Make sure all 4 are separate self.assertEqual(Property.objects.count(), 4) self.assertEqual(PropertyState.objects.count(), 4) self.assertEqual(PropertyView.objects.count(), 4) # Merge -State 1 and 2 - which should then match merge with -State 4 with precedence to the initial merged -State url = reverse('api:v2:properties-merge' ) + '?organization_id={}'.format(self.org.pk) post_params = json.dumps({'state_ids': [ps_2.pk, ps_1.pk]}) raw_response = self.client.post(url, post_params, content_type='application/json') response = json.loads(raw_response.content) self.assertEqual(response['match_merged_count'], 2) # Verify that 3 -States have been merged and 2 remain self.assertEqual(Property.objects.count(), 2) self.assertEqual(PropertyState.objects.count(), 6) # Original 4 + 1 initial merge + 1 post merge self.assertEqual(PropertyView.objects.count(), 2) # Note, the success of the .get() implies the other View had state_id=ps_3 changed_view = PropertyView.objects.exclude(state_id=ps_3).get() # It will have a -State having city as Golden self.assertEqual(changed_view.state.city, 'Golden') # The corresponding log should be a System Match audit_log = PropertyAuditLog.objects.get( state_id=changed_view.state_id) self.assertEqual(audit_log.name, 'System Match') def test_match_merge_happens_after_taxlot_merge(self): base_details = { 'jurisdiction_tax_lot_id': '123MatchID', 'city': 'Golden', 'import_file_id': self.import_file.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, } # Create 4 non-matching taxlots where merging 1 and 2, will match 4 tls_1 = self.taxlot_state_factory.get_taxlot_state(**base_details) del base_details['jurisdiction_tax_lot_id'] base_details['address_line_1'] = '123 Match Street' base_details['city'] = 'Denver' tls_2 = self.taxlot_state_factory.get_taxlot_state(**base_details) # TaxLot 3 is here to be sure it remains unchanged del base_details['address_line_1'] base_details['jurisdiction_tax_lot_id'] = '1337AnotherDifferentID' base_details['city'] = 'Philadelphia' tls_3 = self.taxlot_state_factory.get_taxlot_state(**base_details) base_details['address_line_1'] = '123 Match Street' base_details['jurisdiction_tax_lot_id'] = '123MatchID' base_details['city'] = 'Colorado Springs' self.taxlot_state_factory.get_taxlot_state(**base_details) self.import_file.mapping_done = True self.import_file.save() match_buildings(self.import_file.id) # Make sure all 4 are separate self.assertEqual(TaxLot.objects.count(), 4) self.assertEqual(TaxLotState.objects.count(), 4) self.assertEqual(TaxLotView.objects.count(), 4) # Merge -State 1 and 2 - which should then match merge with -State 4 with precedence to the initial merged -State url = reverse('api:v2:taxlots-merge') + '?organization_id={}'.format( self.org.pk) post_params = json.dumps({'state_ids': [tls_2.pk, tls_1.pk]}) raw_response = self.client.post(url, post_params, content_type='application/json') response = json.loads(raw_response.content) self.assertEqual(response['match_merged_count'], 2) # Verify that 3 -States have been merged and 2 remain self.assertEqual(TaxLot.objects.count(), 2) self.assertEqual(TaxLotState.objects.count(), 6) # Original 4 + 1 initial merge + 1 post merge self.assertEqual(TaxLotView.objects.count(), 2) # Note, the success of the .get() implies the other View had state_id=tls_3 changed_view = TaxLotView.objects.exclude(state_id=tls_3).get() # It will have a -State having city as Golden self.assertEqual(changed_view.state.city, 'Golden') # The corresponding log should be a System Match audit_log = TaxLotAuditLog.objects.get(state_id=changed_view.state_id) self.assertEqual(audit_log.name, 'System Match')
class TestMatching(DataMappingBaseTestCase): def setUp(self): selfvars = self.set_up(ASSESSED_RAW) self.user, self.org, self.import_file, self.import_record, self.cycle = selfvars self.property_factory = FakePropertyFactory(organization=self.org) self.property_state_factory = FakePropertyStateFactory( organization=self.org) self.property_view_factory = FakePropertyViewFactory( organization=self.org, cycle=self.cycle) self.taxlot_state_factory = FakeTaxLotStateFactory( organization=self.org) self.taxlot_view_factory = FakeTaxLotViewFactory(organization=self.org, cycle=self.cycle) def test_match_properties_and_taxlots_with_address(self): # create an ImportFile for testing purposes. Seems like we would want to run this matching just on a # list of properties and taxlots. # # This emulates importing the following # Address, Jurisdiction Tax Lot # 742 Evergreen Terrace, 100;101;110;111 lot_numbers = '100;101;110;111' for i in range(10): self.property_state_factory.get_property_state( address_line_1='742 Evergreen Terrace', lot_number=lot_numbers, import_file_id=self.import_file.id, data_state=DATA_STATE_MAPPING, ) for lot_number in lot_numbers.split(';'): self.taxlot_state_factory.get_taxlot_state( address_line_1=None, jurisdiction_tax_lot_id=lot_number, import_file_id=self.import_file.id, data_state=DATA_STATE_MAPPING, ) # for ps in PropertyState.objects.filter(organization=self.org): # print("%s -- %s -- %s" % (ps.lot_number, ps.import_file_id, ps.address_line_1)) # for tl in TaxLotState.objects.filter(organization=self.org): # print("%s -- %s" % (tl.import_file_id, tl.jurisdiction_tax_lot_id)) # set import_file mapping done so that matching can occur. self.import_file.mapping_done = True self.import_file.save() match_buildings(self.import_file.id) # for pv in PropertyView.objects.filter(state__organization=self.org): # print("%s -- %s" % (pv.state, pv.cycle)) # should only have 1 PropertyView and 4 taxlot views self.assertEqual( PropertyView.objects.filter(state__organization=self.org).count(), 1) self.assertEqual( TaxLotView.objects.filter(state__organization=self.org).count(), 4) pv = PropertyView.objects.filter(state__organization=self.org).first() # there should be 4 relationships in the TaxLotProperty associated with view, one each for the taxlots defined self.assertEqual( TaxLotProperty.objects.filter(property_view_id=pv).count(), 4) def test_match_properties_and_taxlots_with_address_no_lot_number(self): # create an ImportFile for testing purposes. Seems like we would want to run this matching just on a # list of properties and taxlots. # # This emulates importing the following # Address, Jurisdiction Tax Lot # 742 Evergreen Terrace, 100 # 742 Evergreen Terrace, 101 # 742 Evergreen Terrace, 110 # 742 Evergreen Terrace, 111 lot_numbers = '100;101;110;111' for lot_number in lot_numbers.split(';'): self.property_state_factory.get_property_state( address_line_1='742 Evergreen Terrace', lot_number=lot_number, import_file_id=self.import_file.id, data_state=DATA_STATE_MAPPING, ) self.taxlot_state_factory.get_taxlot_state( address_line_1=None, jurisdiction_tax_lot_id=lot_number, import_file_id=self.import_file.id, data_state=DATA_STATE_MAPPING, ) # for ps in PropertyState.objects.filter(organization=self.org): # print("%s -- %s -- %s" % (ps.lot_number, ps.import_file_id, ps.address_line_1)) # for tl in TaxLotState.objects.filter(organization=self.org): # print("%s -- %s" % (tl.import_file_id, tl.jurisdiction_tax_lot_id)) # set import_file mapping done so that matching can occur. self.import_file.mapping_done = True self.import_file.save() match_buildings(self.import_file.id) # for pv in PropertyView.objects.filter(state__organization=self.org): # print("%s -- %s" % (pv.state, pv.cycle)) # should only have 1 PropertyView and 4 taxlot views self.assertEqual( PropertyView.objects.filter(state__organization=self.org).count(), 1) self.assertEqual( TaxLotView.objects.filter(state__organization=self.org).count(), 4) pv = PropertyView.objects.filter(state__organization=self.org).first() # there should be 4 relationships in the TaxLotProperty associated with view, one each for the taxlots defined self.assertEqual( TaxLotProperty.objects.filter(property_view_id=pv).count(), 4) def test_match_properties_and_taxlots_with_ubid(self): # create an ImportFile for testing purposes. Seems like we would want to run this matching just on a # list of properties and taxlots. # # This emulates importing the following # UBID, Jurisdiction Tax Lot # ubid_100, lot_1 # ubid_101, lot_1 # ubid_110, lot_1 # ubid_111, lot_1 ids = [('ubid_100', 'lot_1'), ('ubid_101', 'lot_1'), ('ubid_110', 'lot_1'), ('ubid_111', 'lot_1')] for id in ids: self.property_state_factory.get_property_state( no_default_data=True, ubid=id[0], lot_number=id[1], import_file_id=self.import_file.id, data_state=DATA_STATE_MAPPING, ) self.taxlot_state_factory.get_taxlot_state( no_default_data=True, jurisdiction_tax_lot_id=ids[0][1], import_file_id=self.import_file.id, data_state=DATA_STATE_MAPPING, ) # for ps in PropertyState.objects.filter(organization=self.org): # print("%s -- %s -- %s" % (ps.lot_number, ps.import_file_id, ps.ubid)) # pv = PropertyView.objects.get(state=ps, cycle=self.cycle) # TaxLotProperty.objects.filter() # for tl in TaxLotState.objects.filter(organization=self.org): # print("%s -- %s" % (tl.import_file_id, tl.jurisdiction_tax_lot_id)) # set import_file mapping done so that matching can occur. self.import_file.mapping_done = True self.import_file.save() match_buildings(self.import_file.id) # for pv in PropertyView.objects.filter(state__organization=self.org): # print("%s -- %s" % (pv.state.ubid, pv.cycle)) # should only have 1 PropertyView and 4 taxlot views self.assertEqual( PropertyView.objects.filter(state__organization=self.org).count(), 4) self.assertEqual( TaxLotView.objects.filter(state__organization=self.org).count(), 1) tlv = TaxLotView.objects.filter(state__organization=self.org).first() # there should be 4 relationships in the TaxLotProperty associated with view, one each for the taxlots defined self.assertEqual( TaxLotProperty.objects.filter(taxlot_view_id=tlv).count(), 4) def test_match_properties_and_taxlots_with_custom_id(self): # create an ImportFile for testing purposes. Seems like we would want to run this matching just on a # list of properties and taxlots. # # This emulates importing the following # Custom ID 1, Jurisdiction Tax Lot # custom_100, lot_1 # custom_101, lot_1 # custom_110, lot_1 # custom_111, lot_1 ids = [('custom_100', 'lot_1'), ('custom_101', 'lot_1'), ('custom_110', 'lot_1'), ('custom_111', 'lot_1')] for id in ids: self.property_state_factory.get_property_state( no_default_data=True, custom_id_1=id[0], lot_number=id[1], import_file_id=self.import_file.id, data_state=DATA_STATE_MAPPING, ) self.taxlot_state_factory.get_taxlot_state( no_default_data=True, jurisdiction_tax_lot_id=ids[0][1], import_file_id=self.import_file.id, data_state=DATA_STATE_MAPPING, ) # for ps in PropertyState.objects.filter(organization=self.org): # print("%s -- %s -- %s" % (ps.lot_number, ps.import_file_id, ps.custom_id_1)) # pv = PropertyView.objects.get(state=ps, cycle=self.cycle) # TaxLotProperty.objects.filter() # for tl in TaxLotState.objects.filter(organization=self.org): # print("%s -- %s" % (tl.import_file_id, tl.jurisdiction_tax_lot_id)) # set import_file mapping done so that matching can occur. self.import_file.mapping_done = True self.import_file.save() match_buildings(self.import_file.id) # for pv in PropertyView.objects.filter(state__organization=self.org): # print("%s -- %s" % (pv.state, pv.cycle)) # should only have 1 PropertyView and 4 taxlot views self.assertEqual( PropertyView.objects.filter(state__organization=self.org).count(), 4) self.assertEqual( TaxLotView.objects.filter(state__organization=self.org).count(), 1) tlv = TaxLotView.objects.filter(state__organization=self.org).first() # there should be 4 relationships in the TaxLotProperty associated with view, one each for the taxlots defined self.assertEqual( TaxLotProperty.objects.filter(taxlot_view_id=tlv).count(), 4) def test_save_state_match(self): # create a couple states to merge together ps_1 = self.property_state_factory.get_property_state( property_name="this should persist") ps_2 = self.property_state_factory.get_property_state( extra_data={"extra_1": "this should exist too"}) priorities = Column.retrieve_priorities(self.org.pk) merged_state = save_state_match(ps_1, ps_2, priorities) self.assertEqual(merged_state.merge_state, MERGE_STATE_MERGED) self.assertEqual(merged_state.property_name, ps_1.property_name) self.assertEqual(merged_state.extra_data['extra_1'], "this should exist too") # verify that the audit log is correct. pal = PropertyAuditLog.objects.get(organization=self.org, state=merged_state) self.assertEqual(pal.name, 'System Match') self.assertEqual(pal.parent_state1, ps_1) self.assertEqual(pal.parent_state2, ps_2) self.assertEqual(pal.description, 'Automatic Merge') def test_filter_duplicated_states(self): for i in range(10): self.property_state_factory.get_property_state( no_default_data=True, address_line_1='123 The Same Address', # extra_data={"extra_1": "value_%s" % i}, import_file_id=self.import_file.id, data_state=DATA_STATE_MAPPING, ) for i in range(5): self.property_state_factory.get_property_state( import_file_id=self.import_file.id, data_state=DATA_STATE_MAPPING, ) props = self.import_file.find_unmatched_property_states() uniq_states, dup_states = filter_duplicated_states(props) # There should be 6 uniq states. 5 from the second call, and one of 'The Same Address' self.assertEqual(len(uniq_states), 6) self.assertEqual(len(dup_states), 9) def test_match_and_merge_unmatched_objects_all_unique(self): # create some objects to match and merge partitioner = EquivalencePartitioner.make_default_state_equivalence( PropertyState) for i in range(10): self.property_state_factory.get_property_state( import_file_id=self.import_file.id, data_state=DATA_STATE_MAPPING, ) props = self.import_file.find_unmatched_property_states() uniq_states, dup_states = filter_duplicated_states(props) merged, keys = match_and_merge_unmatched_objects( uniq_states, partitioner) self.assertEqual(len(merged), 10) def test_match_and_merge_unmatched_objects_with_duplicates(self): # create some objects to match and merge partitioner = EquivalencePartitioner.make_default_state_equivalence( PropertyState) for i in range(8): self.property_state_factory.get_property_state( import_file_id=self.import_file.id, data_state=DATA_STATE_MAPPING, ) self.property_state_factory.get_property_state( no_default_data=True, extra_data={'moniker': '12345'}, address_line_1='123 same address', site_eui=25, import_file_id=self.import_file.id, data_state=DATA_STATE_MAPPING, ) self.property_state_factory.get_property_state( no_default_data=True, extra_data={'moniker': '12345'}, address_line_1='123 same address', site_eui=150, import_file_id=self.import_file.id, data_state=DATA_STATE_MAPPING, ) props = self.import_file.find_unmatched_property_states() uniq_states, dup_states = filter_duplicated_states(props) merged, keys = match_and_merge_unmatched_objects( uniq_states, partitioner) self.assertEqual(len(merged), 9) self.assertEqual(len(keys), 9) # find the ps_cp_1 in the list of merged found = False for ps in merged: if ps.extra_data.get('moniker', None) == '12345': found = True self.assertEqual(ps.site_eui.magnitude, 150) # from the second record self.assertEqual(found, True) def test_match_and_merge_unmatched_objects_with_dates(self): # Make sure that the dates sort correctly! (only testing release_date, but also sorts # on generation_date, then pk partitioner = EquivalencePartitioner.make_default_state_equivalence( PropertyState) self.property_state_factory.get_property_state( no_default_data=True, address_line_1='123 same address', release_date=datetime.datetime(2010, 1, 1, 1, 1, tzinfo=tz.get_current_timezone()), site_eui=25, import_file_id=self.import_file.id, data_state=DATA_STATE_MAPPING, ) self.property_state_factory.get_property_state( no_default_data=True, address_line_1='123 same address', release_date=datetime.datetime(2015, 1, 1, 1, 1, tzinfo=tz.get_current_timezone()), site_eui=150, import_file_id=self.import_file.id, data_state=DATA_STATE_MAPPING, ) self.property_state_factory.get_property_state( no_default_data=True, address_line_1='123 same address', release_date=datetime.datetime(2005, 1, 1, 1, 1, tzinfo=tz.get_current_timezone()), site_eui=300, import_file_id=self.import_file.id, data_state=DATA_STATE_MAPPING, ) props = self.import_file.find_unmatched_property_states() uniq_states, dup_states = filter_duplicated_states(props) merged, keys = match_and_merge_unmatched_objects( uniq_states, partitioner) found = False for ps in merged: found = True self.assertEqual(ps.site_eui.magnitude, 150) # from the second record self.assertEqual(found, True) def test_merge_unmatched_into_views_no_matches(self): """It is very unlikely that any of these states will match since it is using faker.""" for i in range(10): self.property_state_factory.get_property_state( import_file_id=self.import_file.id, data_state=DATA_STATE_MAPPING, )
class PropertyViewTests(DeleteModelsTestCase): def setUp(self): user_details = { 'username': '******', 'password': '******', 'email': '*****@*****.**' } self.user = User.objects.create_superuser(**user_details) self.org, self.org_user, _ = create_organization(self.user) self.cycle_factory = FakeCycleFactory(organization=self.org, user=self.user) self.cycle = self.cycle_factory.get_cycle( start=datetime(2010, 10, 10, tzinfo=timezone.get_current_timezone()) ) self.property_factory = FakePropertyFactory(organization=self.org) self.property_state_factory = FakePropertyStateFactory(organization=self.org) self.property_view_factory = FakePropertyViewFactory(organization=self.org, cycle=self.cycle) self.taxlot_state_factory = FakeTaxLotStateFactory(organization=self.org) self.taxlot_view_factory = FakeTaxLotViewFactory(organization=self.org, cycle=self.cycle) # create 10 addresses that are exactly the same import_record = ImportRecord.objects.create(super_organization=self.org) self.import_file = ImportFile.objects.create( import_record=import_record, cycle=self.cycle, ) def test_match_properties_and_taxlots_with_address(self): # create an ImportFile for testing purposes. Seems like we would want to run this matching just on a # list of properties and taxlots. # # This emulates importing the following # Address, Jurisdiction Tax Lot # 742 Evergreen Terrace, 100;101;110;111 lot_numbers = '100;101;110;111' for i in range(10): self.property_state_factory.get_property_state( address_line_1='742 Evergreen Terrace', lot_number=lot_numbers, import_file_id=self.import_file.id, data_state=DATA_STATE_MAPPING, ) for lot_number in lot_numbers.split(';'): self.taxlot_state_factory.get_taxlot_state( address_line_1=None, jurisdiction_tax_lot_id=lot_number, import_file_id=self.import_file.id, data_state=DATA_STATE_MAPPING, ) for ps in PropertyState.objects.filter(organization=self.org): print "%s -- %s -- %s" % (ps.lot_number, ps.import_file_id, ps.address_line_1) # pv = PropertyView.objects.get(state=ps, cycle=self.cycle) # TaxLotProperty.objects.filter() for tl in TaxLotState.objects.filter(organization=self.org): print "%s -- %s" % (tl.import_file_id, tl.jurisdiction_tax_lot_id) # for tlm in TaxLotProperty.objects.filter() _match_properties_and_taxlots(self.import_file.id) for pv in PropertyView.objects.filter(state__organization=self.org): print "%s -- %s" % (pv.state, pv.cycle) # should only have 1 PropertyView and 4 taxlot views self.assertEqual(PropertyView.objects.filter(state__organization=self.org).count(), 1) self.assertEqual(TaxLotView.objects.filter(state__organization=self.org).count(), 4) pv = PropertyView.objects.filter(state__organization=self.org).first() # there should be 4 relationships in the TaxLotProperty associated with view, one each for the taxlots defined self.assertEqual(TaxLotProperty.objects.filter(property_view_id=pv).count(), 4) def test_match_properties_and_taxlots_with_address_no_lot_number(self): # create an ImportFile for testing purposes. Seems like we would want to run this matching just on a # list of properties and taxlots. # # This emulates importing the following # Address, Jurisdiction Tax Lot # 742 Evergreen Terrace, 100 # 742 Evergreen Terrace, 101 # 742 Evergreen Terrace, 110 # 742 Evergreen Terrace, 111 lot_numbers = '100;101;110;111' for lot_number in lot_numbers.split(';'): self.property_state_factory.get_property_state( address_line_1='742 Evergreen Terrace', lot_number=lot_number, import_file_id=self.import_file.id, data_state=DATA_STATE_MAPPING, ) self.taxlot_state_factory.get_taxlot_state( address_line_1=None, jurisdiction_tax_lot_id=lot_number, import_file_id=self.import_file.id, data_state=DATA_STATE_MAPPING, ) for ps in PropertyState.objects.filter(organization=self.org): print "%s -- %s -- %s" % (ps.lot_number, ps.import_file_id, ps.address_line_1) for tl in TaxLotState.objects.filter(organization=self.org): print "%s -- %s" % (tl.import_file_id, tl.jurisdiction_tax_lot_id) _match_properties_and_taxlots(self.import_file.id) for pv in PropertyView.objects.filter(state__organization=self.org): print "%s -- %s" % (pv.state, pv.cycle) # should only have 1 PropertyView and 4 taxlot views self.assertEqual(PropertyView.objects.filter(state__organization=self.org).count(), 1) self.assertEqual(TaxLotView.objects.filter(state__organization=self.org).count(), 4) pv = PropertyView.objects.filter(state__organization=self.org).first() # there should be 4 relationships in the TaxLotProperty associated with view, one each for the taxlots defined self.assertEqual(TaxLotProperty.objects.filter(property_view_id=pv).count(), 4) def test_match_properties_and_taxlots_with_ubid(self): # create an ImportFile for testing purposes. Seems like we would want to run this matching just on a # list of properties and taxlots. # # This emulates importing the following # UBID, Jurisdiction Tax Lot # ubid_100, lot_1 # ubid_101, lot_1 # ubid_110, lot_1 # ubid_111, lot_1 ids = [('ubid_100', 'lot_1'), ('ubid_101', 'lot_1'), ('ubid_110', 'lot_1'), ('ubid_111', 'lot_1')] for id in ids: self.property_state_factory.get_property_state( no_default_data=True, ubid=id[0], lot_number=id[1], import_file_id=self.import_file.id, data_state=DATA_STATE_MAPPING, ) self.taxlot_state_factory.get_taxlot_state( no_default_data=True, jurisdiction_tax_lot_id=ids[0][1], import_file_id=self.import_file.id, data_state=DATA_STATE_MAPPING, ) for ps in PropertyState.objects.filter(organization=self.org): print "%s -- %s -- %s" % (ps.lot_number, ps.import_file_id, ps.ubid) # pv = PropertyView.objects.get(state=ps, cycle=self.cycle) # TaxLotProperty.objects.filter() for tl in TaxLotState.objects.filter(organization=self.org): print "%s -- %s" % (tl.import_file_id, tl.jurisdiction_tax_lot_id) # for tlm in TaxLotProperty.objects.filter() _match_properties_and_taxlots(self.import_file.id) for pv in PropertyView.objects.filter(state__organization=self.org): print "%s -- %s" % (pv.state.ubid, pv.cycle) # should only have 1 PropertyView and 4 taxlot views self.assertEqual(PropertyView.objects.filter(state__organization=self.org).count(), 4) self.assertEqual(TaxLotView.objects.filter(state__organization=self.org).count(), 1) tlv = TaxLotView.objects.filter(state__organization=self.org).first() # there should be 4 relationships in the TaxLotProperty associated with view, one each for the taxlots defined self.assertEqual(TaxLotProperty.objects.filter(taxlot_view_id=tlv).count(), 4) def test_match_properties_and_taxlots_with_custom_id(self): # create an ImportFile for testing purposes. Seems like we would want to run this matching just on a # list of properties and taxlots. # # This emulates importing the following # Custom ID 1, Jurisdiction Tax Lot # custom_100, lot_1 # custom_101, lot_1 # custom_110, lot_1 # custom_111, lot_1 ids = [('custom_100', 'lot_1'), ('custom_101', 'lot_1'), ('custom_110', 'lot_1'), ('custom_111', 'lot_1')] for id in ids: self.property_state_factory.get_property_state( no_default_data=True, custom_id_1=id[0], lot_number=id[1], import_file_id=self.import_file.id, data_state=DATA_STATE_MAPPING, ) self.taxlot_state_factory.get_taxlot_state( no_default_data=True, jurisdiction_tax_lot_id=ids[0][1], import_file_id=self.import_file.id, data_state=DATA_STATE_MAPPING, ) for ps in PropertyState.objects.filter(organization=self.org): print "%s -- %s -- %s" % (ps.lot_number, ps.import_file_id, ps.custom_id_1) # pv = PropertyView.objects.get(state=ps, cycle=self.cycle) # TaxLotProperty.objects.filter() for tl in TaxLotState.objects.filter(organization=self.org): print "%s -- %s" % (tl.import_file_id, tl.jurisdiction_tax_lot_id) # for tlm in TaxLotProperty.objects.filter() _match_properties_and_taxlots(self.import_file.id) for pv in PropertyView.objects.filter(state__organization=self.org): print "%s -- %s" % (pv.state, pv.cycle) # should only have 1 PropertyView and 4 taxlot views self.assertEqual(PropertyView.objects.filter(state__organization=self.org).count(), 4) self.assertEqual(TaxLotView.objects.filter(state__organization=self.org).count(), 1) tlv = TaxLotView.objects.filter(state__organization=self.org).first() # there should be 4 relationships in the TaxLotProperty associated with view, one each for the taxlots defined self.assertEqual(TaxLotProperty.objects.filter(taxlot_view_id=tlv).count(), 4) def test_save_state_match(self): # create a couple states to merge together ps_1 = self.property_state_factory.get_property_state(property_name="this should persist") ps_2 = self.property_state_factory.get_property_state(extra_data={"extra_1": "this should exist too"}) merged_state = save_state_match(ps_1, ps_2) self.assertEqual(merged_state.merge_state, MERGE_STATE_MERGED) self.assertEqual(merged_state.property_name, ps_1.property_name) self.assertEqual(merged_state.extra_data['extra_1'], "this should exist too") # verify that the audit log is correct. pal = PropertyAuditLog.objects.get(organization=self.org, state=merged_state) self.assertEqual(pal.name, 'System Match') self.assertEqual(pal.parent_state1, ps_1) self.assertEqual(pal.parent_state2, ps_2) self.assertEqual(pal.description, 'Automatic Merge')
class TestMatchingExistingViewFullOrgMatching(DataMappingBaseTestCase): def setUp(self): selfvars = self.set_up(ASSESSED_RAW) self.user, self.org, self.import_file_1, self.import_record_1, self.cycle_1 = selfvars cycle_factory = FakeCycleFactory(organization=self.org, user=self.user) self.cycle_2 = cycle_factory.get_cycle(name="Cycle 2") self.import_record_2, self.import_file_2 = self.create_import_file( self.user, self.org, self.cycle_2) self.property_state_factory = FakePropertyStateFactory( organization=self.org) self.taxlot_state_factory = FakeTaxLotStateFactory( organization=self.org) def test_whole_org_match_merge(self): """ The set up for this test is lengthy and includes multiple Property sets and TaxLot sets across multiple Cycles. In this context, a "set" includes a -State, -View, and canonical record. Cycle 1 - 5 property & 5 taxlot sets - 2 & 2 sets match, 1 set doesn't match Cycle 2 - 5 property & 5 taxlot sets - 3 sets match, 2 sets w/ null fields """ # Cycle 1 / ImportFile 1 base_property_details = { 'pm_property_id': '1st Match Set', 'city': 'Golden', 'import_file_id': self.import_file_1.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, } # Create 5 initially non-matching properties in first Cycle ps_11 = self.property_state_factory.get_property_state( **base_property_details) base_property_details[ 'pm_property_id'] = 'To be updated - 1st Match Set' base_property_details['city'] = 'Denver' ps_12 = self.property_state_factory.get_property_state( **base_property_details) base_property_details['pm_property_id'] = '2nd Match Set' base_property_details['city'] = 'Philadelphia' ps_13 = self.property_state_factory.get_property_state( **base_property_details) base_property_details[ 'pm_property_id'] = 'To be updated - 2nd Match Set' base_property_details['city'] = 'Colorado Springs' ps_14 = self.property_state_factory.get_property_state( **base_property_details) base_property_details['pm_property_id'] = 'Single Unmatched' base_property_details['city'] = 'Grand Junction' ps_15 = self.property_state_factory.get_property_state( **base_property_details) base_taxlot_details = { 'jurisdiction_tax_lot_id': '1st Match Set', 'city': 'Golden', 'import_file_id': self.import_file_1.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, } # Create 5 initially non-matching taxlots in first Cycle tls_11 = self.taxlot_state_factory.get_taxlot_state( **base_taxlot_details) base_taxlot_details[ 'jurisdiction_tax_lot_id'] = 'To be updated - 1st Match Set' base_taxlot_details['city'] = 'Denver' tls_12 = self.taxlot_state_factory.get_taxlot_state( **base_taxlot_details) base_taxlot_details['jurisdiction_tax_lot_id'] = '2nd Match Set' base_taxlot_details['city'] = 'Philadelphia' tls_13 = self.taxlot_state_factory.get_taxlot_state( **base_taxlot_details) base_taxlot_details[ 'jurisdiction_tax_lot_id'] = 'To be updated - 2nd Match Set' base_taxlot_details['city'] = 'Colorado Springs' tls_14 = self.taxlot_state_factory.get_taxlot_state( **base_taxlot_details) base_taxlot_details['jurisdiction_tax_lot_id'] = 'Single Unmatched' base_taxlot_details['city'] = 'Grand Junction' tls_15 = self.taxlot_state_factory.get_taxlot_state( **base_taxlot_details) # Import file and create -Views and canonical records. self.import_file_1.mapping_done = True self.import_file_1.save() match_buildings(self.import_file_1.id) # Make some match but don't trigger matching round PropertyState.objects.filter(pk=ps_12.id).update( pm_property_id='1st Match Set') PropertyState.objects.filter(pk=ps_14.id).update( pm_property_id='2nd Match Set') TaxLotState.objects.filter(pk=tls_12.id).update( jurisdiction_tax_lot_id='1st Match Set') TaxLotState.objects.filter(pk=tls_14.id).update( jurisdiction_tax_lot_id='2nd Match Set') # Check all property and taxlot sets were created without match merges self.assertEqual(5, Property.objects.count()) self.assertEqual(5, PropertyState.objects.count()) self.assertEqual(5, PropertyView.objects.count()) self.assertEqual(5, TaxLot.objects.count()) self.assertEqual(5, TaxLotState.objects.count()) self.assertEqual(5, TaxLotView.objects.count()) # Cycle 2 / ImportFile 2 base_property_details = { 'pm_property_id': '1st Match Set', 'city': 'Golden', 'import_file_id': self.import_file_2.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, } # Create 5 initially non-matching properties in second Cycle ps_21 = self.property_state_factory.get_property_state( **base_property_details) base_property_details[ 'pm_property_id'] = 'To be updated 1 - 1st Match Set' base_property_details['city'] = 'Denver' ps_22 = self.property_state_factory.get_property_state( **base_property_details) base_property_details[ 'pm_property_id'] = 'To be updated 2 - 1st Match Set' base_property_details['city'] = 'Philadelphia' ps_23 = self.property_state_factory.get_property_state( **base_property_details) del base_property_details['pm_property_id'] base_property_details['city'] = 'Null Fields 1' ps_24 = self.property_state_factory.get_property_state( **base_property_details) base_property_details['city'] = 'Null Fields 2' ps_25 = self.property_state_factory.get_property_state( **base_property_details) base_taxlot_details = { 'jurisdiction_tax_lot_id': '1st Match Set', 'city': 'Golden', 'import_file_id': self.import_file_2.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, } # Create 5 initially non-matching taxlots in second Cycle tls_21 = self.taxlot_state_factory.get_taxlot_state( **base_taxlot_details) base_taxlot_details[ 'jurisdiction_tax_lot_id'] = 'To be updated 1 - 1st Match Set' base_taxlot_details['city'] = 'Denver' tls_22 = self.taxlot_state_factory.get_taxlot_state( **base_taxlot_details) base_taxlot_details[ 'jurisdiction_tax_lot_id'] = 'To be updated 2 - 1st Match Set' base_taxlot_details['city'] = 'Philadelphia' tls_23 = self.taxlot_state_factory.get_taxlot_state( **base_taxlot_details) del base_taxlot_details['jurisdiction_tax_lot_id'] base_taxlot_details['city'] = 'Null Fields 1' tls_24 = self.taxlot_state_factory.get_taxlot_state( **base_taxlot_details) base_taxlot_details['city'] = 'Null Fields 2' tls_25 = self.taxlot_state_factory.get_taxlot_state( **base_taxlot_details) # Import file and create -Views and canonical records. self.import_file_2.mapping_done = True self.import_file_2.save() match_buildings(self.import_file_2.id) # Make some match but don't trigger matching round PropertyState.objects.filter(pk__in=[ps_22.id, ps_23.id]).update( pm_property_id='1st Match Set') TaxLotState.objects.filter(pk__in=[tls_22.id, tls_23.id]).update( jurisdiction_tax_lot_id='1st Match Set') # Check all property and taxlot sets were created without match merges self.assertEqual(10, Property.objects.count()) self.assertEqual(10, PropertyState.objects.count()) self.assertEqual(10, PropertyView.objects.count()) self.assertEqual(10, TaxLot.objects.count()) self.assertEqual(10, TaxLotState.objects.count()) self.assertEqual(10, TaxLotView.objects.count()) # Set up complete - run method summary = whole_org_match_merge(self.org.id) # Check -View and canonical counts self.assertEqual(6, PropertyView.objects.count()) self.assertEqual(6, TaxLotView.objects.count()) self.assertEqual(6, Property.objects.count()) self.assertEqual(6, TaxLot.objects.count()) # For each -State model, there should be 14 # 14 = 10 + 2 from Cycle-1 merges + 2 from Cycle-2 merges self.assertEqual(14, TaxLotState.objects.count()) self.assertEqual(14, PropertyState.objects.count()) # Check -States part of merges are no longer associated to -Views merged_ps_ids = [ ps_11.id, ps_12.id, # Cycle 1 ps_13.id, ps_14.id, # Cycle 1 ps_21.id, ps_22.id, ps_23.id # Cycle 2 ] self.assertFalse( PropertyView.objects.filter(state_id__in=merged_ps_ids).exists()) merged_tls_ids = [ tls_11.id, tls_12.id, # Cycle 1 tls_13.id, tls_14.id, # Cycle 1 tls_21.id, tls_22.id, tls_23.id # Cycle 2 ] self.assertFalse( TaxLotView.objects.filter(state_id__in=merged_tls_ids).exists()) # Check -States NOT part of merges are still associated to -Views self.assertTrue(PropertyView.objects.filter(state_id=ps_15).exists()) self.assertTrue(PropertyView.objects.filter(state_id=ps_24).exists()) self.assertTrue(PropertyView.objects.filter(state_id=ps_25).exists()) self.assertTrue(TaxLotView.objects.filter(state_id=tls_15).exists()) self.assertTrue(TaxLotView.objects.filter(state_id=tls_24).exists()) self.assertTrue(TaxLotView.objects.filter(state_id=tls_25).exists()) # Check Merges occurred correctly, with priority given to newer -States as evidenced by 'city' values cycle_1_pviews = PropertyView.objects.filter(cycle_id=self.cycle_1.id) cycle_1_pstates = PropertyState.objects.filter( pk__in=Subquery(cycle_1_pviews.values('state_id'))) self.assertEqual(3, cycle_1_pstates.count()) self.assertEqual(1, cycle_1_pstates.filter(city='Denver').count()) self.assertEqual( 1, cycle_1_pstates.filter(city='Colorado Springs').count()) self.assertEqual(1, cycle_1_pstates.filter(city='Grand Junction').count()) cycle_2_pviews = PropertyView.objects.filter(cycle_id=self.cycle_2.id) cycle_2_pstates = PropertyState.objects.filter( pk__in=Subquery(cycle_2_pviews.values('state_id'))) self.assertEqual(3, cycle_2_pstates.count()) self.assertEqual(1, cycle_2_pstates.filter(city='Philadelphia').count()) self.assertEqual(1, cycle_2_pstates.filter(city='Null Fields 1').count()) self.assertEqual(1, cycle_2_pstates.filter(city='Null Fields 2').count()) cycle_1_tlviews = TaxLotView.objects.filter(cycle_id=self.cycle_1.id) cycle_1_tlstates = TaxLotState.objects.filter( pk__in=Subquery(cycle_1_tlviews.values('state_id'))) self.assertEqual(3, cycle_1_tlstates.count()) self.assertEqual(1, cycle_1_tlstates.filter(city='Denver').count()) self.assertEqual( 1, cycle_1_tlstates.filter(city='Colorado Springs').count()) self.assertEqual( 1, cycle_1_tlstates.filter(city='Grand Junction').count()) cycle_2_tlviews = TaxLotView.objects.filter(cycle_id=self.cycle_2.id) cycle_2_tlstates = TaxLotState.objects.filter( pk__in=Subquery(cycle_2_tlviews.values('state_id'))) self.assertEqual(3, cycle_2_tlstates.count()) self.assertEqual(1, cycle_2_tlstates.filter(city='Philadelphia').count()) self.assertEqual(1, cycle_2_tlstates.filter(city='Null Fields 1').count()) self.assertEqual(1, cycle_2_tlstates.filter(city='Null Fields 2').count()) # Finally, check method returned expected summary expected_summary = { 'PropertyState': { 'merged_count': 7, 'new_merged_state_ids': [ cycle_1_pstates.filter(city='Denver').get().id, cycle_1_pstates.filter(city='Colorado Springs').get().id, cycle_2_pstates.filter(city='Philadelphia').get().id, ] }, 'TaxLotState': { 'merged_count': 7, 'new_merged_state_ids': [ cycle_1_tlstates.filter(city='Denver').get().id, cycle_1_tlstates.filter(city='Colorado Springs').get().id, cycle_2_tlstates.filter(city='Philadelphia').get().id, ] }, } self.assertEqual(summary['PropertyState']['merged_count'], expected_summary['PropertyState']['merged_count']) self.assertEqual(summary['TaxLotState']['merged_count'], expected_summary['TaxLotState']['merged_count']) self.assertCountEqual( summary['PropertyState']['new_merged_state_ids'], expected_summary['PropertyState']['new_merged_state_ids']) self.assertCountEqual( summary['TaxLotState']['new_merged_state_ids'], expected_summary['TaxLotState']['new_merged_state_ids'])
class TestMatchingPostEdit(DataMappingBaseTestCase): def setUp(self): selfvars = self.set_up(ASSESSED_RAW) self.user, self.org, self.import_file, self.import_record, self.cycle = selfvars user_details = { 'username': '******', 'password': '******', 'email': '*****@*****.**' } self.client.login(**user_details) self.property_state_factory = FakePropertyStateFactory( organization=self.org) self.taxlot_state_factory = FakeTaxLotStateFactory( organization=self.org) def test_match_merge_happens_after_property_edit(self): base_details = { 'pm_property_id': '789DifferentID', 'city': 'Golden', 'import_file_id': self.import_file.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, } # Create 3 non-matching properties ps_1 = self.property_state_factory.get_property_state(**base_details) base_details['pm_property_id'] = '123MatchID' base_details['city'] = 'Denver' self.property_state_factory.get_property_state(**base_details) base_details['pm_property_id'] = '1337AnotherDifferentID' base_details['city'] = 'Philadelphia' ps_3 = self.property_state_factory.get_property_state(**base_details) self.import_file.mapping_done = True self.import_file.save() match_buildings(self.import_file.id) # Edit the first property to match the second new_data = {"state": {"pm_property_id": "123MatchID"}} target_view_id = ps_1.propertyview_set.first().id url = reverse('api:v2:properties-detail', args=[ target_view_id ]) + '?organization_id={}'.format(self.org.pk) raw_response = self.client.put(url, json.dumps(new_data), content_type='application/json') response = json.loads(raw_response.content) self.assertEqual(response['match_merged_count'], 2) changed_view = PropertyView.objects.exclude(state_id=ps_3).get() self.assertEqual(response['view_id'], changed_view.id) # Verify that properties 1 and 2 have been merged self.assertEqual(Property.objects.count(), 2) self.assertEqual(PropertyState.objects.count(), 5) # Original 3 + 1 edit + 1 merge result self.assertEqual(PropertyView.objects.count(), 2) # It will have a -State having city as Golden self.assertEqual(changed_view.state.city, 'Golden') # The corresponding log should be a System Match audit_log = PropertyAuditLog.objects.get( state_id=changed_view.state_id) self.assertEqual(audit_log.name, 'System Match') # Update the edit and match-merge result -State new_data = {"state": {"pm_property_id": "1337AnotherDifferentID"}} url = reverse('api:v2:properties-detail', args=[ changed_view.id ]) + '?organization_id={}'.format(self.org.pk) raw_response = self.client.put(url, json.dumps(new_data), content_type='application/json') response = json.loads(raw_response.content) # Verify that there's only 1 canonical Property and View left self.assertEqual(Property.objects.count(), 1) # 6 -States since, 5 from 1st round + 1 from merge # None created during edit since the audit log isn't named 'Import Creation' self.assertEqual(PropertyState.objects.count(), 6) self.assertEqual(PropertyView.objects.count(), 1) view = PropertyView.objects.first() self.assertEqual(response['view_id'], view.id) # Check that city is still Golden, since the edited -State takes precedence self.assertEqual(view.state.city, 'Golden') def test_match_merge_happens_after_taxlot_edit(self): base_details = { 'jurisdiction_tax_lot_id': '789DifferentID', 'city': 'Golden', 'import_file_id': self.import_file.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, } # Create 3 non-matching taxlots tls_1 = self.taxlot_state_factory.get_taxlot_state(**base_details) base_details['jurisdiction_tax_lot_id'] = '123MatchID' base_details['city'] = 'Denver' self.taxlot_state_factory.get_taxlot_state(**base_details) base_details['jurisdiction_tax_lot_id'] = '1337AnotherDifferentID' base_details['city'] = 'Philadelphia' tls_3 = self.taxlot_state_factory.get_taxlot_state(**base_details) self.import_file.mapping_done = True self.import_file.save() match_buildings(self.import_file.id) # Edit the first taxlot to match the second new_data = {"state": {"jurisdiction_tax_lot_id": "123MatchID"}} target_view_id = tls_1.taxlotview_set.first().id url = reverse('api:v2:taxlots-detail', args=[ target_view_id ]) + '?organization_id={}'.format(self.org.pk) raw_response = self.client.put(url, json.dumps(new_data), content_type='application/json') response = json.loads(raw_response.content) changed_view = TaxLotView.objects.exclude(state_id=tls_3).get() self.assertEqual(response['view_id'], changed_view.id) # Verify that taxlots 1 and 2 have been merged self.assertEqual(TaxLot.objects.count(), 2) self.assertEqual(TaxLotState.objects.count(), 5) # Original 3 + 1 edit + 1 merge result self.assertEqual(TaxLotView.objects.count(), 2) # It will have a -State having city as Golden self.assertEqual(changed_view.state.city, 'Golden') # The corresponding log should be a System Match audit_log = TaxLotAuditLog.objects.get(state_id=changed_view.state_id) self.assertEqual(audit_log.name, 'System Match') # Update the edit and match-merge result -State new_data = { "state": { "jurisdiction_tax_lot_id": "1337AnotherDifferentID" } } url = reverse('api:v2:taxlots-detail', args=[ changed_view.id ]) + '?organization_id={}'.format(self.org.pk) raw_response = self.client.put(url, json.dumps(new_data), content_type='application/json') response = json.loads(raw_response.content) self.assertEqual(response['match_merged_count'], 2) # Verify that there's only 1 canonical TaxLot and View left self.assertEqual(TaxLot.objects.count(), 1) # 6 -States since, 5 from 1st round + 1 from merge # None created during edit since the audit log isn't named 'Import Creation' self.assertEqual(TaxLotState.objects.count(), 6) self.assertEqual(TaxLotView.objects.count(), 1) view = TaxLotView.objects.first() self.assertEqual(response['view_id'], view.id) # Check that city is still Golden, since the edited -State takes precedence self.assertEqual(view.state.city, 'Golden')
class TestMatchingExistingViewMatching(DataMappingBaseTestCase): def setUp(self): selfvars = self.set_up(ASSESSED_RAW) self.user, self.org, self.import_file_1, self.import_record_1, self.cycle = selfvars user_details = { 'username': '******', 'password': '******', 'email': '*****@*****.**' } self.client.login(**user_details) self.property_state_factory = FakePropertyStateFactory( organization=self.org) self.taxlot_state_factory = FakeTaxLotStateFactory( organization=self.org) def test_match_merge_in_cycle_rolls_up_existing_property_matches_in_updated_state_order_with_final_priority_given_to_selected_property( self): """ Import 4 non-matching records each with different cities and state_orders (extra data field). Create a Column record for state_orders, and update merge protection setting for the city column. Change the 'updated' field's datetime value for each -State. Use update() to make the records match to avoid changing the 'updated' values. Run merging and unmerge records to unravel and reveal the merge order. """ base_details = { 'pm_property_id': '123MatchID', 'city': '1st Oldest City', 'import_file_id': self.import_file_1.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, 'extra_data': { 'state_order': 'first', }, } ps_1 = self.property_state_factory.get_property_state(**base_details) base_details['pm_property_id'] = '123DifferentID' base_details['city'] = '2nd Oldest City' base_details['extra_data']['state_order'] = 'second' ps_2 = self.property_state_factory.get_property_state(**base_details) base_details['pm_property_id'] = '456DifferentID' base_details['city'] = '3rd Oldest City' base_details['extra_data']['state_order'] = 'third' ps_3 = self.property_state_factory.get_property_state(**base_details) base_details['pm_property_id'] = '789DifferentID' base_details['city'] = '4th Oldest City' base_details['extra_data']['state_order'] = 'fourth' ps_4 = self.property_state_factory.get_property_state(**base_details) self.import_file_1.mapping_done = True self.import_file_1.save() match_buildings(self.import_file_1.id) # Create (ED) 'state_order' column and update merge protection column for 'city' self.org.column_set.create( column_name='state_order', is_extra_data=True, table_name='PropertyState', merge_protection=Column.COLUMN_MERGE_FAVOR_EXISTING) self.org.column_set.filter( column_name='city', table_name='PropertyState').update( merge_protection=Column.COLUMN_MERGE_FAVOR_EXISTING) # Update -States to make the roll up order be 4, 2, 3 refreshed_ps_4 = PropertyState.objects.get(id=ps_4.id) refreshed_ps_4.pm_property_id = '123MatchID' refreshed_ps_4.save() refreshed_ps_2 = PropertyState.objects.get(id=ps_2.id) refreshed_ps_2.pm_property_id = '123MatchID' refreshed_ps_2.save() refreshed_ps_3 = PropertyState.objects.get(id=ps_3.id) refreshed_ps_3.pm_property_id = '123MatchID' refreshed_ps_3.save() # run match_merge_in_cycle giving manual_merge_view = PropertyView.objects.get(state_id=ps_1.id) count_result, view_id_result = match_merge_in_cycle( manual_merge_view.id, 'PropertyState') self.assertEqual(count_result, 4) """ Verify everything's rolled up to one -View with precedence given to manual merge -View with '1st Oldest City'. '1st Oldest City' is expected to be final City value since this rollup should ignore Merge Protection. """ self.assertEqual(PropertyView.objects.count(), 1) only_view = PropertyView.objects.get() self.assertEqual(only_view.state.city, '1st Oldest City') self.assertEqual(only_view.state.extra_data['state_order'], 'first') """ Undoing 1 rollup merge should expose a set -State having '3rd Oldest City' and state_order of 'third'. """ rollback_unmerge_url_1 = reverse( 'api:v2:properties-unmerge', args=[only_view.id]) + '?organization_id={}'.format(self.org.pk) self.client.post(rollback_unmerge_url_1, content_type='application/json') rollback_view_1 = PropertyView.objects.prefetch_related( 'state').exclude(state__city='1st Oldest City').get() self.assertEqual(rollback_view_1.state.city, '3rd Oldest City') self.assertEqual(rollback_view_1.state.extra_data['state_order'], 'third') """ Undoing another rollup merge should expose a set -State having '2nd Oldest City' and state_order of 'second'. """ rollback_unmerge_url_2 = reverse( 'api:v2:properties-unmerge', args=[ rollback_view_1.id ]) + '?organization_id={}'.format(self.org.pk) self.client.post(rollback_unmerge_url_2, content_type='application/json') rollback_view_2 = PropertyView.objects.prefetch_related( 'state').exclude( state__city__in=['1st Oldest City', '3rd Oldest City']).get() self.assertEqual(rollback_view_2.state.city, '2nd Oldest City') self.assertEqual(rollback_view_2.state.extra_data['state_order'], 'second') def test_match_merge_in_cycle_ignores_properties_with_unpopulated_matching_criteria( self): base_details = { 'city': 'Golden', 'import_file_id': self.import_file_1.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, } # Create 3 non-duplicate properties with unpopulated matching criteria ps_1 = self.property_state_factory.get_property_state(**base_details) base_details['city'] = 'Denver' ps_2 = self.property_state_factory.get_property_state(**base_details) base_details['city'] = 'Philadelphia' ps_3 = self.property_state_factory.get_property_state(**base_details) self.import_file_1.mapping_done = True self.import_file_1.save() match_buildings(self.import_file_1.id) # Verify no match merges happen ps_1_view = PropertyView.objects.get(state_id=ps_1.id) count_result, no_match_indicator = match_merge_in_cycle( ps_1_view.id, 'PropertyState') self.assertEqual(count_result, 0) self.assertIsNone(no_match_indicator) self.assertEqual(Property.objects.count(), 3) self.assertEqual(PropertyState.objects.count(), 3) self.assertEqual(PropertyView.objects.count(), 3) state_ids = list(PropertyView.objects.all().values_list('state_id', flat=True)) self.assertCountEqual([ps_1.id, ps_2.id, ps_3.id], state_ids) def test_match_merge_in_cycle_rolls_up_existing_taxlot_matches_in_updated_state_order_with_final_priority_given_to_selected_taxlot( self): """ Import 4 non-matching records each with different cities and state_orders (extra data field). Create a Column record for state_orders, and update merge protection setting for the city column. Change the 'updated' field's datetime value for each -State. Use update() to make the records match to avoid changing the 'updated' values. Run merging and unmerge records to unravel and reveal the merge order. """ base_details = { 'jurisdiction_tax_lot_id': '123MatchID', 'city': '1st Oldest City', 'import_file_id': self.import_file_1.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, 'extra_data': { 'state_order': 'first', }, } tls_1 = self.taxlot_state_factory.get_taxlot_state(**base_details) base_details['jurisdiction_tax_lot_id'] = '123DifferentID' base_details['city'] = '2nd Oldest City' base_details['extra_data']['state_order'] = 'second' tls_2 = self.taxlot_state_factory.get_taxlot_state(**base_details) base_details['jurisdiction_tax_lot_id'] = '456DifferentID' base_details['city'] = '3rd Oldest City' base_details['extra_data']['state_order'] = 'third' tls_3 = self.taxlot_state_factory.get_taxlot_state(**base_details) base_details['jurisdiction_tax_lot_id'] = '789DifferentID' base_details['city'] = '4th Oldest City' base_details['extra_data']['state_order'] = 'fourth' tls_4 = self.taxlot_state_factory.get_taxlot_state(**base_details) self.import_file_1.mapping_done = True self.import_file_1.save() match_buildings(self.import_file_1.id) # Create (ED) 'state_order' column and update merge protection column for 'city' self.org.column_set.create( column_name='state_order', is_extra_data=True, table_name='TaxLotState', merge_protection=Column.COLUMN_MERGE_FAVOR_EXISTING) self.org.column_set.filter( column_name='city', table_name='TaxLotState').update( merge_protection=Column.COLUMN_MERGE_FAVOR_EXISTING) # Update -States to make the roll up order be 4, 2, 3 refreshed_tls_4 = TaxLotState.objects.get(id=tls_4.id) refreshed_tls_4.jurisdiction_tax_lot_id = '123MatchID' refreshed_tls_4.save() refreshed_tls_2 = TaxLotState.objects.get(id=tls_2.id) refreshed_tls_2.jurisdiction_tax_lot_id = '123MatchID' refreshed_tls_2.save() refreshed_tls_3 = TaxLotState.objects.get(id=tls_3.id) refreshed_tls_3.jurisdiction_tax_lot_id = '123MatchID' refreshed_tls_3.save() # run match_merge_in_cycle giving manual_merge_view = TaxLotView.objects.get(state_id=tls_1.id) count_result, view_id_result = match_merge_in_cycle( manual_merge_view.id, 'TaxLotState') self.assertEqual(count_result, 4) """ Verify everything's rolled up to one -View with precedence given to manual merge -View with '1st Oldest City'. '1st Oldest City' is expected to be final City value since this rollup should ignore Merge Protection. """ self.assertEqual(TaxLotView.objects.count(), 1) only_view = TaxLotView.objects.get() self.assertEqual(only_view.state.city, '1st Oldest City') self.assertEqual(only_view.state.extra_data['state_order'], 'first') """ Undoing 1 rollup merge should expose a set -State having '3rd Oldest City' and state_order of 'third'. """ rollback_unmerge_url_1 = reverse( 'api:v2:taxlots-unmerge', args=[only_view.id]) + '?organization_id={}'.format(self.org.pk) self.client.post(rollback_unmerge_url_1, content_type='application/json') rollback_view_1 = TaxLotView.objects.prefetch_related('state').exclude( state__city='1st Oldest City').get() self.assertEqual(rollback_view_1.state.city, '3rd Oldest City') self.assertEqual(rollback_view_1.state.extra_data['state_order'], 'third') """ Undoing another rollup merge should expose a set -State having '2nd Oldest City' and state_order of 'second'. """ rollback_unmerge_url_2 = reverse( 'api:v2:taxlots-unmerge', args=[ rollback_view_1.id ]) + '?organization_id={}'.format(self.org.pk) self.client.post(rollback_unmerge_url_2, content_type='application/json') rollback_view_2 = TaxLotView.objects.prefetch_related('state').exclude( state__city__in=['1st Oldest City', '3rd Oldest City']).get() self.assertEqual(rollback_view_2.state.city, '2nd Oldest City') self.assertEqual(rollback_view_2.state.extra_data['state_order'], 'second') def test_match_merge_in_cycle_ignores_taxlots_with_unpopulated_matching_criteria( self): base_details = { 'city': 'Golden', 'import_file_id': self.import_file_1.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, } # Create 3 non-duplicate taxlots with unpopulated matching criteria tls_1 = self.taxlot_state_factory.get_taxlot_state(**base_details) base_details['city'] = 'Denver' tls_2 = self.taxlot_state_factory.get_taxlot_state(**base_details) base_details['city'] = 'Philadelphia' tls_3 = self.taxlot_state_factory.get_taxlot_state(**base_details) self.import_file_1.mapping_done = True self.import_file_1.save() match_buildings(self.import_file_1.id) # Verify no match merges happen tls_1_view = TaxLotView.objects.get(state_id=tls_1.id) count_result, no_match_indicator = match_merge_in_cycle( tls_1_view.id, 'TaxLotState') self.assertEqual(count_result, 0) self.assertIsNone(no_match_indicator) self.assertEqual(TaxLot.objects.count(), 3) self.assertEqual(TaxLotState.objects.count(), 3) self.assertEqual(TaxLotView.objects.count(), 3) state_ids = list(TaxLotView.objects.all().values_list('state_id', flat=True)) self.assertCountEqual([tls_1.id, tls_2.id, tls_3.id], state_ids)