def test_no_unmatched_buildings(self): """Make sure we shortcut out if there isn't unmatched data.""" bs1_data = { 'pm_property_id': 1243, 'tax_lot_id': '435/422', 'property_name': 'Greenfield Complex', 'custom_id_1': 1243, 'address_line_1': '555 Database LN.', 'address_line_2': '', 'city': 'Gotham City', 'postal_code': 8999, } self.import_file.mapping_done = True self.import_file.save() util.make_fake_snapshot(self.import_file, bs1_data, ASSESSED_BS, is_canon=True) self.assertEqual(BuildingSnapshot.objects.all().count(), 1) tasks.match_buildings(self.import_file.pk, self.fake_user.pk) self.assertEqual(BuildingSnapshot.objects.all().count(), 1)
def test_match_no_canonical_buildings(self): """If no canonicals exist, create, but no new BSes.""" bs1_data = { "pm_property_id": 1243, "tax_lot_id": "435/422", "property_name": "Greenfield Complex", "custom_id_1": 1243, "address_line_1": "555 Database LN.", "address_line_2": "", "city": "Gotham City", "postal_code": 8999, } # Note: no Canonical Building is created for this snapshot. snapshot = util.make_fake_snapshot(self.import_file, bs1_data, ASSESSED_BS, is_canon=False, org=self.fake_org) self.import_file.mapping_done = True self.import_file.save() self.assertEqual(snapshot.canonical_building, None) self.assertEqual(BuildingSnapshot.objects.all().count(), 1) tasks.match_buildings(self.import_file.pk, self.fake_user.pk) refreshed_snapshot = BuildingSnapshot.objects.get(pk=snapshot.pk) self.assertNotEqual(refreshed_snapshot.canonical_building, None) self.assertEqual(BuildingSnapshot.objects.all().count(), 1)
def test_match_buildings(self): """Good case for testing our matching system.""" bs_data = { 'pm_property_id': 1243, 'tax_lot_id': '435/422', 'property_name': 'Greenfield Complex', 'custom_id_1': 12, 'address_line_1': '555 Database LN.', 'address_line_2': '', 'city': 'Gotham City', 'postal_code': 8999, } #Since the change to not match duplicates there needs to be a second record that isn't exactly the same #to run this test. In this case address_line_2 now has a value of 'A' rather than '' bs_data_2 = { 'pm_property_id': 1243, 'tax_lot_id': '435/422', 'property_name': 'Greenfield Complex', 'custom_id_1': 12, 'address_line_1': '555 Database LN.', 'address_line_2': 'A', 'city': 'Gotham City', 'postal_code': 8999, } # Setup mapped AS snapshot. snapshot = util.make_fake_snapshot( self.import_file, bs_data, ASSESSED_BS, is_canon=True, org=self.fake_org ) # Different file, but same ImportRecord. # Setup mapped PM snapshot. # Should be an identical match. new_import_file = ImportFile.objects.create( import_record=self.import_record, mapping_done=True ) new_snapshot = util.make_fake_snapshot( new_import_file, bs_data_2, PORTFOLIO_BS, org=self.fake_org ) tasks.match_buildings(new_import_file.pk, self.fake_user.pk) result = BuildingSnapshot.objects.all()[0] self.assertEqual(result.property_name, snapshot.property_name) self.assertEqual(result.property_name, new_snapshot.property_name) # Since these two buildings share a common ID, we match that way. self.assertEqual(result.confidence, 0.9) self.assertEqual( sorted([r.pk for r in result.parents.all()]), sorted([new_snapshot.pk, snapshot.pk]) ) self.assertGreater(AuditLog.objects.count(), 0) self.assertEqual( AuditLog.objects.first().action_note, 'System matched building ID.' )
def test_match_no_canonical_buildings(self): """If no canonicals exist, create, but no new BSes.""" bs1_data = { 'pm_property_id': 1243, 'tax_lot_id': '435/422', 'property_name': 'Greenfield Complex', 'custom_id_1': 1243, 'address_line_1': '555 Database LN.', 'address_line_2': '', 'city': 'Gotham City', 'postal_code': 8999, } # Note: no Canonical Building is created for this snapshot. snapshot = util.make_fake_snapshot(self.import_file, bs1_data, ASSESSED_BS, is_canon=False, org=self.fake_org) self.import_file.mapping_done = True self.import_file.save() self.assertEqual(snapshot.canonical_building, None) self.assertEqual(BuildingSnapshot.objects.all().count(), 1) tasks.match_buildings(self.import_file.pk, self.fake_user.pk) refreshed_snapshot = BuildingSnapshot.objects.get(pk=snapshot.pk) self.assertNotEqual(refreshed_snapshot.canonical_building, None) self.assertEqual(BuildingSnapshot.objects.all().count(), 1)
def test_get_ancestors(self): """Tests get_ancestors(building), returns all non-composite, non-raw BuildingSnapshot instances. """ bs_data = { "pm_property_id": 1243, "tax_lot_id": "435/422", "property_name": "Greenfield Complex", "custom_id_1": 1243, "address_line_1": "555 Database LN.", "address_line_2": "", "city": "Gotham City", "postal_code": 8999, } # Setup mapped AS snapshot. snapshot = util.make_fake_snapshot(self.import_file, bs_data, ASSESSED_BS, is_canon=True, org=self.fake_org) # Different file, but same ImportRecord. # Setup mapped PM snapshot. # Should be an identical match. new_import_file = ImportFile.objects.create( import_record=self.import_record, raw_save_done=True, mapping_done=True ) new_snapshot = util.make_fake_snapshot(new_import_file, bs_data, PORTFOLIO_BS, org=self.fake_org) tasks.match_buildings(new_import_file.pk, self.fake_user.pk) result = BuildingSnapshot.objects.filter(source_type=4)[0] ancestor_pks = set([b.pk for b in get_ancestors(result)]) buildings = BuildingSnapshot.objects.filter(source_type__in=[2, 3]).exclude(pk=result.pk) building_pks = set([b.pk for b in buildings]) self.assertEqual(ancestor_pks, building_pks)
def test_separates_system_and_possible_match_types(self): """We save possible matches separately.""" bs1_data = { "pm_property_id": 123, "tax_lot_id": "435/422", "property_name": "Greenfield Complex", "custom_id_1": 1243, "address_line_1": "555 NorthWest Databaseer Lane.", "address_line_2": "", "city": "Gotham City", "postal_code": 8999, } # This building will have a lot less data to identify it. bs2_data = { "pm_property_id": 1243, "custom_id_1": 1243, "address_line_1": "555 Database LN.", "city": "Gotham City", "postal_code": 8999, } new_import_file = ImportFile.objects.create(import_record=self.import_record, mapping_done=True) util.make_fake_snapshot(self.import_file, bs1_data, ASSESSED_BS, is_canon=True, org=self.fake_org) util.make_fake_snapshot(new_import_file, bs2_data, PORTFOLIO_BS, org=self.fake_org) tasks.match_buildings(new_import_file.pk, self.fake_user.pk) self.assertEqual(BuildingSnapshot.objects.filter(match_type=POSSIBLE_MATCH).count(), 0) self.assertEqual(BuildingSnapshot.objects.filter(match_type=SYSTEM_MATCH).count(), 1)
def test_match_no_canonical_buildings(self): """If no canonicals exist, create, but no new BSes.""" bs1_data = { 'pm_property_id': 1243, 'tax_lot_id': '435/422', 'property_name': 'Greenfield Complex', 'custom_id_1': 1243, 'address_line_1': '555 Database LN.', 'address_line_2': '', 'city': 'Gotham City', 'postal_code': 8999, } # Note: no Canonical Building is created for this snapshot. snapshot = util.make_fake_snapshot( self.import_file, bs1_data, ASSESSED_BS, is_canon=False ) self.import_file.mapping_done = True self.import_file.save() self.assertEqual(snapshot.canonical_building, None) self.assertEqual(BuildingSnapshot.objects.all().count(), 1) tasks.match_buildings(self.import_file.pk) refreshed_snapshot = BuildingSnapshot.objects.get(pk=snapshot.pk) self.assertNotEqual(refreshed_snapshot.canonical_building, None) self.assertEqual(BuildingSnapshot.objects.all().count(), 1)
def test_match_buildings(self): """Good case for testing our matching system.""" bs_data = { "pm_property_id": 1243, "tax_lot_id": "435/422", "property_name": "Greenfield Complex", "custom_id_1": 12, "address_line_1": "555 Database LN.", "address_line_2": "", "city": "Gotham City", "postal_code": 8999, } # Setup mapped AS snapshot. snapshot = util.make_fake_snapshot(self.import_file, bs_data, ASSESSED_BS, is_canon=True, org=self.fake_org) # Different file, but same ImportRecord. # Setup mapped PM snapshot. # Should be an identical match. new_import_file = ImportFile.objects.create(import_record=self.import_record, mapping_done=True) new_snapshot = util.make_fake_snapshot(new_import_file, bs_data, PORTFOLIO_BS, org=self.fake_org) tasks.match_buildings(new_import_file.pk, self.fake_user.pk) result = BuildingSnapshot.objects.all()[0] self.assertEqual(result.property_name, snapshot.property_name) self.assertEqual(result.property_name, new_snapshot.property_name) # Since these two buildings share a common ID, we match that way. self.assertEqual(result.confidence, 0.9) self.assertEqual(sorted([r.pk for r in result.parents.all()]), sorted([new_snapshot.pk, snapshot.pk])) self.assertGreater(AuditLog.objects.count(), 0) self.assertEqual(AuditLog.objects.first().action_note, "System matched building ID.")
def test_get_ancestors(self): """Tests get_ancestors(building), returns all non-composite, non-raw BuildingSnapshot instances. """ bs_data = { 'pm_property_id': 1243, 'tax_lot_id': '435/422', 'property_name': 'Greenfield Complex', 'custom_id_1': 1243, 'address_line_1': '555 Database LN.', 'address_line_2': '', 'city': 'Gotham City', 'postal_code': 8999, } #Since we changed to not match duplicate data make a second record that matches with something slighty changed #In this case appended a 'A' to the end of address_line_1 bs_data_2 = { 'pm_property_id': 1243, 'tax_lot_id': '435/422', 'property_name': 'Greenfield Complex', 'custom_id_1': 1243, 'address_line_1': '555 Database LN. A', 'address_line_2': '', 'city': 'Gotham City', 'postal_code': 8999, } # Setup mapped AS snapshot. snapshot = util.make_fake_snapshot( self.import_file, bs_data, ASSESSED_BS, is_canon=True, org=self.fake_org ) # Different file, but same ImportRecord. # Setup mapped PM snapshot. # Should be an identical match. new_import_file = ImportFile.objects.create( import_record=self.import_record, raw_save_done=True, mapping_done=True ) new_snapshot = util.make_fake_snapshot( new_import_file, bs_data_2, PORTFOLIO_BS, org=self.fake_org ) tasks.match_buildings(new_import_file.pk, self.fake_user.pk) result = BuildingSnapshot.objects.filter(source_type=4)[0] ancestor_pks = set([b.pk for b in get_ancestors(result)]) buildings = BuildingSnapshot.objects.filter( source_type__in=[2, 3] ).exclude( pk=result.pk ) building_pks = set([b.pk for b in buildings]) self.assertEqual(ancestor_pks, building_pks)
def test_match_buildings(self): """Good case for testing our matching system.""" bs_data = { 'pm_property_id': 1243, 'tax_lot_id': '435/422', 'property_name': 'Greenfield Complex', 'custom_id_1': 12, 'address_line_1': '555 Database LN.', 'address_line_2': '', 'city': 'Gotham City', 'postal_code': 8999, } #Since the change to not match duplicates there needs to be a second record that isn't exactly the same #to run this test. In this case address_line_2 now has a value of 'A' rather than '' bs_data_2 = { 'pm_property_id': 1243, 'tax_lot_id': '435/422', 'property_name': 'Greenfield Complex', 'custom_id_1': 12, 'address_line_1': '555 Database LN.', 'address_line_2': 'A', 'city': 'Gotham City', 'postal_code': 8999, } # Setup mapped AS snapshot. snapshot = util.make_fake_snapshot(self.import_file, bs_data, ASSESSED_BS, is_canon=True, org=self.fake_org) # Different file, but same ImportRecord. # Setup mapped PM snapshot. # Should be an identical match. new_import_file = ImportFile.objects.create( import_record=self.import_record, mapping_done=True) new_snapshot = util.make_fake_snapshot(new_import_file, bs_data_2, PORTFOLIO_BS, org=self.fake_org) tasks.match_buildings(new_import_file.pk, self.fake_user.pk) result = BuildingSnapshot.objects.all()[0] self.assertEqual(result.property_name, snapshot.property_name) self.assertEqual(result.property_name, new_snapshot.property_name) # Since these two buildings share a common ID, we match that way. self.assertEqual(result.confidence, 0.9) self.assertEqual(sorted([r.pk for r in result.parents.all()]), sorted([new_snapshot.pk, snapshot.pk])) self.assertGreater(AuditLog.objects.count(), 0) self.assertEqual(AuditLog.objects.first().action_note, 'System matched building ID.')
def test_delete_organization_buildings(self): """tests the delete builings for an org""" # start with the normal use case bs1_data = { "pm_property_id": 123, "tax_lot_id": "435/422", "property_name": "Greenfield Complex", "custom_id_1": 1243, "address_line_1": "555 NorthWest Databaseer Lane.", "address_line_2": "", "city": "Gotham City", "postal_code": 8999, } # This building will have a lot less data to identify it. bs2_data = { "pm_property_id": 1243, "custom_id_1": 1243, "address_line_1": "555 Database LN.", "city": "Gotham City", "postal_code": 8999, } new_import_file = ImportFile.objects.create(import_record=self.import_record, mapping_done=True) snapshot = util.make_fake_snapshot(self.import_file, bs1_data, ASSESSED_BS, is_canon=True) snapshot.super_organization = self.fake_org snapshot.save() snapshot = util.make_fake_snapshot(new_import_file, bs2_data, PORTFOLIO_BS) snapshot.super_organization = self.fake_org snapshot.save() tasks.match_buildings(new_import_file.pk, self.fake_user.pk) # make one more building snapshot in a different org fake_org_2 = Organization.objects.create() snapshot = util.make_fake_snapshot(self.import_file, bs1_data, ASSESSED_BS, is_canon=True) snapshot.super_organization = fake_org_2 snapshot.save() self.assertGreater(BuildingSnapshot.objects.filter(super_organization=self.fake_org).count(), 0) tasks.delete_organization_buildings(self.fake_org.pk) self.assertEqual(BuildingSnapshot.objects.filter(super_organization=self.fake_org).count(), 0) self.assertGreater(BuildingSnapshot.objects.filter(super_organization=fake_org_2).count(), 0) # test that the CanonicalBuildings are deleted self.assertEqual( CanonicalBuilding.objects.filter(canonical_snapshot__super_organization=self.fake_org).count(), 0 ) # test that other orgs CanonicalBuildings are not deleted self.assertGreater( CanonicalBuilding.objects.filter(canonical_snapshot__super_organization=fake_org_2).count(), 0 )
def test_get_ancestors(self): """Tests get_ancestors(building), returns all non-composite, non-raw BuildingSnapshot instances. """ bs_data = { 'pm_property_id': 1243, 'tax_lot_id': '435/422', 'property_name': 'Greenfield Complex', 'custom_id_1': 1243, 'address_line_1': '555 Database LN.', 'address_line_2': '', 'city': 'Gotham City', 'postal_code': 8999, } #Since we changed to not match duplicate data make a second record that matches with something slighty changed #In this case appended a 'A' to the end of address_line_1 bs_data_2 = { 'pm_property_id': 1243, 'tax_lot_id': '435/422', 'property_name': 'Greenfield Complex', 'custom_id_1': 1243, 'address_line_1': '555 Database LN. A', 'address_line_2': '', 'city': 'Gotham City', 'postal_code': 8999, } # Setup mapped AS snapshot. snapshot = util.make_fake_snapshot(self.import_file, bs_data, ASSESSED_BS, is_canon=True, org=self.fake_org) # Different file, but same ImportRecord. # Setup mapped PM snapshot. # Should be an identical match. new_import_file = ImportFile.objects.create( import_record=self.import_record, raw_save_done=True, mapping_done=True) new_snapshot = util.make_fake_snapshot(new_import_file, bs_data_2, PORTFOLIO_BS, org=self.fake_org) tasks.match_buildings(new_import_file.pk, self.fake_user.pk) result = BuildingSnapshot.objects.filter(source_type=4)[0] ancestor_pks = set([b.pk for b in get_ancestors(result)]) buildings = BuildingSnapshot.objects.filter( source_type__in=[2, 3]).exclude(pk=result.pk) building_pks = set([b.pk for b in buildings]) self.assertEqual(ancestor_pks, building_pks)
def test_match_no_matches(self): """When a canonical exists, but doesn't match, we create a new one.""" bs1_data = { 'pm_property_id': 1243, 'tax_lot_id': '435/422', 'property_name': 'Greenfield Complex', 'custom_id_1': 1243, 'address_line_1': '555 Database LN.', 'address_line_2': '', 'city': 'Gotham City', 'postal_code': 8999, } bs2_data = { 'pm_property_id': 9999, 'tax_lot_id': '1231', 'property_name': 'A Place', 'custom_id_1': 0o000111000, 'address_line_1': '44444 Hmmm Ave.', 'address_line_2': 'Apt 4', 'city': 'Gotham City', 'postal_code': 8999, } snapshot = util.make_fake_snapshot( self.import_file, bs1_data, ASSESSED_BS, is_canon=True ) new_import_file = ImportFile.objects.create( import_record=self.import_record, mapping_done=True ) new_snapshot = util.make_fake_snapshot( new_import_file, bs2_data, PORTFOLIO_BS, org=self.fake_org ) self.assertEqual(BuildingSnapshot.objects.all().count(), 2) tasks.match_buildings(new_import_file.pk, self.fake_user.pk) # E.g. we didn't create a match self.assertEqual(BuildingSnapshot.objects.all().count(), 2) latest_snapshot = BuildingSnapshot.objects.get(pk=new_snapshot.pk) # But we did create another canonical building for the unmatched bs. self.assertNotEqual(latest_snapshot.canonical_building, None) self.assertNotEqual( latest_snapshot.canonical_building.pk, snapshot.canonical_building.pk ) self.assertEqual(latest_snapshot.confidence, None)
def test_match_no_matches(self): """When a canonical exists, but doesn't match, we create a new one.""" bs1_data = { 'pm_property_id': 1243, 'tax_lot_id': '435/422', 'property_name': 'Greenfield Complex', 'custom_id_1': 1243, 'address_line_1': '555 Database LN.', 'address_line_2': '', 'city': 'Gotham City', 'postal_code': 8999, } bs2_data = { 'pm_property_id': 9999, 'tax_lot_id': '1231', 'property_name': 'A Place', 'custom_id_1': 0o000111000, 'address_line_1': '44444 Hmmm Ave.', 'address_line_2': 'Apt 4', 'city': 'Gotham City', 'postal_code': 8999, } snapshot = util.make_fake_snapshot(self.import_file, bs1_data, ASSESSED_BS, is_canon=True) new_import_file = ImportFile.objects.create( import_record=self.import_record, mapping_done=True) new_snapshot = util.make_fake_snapshot(new_import_file, bs2_data, PORTFOLIO_BS, org=self.fake_org) self.assertEqual(BuildingSnapshot.objects.all().count(), 2) tasks.match_buildings(new_import_file.pk, self.fake_user.pk) # E.g. we didn't create a match self.assertEqual(BuildingSnapshot.objects.all().count(), 2) latest_snapshot = BuildingSnapshot.objects.get(pk=new_snapshot.pk) # But we did create another canonical building for the unmatched bs. self.assertNotEqual(latest_snapshot.canonical_building, None) self.assertNotEqual(latest_snapshot.canonical_building.pk, snapshot.canonical_building.pk) self.assertEqual(latest_snapshot.confidence, None)
def start_system_matching(request): """"Match data in this import file to existin canonical buildings.""" body = json.loads(request.body) import_file_id = body.get('file_id') if not import_file_id: return {'status': 'error'} return match_buildings(import_file_id)
def test_separates_system_and_possible_match_types(self): """We save possible matches separately.""" bs1_data = { 'pm_property_id': 123, 'tax_lot_id': '435/422', 'property_name': 'Greenfield Complex', 'custom_id_1': 1243, 'address_line_1': '555 NorthWest Databaseer Lane.', 'address_line_2': '', 'city': 'Gotham City', 'postal_code': 8999, } # This building will have a lot less data to identify it. bs2_data = { 'pm_property_id': 1243, 'custom_id_1': 1243, 'address_line_1': '555 Database LN.', 'city': 'Gotham City', 'postal_code': 8999, } new_import_file = ImportFile.objects.create( import_record=self.import_record, mapping_done=True) util.make_fake_snapshot(self.import_file, bs1_data, ASSESSED_BS, is_canon=True, org=self.fake_org) util.make_fake_snapshot(new_import_file, bs2_data, PORTFOLIO_BS, org=self.fake_org) tasks.match_buildings(new_import_file.pk, self.fake_user.pk) self.assertEqual( BuildingSnapshot.objects.filter(match_type=POSSIBLE_MATCH).count(), 0) self.assertEqual( BuildingSnapshot.objects.filter(match_type=SYSTEM_MATCH).count(), 1)
def test_handle_id_matches_duplicate_data(self): """ Test for handle_id_matches behavior when matching duplicate data """ bs_data = { 'pm_property_id': "2360", 'tax_lot_id': '476/460', 'property_name': 'Garfield Complex', 'custom_id_1': "89", 'address_line_1': '12975 Database LN.', 'address_line_2': '', 'city': 'Cartoon City', 'postal_code': "54321", } # Setup mapped AS snapshot. util.make_fake_snapshot( self.import_file, bs_data, ASSESSED_BS, is_canon=True, org=self.fake_org ) # Different file, but same ImportRecord. # Setup mapped PM snapshot. # Should be an identical match. new_import_file = ImportFile.objects.create( import_record=self.import_record, mapping_done=True ) tasks.match_buildings(new_import_file.pk, self.fake_user.pk) duplicate_import_file = ImportFile.objects.create( import_record=self.import_record, mapping_done=True ) new_snapshot = util.make_fake_snapshot( duplicate_import_file, bs_data, PORTFOLIO_BS, org=self.fake_org ) self.assertRaises(tasks.DuplicateDataError, tasks.handle_id_matches, new_snapshot, duplicate_import_file, self.fake_user.pk)
def test_separates_system_and_possible_match_types(self): """We save possible matches separately.""" bs1_data = { 'pm_property_id': 123, 'tax_lot_id': '435/422', 'property_name': 'Greenfield Complex', 'custom_id_1': 1243, 'address_line_1': '555 NorthWest Databaseer Lane.', 'address_line_2': '', 'city': 'Gotham City', 'postal_code': 8999, } # This building will have a lot less data to identify it. bs2_data = { 'pm_property_id': 1243, 'custom_id_1': 1243, 'address_line_1': '555 Database LN.', 'city': 'Gotham City', 'postal_code': 8999, } new_import_file = ImportFile.objects.create( import_record=self.import_record, mapping_done=True ) util.make_fake_snapshot( self.import_file, bs1_data, ASSESSED_BS, is_canon=True ) util.make_fake_snapshot(new_import_file, bs2_data, PORTFOLIO_BS) tasks.match_buildings(new_import_file.pk) self.assertEqual( BuildingSnapshot.objects.filter(match_type=POSSIBLE_MATCH).count(), 1 ) self.assertEqual( BuildingSnapshot.objects.filter(match_type=SYSTEM_MATCH).count(), 0 )
def test_handle_id_matches_duplicate_data(self): """ Test for handle_id_matches behavior when matching duplicate data """ bs_data = { 'pm_property_id': "2360", 'tax_lot_id': '476/460', 'property_name': 'Garfield Complex', 'custom_id_1': "89", 'address_line_1': '12975 Database LN.', 'address_line_2': '', 'city': 'Cartoon City', 'postal_code': "54321", } # Setup mapped AS snapshot. util.make_fake_snapshot(self.import_file, bs_data, ASSESSED_BS, is_canon=True, org=self.fake_org) # Different file, but same ImportRecord. # Setup mapped PM snapshot. # Should be an identical match. new_import_file = ImportFile.objects.create( import_record=self.import_record, mapping_done=True) tasks.match_buildings(new_import_file.pk, self.fake_user.pk) duplicate_import_file = ImportFile.objects.create( import_record=self.import_record, mapping_done=True) new_snapshot = util.make_fake_snapshot(duplicate_import_file, bs_data, PORTFOLIO_BS, org=self.fake_org) self.assertRaises(tasks.DuplicateDataError, tasks.handle_id_matches, new_snapshot, duplicate_import_file, self.fake_user.pk)
def test_match_duplicate_buildings(self): """ Test for behavior when trying to match duplicate building data """ bs_data = { 'pm_property_id': "8450", 'tax_lot_id': '143/292', 'property_name': 'Greenfield Complex', 'custom_id_1': "99", 'address_line_1': '93754 Database LN.', 'address_line_2': '', 'city': 'Gotham City', 'postal_code': "8999", } import_file = ImportFile.objects.create( import_record=self.import_record, mapping_done=True ) # Setup mapped PM snapshot. snapshot = util.make_fake_snapshot( import_file, bs_data, PORTFOLIO_BS, is_canon=True, org=self.fake_org ) # Different file, but same ImportRecord. # Setup mapped PM snapshot. # Should be a duplicate. new_import_file = ImportFile.objects.create( import_record=self.import_record, mapping_done=True ) new_snapshot = util.make_fake_snapshot( new_import_file, bs_data, PORTFOLIO_BS, org=self.fake_org ) tasks.match_buildings(import_file.pk, self.fake_user.pk) tasks.match_buildings(new_import_file.pk, self.fake_user.pk) self.assertEqual(len(BuildingSnapshot.objects.all()), 2)
def test_match_buildings(self): """Good case for testing our matching system.""" bs_data = { 'pm_property_id': 1243, 'tax_lot_id': '435/422', 'property_name': 'Greenfield Complex', 'custom_id_1': 1243, 'address_line_1': '555 Database LN.', 'address_line_2': '', 'city': 'Gotham City', 'postal_code': 8999, } # Setup mapped AS snapshot. snapshot = util.make_fake_snapshot( self.import_file, bs_data, ASSESSED_BS, is_canon=True ) # Different file, but same ImportRecord. # Setup mapped PM snapshot. # Should be an identical match. new_import_file = ImportFile.objects.create( import_record=self.import_record, mapping_done=True ) new_snapshot = util.make_fake_snapshot( new_import_file, bs_data, PORTFOLIO_BS ) tasks.match_buildings(new_import_file.pk) result = BuildingSnapshot.objects.all()[0] self.assertEqual(result.property_name, snapshot.property_name) self.assertEqual(result.property_name, new_snapshot.property_name) self.assertEqual(result.confidence, 1.0) self.assertEqual( sorted([r.pk for r in result.parents.all()]), sorted([new_snapshot.pk, snapshot.pk]) )
def test_no_unmatched_buildings(self): """Make sure we shortcut out if there isn't unmatched data.""" bs1_data = { "pm_property_id": 1243, "tax_lot_id": "435/422", "property_name": "Greenfield Complex", "custom_id_1": 1243, "address_line_1": "555 Database LN.", "address_line_2": "", "city": "Gotham City", "postal_code": 8999, } self.import_file.mapping_done = True self.import_file.save() util.make_fake_snapshot(self.import_file, bs1_data, ASSESSED_BS, is_canon=True) self.assertEqual(BuildingSnapshot.objects.all().count(), 1) tasks.match_buildings(self.import_file.pk, self.fake_user.pk) self.assertEqual(BuildingSnapshot.objects.all().count(), 1)
def test_match_duplicate_buildings(self): """ Test for behavior when trying to match duplicate building data """ bs_data = { 'pm_property_id': "8450", 'tax_lot_id': '143/292', 'property_name': 'Greenfield Complex', 'custom_id_1': "99", 'address_line_1': '93754 Database LN.', 'address_line_2': '', 'city': 'Gotham City', 'postal_code': "8999", } import_file = ImportFile.objects.create( import_record=self.import_record, mapping_done=True) # Setup mapped PM snapshot. snapshot = util.make_fake_snapshot(import_file, bs_data, PORTFOLIO_BS, is_canon=True, org=self.fake_org) # Different file, but same ImportRecord. # Setup mapped PM snapshot. # Should be a duplicate. new_import_file = ImportFile.objects.create( import_record=self.import_record, mapping_done=True) new_snapshot = util.make_fake_snapshot(new_import_file, bs_data, PORTFOLIO_BS, org=self.fake_org) tasks.match_buildings(import_file.pk, self.fake_user.pk) tasks.match_buildings(new_import_file.pk, self.fake_user.pk) self.assertEqual(len(BuildingSnapshot.objects.all()), 2)
def test_no_unmatched_buildings(self): """Make sure we shortcut out if there isn't unmatched data.""" bs1_data = { 'pm_property_id': 1243, 'tax_lot_id': '435/422', 'property_name': 'Greenfield Complex', 'custom_id_1': 1243, 'address_line_1': '555 Database LN.', 'address_line_2': '', 'city': 'Gotham City', 'postal_code': 8999, } self.import_file.mapping_done = True self.import_file.save() util.make_fake_snapshot( self.import_file, bs1_data, ASSESSED_BS, is_canon=True ) self.assertEqual(BuildingSnapshot.objects.all().count(), 1) tasks.match_buildings(self.import_file.pk) self.assertEqual(BuildingSnapshot.objects.all().count(), 1)
} snapshot = util.make_fake_snapshot( self.import_file, bs1_data, ASSESSED_BS, is_canon=True ) new_import_file = ImportFile.objects.create( import_record=self.import_record, mapping_done=True ) new_snapshot = util.make_fake_snapshot( new_import_file, bs2_data, PORTFOLIO_BS ) self.assertEqual(BuildingSnapshot.objects.all().count(), 2) tasks.match_buildings(new_import_file.pk) # E.g. we didn't create a match self.assertEqual(BuildingSnapshot.objects.all().count(), 2) latest_snapshot = BuildingSnapshot.objects.get(pk=new_snapshot.pk) # But we did create another canonical building for the unmatched bs. self.assertNotEqual(latest_snapshot.canonical_building, None) self.assertNotEqual( latest_snapshot.canonical_building.pk, snapshot.canonical_building.pk ) self.assertEqual(latest_snapshot.confidence, None) def test_match_no_canonical_buildings(self):
} snapshot = util.make_fake_snapshot(self.import_file, bs1_data, ASSESSED_BS, is_canon=True) new_import_file = ImportFile.objects.create( import_record=self.import_record, mapping_done=True) new_snapshot = util.make_fake_snapshot(new_import_file, bs2_data, PORTFOLIO_BS, org=self.fake_org) self.assertEqual(BuildingSnapshot.objects.all().count(), 2) tasks.match_buildings(new_import_file.pk, self.fake_user.pk) # E.g. we didn't create a match self.assertEqual(BuildingSnapshot.objects.all().count(), 2) latest_snapshot = BuildingSnapshot.objects.get(pk=new_snapshot.pk) # But we did create another canonical building for the unmatched bs. self.assertNotEqual(latest_snapshot.canonical_building, None) self.assertNotEqual(latest_snapshot.canonical_building.pk, snapshot.canonical_building.pk) self.assertEqual(latest_snapshot.confidence, None) def test_match_no_canonical_buildings(self): """If no canonicals exist, create, but no new BSes.""" bs1_data = {
} snapshot = util.make_fake_snapshot( self.import_file, bs1_data, ASSESSED_BS, is_canon=True ) new_import_file = ImportFile.objects.create( import_record=self.import_record, mapping_done=True ) new_snapshot = util.make_fake_snapshot( new_import_file, bs2_data, PORTFOLIO_BS, org=self.fake_org ) self.assertEqual(BuildingSnapshot.objects.all().count(), 2) tasks.match_buildings(new_import_file.pk, self.fake_user.pk) # E.g. we didn't create a match self.assertEqual(BuildingSnapshot.objects.all().count(), 2) latest_snapshot = BuildingSnapshot.objects.get(pk=new_snapshot.pk) # But we did create another canonical building for the unmatched bs. self.assertNotEqual(latest_snapshot.canonical_building, None) self.assertNotEqual( latest_snapshot.canonical_building.pk, snapshot.canonical_building.pk ) self.assertEqual(latest_snapshot.confidence, None) def test_match_no_canonical_buildings(self):
def test_delete_organization_buildings(self): """tests the delete builings for an org""" # start with the normal use case bs1_data = { 'pm_property_id': 123, 'tax_lot_id': '435/422', 'property_name': 'Greenfield Complex', 'custom_id_1': 1243, 'address_line_1': '555 NorthWest Databaseer Lane.', 'address_line_2': '', 'city': 'Gotham City', 'postal_code': 8999, } # This building will have a lot less data to identify it. bs2_data = { 'pm_property_id': 1243, 'custom_id_1': 1243, 'address_line_1': '555 Database LN.', 'city': 'Gotham City', 'postal_code': 8999, } new_import_file = ImportFile.objects.create( import_record=self.import_record, mapping_done=True) snapshot = util.make_fake_snapshot(self.import_file, bs1_data, ASSESSED_BS, is_canon=True) snapshot.super_organization = self.fake_org snapshot.save() snapshot = util.make_fake_snapshot(new_import_file, bs2_data, PORTFOLIO_BS) snapshot.super_organization = self.fake_org snapshot.save() tasks.match_buildings(new_import_file.pk, self.fake_user.pk) # make one more building snapshot in a different org fake_org_2 = Organization.objects.create() snapshot = util.make_fake_snapshot(self.import_file, bs1_data, ASSESSED_BS, is_canon=True) snapshot.super_organization = fake_org_2 snapshot.save() self.assertGreater( BuildingSnapshot.objects.filter( super_organization=self.fake_org).count(), 0) tasks.delete_organization_buildings(self.fake_org.pk) self.assertEqual( BuildingSnapshot.objects.filter( super_organization=self.fake_org).count(), 0) self.assertGreater( BuildingSnapshot.objects.filter( super_organization=fake_org_2).count(), 0) # test that the CanonicalBuildings are deleted self.assertEqual( CanonicalBuilding.objects.filter( canonical_snapshot__super_organization=self.fake_org).count(), 0) # test that other orgs CanonicalBuildings are not deleted self.assertGreater( CanonicalBuilding.objects.filter( canonical_snapshot__super_organization=fake_org_2).count(), 0)