def test_reset_mapped_w_matching_done(self): """Make sure we don't delete buildings that have been merged.""" self.import_file.matching_done = True self.import_file.matching_progress = 100 self.import_file.save() for x in range(10): test_util.make_fake_snapshot(self.import_file, {}, ASSESSED_BS) expected = { 'status': 'warning', 'message': 'Mapped buildings already merged' } resp = self.client.post( reverse_lazy("seed:remap_buildings"), data=json.dumps({ 'file_id': self.import_file.pk, }), content_type='application/json' ) self.assertDictEqual(json.loads(resp.content), expected) # Verify that we haven't deleted those mapped buildings. self.assertEqual( BuildingSnapshot.objects.filter( import_file=self.import_file ).count(), 10 )
def test_no_unmatched_buildings(self): """Make sure we shortcut out if there isn't unmatched data.""" bs1_data = { 'pm_property_id': 1243, 'tax_lot_id': '435/422', 'property_name': 'Greenfield Complex', 'custom_id_1': 1243, 'address_line_1': '555 Database LN.', 'address_line_2': '', 'city': 'Gotham City', 'postal_code': 8999, } self.import_file.mapping_done = True self.import_file.save() util.make_fake_snapshot(self.import_file, bs1_data, ASSESSED_BS, is_canon=True) self.assertEqual(BuildingSnapshot.objects.all().count(), 1) tasks.match_buildings(self.import_file.pk, self.fake_user.pk) self.assertEqual(BuildingSnapshot.objects.all().count(), 1)
def test_match_buildings(self): """Good case for testing our matching system.""" bs_data = { 'pm_property_id': 1243, 'tax_lot_id': '435/422', 'property_name': 'Greenfield Complex', 'custom_id_1': 12, 'address_line_1': '555 Database LN.', 'address_line_2': '', 'city': 'Gotham City', 'postal_code': 8999, } #Since the change to not match duplicates there needs to be a second record that isn't exactly the same #to run this test. In this case address_line_2 now has a value of 'A' rather than '' bs_data_2 = { 'pm_property_id': 1243, 'tax_lot_id': '435/422', 'property_name': 'Greenfield Complex', 'custom_id_1': 12, 'address_line_1': '555 Database LN.', 'address_line_2': 'A', 'city': 'Gotham City', 'postal_code': 8999, } # Setup mapped AS snapshot. snapshot = util.make_fake_snapshot( self.import_file, bs_data, ASSESSED_BS, is_canon=True, org=self.fake_org ) # Different file, but same ImportRecord. # Setup mapped PM snapshot. # Should be an identical match. new_import_file = ImportFile.objects.create( import_record=self.import_record, mapping_done=True ) new_snapshot = util.make_fake_snapshot( new_import_file, bs_data_2, PORTFOLIO_BS, org=self.fake_org ) tasks.match_buildings(new_import_file.pk, self.fake_user.pk) result = BuildingSnapshot.objects.all()[0] self.assertEqual(result.property_name, snapshot.property_name) self.assertEqual(result.property_name, new_snapshot.property_name) # Since these two buildings share a common ID, we match that way. self.assertEqual(result.confidence, 0.9) self.assertEqual( sorted([r.pk for r in result.parents.all()]), sorted([new_snapshot.pk, snapshot.pk]) ) self.assertGreater(AuditLog.objects.count(), 0) self.assertEqual( AuditLog.objects.first().action_note, 'System matched building ID.' )
def test_get_ancestors(self): """Tests get_ancestors(building), returns all non-composite, non-raw BuildingSnapshot instances. """ bs_data = { "pm_property_id": 1243, "tax_lot_id": "435/422", "property_name": "Greenfield Complex", "custom_id_1": 1243, "address_line_1": "555 Database LN.", "address_line_2": "", "city": "Gotham City", "postal_code": 8999, } # Setup mapped AS snapshot. snapshot = util.make_fake_snapshot(self.import_file, bs_data, ASSESSED_BS, is_canon=True, org=self.fake_org) # Different file, but same ImportRecord. # Setup mapped PM snapshot. # Should be an identical match. new_import_file = ImportFile.objects.create( import_record=self.import_record, raw_save_done=True, mapping_done=True ) new_snapshot = util.make_fake_snapshot(new_import_file, bs_data, PORTFOLIO_BS, org=self.fake_org) tasks.match_buildings(new_import_file.pk, self.fake_user.pk) result = BuildingSnapshot.objects.filter(source_type=4)[0] ancestor_pks = set([b.pk for b in get_ancestors(result)]) buildings = BuildingSnapshot.objects.filter(source_type__in=[2, 3]).exclude(pk=result.pk) building_pks = set([b.pk for b in buildings]) self.assertEqual(ancestor_pks, building_pks)
def test_separates_system_and_possible_match_types(self): """We save possible matches separately.""" bs1_data = { "pm_property_id": 123, "tax_lot_id": "435/422", "property_name": "Greenfield Complex", "custom_id_1": 1243, "address_line_1": "555 NorthWest Databaseer Lane.", "address_line_2": "", "city": "Gotham City", "postal_code": 8999, } # This building will have a lot less data to identify it. bs2_data = { "pm_property_id": 1243, "custom_id_1": 1243, "address_line_1": "555 Database LN.", "city": "Gotham City", "postal_code": 8999, } new_import_file = ImportFile.objects.create(import_record=self.import_record, mapping_done=True) util.make_fake_snapshot(self.import_file, bs1_data, ASSESSED_BS, is_canon=True, org=self.fake_org) util.make_fake_snapshot(new_import_file, bs2_data, PORTFOLIO_BS, org=self.fake_org) tasks.match_buildings(new_import_file.pk, self.fake_user.pk) self.assertEqual(BuildingSnapshot.objects.filter(match_type=POSSIBLE_MATCH).count(), 0) self.assertEqual(BuildingSnapshot.objects.filter(match_type=SYSTEM_MATCH).count(), 1)
def setUp(self): self.fake_user = User.objects.create(username='******') self.fake_org = Organization.objects.create() OrganizationUser.objects.create( user=self.fake_user, organization=self.fake_org ) self.import_record = ImportRecord.objects.create(owner=self.fake_user) self.import_file1 = ImportFile.objects.create( import_record=self.import_record ) self.import_file2 = ImportFile.objects.create( import_record=self.import_record ) self.bs1 = util.make_fake_snapshot( self.import_file1, self.bs1_data, bs_type=seed_models.ASSESSED_BS, is_canon=True ) self.bs2 = util.make_fake_snapshot( self.import_file2, self.bs2_data, bs_type=seed_models.PORTFOLIO_BS, is_canon=True ) self.meter = seed_models.Meter.objects.create( name='test meter', energy_type=seed_models.ELECTRICITY, energy_units=seed_models.KILOWATT_HOURS ) self.meter.building_snapshot.add(self.bs2)
def test_match_buildings(self): """Good case for testing our matching system.""" bs_data = { "pm_property_id": 1243, "tax_lot_id": "435/422", "property_name": "Greenfield Complex", "custom_id_1": 12, "address_line_1": "555 Database LN.", "address_line_2": "", "city": "Gotham City", "postal_code": 8999, } # Setup mapped AS snapshot. snapshot = util.make_fake_snapshot(self.import_file, bs_data, ASSESSED_BS, is_canon=True, org=self.fake_org) # Different file, but same ImportRecord. # Setup mapped PM snapshot. # Should be an identical match. new_import_file = ImportFile.objects.create(import_record=self.import_record, mapping_done=True) new_snapshot = util.make_fake_snapshot(new_import_file, bs_data, PORTFOLIO_BS, org=self.fake_org) tasks.match_buildings(new_import_file.pk, self.fake_user.pk) result = BuildingSnapshot.objects.all()[0] self.assertEqual(result.property_name, snapshot.property_name) self.assertEqual(result.property_name, new_snapshot.property_name) # Since these two buildings share a common ID, we match that way. self.assertEqual(result.confidence, 0.9) self.assertEqual(sorted([r.pk for r in result.parents.all()]), sorted([new_snapshot.pk, snapshot.pk])) self.assertGreater(AuditLog.objects.count(), 0) self.assertEqual(AuditLog.objects.first().action_note, "System matched building ID.")
def test_get_ancestors(self): """Tests get_ancestors(building), returns all non-composite, non-raw BuildingSnapshot instances. """ bs_data = { 'pm_property_id': 1243, 'tax_lot_id': '435/422', 'property_name': 'Greenfield Complex', 'custom_id_1': 1243, 'address_line_1': '555 Database LN.', 'address_line_2': '', 'city': 'Gotham City', 'postal_code': 8999, } #Since we changed to not match duplicate data make a second record that matches with something slighty changed #In this case appended a 'A' to the end of address_line_1 bs_data_2 = { 'pm_property_id': 1243, 'tax_lot_id': '435/422', 'property_name': 'Greenfield Complex', 'custom_id_1': 1243, 'address_line_1': '555 Database LN. A', 'address_line_2': '', 'city': 'Gotham City', 'postal_code': 8999, } # Setup mapped AS snapshot. snapshot = util.make_fake_snapshot( self.import_file, bs_data, ASSESSED_BS, is_canon=True, org=self.fake_org ) # Different file, but same ImportRecord. # Setup mapped PM snapshot. # Should be an identical match. new_import_file = ImportFile.objects.create( import_record=self.import_record, raw_save_done=True, mapping_done=True ) new_snapshot = util.make_fake_snapshot( new_import_file, bs_data_2, PORTFOLIO_BS, org=self.fake_org ) tasks.match_buildings(new_import_file.pk, self.fake_user.pk) result = BuildingSnapshot.objects.filter(source_type=4)[0] ancestor_pks = set([b.pk for b in get_ancestors(result)]) buildings = BuildingSnapshot.objects.filter( source_type__in=[2, 3] ).exclude( pk=result.pk ) building_pks = set([b.pk for b in buildings]) self.assertEqual(ancestor_pks, building_pks)
def test_match_buildings(self): """Good case for testing our matching system.""" bs_data = { 'pm_property_id': 1243, 'tax_lot_id': '435/422', 'property_name': 'Greenfield Complex', 'custom_id_1': 12, 'address_line_1': '555 Database LN.', 'address_line_2': '', 'city': 'Gotham City', 'postal_code': 8999, } #Since the change to not match duplicates there needs to be a second record that isn't exactly the same #to run this test. In this case address_line_2 now has a value of 'A' rather than '' bs_data_2 = { 'pm_property_id': 1243, 'tax_lot_id': '435/422', 'property_name': 'Greenfield Complex', 'custom_id_1': 12, 'address_line_1': '555 Database LN.', 'address_line_2': 'A', 'city': 'Gotham City', 'postal_code': 8999, } # Setup mapped AS snapshot. snapshot = util.make_fake_snapshot(self.import_file, bs_data, ASSESSED_BS, is_canon=True, org=self.fake_org) # Different file, but same ImportRecord. # Setup mapped PM snapshot. # Should be an identical match. new_import_file = ImportFile.objects.create( import_record=self.import_record, mapping_done=True) new_snapshot = util.make_fake_snapshot(new_import_file, bs_data_2, PORTFOLIO_BS, org=self.fake_org) tasks.match_buildings(new_import_file.pk, self.fake_user.pk) result = BuildingSnapshot.objects.all()[0] self.assertEqual(result.property_name, snapshot.property_name) self.assertEqual(result.property_name, new_snapshot.property_name) # Since these two buildings share a common ID, we match that way. self.assertEqual(result.confidence, 0.9) self.assertEqual(sorted([r.pk for r in result.parents.all()]), sorted([new_snapshot.pk, snapshot.pk])) self.assertGreater(AuditLog.objects.count(), 0) self.assertEqual(AuditLog.objects.first().action_note, 'System matched building ID.')
def test_get_ancestors(self): """Tests get_ancestors(building), returns all non-composite, non-raw BuildingSnapshot instances. """ bs_data = { 'pm_property_id': 1243, 'tax_lot_id': '435/422', 'property_name': 'Greenfield Complex', 'custom_id_1': 1243, 'address_line_1': '555 Database LN.', 'address_line_2': '', 'city': 'Gotham City', 'postal_code': 8999, } #Since we changed to not match duplicate data make a second record that matches with something slighty changed #In this case appended a 'A' to the end of address_line_1 bs_data_2 = { 'pm_property_id': 1243, 'tax_lot_id': '435/422', 'property_name': 'Greenfield Complex', 'custom_id_1': 1243, 'address_line_1': '555 Database LN. A', 'address_line_2': '', 'city': 'Gotham City', 'postal_code': 8999, } # Setup mapped AS snapshot. snapshot = util.make_fake_snapshot(self.import_file, bs_data, ASSESSED_BS, is_canon=True, org=self.fake_org) # Different file, but same ImportRecord. # Setup mapped PM snapshot. # Should be an identical match. new_import_file = ImportFile.objects.create( import_record=self.import_record, raw_save_done=True, mapping_done=True) new_snapshot = util.make_fake_snapshot(new_import_file, bs_data_2, PORTFOLIO_BS, org=self.fake_org) tasks.match_buildings(new_import_file.pk, self.fake_user.pk) result = BuildingSnapshot.objects.filter(source_type=4)[0] ancestor_pks = set([b.pk for b in get_ancestors(result)]) buildings = BuildingSnapshot.objects.filter( source_type__in=[2, 3]).exclude(pk=result.pk) building_pks = set([b.pk for b in buildings]) self.assertEqual(ancestor_pks, building_pks)
def test_delete_organization_buildings(self): """tests the delete builings for an org""" # start with the normal use case bs1_data = { "pm_property_id": 123, "tax_lot_id": "435/422", "property_name": "Greenfield Complex", "custom_id_1": 1243, "address_line_1": "555 NorthWest Databaseer Lane.", "address_line_2": "", "city": "Gotham City", "postal_code": 8999, } # This building will have a lot less data to identify it. bs2_data = { "pm_property_id": 1243, "custom_id_1": 1243, "address_line_1": "555 Database LN.", "city": "Gotham City", "postal_code": 8999, } new_import_file = ImportFile.objects.create(import_record=self.import_record, mapping_done=True) snapshot = util.make_fake_snapshot(self.import_file, bs1_data, ASSESSED_BS, is_canon=True) snapshot.super_organization = self.fake_org snapshot.save() snapshot = util.make_fake_snapshot(new_import_file, bs2_data, PORTFOLIO_BS) snapshot.super_organization = self.fake_org snapshot.save() tasks.match_buildings(new_import_file.pk, self.fake_user.pk) # make one more building snapshot in a different org fake_org_2 = Organization.objects.create() snapshot = util.make_fake_snapshot(self.import_file, bs1_data, ASSESSED_BS, is_canon=True) snapshot.super_organization = fake_org_2 snapshot.save() self.assertGreater(BuildingSnapshot.objects.filter(super_organization=self.fake_org).count(), 0) tasks.delete_organization_buildings(self.fake_org.pk) self.assertEqual(BuildingSnapshot.objects.filter(super_organization=self.fake_org).count(), 0) self.assertGreater(BuildingSnapshot.objects.filter(super_organization=fake_org_2).count(), 0) # test that the CanonicalBuildings are deleted self.assertEqual( CanonicalBuilding.objects.filter(canonical_snapshot__super_organization=self.fake_org).count(), 0 ) # test that other orgs CanonicalBuildings are not deleted self.assertGreater( CanonicalBuilding.objects.filter(canonical_snapshot__super_organization=fake_org_2).count(), 0 )
def _add_additional_fake_buildings(self): """DRY up some test code below where many BSes are needed.""" self.bs3 = util.make_fake_snapshot( self.import_file1, self.bs1_data, bs_type=seed_models.COMPOSITE_BS, ) self.bs4 = util.make_fake_snapshot( self.import_file1, self.bs2_data, bs_type=seed_models.COMPOSITE_BS, ) self.bs5 = util.make_fake_snapshot( self.import_file1, self.bs2_data, bs_type=seed_models.COMPOSITE_BS, )
def test_reset_mapped_w_previous_matches(self): """Ensure we ignore mapped buildings with children BSes.""" # Make the raw BSes for us to make new mappings from for x in range(10): test_util.make_fake_snapshot(self.import_file, {}, ASSESSED_RAW) # Simulate existing mapped BSes, which should be deleted. for x in range(10): test_util.make_fake_snapshot(self.import_file, {}, ASSESSED_BS) # Setup our exceptional case: here the first BS has a child, COMPOSITE. child = test_util.make_fake_snapshot(None, {}, COMPOSITE_BS) first = BuildingSnapshot.objects.filter( import_file=self.import_file )[:1].get() # We add a child to our first BuildingSnapshot, which should exclude it # from deletion and thus it should remain after a remapping is issued. first.children.add(child) # Here we mark all of the mapped building snapshots. These should all # get deleted when we remap from the raw snapshots after the call to # to this function. for item in BuildingSnapshot.objects.filter(source_type=ASSESSED_BS): item.property_name = 'Touched' item.save() # Ensure we have all 10 mapped BuildingSnapshots saved. self.assertEqual( BuildingSnapshot.objects.filter(property_name='Touched').count(), 10 ) self.client.post( reverse_lazy("seed:remap_buildings"), data=json.dumps({ 'file_id': self.import_file.pk, }), content_type='application/json' ) # Assert that only one remains that was touched, and that it has the # child. self.assertEqual( BuildingSnapshot.objects.filter(property_name='Touched').count(), 1 ) self.assertEqual( BuildingSnapshot.objects.get( property_name='Touched' ).children.all()[0], child )
def test_match_no_matches(self): """When a canonical exists, but doesn't match, we create a new one.""" bs1_data = { 'pm_property_id': 1243, 'tax_lot_id': '435/422', 'property_name': 'Greenfield Complex', 'custom_id_1': 1243, 'address_line_1': '555 Database LN.', 'address_line_2': '', 'city': 'Gotham City', 'postal_code': 8999, } bs2_data = { 'pm_property_id': 9999, 'tax_lot_id': '1231', 'property_name': 'A Place', 'custom_id_1': 0o000111000, 'address_line_1': '44444 Hmmm Ave.', 'address_line_2': 'Apt 4', 'city': 'Gotham City', 'postal_code': 8999, } snapshot = util.make_fake_snapshot( self.import_file, bs1_data, ASSESSED_BS, is_canon=True ) new_import_file = ImportFile.objects.create( import_record=self.import_record, mapping_done=True ) new_snapshot = util.make_fake_snapshot( new_import_file, bs2_data, PORTFOLIO_BS, org=self.fake_org ) self.assertEqual(BuildingSnapshot.objects.all().count(), 2) tasks.match_buildings(new_import_file.pk, self.fake_user.pk) # E.g. we didn't create a match self.assertEqual(BuildingSnapshot.objects.all().count(), 2) latest_snapshot = BuildingSnapshot.objects.get(pk=new_snapshot.pk) # But we did create another canonical building for the unmatched bs. self.assertNotEqual(latest_snapshot.canonical_building, None) self.assertNotEqual( latest_snapshot.canonical_building.pk, snapshot.canonical_building.pk ) self.assertEqual(latest_snapshot.confidence, None)
def test_match_no_matches(self): """When a canonical exists, but doesn't match, we create a new one.""" bs1_data = { 'pm_property_id': 1243, 'tax_lot_id': '435/422', 'property_name': 'Greenfield Complex', 'custom_id_1': 1243, 'address_line_1': '555 Database LN.', 'address_line_2': '', 'city': 'Gotham City', 'postal_code': 8999, } bs2_data = { 'pm_property_id': 9999, 'tax_lot_id': '1231', 'property_name': 'A Place', 'custom_id_1': 0o000111000, 'address_line_1': '44444 Hmmm Ave.', 'address_line_2': 'Apt 4', 'city': 'Gotham City', 'postal_code': 8999, } snapshot = util.make_fake_snapshot(self.import_file, bs1_data, ASSESSED_BS, is_canon=True) new_import_file = ImportFile.objects.create( import_record=self.import_record, mapping_done=True) new_snapshot = util.make_fake_snapshot(new_import_file, bs2_data, PORTFOLIO_BS, org=self.fake_org) self.assertEqual(BuildingSnapshot.objects.all().count(), 2) tasks.match_buildings(new_import_file.pk, self.fake_user.pk) # E.g. we didn't create a match self.assertEqual(BuildingSnapshot.objects.all().count(), 2) latest_snapshot = BuildingSnapshot.objects.get(pk=new_snapshot.pk) # But we did create another canonical building for the unmatched bs. self.assertNotEqual(latest_snapshot.canonical_building, None) self.assertNotEqual(latest_snapshot.canonical_building.pk, snapshot.canonical_building.pk) self.assertEqual(latest_snapshot.confidence, None)
def test_update_building_with_dates(self): fake_building_kwargs = { u'extra_data': {} } fake_building = util.make_fake_snapshot( self.import_file2, fake_building_kwargs, seed_models.COMPOSITE_BS, is_canon=True ) fake_building.super_organization = self.fake_org fake_building.save() fake_building_pk = fake_building.pk fake_building = seed_models.BuildingSnapshot.objects.filter(pk=fake_building_pk).first() fake_building_kwargs['year_ending'] = '12/30/2015' new_snap = seed_models.update_building( fake_building, fake_building_kwargs, self.fake_user ) self.assertNotEqual(new_snap.pk, fake_building.pk)
def test_match_no_canonical_buildings(self): """If no canonicals exist, create, but no new BSes.""" bs1_data = { 'pm_property_id': 1243, 'tax_lot_id': '435/422', 'property_name': 'Greenfield Complex', 'custom_id_1': 1243, 'address_line_1': '555 Database LN.', 'address_line_2': '', 'city': 'Gotham City', 'postal_code': 8999, } # Note: no Canonical Building is created for this snapshot. snapshot = util.make_fake_snapshot( self.import_file, bs1_data, ASSESSED_BS, is_canon=False ) self.import_file.mapping_done = True self.import_file.save() self.assertEqual(snapshot.canonical_building, None) self.assertEqual(BuildingSnapshot.objects.all().count(), 1) tasks.match_buildings(self.import_file.pk) refreshed_snapshot = BuildingSnapshot.objects.get(pk=snapshot.pk) self.assertNotEqual(refreshed_snapshot.canonical_building, None) self.assertEqual(BuildingSnapshot.objects.all().count(), 1)
def test_match_no_canonical_buildings(self): """If no canonicals exist, create, but no new BSes.""" bs1_data = { 'pm_property_id': 1243, 'tax_lot_id': '435/422', 'property_name': 'Greenfield Complex', 'custom_id_1': 1243, 'address_line_1': '555 Database LN.', 'address_line_2': '', 'city': 'Gotham City', 'postal_code': 8999, } # Note: no Canonical Building is created for this snapshot. snapshot = util.make_fake_snapshot(self.import_file, bs1_data, ASSESSED_BS, is_canon=False, org=self.fake_org) self.import_file.mapping_done = True self.import_file.save() self.assertEqual(snapshot.canonical_building, None) self.assertEqual(BuildingSnapshot.objects.all().count(), 1) tasks.match_buildings(self.import_file.pk, self.fake_user.pk) refreshed_snapshot = BuildingSnapshot.objects.get(pk=snapshot.pk) self.assertNotEqual(refreshed_snapshot.canonical_building, None) self.assertEqual(BuildingSnapshot.objects.all().count(), 1)
def test_match_no_canonical_buildings(self): """If no canonicals exist, create, but no new BSes.""" bs1_data = { "pm_property_id": 1243, "tax_lot_id": "435/422", "property_name": "Greenfield Complex", "custom_id_1": 1243, "address_line_1": "555 Database LN.", "address_line_2": "", "city": "Gotham City", "postal_code": 8999, } # Note: no Canonical Building is created for this snapshot. snapshot = util.make_fake_snapshot(self.import_file, bs1_data, ASSESSED_BS, is_canon=False, org=self.fake_org) self.import_file.mapping_done = True self.import_file.save() self.assertEqual(snapshot.canonical_building, None) self.assertEqual(BuildingSnapshot.objects.all().count(), 1) tasks.match_buildings(self.import_file.pk, self.fake_user.pk) refreshed_snapshot = BuildingSnapshot.objects.get(pk=snapshot.pk) self.assertNotEqual(refreshed_snapshot.canonical_building, None) self.assertEqual(BuildingSnapshot.objects.all().count(), 1)
def test_unmatch_snapshot_tree_retains_canonical_snapshot(self): """ TODO: """ self.bs3 = util.make_fake_snapshot( self.import_file1, self.bs1_data, bs_type=seed_models.COMPOSITE_BS, is_canon=True, ) self.bs4 = util.make_fake_snapshot( self.import_file1, self.bs2_data, bs_type=seed_models.COMPOSITE_BS, is_canon=True, ) # simulate matching bs1 and bs2 to have a child of bs3 seed_models.save_snapshot_match(self.bs2.pk, self.bs1.tip.pk) seed_models.save_snapshot_match(self.bs3.pk, self.bs1.tip.pk) seed_models.save_snapshot_match(self.bs4.pk, self.bs1.tip.pk) tip_pk = self.bs1.tip.pk # simulating the following tree: # b1 b2 # \ / # b3 b4 # \ / # b5 # unmatch bs3 from bs4 seed_models.unmatch_snapshot_tree(self.bs4.pk) # tip should be deleted self.assertFalse( seed_models.BuildingSnapshot.objects.filter(pk=tip_pk).exists()) canon_bs4 = seed_models.CanonicalBuilding.objects.get( pk=self.bs4.canonical_building_id) # both of their canons should be active self.assertTrue(canon_bs4.active) # both cannons should have a canonical_snapshot self.assertEqual(canon_bs4.canonical_snapshot, self.bs4)
def test_remap_buildings(self): """Test good case for resetting mapping.""" # Make raw BSes, these should stick around. for x in range(10): test_util.make_fake_snapshot(self.import_file, {}, ASSESSED_RAW) # Make "mapped" BSes, these should get removed. for x in range(10): test_util.make_fake_snapshot(self.import_file, {}, ASSESSED_BS) # Set import file like we're done mapping self.import_file.mapping_done = True self.import_file.mapping_progress = 100 self.import_file.save() # Set cache like we're done mapping. cache_key = decorators.get_prog_key('map_data', self.import_file.pk) cache.set(cache_key, 100) resp = self.client.post( reverse_lazy("seed:remap_buildings"), data=json.dumps({ 'file_id': self.import_file.pk, }), content_type='application/json' ) self.assertEqual(resp.status_code, 200) self.assertEqual( BuildingSnapshot.objects.filter( import_file=self.import_file, source_type__in=(ASSESSED_BS, PORTFOLIO_BS) ).count(), 0 ) self.assertEqual( BuildingSnapshot.objects.filter( import_file=self.import_file, ).count(), 10 ) self.assertEqual(cache.get(cache_key), 0)
def test_is_same_snapshot(self): """Test to check if two snapshots are duplicates""" bs_data = { 'pm_property_id': 1243, 'tax_lot_id': '435/422', 'property_name': 'Greenfield Complex', 'custom_id_1': 12, 'address_line_1': '555 Database LN.', 'address_line_2': '', 'city': 'Gotham City', 'postal_code': 8999, } s1 = util.make_fake_snapshot(self.import_file, bs_data, ASSESSED_BS, is_canon=True, org=self.fake_org) self.assertTrue(tasks.is_same_snapshot(s1, s1), "Matching a snapshot to itself should return True") #Making a different snapshot, now Garfield complex rather than Greenfield complex bs_data_2 = { 'pm_property_id': 1243, 'tax_lot_id': '435/422', 'property_name': 'Garfield Complex', 'custom_id_1': 12, 'address_line_1': '555 Database LN.', 'address_line_2': '', 'city': 'Gotham City', 'postal_code': 8999, } s2 = util.make_fake_snapshot(self.import_file, bs_data_2, ASSESSED_BS, is_canon=True, org=self.fake_org) self.assertFalse( tasks.is_same_snapshot(s1, s2), "Matching a snapshot to a different snapshot should return False")
def test_separates_system_and_possible_match_types(self): """We save possible matches separately.""" bs1_data = { 'pm_property_id': 123, 'tax_lot_id': '435/422', 'property_name': 'Greenfield Complex', 'custom_id_1': 1243, 'address_line_1': '555 NorthWest Databaseer Lane.', 'address_line_2': '', 'city': 'Gotham City', 'postal_code': 8999, } # This building will have a lot less data to identify it. bs2_data = { 'pm_property_id': 1243, 'custom_id_1': 1243, 'address_line_1': '555 Database LN.', 'city': 'Gotham City', 'postal_code': 8999, } new_import_file = ImportFile.objects.create( import_record=self.import_record, mapping_done=True) util.make_fake_snapshot(self.import_file, bs1_data, ASSESSED_BS, is_canon=True, org=self.fake_org) util.make_fake_snapshot(new_import_file, bs2_data, PORTFOLIO_BS, org=self.fake_org) tasks.match_buildings(new_import_file.pk, self.fake_user.pk) self.assertEqual( BuildingSnapshot.objects.filter(match_type=POSSIBLE_MATCH).count(), 0) self.assertEqual( BuildingSnapshot.objects.filter(match_type=SYSTEM_MATCH).count(), 1)
def test_handle_id_matches_duplicate_data(self): """ Test for handle_id_matches behavior when matching duplicate data """ bs_data = { 'pm_property_id': "2360", 'tax_lot_id': '476/460', 'property_name': 'Garfield Complex', 'custom_id_1': "89", 'address_line_1': '12975 Database LN.', 'address_line_2': '', 'city': 'Cartoon City', 'postal_code': "54321", } # Setup mapped AS snapshot. util.make_fake_snapshot( self.import_file, bs_data, ASSESSED_BS, is_canon=True, org=self.fake_org ) # Different file, but same ImportRecord. # Setup mapped PM snapshot. # Should be an identical match. new_import_file = ImportFile.objects.create( import_record=self.import_record, mapping_done=True ) tasks.match_buildings(new_import_file.pk, self.fake_user.pk) duplicate_import_file = ImportFile.objects.create( import_record=self.import_record, mapping_done=True ) new_snapshot = util.make_fake_snapshot( duplicate_import_file, bs_data, PORTFOLIO_BS, org=self.fake_org ) self.assertRaises(tasks.DuplicateDataError, tasks.handle_id_matches, new_snapshot, duplicate_import_file, self.fake_user.pk)
def test_handle_id_matches_duplicate_data(self): """ Test for handle_id_matches behavior when matching duplicate data """ bs_data = { 'pm_property_id': "2360", 'tax_lot_id': '476/460', 'property_name': 'Garfield Complex', 'custom_id_1': "89", 'address_line_1': '12975 Database LN.', 'address_line_2': '', 'city': 'Cartoon City', 'postal_code': "54321", } # Setup mapped AS snapshot. util.make_fake_snapshot(self.import_file, bs_data, ASSESSED_BS, is_canon=True, org=self.fake_org) # Different file, but same ImportRecord. # Setup mapped PM snapshot. # Should be an identical match. new_import_file = ImportFile.objects.create( import_record=self.import_record, mapping_done=True) tasks.match_buildings(new_import_file.pk, self.fake_user.pk) duplicate_import_file = ImportFile.objects.create( import_record=self.import_record, mapping_done=True) new_snapshot = util.make_fake_snapshot(duplicate_import_file, bs_data, PORTFOLIO_BS, org=self.fake_org) self.assertRaises(tasks.DuplicateDataError, tasks.handle_id_matches, new_snapshot, duplicate_import_file, self.fake_user.pk)
def test_separates_system_and_possible_match_types(self): """We save possible matches separately.""" bs1_data = { 'pm_property_id': 123, 'tax_lot_id': '435/422', 'property_name': 'Greenfield Complex', 'custom_id_1': 1243, 'address_line_1': '555 NorthWest Databaseer Lane.', 'address_line_2': '', 'city': 'Gotham City', 'postal_code': 8999, } # This building will have a lot less data to identify it. bs2_data = { 'pm_property_id': 1243, 'custom_id_1': 1243, 'address_line_1': '555 Database LN.', 'city': 'Gotham City', 'postal_code': 8999, } new_import_file = ImportFile.objects.create( import_record=self.import_record, mapping_done=True ) util.make_fake_snapshot( self.import_file, bs1_data, ASSESSED_BS, is_canon=True ) util.make_fake_snapshot(new_import_file, bs2_data, PORTFOLIO_BS) tasks.match_buildings(new_import_file.pk) self.assertEqual( BuildingSnapshot.objects.filter(match_type=POSSIBLE_MATCH).count(), 1 ) self.assertEqual( BuildingSnapshot.objects.filter(match_type=SYSTEM_MATCH).count(), 0 )
def test_match_duplicate_buildings(self): """ Test for behavior when trying to match duplicate building data """ bs_data = { 'pm_property_id': "8450", 'tax_lot_id': '143/292', 'property_name': 'Greenfield Complex', 'custom_id_1': "99", 'address_line_1': '93754 Database LN.', 'address_line_2': '', 'city': 'Gotham City', 'postal_code': "8999", } import_file = ImportFile.objects.create( import_record=self.import_record, mapping_done=True ) # Setup mapped PM snapshot. snapshot = util.make_fake_snapshot( import_file, bs_data, PORTFOLIO_BS, is_canon=True, org=self.fake_org ) # Different file, but same ImportRecord. # Setup mapped PM snapshot. # Should be a duplicate. new_import_file = ImportFile.objects.create( import_record=self.import_record, mapping_done=True ) new_snapshot = util.make_fake_snapshot( new_import_file, bs_data, PORTFOLIO_BS, org=self.fake_org ) tasks.match_buildings(import_file.pk, self.fake_user.pk) tasks.match_buildings(new_import_file.pk, self.fake_user.pk) self.assertEqual(len(BuildingSnapshot.objects.all()), 2)
def test_unmatch_snapshot_tree_retains_canonical_snapshot(self): """ TODO: """ self.bs3 = util.make_fake_snapshot( self.import_file1, self.bs1_data, bs_type=seed_models.COMPOSITE_BS, is_canon=True, ) self.bs4 = util.make_fake_snapshot( self.import_file1, self.bs2_data, bs_type=seed_models.COMPOSITE_BS, is_canon=True, ) # simulate matching bs1 and bs2 to have a child of bs3 seed_models.save_snapshot_match(self.bs2.pk, self.bs1.tip.pk) seed_models.save_snapshot_match(self.bs3.pk, self.bs1.tip.pk) seed_models.save_snapshot_match(self.bs4.pk, self.bs1.tip.pk) tip_pk = self.bs1.tip.pk # simulating the following tree: # b1 b2 # \ / # b3 b4 # \ / # b5 # unmatch bs3 from bs4 seed_models.unmatch_snapshot_tree(self.bs4.pk) # tip should be deleted self.assertFalse(seed_models.BuildingSnapshot.objects.filter(pk=tip_pk).exists()) canon_bs4 = seed_models.CanonicalBuilding.objects.get(pk=self.bs4.canonical_building_id) # both of their canons should be active self.assertTrue(canon_bs4.active) # both cannons should have a canonical_snapshot self.assertEqual(canon_bs4.canonical_snapshot, self.bs4)
def test_match_buildings(self): """Good case for testing our matching system.""" bs_data = { 'pm_property_id': 1243, 'tax_lot_id': '435/422', 'property_name': 'Greenfield Complex', 'custom_id_1': 1243, 'address_line_1': '555 Database LN.', 'address_line_2': '', 'city': 'Gotham City', 'postal_code': 8999, } # Setup mapped AS snapshot. snapshot = util.make_fake_snapshot( self.import_file, bs_data, ASSESSED_BS, is_canon=True ) # Different file, but same ImportRecord. # Setup mapped PM snapshot. # Should be an identical match. new_import_file = ImportFile.objects.create( import_record=self.import_record, mapping_done=True ) new_snapshot = util.make_fake_snapshot( new_import_file, bs_data, PORTFOLIO_BS ) tasks.match_buildings(new_import_file.pk) result = BuildingSnapshot.objects.all()[0] self.assertEqual(result.property_name, snapshot.property_name) self.assertEqual(result.property_name, new_snapshot.property_name) self.assertEqual(result.confidence, 1.0) self.assertEqual( sorted([r.pk for r in result.parents.all()]), sorted([new_snapshot.pk, snapshot.pk]) )
def test_match_duplicate_buildings(self): """ Test for behavior when trying to match duplicate building data """ bs_data = { 'pm_property_id': "8450", 'tax_lot_id': '143/292', 'property_name': 'Greenfield Complex', 'custom_id_1': "99", 'address_line_1': '93754 Database LN.', 'address_line_2': '', 'city': 'Gotham City', 'postal_code': "8999", } import_file = ImportFile.objects.create( import_record=self.import_record, mapping_done=True) # Setup mapped PM snapshot. snapshot = util.make_fake_snapshot(import_file, bs_data, PORTFOLIO_BS, is_canon=True, org=self.fake_org) # Different file, but same ImportRecord. # Setup mapped PM snapshot. # Should be a duplicate. new_import_file = ImportFile.objects.create( import_record=self.import_record, mapping_done=True) new_snapshot = util.make_fake_snapshot(new_import_file, bs_data, PORTFOLIO_BS, org=self.fake_org) tasks.match_buildings(import_file.pk, self.fake_user.pk) tasks.match_buildings(new_import_file.pk, self.fake_user.pk) self.assertEqual(len(BuildingSnapshot.objects.all()), 2)
def test_no_unmatched_buildings(self): """Make sure we shortcut out if there isn't unmatched data.""" bs1_data = { "pm_property_id": 1243, "tax_lot_id": "435/422", "property_name": "Greenfield Complex", "custom_id_1": 1243, "address_line_1": "555 Database LN.", "address_line_2": "", "city": "Gotham City", "postal_code": 8999, } self.import_file.mapping_done = True self.import_file.save() util.make_fake_snapshot(self.import_file, bs1_data, ASSESSED_BS, is_canon=True) self.assertEqual(BuildingSnapshot.objects.all().count(), 1) tasks.match_buildings(self.import_file.pk, self.fake_user.pk) self.assertEqual(BuildingSnapshot.objects.all().count(), 1)
def test_is_same_snapshot(self): """Test to check if two snapshots are duplicates""" bs_data = { 'pm_property_id': 1243, 'tax_lot_id': '435/422', 'property_name': 'Greenfield Complex', 'custom_id_1': 12, 'address_line_1': '555 Database LN.', 'address_line_2': '', 'city': 'Gotham City', 'postal_code': 8999, } s1 = util.make_fake_snapshot( self.import_file, bs_data, ASSESSED_BS, is_canon=True, org=self.fake_org ) self.assertTrue(tasks.is_same_snapshot(s1, s1), "Matching a snapshot to itself should return True") #Making a different snapshot, now Garfield complex rather than Greenfield complex bs_data_2 = { 'pm_property_id': 1243, 'tax_lot_id': '435/422', 'property_name': 'Garfield Complex', 'custom_id_1': 12, 'address_line_1': '555 Database LN.', 'address_line_2': '', 'city': 'Gotham City', 'postal_code': 8999, } s2 = util.make_fake_snapshot( self.import_file, bs_data_2, ASSESSED_BS, is_canon=True, org=self.fake_org ) self.assertFalse(tasks.is_same_snapshot(s1, s2), "Matching a snapshot to a different snapshot should return False")
def test_no_unmatched_buildings(self): """Make sure we shortcut out if there isn't unmatched data.""" bs1_data = { 'pm_property_id': 1243, 'tax_lot_id': '435/422', 'property_name': 'Greenfield Complex', 'custom_id_1': 1243, 'address_line_1': '555 Database LN.', 'address_line_2': '', 'city': 'Gotham City', 'postal_code': 8999, } self.import_file.mapping_done = True self.import_file.save() util.make_fake_snapshot( self.import_file, bs1_data, ASSESSED_BS, is_canon=True ) self.assertEqual(BuildingSnapshot.objects.all().count(), 1) tasks.match_buildings(self.import_file.pk) self.assertEqual(BuildingSnapshot.objects.all().count(), 1)
def test_update_building(self): """Good case for updating a building.""" fake_building_extra = { u'Assessor Data 1': u'2342342', u'Assessor Data 2': u'245646', } fake_building_kwargs = { u'property_name': u'Place pl.', u'address_line_1': u'332 Place pl.', u'owner': u'Duke of Earl', u'postal_code': u'68674', } fake_building = util.make_fake_snapshot( self.import_file2, fake_building_kwargs, seed_models.COMPOSITE_BS, is_canon=True ) fake_building.super_organization = self.fake_org fake_building.extra_data = fake_building_extra fake_building.save() # add building to a project project = seed_models.Project.objects.create( name='test project', owner=self.fake_user, super_organization=self.fake_org, ) seed_models.ProjectBuilding.objects.create( building_snapshot=fake_building, project=project ) fake_building_pk = fake_building.pk fake_building = seed_models.BuildingSnapshot.objects.filter(pk=fake_building_pk).first() fake_building_kwargs[u'property_name_source'] = fake_building.pk fake_building_kwargs[u'address_line_1_source'] = fake_building.pk fake_building_kwargs[u'owner_source'] = fake_building.pk seed_models.set_initial_sources(fake_building) # Hydrated JS version will have this, we'll query off it. fake_building_kwargs[u'pk'] = fake_building.pk # "update" one of the field values. fake_building_kwargs[u'import_file'] = self.import_file1 fake_building_kwargs[u'postal_code'] = u'99999' fake_building_extra[u'Assessor Data 1'] = u'NUP.' # Need to simulate JS hydrated payload here. fake_building_kwargs[u'extra_data'] = fake_building_extra new_snap = seed_models.update_building( fake_building, fake_building_kwargs, self.fake_user ) # Make sure new building is also in project. pbs = seed_models.ProjectBuilding.objects.filter( building_snapshot=new_snap, ) self.assertEqual(pbs.count(), 1) # Make sure our value was updated. self.assertEqual( new_snap.postal_code, fake_building_kwargs[u'postal_code'] ) self.assertNotEqual(new_snap.pk, fake_building.pk) # Make sure that the extra data were saved, with orig sources. self.assertDictEqual( new_snap.extra_data, fake_building_extra ) # Make sure we have the same orgs. self.assertEqual( new_snap.super_organization, fake_building.super_organization ) self.assertEqual(new_snap.match_type, fake_building.match_type) # Make sure we're set as the source for updated info!!! self.assertEqual(new_snap, new_snap.postal_code_source) # Make sure our sources from parent get set properly. for attr in ['property_name', 'address_line_1', 'owner']: self.assertEqual( getattr(new_snap, '{0}_source'.format(attr)).pk, fake_building.pk ) # Make sure our parent is set. self.assertEqual(new_snap.parents.all()[0].pk, fake_building.pk) # Make sure we captured all of the extra_data column names after update data_columns = seed_models.Column.objects.filter( organization=fake_building.super_organization, is_extra_data=True ) self.assertEqual(data_columns.count(), len(fake_building_extra)) self.assertListEqual( sorted([d.column_name for d in data_columns]), sorted(fake_building_extra.keys()) )
def test_delete_organization_buildings(self): """tests the delete builings for an org""" # start with the normal use case bs1_data = { 'pm_property_id': 123, 'tax_lot_id': '435/422', 'property_name': 'Greenfield Complex', 'custom_id_1': 1243, 'address_line_1': '555 NorthWest Databaseer Lane.', 'address_line_2': '', 'city': 'Gotham City', 'postal_code': 8999, } # This building will have a lot less data to identify it. bs2_data = { 'pm_property_id': 1243, 'custom_id_1': 1243, 'address_line_1': '555 Database LN.', 'city': 'Gotham City', 'postal_code': 8999, } new_import_file = ImportFile.objects.create( import_record=self.import_record, mapping_done=True) snapshot = util.make_fake_snapshot(self.import_file, bs1_data, ASSESSED_BS, is_canon=True) snapshot.super_organization = self.fake_org snapshot.save() snapshot = util.make_fake_snapshot(new_import_file, bs2_data, PORTFOLIO_BS) snapshot.super_organization = self.fake_org snapshot.save() tasks.match_buildings(new_import_file.pk, self.fake_user.pk) # make one more building snapshot in a different org fake_org_2 = Organization.objects.create() snapshot = util.make_fake_snapshot(self.import_file, bs1_data, ASSESSED_BS, is_canon=True) snapshot.super_organization = fake_org_2 snapshot.save() self.assertGreater( BuildingSnapshot.objects.filter( super_organization=self.fake_org).count(), 0) tasks.delete_organization_buildings(self.fake_org.pk) self.assertEqual( BuildingSnapshot.objects.filter( super_organization=self.fake_org).count(), 0) self.assertGreater( BuildingSnapshot.objects.filter( super_organization=fake_org_2).count(), 0) # test that the CanonicalBuildings are deleted self.assertEqual( CanonicalBuilding.objects.filter( canonical_snapshot__super_organization=self.fake_org).count(), 0) # test that other orgs CanonicalBuildings are not deleted self.assertGreater( CanonicalBuilding.objects.filter( canonical_snapshot__super_organization=fake_org_2).count(), 0)
'postal_code': 8999, } bs2_data = { 'pm_property_id': 9999, 'tax_lot_id': '1231', 'property_name': 'A Place', 'custom_id_1': 0000111000, 'address_line_1': '44444 Hmmm Ave.', 'address_line_2': 'Apt 4', 'city': 'Gotham City', 'postal_code': 8999, } snapshot = util.make_fake_snapshot( self.import_file, bs1_data, ASSESSED_BS, is_canon=True ) new_import_file = ImportFile.objects.create( import_record=self.import_record, mapping_done=True ) new_snapshot = util.make_fake_snapshot( new_import_file, bs2_data, PORTFOLIO_BS ) self.assertEqual(BuildingSnapshot.objects.all().count(), 2) tasks.match_buildings(new_import_file.pk) # E.g. we didn't create a match self.assertEqual(BuildingSnapshot.objects.all().count(), 2)
def test_update_building(self): """Good case for updating a building.""" fake_building_extra = { u'Assessor Data 1': u'2342342', u'Assessor Data 2': u'245646', } fake_building_kwargs = { u'property_name': u'Place pl.', u'address_line_1': u'332 Place pl.', u'owner': u'Duke of Earl', u'postal_code': u'68674', } fake_building = util.make_fake_snapshot( self.import_file2, fake_building_kwargs, seed_models.COMPOSITE_BS, is_canon=True ) fake_building.super_org = self.fake_org fake_building.extra_data = fake_building_extra fake_building.save() fake_building_kwargs[u'property_name_source'] = fake_building.pk fake_building_kwargs[u'address_line_1_source'] = fake_building.pk fake_building_kwargs[u'owner_source'] = fake_building.pk seed_models.set_initial_sources(fake_building) # Hydrated JS version will have this, we'll query off it. fake_building_kwargs[u'pk'] = fake_building.pk # "update" one of the field values. fake_building_kwargs[u'import_file'] = self.import_file1 fake_building_kwargs[u'postal_code'] = u'99999' fake_building_extra[u'Assessor Data 1'] = u'NUP.' # Need to simulate JS hydrated payload here. fake_building_kwargs[u'extra_data'] = fake_building_extra new_snap = seed_models.update_building( fake_building, fake_building_kwargs, self.fake_user ) # Make sure our value was updated. self.assertEqual( new_snap.postal_code, fake_building_kwargs[u'postal_code'] ) self.assertNotEqual(new_snap.pk, fake_building.pk) # Make sure that the extra data were saved, with orig sources. self.assertDictEqual( new_snap.extra_data, fake_building_extra ) # Make sure we have the same orgs. self.assertEqual( new_snap.super_organization, fake_building.super_organization ) self.assertEqual(new_snap.match_type, fake_building.match_type) # Make sure we're set as the source for updated info!!! self.assertEqual(new_snap, new_snap.postal_code_source) # Make sure our sources from parent get set properly. for attr in ['property_name', 'address_line_1', 'owner']: self.assertEqual( getattr(new_snap, '{0}_source'.format(attr)).pk, fake_building.pk ) # Make sure our parent is set. self.assertEqual(new_snap.parents.all()[0].pk, fake_building.pk)
'postal_code': 8999, } bs2_data = { 'pm_property_id': 9999, 'tax_lot_id': '1231', 'property_name': 'A Place', 'custom_id_1': 0000111000, 'address_line_1': '44444 Hmmm Ave.', 'address_line_2': 'Apt 4', 'city': 'Gotham City', 'postal_code': 8999, } snapshot = util.make_fake_snapshot(self.import_file, bs1_data, ASSESSED_BS, is_canon=True) new_import_file = ImportFile.objects.create( import_record=self.import_record, mapping_done=True) new_snapshot = util.make_fake_snapshot(new_import_file, bs2_data, PORTFOLIO_BS, org=self.fake_org) self.assertEqual(BuildingSnapshot.objects.all().count(), 2) tasks.match_buildings(new_import_file.pk, self.fake_user.pk) # E.g. we didn't create a match self.assertEqual(BuildingSnapshot.objects.all().count(), 2) latest_snapshot = BuildingSnapshot.objects.get(pk=new_snapshot.pk)
def test_update_building(self): """Good case for updating a building.""" fake_building_extra = { u'Assessor Data 1': u'2342342', u'Assessor Data 2': u'245646', } fake_building_kwargs = { u'property_name': u'Place pl.', u'address_line_1': u'332 Place pl.', u'owner': u'Duke of Earl', u'postal_code': u'68674', } fake_building = util.make_fake_snapshot(self.import_file2, fake_building_kwargs, seed_models.COMPOSITE_BS, is_canon=True) fake_building.super_organization = self.fake_org fake_building.extra_data = fake_building_extra fake_building.save() # add building to a project project = seed_models.Project.objects.create( name='test project', owner=self.fake_user, super_organization=self.fake_org, ) seed_models.ProjectBuilding.objects.create( building_snapshot=fake_building, project=project) fake_building_pk = fake_building.pk fake_building = seed_models.BuildingSnapshot.objects.filter( pk=fake_building_pk).first() fake_building_kwargs[u'property_name_source'] = fake_building.pk fake_building_kwargs[u'address_line_1_source'] = fake_building.pk fake_building_kwargs[u'owner_source'] = fake_building.pk seed_models.set_initial_sources(fake_building) # Hydrated JS version will have this, we'll query off it. fake_building_kwargs[u'pk'] = fake_building.pk # "update" one of the field values. fake_building_kwargs[u'import_file'] = self.import_file1 fake_building_kwargs[u'postal_code'] = u'99999' fake_building_extra[u'Assessor Data 1'] = u'NUP.' # Need to simulate JS hydrated payload here. fake_building_kwargs[u'extra_data'] = fake_building_extra new_snap = seed_models.update_building(fake_building, fake_building_kwargs, self.fake_user) # Make sure new building is also in project. pbs = seed_models.ProjectBuilding.objects.filter( building_snapshot=new_snap, ) self.assertEqual(pbs.count(), 1) # Make sure our value was updated. self.assertEqual(new_snap.postal_code, fake_building_kwargs[u'postal_code']) self.assertNotEqual(new_snap.pk, fake_building.pk) # Make sure that the extra data were saved, with orig sources. self.assertDictEqual(new_snap.extra_data, fake_building_extra) # Make sure we have the same orgs. self.assertEqual(new_snap.super_organization, fake_building.super_organization) self.assertEqual(new_snap.match_type, fake_building.match_type) # Make sure we're set as the source for updated info!!! self.assertEqual(new_snap, new_snap.postal_code_source) # Make sure our sources from parent get set properly. for attr in ['property_name', 'address_line_1', 'owner']: self.assertEqual( getattr(new_snap, '{0}_source'.format(attr)).pk, fake_building.pk) # Make sure our parent is set. self.assertEqual(new_snap.parents.all()[0].pk, fake_building.pk) # Make sure we captured all of the extra_data column names after update data_columns = seed_models.Column.objects.filter( organization=fake_building.super_organization, is_extra_data=True) self.assertEqual(data_columns.count(), len(fake_building_extra)) self.assertListEqual(sorted([d.column_name for d in data_columns]), sorted(fake_building_extra.keys()))