def test_no_matches_if_all_matching_criteria_is_None(self): """ Default matching criteria for PropertyStates are: - address_line_1 (substituted by normalized_address) - ubid - pm_property_id - custom_id_1 and all are set to None. """ base_details = { 'import_file_id': self.import_file.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, } # Create set of properties that won't match self.property_state_factory.get_property_state(**base_details) base_details['city'] = 'Denver' self.property_state_factory.get_property_state(**base_details) # set import_file mapping done so that matching can occur. self.import_file.mapping_done = True self.import_file.save() geocode_and_match_buildings_task(self.import_file.id) # 2 Property, 2 PropertyView, 2 PropertyStates - No merges self.assertEqual(Property.objects.count(), 2) self.assertEqual(PropertyView.objects.count(), 2) self.assertEqual(PropertyState.objects.count(), 2)
def setUp(self): super().setUp() # for now just import some test data. I'd rather create fake data... next time. filename = getattr(self, 'filename', 'example-data-properties.xlsx') self.fake_mappings = copy.copy(FAKE_MAPPINGS['portfolio']) selfvars = self.set_up(ASSESSED_RAW) self.user, self.org, self.import_file, self.import_record, self.cycle = selfvars filepath = osp.join(osp.dirname(__file__), '..', 'data', filename) self.import_file.file = SimpleUploadedFile(name=filename, content=open( filepath, 'rb').read()) self.import_file.save() tasks.save_raw_data(self.import_file.pk) Column.create_mappings(self.fake_mappings, self.org, self.user, self.import_file.id) tasks.map_data(self.import_file.pk) tasks.geocode_and_match_buildings_task(self.import_file.id) # import second file that is currently the same, but should be slightly different filename_2 = getattr(self, 'filename', 'example-data-properties-small-changes.xlsx') _, self.import_file_2 = self.create_import_file( self.user, self.org, self.cycle) filepath = osp.join(osp.dirname(__file__), '..', 'data', filename_2) self.import_file_2.file = SimpleUploadedFile(name=filename_2, content=open( filepath, 'rb').read()) self.import_file_2.save() tasks.save_raw_data(self.import_file_2.pk) tasks.map_data(self.import_file_2.pk) tasks.geocode_and_match_buildings_task(self.import_file_2.id)
def test_import_duplicates(self): # Check to make sure all the properties imported ps = PropertyState.objects.filter( data_state=DATA_STATE_MAPPING, organization=self.org, import_file=self.import_file, ) self.assertEqual(len(ps), 9) self.assertEqual(PropertyState.objects.filter(pm_property_id='2264').count(), 7) hashes = list(map(tasks.hash_state_object, ps)) self.assertEqual(len(hashes), 9) self.assertEqual(len(set(hashes)), 4) unique_property_states, _ = match.filter_duplicate_states(ps) self.assertEqual(len(unique_property_states), 4) tasks.geocode_and_match_buildings_task(self.import_file.id) self.assertEqual(Property.objects.count(), 3) self.assertEqual(PropertyView.objects.count(), 3) self.assertEqual(PropertyView.objects.filter(state__pm_property_id='2264').count(), 1) pv = PropertyView.objects.filter(state__pm_property_id='2264').first() self.assertEqual(pv.state.pm_property_id, '2264') self.assertEqual(pv.state.gross_floor_area, 12555 * ureg.feet ** 2) self.assertEqual(pv.state.energy_score, 75) self.assertEqual(TaxLot.objects.count(), 0) self.assertEqual(self.import_file.find_unmatched_property_states().count(), 2) self.assertEqual(self.import_file.find_unmatched_tax_lot_states().count(), 0)
def test_duplicate_taxlots_identified(self): base_details = { 'address_line_1': '123 Match Street', 'import_file_id': self.import_file.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, } # Create pair of properties that are exact duplicates self.taxlot_state_factory.get_taxlot_state(**base_details) self.taxlot_state_factory.get_taxlot_state(**base_details) # Create a non-matching, non-duplicate property base_details['address_line_1'] = '123 Different Ave' base_details['city'] = 'Denver' self.taxlot_state_factory.get_taxlot_state(**base_details) # set import_file mapping done so that matching can occur. self.import_file.mapping_done = True self.import_file.save() geocode_and_match_buildings_task(self.import_file.id) # 2 TaxLot, 2 TaxLotViews, 3 TaxLotState (1 flagged to be ignored) self.assertEqual(TaxLot.objects.count(), 2) self.assertEqual(TaxLotView.objects.count(), 2) self.assertEqual(TaxLotState.objects.count(), 3) self.assertEqual( TaxLotState.objects.filter(data_state=DATA_STATE_DELETE).count(), 1) # Make sure "deleted" -States are not found in the -Views deleted = TaxLotState.objects.get(data_state=DATA_STATE_DELETE) self.assertNotIn(deleted.id, TaxLotView.objects.values_list('state_id', flat=True))
def test_duplicate_properties_identified(self): base_details = { 'address_line_1': '123 Match Street', 'import_file_id': self.import_file_1.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, } # Create property in first ImportFile ps_1 = self.property_state_factory.get_property_state(**base_details) self.import_file_1.mapping_done = True self.import_file_1.save() geocode_and_match_buildings_task(self.import_file_1.id) # Create duplicate property coming from second ImportFile base_details['import_file_id'] = self.import_file_2.id ps_2 = self.property_state_factory.get_property_state(**base_details) self.import_file_2.mapping_done = True self.import_file_2.save() geocode_and_match_buildings_task(self.import_file_2.id) # 1 Property, 1 PropertyViews, 2 PropertyStates self.assertEqual(Property.objects.count(), 1) self.assertEqual(PropertyView.objects.count(), 1) self.assertEqual(PropertyState.objects.count(), 2) # Be sure the first property is used in the -View and the second is marked for "deletion" self.assertEqual(PropertyView.objects.first().state_id, ps_1.id) self.assertEqual( PropertyState.objects.get(data_state=DATA_STATE_DELETE).id, ps_2.id)
def test_match_properties_rolls_up_multiple_existing_matches_in_id_order_if_they_exist( self): base_details = { 'pm_property_id': '123MatchID', 'city': 'Golden', 'import_file_id': self.import_file_1.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, } # Create 3 non-matching properties in first ImportFile ps_1 = self.property_state_factory.get_property_state(**base_details) base_details['pm_property_id'] = '789DifferentID' base_details['city'] = 'Denver' ps_2 = self.property_state_factory.get_property_state(**base_details) base_details['pm_property_id'] = '1337AnotherDifferentID' base_details['city'] = 'Philadelphia' ps_3 = self.property_state_factory.get_property_state(**base_details) self.import_file_1.mapping_done = True self.import_file_1.save() geocode_and_match_buildings_task(self.import_file_1.id) # Update -States to make the roll up order be 1, 3, 2 refreshed_ps_3 = PropertyState.objects.get(id=ps_3.id) refreshed_ps_3.pm_property_id = '123MatchID' refreshed_ps_3.save() refreshed_ps_2 = PropertyState.objects.get(id=ps_2.id) refreshed_ps_2.pm_property_id = '123MatchID' refreshed_ps_2.save() # Verify that none of the 3 have been merged self.assertEqual(Property.objects.count(), 3) self.assertEqual(PropertyState.objects.count(), 3) self.assertEqual(PropertyView.objects.count(), 3) # Import a property that will identify the first 3 as matches. base_details['import_file_id'] = self.import_file_2.id base_details['pm_property_id'] = '123MatchID' del base_details['city'] ps_4 = self.property_state_factory.get_property_state(**base_details) self.import_file_2.mapping_done = True self.import_file_2.save() geocode_and_match_buildings_task(self.import_file_2.id) # There should only be one PropertyView which is associated to new, merged -State self.assertEqual(PropertyView.objects.count(), 1) view = PropertyView.objects.first() self.assertNotIn(view.state_id, [ps_1.id, ps_2.id, ps_3.id, ps_4.id]) # It will have a -State having city as Denver self.assertEqual(view.state.city, 'Denver') # The corresponding log should be a System Match audit_log = PropertyAuditLog.objects.get(state_id=view.state_id) self.assertEqual(audit_log.name, 'System Match')
def test_match_taxlots_rolls_up_multiple_existing_matches_in_id_order_if_they_exist( self): base_details = { 'jurisdiction_tax_lot_id': '123MatchID', 'city': 'Golden', 'import_file_id': self.import_file_1.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, } # Create 3 non-matching taxlots in first ImportFile tls_1 = self.taxlot_state_factory.get_taxlot_state(**base_details) base_details['jurisdiction_tax_lot_id'] = '789DifferentID' base_details['city'] = 'Denver' tls_2 = self.taxlot_state_factory.get_taxlot_state(**base_details) base_details['jurisdiction_tax_lot_id'] = '1337AnotherDifferentID' base_details['city'] = 'Philadelphia' tls_3 = self.taxlot_state_factory.get_taxlot_state(**base_details) self.import_file_1.mapping_done = True self.import_file_1.save() geocode_and_match_buildings_task(self.import_file_1.id) # Make all those states match TaxLotState.objects.filter(pk__in=[tls_2.id, tls_3.id]).update( jurisdiction_tax_lot_id='123MatchID') # Verify that none of the 3 have been merged self.assertEqual(TaxLot.objects.count(), 3) self.assertEqual(TaxLotState.objects.count(), 3) self.assertEqual(TaxLotView.objects.count(), 3) # Import a property that will identify the first 3 as matches. base_details['import_file_id'] = self.import_file_2.id base_details['jurisdiction_tax_lot_id'] = '123MatchID' del base_details['city'] tls_4 = self.taxlot_state_factory.get_taxlot_state(**base_details) self.import_file_2.mapping_done = True self.import_file_2.save() geocode_and_match_buildings_task(self.import_file_2.id) # There should only be one TaxLotView which is associated to new, merged -State self.assertEqual(TaxLotView.objects.count(), 1) view = TaxLotView.objects.first() self.assertNotIn(view.state_id, [tls_1.id, tls_2.id, tls_3.id, tls_4.id]) # It will have a -State having city as Philadelphia self.assertEqual(view.state.city, 'Philadelphia') # The corresponding log should be a System Match audit_log = TaxLotAuditLog.objects.get(state_id=view.state_id) self.assertEqual(audit_log.name, 'System Match')
def test_match_buildings(self): """ case A (one property <-> one tax lot) """ tasks.save_raw_data(self.import_file.pk) Column.create_mappings(self.fake_mappings, self.org, self.user, self.import_file.pk) tasks.map_data(self.import_file.pk) # Check to make sure all the properties imported ps = PropertyState.objects.filter( data_state=DATA_STATE_MAPPING, organization=self.org, import_file=self.import_file, ) self.assertEqual(len(ps), 14) # Check to make sure the taxlots were imported ts = TaxLotState.objects.filter( data_state=DATA_STATE_MAPPING, organization=self.org, import_file=self.import_file, ) self.assertEqual(len(ts), 18) # Check a single case of the taxlotstate ts = TaxLotState.objects.filter( jurisdiction_tax_lot_id='1552813').first() self.assertEqual(ts.jurisdiction_tax_lot_id, '1552813') self.assertEqual(ts.address_line_1, None) self.assertEqual(ts.extra_data["data_008"], 1) # Check a single case of the propertystate ps = PropertyState.objects.filter(pm_property_id='2264') self.assertEqual(len(ps), 1) ps = ps.first() self.assertEqual(ps.pm_property_id, '2264') self.assertEqual(ps.address_line_1, '50 Willow Ave SE') self.assertEqual('data_007' in ps.extra_data, True) self.assertEqual('data_008' in ps.extra_data, False) self.assertEqual(ps.extra_data["data_007"], 'a') # verify that the lot_number has the tax_lot information. For this case it is one-to-one self.assertEqual(ps.lot_number, ts.jurisdiction_tax_lot_id) tasks.geocode_and_match_buildings_task(self.import_file.id) self.assertEqual(TaxLot.objects.count(), 10) qry = PropertyView.objects.filter(state__custom_id_1='7') self.assertEqual(qry.count(), 1) state = qry.first().state self.assertEqual(state.address_line_1, "12 Ninth Street") self.assertEqual(state.property_name, "Grange Hall")
def test_map_all_models_xml(self): # -- Setup with patch.object(ImportFile, 'cache_first_rows', return_value=None): progress_info = tasks.save_raw_data(self.import_file.pk) self.assertEqual('success', progress_info['status'], json.dumps(progress_info)) self.assertEqual( PropertyState.objects.filter(import_file=self.import_file).count(), 1) # make the column mappings self.fake_mappings = default_buildingsync_profile_mappings() Column.create_mappings(self.fake_mappings, self.org, self.user, self.import_file.pk) # map the data progress_info = tasks.map_data(self.import_file.pk) self.assertEqual('success', progress_info['status']) # verify there were no errors with the files self.assertEqual({}, progress_info.get('file_info', {})) ps = PropertyState.objects.filter(address_line_1='123 MAIN BLVD', import_file=self.import_file) self.assertEqual(len(ps), 1) # -- Act tasks.geocode_and_match_buildings_task(self.import_file.pk) # -- Assert ps = PropertyState.objects.filter(address_line_1='123 MAIN BLVD', import_file=self.import_file) self.assertEqual(ps.count(), 1) # verify the property view, scenario and meter data were created pv = PropertyView.objects.filter(state=ps[0]) self.assertEqual(pv.count(), 1) scenario = Scenario.objects.filter(property_state=ps[0]) self.assertEqual(scenario.count(), 3) # for bsync, meters are linked to scenarios only (not properties) meters = Meter.objects.filter(scenario__in=scenario) self.assertEqual(meters.count(), 6)
def setUp(self): data_importer_data_dir = os.path.join(os.path.dirname(__file__), '..', 'data_importer', 'tests', 'data') filename = getattr(self, 'filename', 'example-data-properties.xlsx') self.fake_mappings = copy.copy(FAKE_MAPPINGS['portfolio']) selfvars = self.set_up(ASSESSED_RAW) self.user, self.org, self.import_file, self.import_record, self.cycle = selfvars filepath = os.path.join(data_importer_data_dir, filename) self.import_file.file = SimpleUploadedFile(name=filename, content=open( filepath, 'rb').read()) self.import_file.save() tasks.save_raw_data(self.import_file.pk) Column.create_mappings(self.fake_mappings, self.org, self.user, self.import_file.id) tasks.map_data(self.import_file.pk) tasks.geocode_and_match_buildings_task(self.import_file.id) # import second file that is currently the same, but should be slightly different filename_2 = getattr(self, 'filename', 'example-data-properties-small-changes.xlsx') _, self.import_file_2 = self.create_import_file( self.user, self.org, self.cycle) filepath = os.path.join(data_importer_data_dir, filename_2) self.import_file_2.file = SimpleUploadedFile(name=filename_2, content=open( filepath, 'rb').read()) self.import_file_2.save() tasks.save_raw_data(self.import_file_2.pk) Column.create_mappings(self.fake_mappings, self.org, self.user, self.import_file_2.id) tasks.map_data(self.import_file_2.pk) tasks.geocode_and_match_buildings_task(self.import_file_2.id) # for api tests user_details = { 'username': '******', 'password': '******', } self.client.login(**user_details)
def test_match_buildings(self): """ case B (many property <-> one tax lot) """ tasks.save_raw_data(self.import_file.pk) Column.create_mappings(self.fake_mappings, self.org, self.user, self.import_file.pk) # Set remap to True because for some reason this file id has been imported before. tasks.map_data(self.import_file.pk, True) # Check to make sure all the properties imported ps = PropertyState.objects.filter( data_state=DATA_STATE_MAPPING, organization=self.org, import_file=self.import_file, ) self.assertEqual(len(ps), 14) # Check to make sure the tax lots were imported ts = TaxLotState.objects.filter( data_state=DATA_STATE_MAPPING, organization=self.org, import_file=self.import_file, ) self.assertEqual(len(ts), 18) # verify that the lot_number has the tax_lot information. For this case it is one-to-many p_test = PropertyState.objects.filter( pm_property_id='5233255', organization=self.org, data_state=DATA_STATE_MAPPING, import_file=self.import_file, ).first() self.assertEqual(p_test.lot_number, "333/66555;333/66125;333/66148") tasks.geocode_and_match_buildings_task(self.import_file.id) # make sure the the property only has one tax lot and vice versa tlv = TaxLotView.objects.filter( state__jurisdiction_tax_lot_id='11160509', cycle=self.cycle) self.assertEqual(len(tlv), 1) tlv = tlv[0] properties = tlv.property_states() self.assertEqual(len(properties), 3)
def test_match_properties_on_ubid(self): base_details = { 'ubid': '86HJPCWQ+2VV-1-3-2-3', 'import_file_id': self.import_file.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, } # Create set of properties that match each other self.property_state_factory.get_property_state(**base_details) base_details['city'] = 'Denver' self.property_state_factory.get_property_state(**base_details) # set import_file mapping done so that matching can occur. self.import_file.mapping_done = True self.import_file.save() geocode_and_match_buildings_task(self.import_file.id) # 1 Property, 1 PropertyView, 3 PropertyStates (2 imported, 1 merge result) self.assertEqual(Property.objects.count(), 1) self.assertEqual(PropertyView.objects.count(), 1) self.assertEqual(PropertyState.objects.count(), 3)
def test_match_taxlots_normalized_address_used_instead_of_address_line_1( self): base_details = { 'import_file_id': self.import_file.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, } # Create set of taxlots that have the same address_line_1 in slightly different format base_details['address_line_1'] = '123 Match Street' self.taxlot_state_factory.get_taxlot_state(**base_details) base_details['address_line_1'] = '123 match St.' base_details['city'] = 'Denver' self.taxlot_state_factory.get_taxlot_state(**base_details) # set import_file mapping done so that matching can occur. self.import_file.mapping_done = True self.import_file.save() geocode_and_match_buildings_task(self.import_file.id) # 1 TaxLot, 1 TaxLotView, 3 TaxLotStates (2 imported, 1 merge result) self.assertEqual(TaxLot.objects.count(), 1) self.assertEqual(TaxLotView.objects.count(), 1) self.assertEqual(TaxLotState.objects.count(), 3)
def test_map_all_models_zip(self): # -- Setup with patch.object(ImportFile, 'cache_first_rows', return_value=None): progress_info = tasks.save_raw_data(self.import_file.pk) self.assertEqual('success', progress_info['status'], json.dumps(progress_info)) self.assertEqual( PropertyState.objects.filter(import_file=self.import_file).count(), 2) # make the column mappings self.fake_mappings = default_buildingsync_profile_mappings() Column.create_mappings(self.fake_mappings, self.org, self.user, self.import_file.pk) # map the data progress_info = tasks.map_data(self.import_file.pk) self.assertEqual('success', progress_info['status']) ps = PropertyState.objects.filter(address_line_1='123 Main St', import_file=self.import_file) self.assertEqual(ps.count(), 2) # -- Act tasks.geocode_and_match_buildings_task(self.import_file.pk) # -- Assert ps = PropertyState.objects.filter(address_line_1='123 Main St', import_file=self.import_file) self.assertEqual(ps.count(), 2) # verify there are 2 building files bfs = BuildingFile.objects.all() self.assertEqual(bfs.count(), 2) # check that scenarios were created scenarios = Scenario.objects.all() self.assertEqual(scenarios.count(), 31)
def test_handle_id_matches_duplicate_data(self): """ Test for handle_id_matches behavior when matching duplicate data """ # TODO: Fix the PM, tax lot id, and custom ID fields in PropertyState bs_data = { 'pm_property_id': "2360", # 'tax_lot_id': '476/460', 'property_name': 'Garfield Complex', 'custom_id_1': "89", 'address_line_1': '12975 Database LN.', 'address_line_2': '', 'city': 'Cartoon City', 'postal_code': "54321", 'data_state': DATA_STATE_MAPPING, 'source_type': ASSESSED_BS, } # Setup mapped AS snapshot. PropertyState.objects.create(organization=self.org, import_file=self.import_file, **bs_data) # Different file, but same ImportRecord. # Setup mapped PM snapshot. # Should be an identical match. new_import_file = ImportFile.objects.create( import_record=self.import_record, mapping_done=True) tasks.geocode_and_match_buildings_task(new_import_file.pk) duplicate_import_file = ImportFile.objects.create( import_record=self.import_record, mapping_done=True) PropertyState.objects.create(organization=self.org, import_file=duplicate_import_file, **bs_data)
def test_match_properties_get_rolled_up_into_one_in_the_order_their_uploaded( self): """ The most recently uploaded should take precedence when merging states. If more than 2 states match each other, they are merged two at a time until one is remaining. Reminder, this is only for -States within an ImportFile. """ base_details = { 'address_line_1': '123 Match Street', 'import_file_id': self.import_file.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, } # Create first set of properties that match each other base_details['city'] = 'Philadelphia' self.property_state_factory.get_property_state(**base_details) base_details['city'] = 'Arvada' self.property_state_factory.get_property_state(**base_details) base_details['city'] = 'Golden' self.property_state_factory.get_property_state(**base_details) base_details['city'] = 'Denver' self.property_state_factory.get_property_state(**base_details) # set import_file mapping done so that matching can occur. self.import_file.mapping_done = True self.import_file.save() geocode_and_match_buildings_task(self.import_file.id) # 1 Property, 1 PropertyViews, 7 PropertyStates (4 imported, 3 merge results) self.assertEqual(Property.objects.count(), 1) self.assertEqual(PropertyView.objects.count(), 1) self.assertEqual(PropertyState.objects.count(), 7) self.assertEqual(PropertyView.objects.first().state.city, 'Denver')
def start_system_matching_and_geocoding(self, request, pk=None): """ Starts a background task to attempt automatic matching between buildings in an ImportFile with other existing buildings within the same org. """ org_id = request.query_params.get('organization_id', None) try: ImportFile.objects.get(pk=pk, import_record__super_organization_id=org_id) except ImportFile.DoesNotExist: return JsonResponse( { 'status': 'error', 'message': 'Could not find import file with pk=' + str(pk) }, status=status.HTTP_400_BAD_REQUEST) return geocode_and_match_buildings_task(pk)
def test_map_all_models_xml(self): # -- Setup with patch.object(ImportFile, 'cache_first_rows', return_value=None): progress_info = tasks.save_raw_data(self.import_file.pk) self.assertEqual('success', progress_info['status'], json.dumps(progress_info)) self.assertEqual( PropertyState.objects.filter(import_file=self.import_file).count(), 1) # make the column mappings self.fake_mappings = default_buildingsync_profile_mappings() Column.create_mappings(self.fake_mappings, self.org, self.user, self.import_file.pk) # map the data progress_info = tasks.map_data(self.import_file.pk) self.assertEqual('success', progress_info['status']) # verify there were no errors with the files self.assertEqual({}, progress_info.get('file_info', {})) ps = PropertyState.objects.filter(address_line_1='123 Main St', import_file=self.import_file) self.assertEqual(ps.count(), 1) # -- Act progress_info = tasks.geocode_and_match_buildings_task( self.import_file.pk) # -- Assert ps = PropertyState.objects.filter(address_line_1='123 Main St', import_file=self.import_file) self.assertEqual(ps.count(), 1) # !! we should have warnings for our file because of the bad measure names !! self.assertNotEqual({}, progress_info.get('file_info', {})) self.assertIn(self.import_file.uploaded_filename, list(progress_info['file_info'].keys())) self.assertNotEqual( [], progress_info['file_info'][self.import_file.uploaded_filename].get( 'warnings', []))
def test_match_merge_link_for_properties(self): """ In this context, a "set" includes a -State, -View, and canonical record. Set up consists of 3 imports across 3 cycles respectively: Cycle 1 - 3 sets will be imported. - 2 sets match each other and are merged - 1 set doesn't match any others Cycle 2 - 4 sets will be imported. - 3 sets match. All will merge then link to match set in Cycle 1 - 1 set doesn't match any others Cycle 3 - 2 sets will be imported. - 1 set will match sets from Cycles 1 and 2 and link to them - 1 set doesn't match any others """ # Cycle 1 / ImportFile 1 base_state_details = { 'pm_property_id': '1st Match Set', 'city': '1st Match - Cycle 1 - City 1', 'import_file_id': self.import_file_1.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, } self.property_state_factory.get_property_state(**base_state_details) base_state_details['pm_property_id'] = '1st Match Set' base_state_details['city'] = '1st Match - Cycle 1 - City 2' self.property_state_factory.get_property_state(**base_state_details) base_state_details['pm_property_id'] = 'Single Unmatched - 1' base_state_details['city'] = 'Unmatched City - Cycle 1' self.property_state_factory.get_property_state(**base_state_details) # Import file and create -Views and canonical records. self.import_file_1.mapping_done = True self.import_file_1.save() geocode_and_match_buildings_task(self.import_file_1.id) # Cycle 2 / ImportFile 2 base_state_details['import_file_id'] = self.import_file_2.id base_state_details['pm_property_id'] = '1st Match Set' base_state_details['city'] = '1st Match - Cycle 2 - City 1' self.property_state_factory.get_property_state(**base_state_details) base_state_details['pm_property_id'] = '1st Match Set' base_state_details['city'] = '1st Match - Cycle 2 - City 2' self.property_state_factory.get_property_state(**base_state_details) base_state_details['pm_property_id'] = '1st Match Set' base_state_details['city'] = '1st Match - Cycle 2 - City 3' self.property_state_factory.get_property_state(**base_state_details) base_state_details['pm_property_id'] = 'Single Unmatched - 2' base_state_details['city'] = 'Unmatched City - Cycle 2' self.property_state_factory.get_property_state(**base_state_details) # Import file and create -Views and canonical records. self.import_file_2.mapping_done = True self.import_file_2.save() geocode_and_match_buildings_task(self.import_file_2.id) # Cycle 3 / ImportFile 3 base_state_details['import_file_id'] = self.import_file_3.id base_state_details['pm_property_id'] = '1st Match Set' base_state_details['city'] = '1st Match - Cycle 3 - City 1' self.property_state_factory.get_property_state(**base_state_details) base_state_details['pm_property_id'] = 'Single Unmatched - 3' base_state_details['city'] = 'Unmatched City - Cycle 3' self.property_state_factory.get_property_state(**base_state_details) # Import file and create -Views and canonical records. self.import_file_3.mapping_done = True self.import_file_3.save() geocode_and_match_buildings_task(self.import_file_3.id) # Verify merges and links happened self.assertEqual(6, PropertyView.objects.count()) self.assertEqual(4 + 6 + 2, PropertyState.objects.count()) # 4 unique canonical records used in -Views # For now, Properties are not deleted when they aren't used in -Views so a count test wouldn't be appropriate self.assertEqual( 4, len(set(PropertyView.objects.values_list('property_id', flat=True))) ) # At the moment, there should be 3 -Views with the same canonical record across 3 cycles views_with_same_canonical_record = PropertyView.objects.\ values('property_id').\ annotate(times_used=Count('id'), cycle_ids=ArrayAgg('cycle_id')).\ filter(times_used__gt=1).\ get() self.assertEqual(3, views_with_same_canonical_record['times_used']) self.assertCountEqual( [self.cycle_1.id, self.cycle_2.id, self.cycle_3.id], views_with_same_canonical_record['cycle_ids'] )
def test_whole_org_match_merge_link_preview_endpoint_taxlots(self): # Cycle 1 / ImportFile 1 - Create 1 taxlot base_taxlot_details = { 'jurisdiction_tax_lot_id': '1st Non-Match Set', 'city': 'City 1', 'district': 'Match Set', 'import_file_id': self.import_file_1.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, } tls_1 = self.taxlot_state_factory.get_taxlot_state( **base_taxlot_details) self.import_file_1.mapping_done = True self.import_file_1.save() geocode_and_match_buildings_task(self.import_file_1.id) # Cycle 2 / ImportFile 2 - Create 1 unlinked taxlot base_taxlot_details['jurisdiction_tax_lot_id'] = '2nd Non-Match Set' base_taxlot_details['district'] = 'Match Set' base_taxlot_details['import_file_id'] = self.import_file_2.id tls_2 = self.taxlot_state_factory.get_taxlot_state( **base_taxlot_details) self.import_file_2.mapping_done = True self.import_file_2.save() geocode_and_match_buildings_task(self.import_file_2.id) # Check there doesn't exist links self.assertNotEqual(tls_1.taxlotview_set.first().taxlot_id, tls_2.taxlotview_set.first().taxlot_id) url = reverse('api:v3:organizations-match-merge-link-preview', args=[self.org.id]) post_params = json.dumps({ "inventory_type": "taxlots", "add": ['district'], "remove": ['jurisdiction_tax_lot_id'] }) raw_result = self.client.post(url, post_params, content_type='application/json') # Check there *still* doesn't exist links self.assertNotEqual(tls_1.taxlotview_set.first().taxlot_id, tls_2.taxlotview_set.first().taxlot_id) self.assertEqual(200, raw_result.status_code) raw_content = json.loads(raw_result.content) identifier = ProgressData.from_key( raw_content['progress_key']).data['unique_id'] result_key = "org_match_merge_link_result__%s" % identifier raw_summary = get_cache_raw(result_key) summary = {str(k): v for k, v in raw_summary.items() if v} # ignore empty cycles # Check format of summary self.assertCountEqual( [str(self.cycle_1.id), str(self.cycle_2.id)], summary.keys()) # Check that preview shows links would be created self.assertEqual(summary[str(self.cycle_1.id)][0]['id'], summary[str(self.cycle_2.id)][0]['id']) # try to get result using results endpoint get_result_url = reverse( 'api:v3:organizations-match-merge-link-result', args=[self.org.id]) + '?match_merge_link_id=' + str(identifier) get_result_raw_response = self.client.get(get_result_url) raw_summary = json.loads(get_result_raw_response.content) summary = {str(k): v for k, v in raw_summary.items() if v} # ignore empty cycles # Check format of summary self.assertCountEqual( [str(self.cycle_1.id), str(self.cycle_2.id)], summary.keys()) # Check that preview shows links would be created self.assertEqual(summary[str(self.cycle_1.id)][0]['id'], summary[str(self.cycle_2.id)][0]['id'])
def test_demo_v2(self): tasks.save_raw_data(self.import_file_tax_lot.pk) Column.create_mappings(self.fake_taxlot_mappings, self.org, self.user) Column.create_mappings(self.fake_portfolio_mappings, self.org, self.user) tasks.map_data(self.import_file_tax_lot.pk) # Check to make sure the taxlots were imported ts = TaxLotState.objects.filter( data_state=DATA_STATE_MAPPING, organization=self.org, import_file=self.import_file_tax_lot, ) ps = PropertyState.objects.filter( data_state=DATA_STATE_MAPPING, organization=self.org, import_file=self.import_file_property, ) self.assertEqual(len(ps), 0) self.assertEqual(len(ts), 9) tasks.geocode_and_match_buildings_task(self.import_file_tax_lot.id) # Check a single case of the taxlotstate self.assertEqual( TaxLotState.objects.filter( address_line_1='2655 Welstone Ave NE').count(), 1) self.assertEqual( TaxLotView.objects.filter( state__address_line_1='2655 Welstone Ave NE').count(), 1) self.assertEqual(TaxLotView.objects.count(), 9) # Import the property data tasks.save_raw_data(self.import_file_property.pk) tasks.map_data(self.import_file_property.pk) ts = TaxLotState.objects.filter( # data_state=DATA_STATE_MAPPING, # Look at all taxlotstates organization=self.org, import_file=self.import_file_tax_lot, ) ps = PropertyState.objects.filter( data_state=DATA_STATE_MAPPING, organization=self.org, import_file=self.import_file_property, ) self.assertEqual(len(ts), 9) self.assertEqual(len(ps), 14) tasks.geocode_and_match_buildings_task(self.import_file_property.id) ps = PropertyState.objects.filter( data_state=DATA_STATE_MAPPING, organization=self.org, import_file=self.import_file_property, ) # there should not be any properties left in the mapping state self.assertEqual(len(ps), 0) # psv = PropertyView.objects.filter(state__organization=self.org) # self.assertEqual(len(psv), 12) # tlv = TaxLotView.objects.filter(state__organization=self.org) # self.assertEqual(len(tlv), 9) self.assertEqual( PropertyView.objects.filter(state__organization=self.org, state__pm_property_id='2264').count(), 1) pv = PropertyView.objects.filter(state__organization=self.org, state__pm_property_id='2264').first() self.assertEqual(pv.state.property_name, 'University Inn') self.assertEqual(pv.state.address_line_1, '50 Willow Ave SE')
def test_property_states_not_associated_to_properties_are_not_targetted_on_meter_import( self): # Create three pm_property_id = 5766973 properties that are exact duplicates base_details = { 'address_line_1': '123 Match Street', 'pm_property_id': '5766973', 'import_file_id': self.import_file_1.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, } # Create 1 property with a duplicate in the first ImportFile self.property_state_factory.get_property_state(**base_details) self.property_state_factory.get_property_state(**base_details) # set import_file mapping done so that matching can occur. self.import_file_1.mapping_done = True self.import_file_1.save() geocode_and_match_buildings_task(self.import_file_1.id) import_record_2, import_file_2 = self.create_import_file( self.user, self.org, self.cycle) # Create another duplicate property coming from second ImportFile base_details['import_file_id'] = import_file_2.id self.property_state_factory.get_property_state(**base_details) # set import_file mapping done so that matching can occur. import_file_2.mapping_done = True import_file_2.save() geocode_and_match_buildings_task(import_file_2.id) # Import the PM Meters filename = "example-pm-monthly-meter-usage.xlsx" filepath = os.path.dirname( os.path.abspath(__file__)) + "/data/" + filename pm_meter_file = ImportFile.objects.create( import_record=self.import_record, source_type="PM Meter Usage", uploaded_filename=filename, file=SimpleUploadedFile(name=filename, content=open(filepath, 'rb').read()), cycle=self.cycle) # Check that meters pre-upload confirmation runs without problems confirmation_url = reverse('api:v3:import_files-pm-meters-preview', kwargs={'pk': pm_meter_file.id}) confirmation_url += f'?organization_id={self.org.pk}' self.client.get(confirmation_url) url = reverse("api:v3:import_files-start-save-data", args=[pm_meter_file.id]) url += f'?organization_id={self.org.pk}' post_params = { 'cycle_id': self.cycle.pk, } self.client.post(url, post_params) # Check that Meters have been uploaded successfully (there's only 2 since only pm_property_id 5766973 exists) self.assertEqual(Meter.objects.count(), 2) # Ensure that no meters were associated to the duplicate PropertyStates via PropertyViews delete_flagged_ids = PropertyState.objects.filter( data_state=DATA_STATE_DELETE).values_list('id', flat=True) for meter in Meter.objects.all(): self.assertEqual( meter.property.views.filter( state_id__in=delete_flagged_ids).count(), 0)
def test_taxlots(self): # Define matching values matching_jurisdiction_tax_lot_id = '11111' matching_address_line_1 = '123 Match Street' matching_ulid = '86HJPCWQ+2VV-1-3-2-3' matching_custom_id_1 = 'MatchingID12345' # For first file, create taxlots with no duplicates or matches base_details_file_1 = { 'import_file_id': self.import_file_1.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, } # No matching_criteria values self.taxlot_state_factory.get_taxlot_state(**base_details_file_1) # Build out taxlots with increasingly more matching_criteria values base_details_file_1[ 'jurisdiction_tax_lot_id'] = matching_jurisdiction_tax_lot_id self.taxlot_state_factory.get_taxlot_state(**base_details_file_1) base_details_file_1['address_line_1'] = matching_address_line_1 self.taxlot_state_factory.get_taxlot_state(**base_details_file_1) base_details_file_1['ulid'] = matching_ulid self.taxlot_state_factory.get_taxlot_state(**base_details_file_1) base_details_file_1['custom_id_1'] = matching_custom_id_1 self.taxlot_state_factory.get_taxlot_state(**base_details_file_1) self.import_file_1.mapping_done = True self.import_file_1.save() geocode_and_match_buildings_task(self.import_file_1.id) # Verify no duplicates/matched-merges yet counts = [ TaxLot.objects.count(), TaxLotState.objects.count(), TaxLotView.objects.count(), ] self.assertEqual([5, 5, 5], counts) """ For second file, create several taxlots that are one or many of the following: - 1 duplicates amongst file_1 - 3 duplicates amongst file_2 - 1 matching amongst file_1 - 2 matching amongst file_2 - 3 completely new """ base_details_file_2 = { 'import_file_id': self.import_file_2.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, } # Create 2 duplicates of the 'No matching_criteria values' taxlots # (outcome: 2 additional -States, NO new TaxLot/-View) tls_1 = self.taxlot_state_factory.get_taxlot_state( **base_details_file_2) tls_2 = self.taxlot_state_factory.get_taxlot_state( **base_details_file_2) # Create 2 completely new taxlots with misaligned combinations of matching values # (outcome: 2 additional -States, 2 new TaxLot/-View) base_details_file_2['custom_id_1'] = matching_custom_id_1 tls_3 = self.taxlot_state_factory.get_taxlot_state( **base_details_file_2) base_details_file_2['ulid'] = matching_ulid tls_4 = self.taxlot_state_factory.get_taxlot_state( **base_details_file_2) # Create 3 taxlots - with 1 duplicate and 1 match within it's own file that will # eventually become 1 completely new property # (outcome: 4 additional -States, 1 new TaxLot/-View) base_details_file_2['address_line_1'] = matching_address_line_1 base_details_file_2['city'] = 'Denver' tls_5 = self.taxlot_state_factory.get_taxlot_state( **base_details_file_2) tls_6 = self.taxlot_state_factory.get_taxlot_state( **base_details_file_2) base_details_file_2['city'] = 'Golden' tls_7 = self.taxlot_state_factory.get_taxlot_state( **base_details_file_2) # Create 3 properties - with 1 duplicate and 1 match within it's own file that will # eventually match the last property in file_1 # (outcome: 5 additional -States, NO new TaxLot/-View) base_details_file_2[ 'jurisdiction_tax_lot_id'] = matching_jurisdiction_tax_lot_id base_details_file_2['state'] = 'Colorado' tls_8 = self.taxlot_state_factory.get_taxlot_state( **base_details_file_2) tls_9 = self.taxlot_state_factory.get_taxlot_state( **base_details_file_2) base_details_file_2['state'] = 'California' tls_10 = self.taxlot_state_factory.get_taxlot_state( **base_details_file_2) self.import_file_2.mapping_done = True self.import_file_2.save() geocode_and_match_buildings_task(self.import_file_2.id) self.assertEqual(8, TaxLot.objects.count()) self.assertEqual(8, TaxLotView.objects.count()) self.assertEqual(18, TaxLotState.objects.count()) tls_ids_of_deleted = TaxLotState.objects.filter( data_state=DATA_STATE_DELETE).values_list('id', flat=True).order_by('id') self.assertEqual([tls_1.id, tls_2.id, tls_6.id, tls_9.id], list(tls_ids_of_deleted)) tls_ids_of_merged_in_file = TaxLotState.objects.filter( data_state=DATA_STATE_MAPPING, merge_state=MERGE_STATE_UNKNOWN).values_list( 'id', flat=True).order_by('id') self.assertEqual([tls_5.id, tls_7.id, tls_8.id, tls_10.id], list(tls_ids_of_merged_in_file)) tls_ids_of_all_promoted = TaxLotView.objects.values_list('state_id', flat=True) self.assertIn(tls_3.id, tls_ids_of_all_promoted) self.assertIn(tls_4.id, tls_ids_of_all_promoted) rimport_file_2 = ImportFile.objects.get(pk=self.import_file_2.id) results = rimport_file_2.matching_results_data del results['progress_key'] expected = { 'import_file_records': None, # This is calculated in a separate process 'property_duplicates_against_existing': 0, 'property_duplicates_within_file': 0, 'property_initial_incoming': 0, 'property_merges_against_existing': 0, 'property_merges_between_existing': 0, 'property_merges_within_file': 0, 'property_new': 0, 'tax_lot_duplicates_against_existing': 1, 'tax_lot_duplicates_within_file': 3, 'tax_lot_initial_incoming': 10, 'tax_lot_merges_against_existing': 1, 'tax_lot_merges_between_existing': 0, 'tax_lot_merges_within_file': 2, 'tax_lot_new': 3, } self.assertEqual(results, expected)
def test_merged_indicators_provided_on_filter_endpoint(self): _import_record, import_file_1 = self.create_import_file( self.user, self.org, self.cycle) base_details = { 'address_line_1': '123 Match Street', 'import_file_id': import_file_1.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, } self.taxlot_state_factory.get_taxlot_state(**base_details) # set import_file_1 mapping done so that record is "created for users to view". import_file_1.mapping_done = True import_file_1.save() geocode_and_match_buildings_task(import_file_1.id) _import_record_2, import_file_2 = self.create_import_file( self.user, self.org, self.cycle) url = reverse( 'api:v3:taxlots-filter' ) + '?cycle_id={}&organization_id={}&page=1&per_page=999999999'.format( self.cycle.pk, self.org.pk) response = self.client.post(url, content_type='application/json') data = json.loads(response.content) self.assertFalse(data['results'][0]['merged_indicator']) # make sure merged_indicator is True when merge occurs base_details['city'] = 'Denver' base_details['import_file_id'] = import_file_2.id self.taxlot_state_factory.get_taxlot_state(**base_details) # set import_file_2 mapping done so that match merging can occur. import_file_2.mapping_done = True import_file_2.save() geocode_and_match_buildings_task(import_file_2.id) url = reverse( 'api:v3:taxlots-filter' ) + '?cycle_id={}&organization_id={}&page=1&per_page=999999999'.format( self.cycle.pk, self.org.pk) response = self.client.post(url, content_type='application/json') data = json.loads(response.content) self.assertTrue(data['results'][0]['merged_indicator']) # Create pairings and check if paired object has indicator as well property_factory = FakePropertyFactory(organization=self.org) property_state_factory = FakePropertyStateFactory( organization=self.org) property = property_factory.get_property() property_state = property_state_factory.get_property_state() property_view = PropertyView.objects.create(property=property, cycle=self.cycle, state=property_state) # attach pairing to one and only taxlot_view TaxLotProperty(primary=True, cycle_id=self.cycle.id, property_view_id=property_view.id, taxlot_view_id=TaxLotView.objects.get().id).save() url = reverse( 'api:v3:taxlots-filter' ) + '?cycle_id={}&organization_id={}&page=1&per_page=999999999'.format( self.cycle.pk, self.org.pk) response = self.client.post(url, content_type='application/json') data = json.loads(response.content) related = data['results'][0]['related'][0] self.assertTrue('merged_indicator' in related) self.assertFalse(related['merged_indicator'])
def test_match_properties_if_all_default_fields_match(self): base_details = { 'address_line_1': '123 Match Street', 'import_file_id': self.import_file.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, } # Create first set of properties that match each other ps_1 = self.property_state_factory.get_property_state(**base_details) base_details['city'] = 'Denver' ps_2 = self.property_state_factory.get_property_state(**base_details) # Create second set of properties that match each other base_details['pm_property_id'] = '11111' ps_3 = self.property_state_factory.get_property_state(**base_details) base_details['city'] = 'Philadelphia' ps_4 = self.property_state_factory.get_property_state(**base_details) # Create unmatched property base_details['pm_property_id'] = '000' ps_5 = self.property_state_factory.get_property_state(**base_details) # set import_file mapping done so that matching can occur. self.import_file.mapping_done = True self.import_file.save() geocode_and_match_buildings_task(self.import_file.id) # 3 Property, 3 PropertyViews, 7 PropertyStates (5 imported, 2 merge results) self.assertEqual(Property.objects.count(), 3) self.assertEqual(PropertyView.objects.count(), 3) self.assertEqual(PropertyState.objects.count(), 7) # Refresh -States and check data_state and merge_state values rps_1 = PropertyState.objects.get(pk=ps_1.id) self.assertEqual(rps_1.data_state, DATA_STATE_MAPPING) self.assertEqual(rps_1.merge_state, MERGE_STATE_UNKNOWN) rps_2 = PropertyState.objects.get(pk=ps_2.id) self.assertEqual(rps_2.data_state, DATA_STATE_MAPPING) self.assertEqual(rps_2.merge_state, MERGE_STATE_UNKNOWN) ps_1_plus_2 = PropertyState.objects.filter( pm_property_id__isnull=True, city='Denver', address_line_1='123 Match Street').exclude( data_state=DATA_STATE_MAPPING, merge_state=MERGE_STATE_UNKNOWN).get() self.assertEqual(ps_1_plus_2.data_state, DATA_STATE_MATCHING) self.assertEqual(ps_1_plus_2.merge_state, MERGE_STATE_MERGED) rps_3 = PropertyState.objects.get(pk=ps_3.id) self.assertEqual(rps_3.data_state, DATA_STATE_MAPPING) self.assertEqual(rps_3.merge_state, MERGE_STATE_UNKNOWN) rps_4 = PropertyState.objects.get(pk=ps_4.id) self.assertEqual(rps_4.data_state, DATA_STATE_MAPPING) self.assertEqual(rps_4.merge_state, MERGE_STATE_UNKNOWN) ps_3_plus_4 = PropertyState.objects.filter( pm_property_id='11111', city='Philadelphia', address_line_1='123 Match Street').exclude( data_state=DATA_STATE_MAPPING, merge_state=MERGE_STATE_UNKNOWN).get() self.assertEqual(ps_3_plus_4.data_state, DATA_STATE_MATCHING) self.assertEqual(ps_3_plus_4.merge_state, MERGE_STATE_MERGED) rps_5 = PropertyState.objects.get(pk=ps_5.id) self.assertEqual(rps_5.data_state, DATA_STATE_MATCHING) self.assertEqual(rps_5.merge_state, MERGE_STATE_NEW)
def test_match_taxlots_if_all_default_fields_match(self): base_details = { 'address_line_1': '123 Match Street', 'import_file_id': self.import_file_1.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, } # Create property in first ImportFile tls_1 = self.taxlot_state_factory.get_taxlot_state(**base_details) self.import_file_1.mapping_done = True self.import_file_1.save() geocode_and_match_buildings_task(self.import_file_1.id) # Create properties from second ImportFile, one matching existing PropertyState base_details['import_file_id'] = self.import_file_2.id base_details['city'] = 'Denver' tls_2 = self.taxlot_state_factory.get_taxlot_state(**base_details) base_details['jurisdiction_tax_lot_id'] = '11111' base_details['city'] = 'Philadelphia' tls_3 = self.taxlot_state_factory.get_taxlot_state(**base_details) self.import_file_2.mapping_done = True self.import_file_2.save() geocode_and_match_buildings_task(self.import_file_2.id) # 2 TaxLot, 2 TaxLotViews, 4 TaxLotStates (3 imported, 1 merge result) self.assertEqual(TaxLot.objects.count(), 2) self.assertEqual(TaxLotView.objects.count(), 2) self.assertEqual(TaxLotState.objects.count(), 4) cities_from_views = [] tls_ids_from_views = [] for tlv in TaxLotView.objects.all(): cities_from_views.append(tlv.state.city) tls_ids_from_views.append(tlv.state_id) self.assertIn('Denver', cities_from_views) self.assertIn('Philadelphia', cities_from_views) self.assertIn(tls_3.id, tls_ids_from_views) self.assertNotIn(tls_1.id, tls_ids_from_views) self.assertNotIn(tls_2.id, tls_ids_from_views) # Refresh -States and check data_state and merge_state values rtls_1 = TaxLotState.objects.get(pk=tls_1.id) self.assertEqual(rtls_1.data_state, DATA_STATE_MATCHING) self.assertEqual(rtls_1.merge_state, MERGE_STATE_NEW) rtls_2 = TaxLotState.objects.get(pk=tls_2.id) self.assertEqual(rtls_2.data_state, DATA_STATE_MATCHING) self.assertEqual(rtls_2.merge_state, MERGE_STATE_UNKNOWN) tls_1_plus_2 = TaxLotState.objects.filter( jurisdiction_tax_lot_id__isnull=True, city='Denver', address_line_1='123 Match Street').exclude( data_state=DATA_STATE_MATCHING, merge_state=MERGE_STATE_UNKNOWN).get() self.assertEqual(tls_1_plus_2.data_state, DATA_STATE_MATCHING) self.assertEqual(tls_1_plus_2.merge_state, MERGE_STATE_MERGED) rtls_3 = TaxLotState.objects.get(pk=tls_3.id) self.assertEqual(rtls_3.data_state, DATA_STATE_MATCHING) self.assertEqual(rtls_3.merge_state, MERGE_STATE_NEW)
def test_match_taxlots_if_all_default_fields_match(self): base_details = { 'address_line_1': '123 Match Street', 'import_file_id': self.import_file.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, } # Create first set of taxlots that match each other tls_1 = self.taxlot_state_factory.get_taxlot_state(**base_details) base_details['city'] = 'Denver' tls_2 = self.taxlot_state_factory.get_taxlot_state(**base_details) # Create second set of taxlots that match each other base_details['jurisdiction_tax_lot_id'] = '11111' tls_3 = self.taxlot_state_factory.get_taxlot_state(**base_details) base_details['city'] = 'Philadelphia' tls_4 = self.taxlot_state_factory.get_taxlot_state(**base_details) # Create unmatched taxlot base_details['jurisdiction_tax_lot_id'] = '000' tls_5 = self.taxlot_state_factory.get_taxlot_state(**base_details) # set import_file mapping done so that matching can occur. self.import_file.mapping_done = True self.import_file.save() geocode_and_match_buildings_task(self.import_file.id) # 3 TaxLot, 3 TaxLotViews, 7 TaxLotStates (5 imported, 2 merge results) self.assertEqual(TaxLot.objects.count(), 3) self.assertEqual(TaxLotView.objects.count(), 3) self.assertEqual(TaxLotState.objects.count(), 7) # Refresh -States and check data_state and merge_state values rtls_1 = TaxLotState.objects.get(pk=tls_1.id) self.assertEqual(rtls_1.data_state, DATA_STATE_MAPPING) self.assertEqual(rtls_1.merge_state, MERGE_STATE_UNKNOWN) rtls_2 = TaxLotState.objects.get(pk=tls_2.id) self.assertEqual(rtls_2.data_state, DATA_STATE_MAPPING) self.assertEqual(rtls_2.merge_state, MERGE_STATE_UNKNOWN) tls_1_plus_2 = TaxLotState.objects.filter( jurisdiction_tax_lot_id__isnull=True, city='Denver', address_line_1='123 Match Street').exclude( data_state=DATA_STATE_MAPPING, merge_state=MERGE_STATE_UNKNOWN).get() self.assertEqual(tls_1_plus_2.data_state, DATA_STATE_MATCHING) self.assertEqual(tls_1_plus_2.merge_state, MERGE_STATE_MERGED) rtls_3 = TaxLotState.objects.get(pk=tls_3.id) self.assertEqual(rtls_3.data_state, DATA_STATE_MAPPING) self.assertEqual(rtls_3.merge_state, MERGE_STATE_UNKNOWN) rtls_4 = TaxLotState.objects.get(pk=tls_4.id) self.assertEqual(rtls_4.data_state, DATA_STATE_MAPPING) self.assertEqual(rtls_4.merge_state, MERGE_STATE_UNKNOWN) tls_3_plus_4 = TaxLotState.objects.filter( jurisdiction_tax_lot_id='11111', city='Philadelphia', address_line_1='123 Match Street').exclude( data_state=DATA_STATE_MAPPING, merge_state=MERGE_STATE_UNKNOWN).get() self.assertEqual(tls_3_plus_4.data_state, DATA_STATE_MATCHING) self.assertEqual(tls_3_plus_4.merge_state, MERGE_STATE_MERGED) rtls_5 = TaxLotState.objects.get(pk=tls_5.id) self.assertEqual(rtls_5.data_state, DATA_STATE_MATCHING) self.assertEqual(rtls_5.merge_state, MERGE_STATE_NEW)