def test_match_merge_in_cycle_ignores_taxlots_with_unpopulated_matching_criteria( self): base_details = { 'city': 'Golden', 'import_file_id': self.import_file_1.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, } # Create 3 non-duplicate taxlots with unpopulated matching criteria tls_1 = self.taxlot_state_factory.get_taxlot_state(**base_details) base_details['city'] = 'Denver' tls_2 = self.taxlot_state_factory.get_taxlot_state(**base_details) base_details['city'] = 'Philadelphia' tls_3 = self.taxlot_state_factory.get_taxlot_state(**base_details) self.import_file_1.mapping_done = True self.import_file_1.save() match_buildings(self.import_file_1.id) # Verify no match merges happen tls_1_view = TaxLotView.objects.get(state_id=tls_1.id) count_result, no_match_indicator = match_merge_in_cycle( tls_1_view.id, 'TaxLotState') self.assertEqual(count_result, 0) self.assertIsNone(no_match_indicator) self.assertEqual(TaxLot.objects.count(), 3) self.assertEqual(TaxLotState.objects.count(), 3) self.assertEqual(TaxLotView.objects.count(), 3) state_ids = list(TaxLotView.objects.all().values_list('state_id', flat=True)) self.assertCountEqual([tls_1.id, tls_2.id, tls_3.id], state_ids)
def merge(self, request): """ Merge multiple tax lot records into a single new record, and run this new record through a match and merge round within it's current Cycle. --- parameters: - name: organization_id description: The organization_id for this user's organization required: true paramType: query - name: state_ids description: Array containing tax lot state ids to merge paramType: body """ body = request.data state_ids = body.get('state_ids', []) organization_id = int(request.query_params.get('organization_id', None)) # Check the number of state_ids to merge if len(state_ids) < 2: return JsonResponse( { 'status': 'error', 'message': 'At least two ids are necessary to merge' }, status=status.HTTP_400_BAD_REQUEST) # Make sure the state isn't already matched for state_id in state_ids: if ImportFileViewSet.has_coparent(state_id, 'properties'): return JsonResponse( { 'status': 'error', 'message': 'Source state [' + state_id + '] is already matched' }, status=status.HTTP_400_BAD_REQUEST) merged_state = merge_taxlots(state_ids, organization_id, 'Manual Match') count, view_id = match_merge_in_cycle( merged_state.taxlotview_set.first().id, 'TaxLotState') result = {'status': 'success'} if view_id is not None: result.update({'match_merged_count': count}) return result
def update(self, request, pk): """ Update a taxlot and run the updated record through a match and merge round within it's current Cycle. --- parameters: - name: organization_id description: The organization_id for this user's organization required: true paramType: query """ data = request.data result = self._get_taxlot_view(pk) if result.get('status', 'error') != 'error': taxlot_view = result.pop('taxlot_view') taxlot_state_data = TaxLotStateSerializer(taxlot_view.state).data # get the taxlot state information from the request new_taxlot_state_data = data['state'] # set empty strings to None for key, val in new_taxlot_state_data.items(): if val == '': new_taxlot_state_data[key] = None changed_fields = get_changed_fields(taxlot_state_data, new_taxlot_state_data) if not changed_fields: result.update({ 'status': 'success', 'message': 'Records are identical' }) return JsonResponse(result, status=status.HTTP_204_NO_CONTENT) else: # Not sure why we are going through the pain of logging this all right now... need to # reevaluate this. log = TaxLotAuditLog.objects.select_related().filter( state=taxlot_view.state).order_by('-id').first() if 'extra_data' in new_taxlot_state_data: taxlot_state_data['extra_data'].update( new_taxlot_state_data.pop('extra_data')) taxlot_state_data.update(new_taxlot_state_data) if log.name == 'Import Creation': # Add new state by removing the existing ID. taxlot_state_data.pop('id') new_taxlot_state_serializer = TaxLotStateSerializer( data=taxlot_state_data) if new_taxlot_state_serializer.is_valid(): # create the new property state, and perform an initial save / moving relationships new_state = new_taxlot_state_serializer.save() # then assign this state to the property view and save the whole view taxlot_view.state = new_state taxlot_view.save() TaxLotAuditLog.objects.create( organization=log.organization, parent1=log, parent2=None, parent_state1=log.state, parent_state2=None, state=new_state, name='Manual Edit', description=None, import_filename=log.import_filename, record_type=AUDIT_USER_EDIT) result.update( {'state': new_taxlot_state_serializer.data}) # save the property view so that the datetime gets updated on the property. taxlot_view.save() count, view_id = match_merge_in_cycle( taxlot_view.id, 'TaxLotState') if view_id is not None: result.update({ 'view_id': view_id, 'match_merged_count': count, }) return JsonResponse(result, status=status.HTTP_200_OK) else: result.update({ 'status': 'error', 'message': 'Invalid update data with errors: {}'.format( new_taxlot_state_serializer.errors) }) return JsonResponse( result, status=status.HTTP_422_UNPROCESSABLE_ENTITY) elif log.name in [ 'Manual Edit', 'Manual Match', 'System Match', 'Merge current state in migration' ]: # Convert this to using the serializer to save the data. This will override the # previous values in the state object. # Note: We should be able to use partial update here and pass in the changed # fields instead of the entire state_data. updated_taxlot_state_serializer = TaxLotStateSerializer( taxlot_view.state, data=taxlot_state_data) if updated_taxlot_state_serializer.is_valid(): # create the new property state, and perform an initial save / moving # relationships updated_taxlot_state_serializer.save() result.update( {'state': updated_taxlot_state_serializer.data}) # save the property view so that the datetime gets updated on the property. taxlot_view.save() count, view_id = match_merge_in_cycle( taxlot_view.id, 'TaxLotState') if view_id is not None: result.update({ 'view_id': view_id, 'match_merged_count': count, }) return JsonResponse(result, status=status.HTTP_200_OK) else: result.update({ 'status': 'error', 'message': 'Invalid update data with errors: {}'.format( updated_taxlot_state_serializer.errors) }) return JsonResponse( result, status=status.HTTP_422_UNPROCESSABLE_ENTITY) else: result = { 'status': 'error', 'message': 'Unrecognized audit log name: ' + log.name } return JsonResponse(result, status=status.HTTP_204_NO_CONTENT) else: return JsonResponse(result, status=status.HTTP_404_NOT_FOUND)
def update(self, request, pk=None): """ Update a property and run the updated record through a match and merge round within it's current Cycle. - looks up the property view - casts it as a PropertyState - builds a hash with all the same keys as the original property state - checks if any fields have changed - if nothing has changed, return 422 - Really? Not sure how I feel about that one, it *is* processable - get the property audit log for this property state - if the new property state has extra_data, the original extra_data is update'd - and then whoa stuff about the audit log? - I'm going to assume 'Import Creation' is the key I'm looking for - create a serializer for the new property state - if it's valid, save this new serialized data to the db - assign it to the original property view and save the property view - create a new property audit log for this change - return a 200 if created --- parameters: - name: organization_id description: The organization_id for this user's organization required: true paramType: query """ data = request.data result = self._get_property_view(pk) if result.get('status', None) != 'error': property_view = result.pop('property_view') property_state_data = PropertyStateSerializer( property_view.state).data # get the property state information from the request new_property_state_data = data['state'] # set empty strings to None for key, val in new_property_state_data.items(): if val == '': new_property_state_data[key] = None changed_fields = get_changed_fields(property_state_data, new_property_state_data) if not changed_fields: result.update({ 'status': 'success', 'message': 'Records are identical' }) return JsonResponse(result, status=status.HTTP_204_NO_CONTENT) else: # Not sure why we are going through the pain of logging this all right now... need to # reevaluate this. log = PropertyAuditLog.objects.select_related().filter( state=property_view.state).order_by('-id').first() if 'extra_data' in new_property_state_data: property_state_data['extra_data'].update( new_property_state_data.pop('extra_data')) property_state_data.update(new_property_state_data) if log.name == 'Import Creation': # Add new state by removing the existing ID. property_state_data.pop('id') new_property_state_serializer = PropertyStateSerializer( data=property_state_data) if new_property_state_serializer.is_valid(): # create the new property state, and perform an initial save / moving relationships new_state = new_property_state_serializer.save() # Since we are creating a new relationship when we are manually editing the Properties, then # we need to move the relationships over to the new manually edited record. new_state = self._move_relationships( property_view.state, new_state) new_state.save() # then assign this state to the property view and save the whole view property_view.state = new_state property_view.save() PropertyAuditLog.objects.create( organization=log.organization, parent1=log, parent2=None, parent_state1=log.state, parent_state2=None, state=new_state, name='Manual Edit', description=None, import_filename=log.import_filename, record_type=AUDIT_USER_EDIT) result.update( {'state': new_property_state_serializer.data}) # save the property view so that the datetime gets updated on the property. property_view.save() count, view_id = match_merge_in_cycle( property_view.id, 'PropertyState') if view_id is not None: result.update({ 'view_id': view_id, 'match_merged_count': count, }) return JsonResponse(result, encoder=PintJSONEncoder, status=status.HTTP_200_OK) else: result.update({ 'status': 'error', 'message': 'Invalid update data with errors: {}'.format( new_property_state_serializer.errors) }) return JsonResponse( result, encoder=PintJSONEncoder, status=status.HTTP_422_UNPROCESSABLE_ENTITY) elif log.name in [ 'Manual Edit', 'Manual Match', 'System Match', 'Merge current state in migration' ]: # Convert this to using the serializer to save the data. This will override the previous values # in the state object. # Note: We should be able to use partial update here and pass in the changed fields instead of the # entire state_data. updated_property_state_serializer = PropertyStateSerializer( property_view.state, data=property_state_data) if updated_property_state_serializer.is_valid(): # create the new property state, and perform an initial save / moving # relationships updated_property_state_serializer.save() result.update( {'state': updated_property_state_serializer.data}) # save the property view so that the datetime gets updated on the property. property_view.save() count, view_id = match_merge_in_cycle( property_view.id, 'PropertyState') if view_id is not None: result.update({ 'view_id': view_id, 'match_merged_count': count, }) return JsonResponse(result, encoder=PintJSONEncoder, status=status.HTTP_200_OK) else: result.update({ 'status': 'error', 'message': 'Invalid update data with errors: {}'.format( updated_property_state_serializer.errors) }) return JsonResponse( result, encoder=PintJSONEncoder, status=status.HTTP_422_UNPROCESSABLE_ENTITY) else: result = { 'status': 'error', 'message': 'Unrecognized audit log name: ' + log.name } return JsonResponse( result, status=status.HTTP_422_UNPROCESSABLE_ENTITY) else: return JsonResponse(result, status=status.HTTP_404_NOT_FOUND)
def test_match_merge_in_cycle_rolls_up_existing_taxlot_matches_in_updated_state_order_with_final_priority_given_to_selected_taxlot( self): """ Import 4 non-matching records each with different cities and state_orders (extra data field). Create a Column record for state_orders, and update merge protection setting for the city column. Change the 'updated' field's datetime value for each -State. Use update() to make the records match to avoid changing the 'updated' values. Run merging and unmerge records to unravel and reveal the merge order. """ base_details = { 'jurisdiction_tax_lot_id': '123MatchID', 'city': '1st Oldest City', 'import_file_id': self.import_file_1.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, 'extra_data': { 'state_order': 'first', }, } tls_1 = self.taxlot_state_factory.get_taxlot_state(**base_details) base_details['jurisdiction_tax_lot_id'] = '123DifferentID' base_details['city'] = '2nd Oldest City' base_details['extra_data']['state_order'] = 'second' tls_2 = self.taxlot_state_factory.get_taxlot_state(**base_details) base_details['jurisdiction_tax_lot_id'] = '456DifferentID' base_details['city'] = '3rd Oldest City' base_details['extra_data']['state_order'] = 'third' tls_3 = self.taxlot_state_factory.get_taxlot_state(**base_details) base_details['jurisdiction_tax_lot_id'] = '789DifferentID' base_details['city'] = '4th Oldest City' base_details['extra_data']['state_order'] = 'fourth' tls_4 = self.taxlot_state_factory.get_taxlot_state(**base_details) self.import_file_1.mapping_done = True self.import_file_1.save() match_buildings(self.import_file_1.id) # Create (ED) 'state_order' column and update merge protection column for 'city' self.org.column_set.create( column_name='state_order', is_extra_data=True, table_name='TaxLotState', merge_protection=Column.COLUMN_MERGE_FAVOR_EXISTING) self.org.column_set.filter( column_name='city', table_name='TaxLotState').update( merge_protection=Column.COLUMN_MERGE_FAVOR_EXISTING) # Update -States to make the roll up order be 4, 2, 3 refreshed_tls_4 = TaxLotState.objects.get(id=tls_4.id) refreshed_tls_4.jurisdiction_tax_lot_id = '123MatchID' refreshed_tls_4.save() refreshed_tls_2 = TaxLotState.objects.get(id=tls_2.id) refreshed_tls_2.jurisdiction_tax_lot_id = '123MatchID' refreshed_tls_2.save() refreshed_tls_3 = TaxLotState.objects.get(id=tls_3.id) refreshed_tls_3.jurisdiction_tax_lot_id = '123MatchID' refreshed_tls_3.save() # run match_merge_in_cycle giving manual_merge_view = TaxLotView.objects.get(state_id=tls_1.id) count_result, view_id_result = match_merge_in_cycle( manual_merge_view.id, 'TaxLotState') self.assertEqual(count_result, 4) """ Verify everything's rolled up to one -View with precedence given to manual merge -View with '1st Oldest City'. '1st Oldest City' is expected to be final City value since this rollup should ignore Merge Protection. """ self.assertEqual(TaxLotView.objects.count(), 1) only_view = TaxLotView.objects.get() self.assertEqual(only_view.state.city, '1st Oldest City') self.assertEqual(only_view.state.extra_data['state_order'], 'first') """ Undoing 1 rollup merge should expose a set -State having '3rd Oldest City' and state_order of 'third'. """ rollback_unmerge_url_1 = reverse( 'api:v2:taxlots-unmerge', args=[only_view.id]) + '?organization_id={}'.format(self.org.pk) self.client.post(rollback_unmerge_url_1, content_type='application/json') rollback_view_1 = TaxLotView.objects.prefetch_related('state').exclude( state__city='1st Oldest City').get() self.assertEqual(rollback_view_1.state.city, '3rd Oldest City') self.assertEqual(rollback_view_1.state.extra_data['state_order'], 'third') """ Undoing another rollup merge should expose a set -State having '2nd Oldest City' and state_order of 'second'. """ rollback_unmerge_url_2 = reverse( 'api:v2:taxlots-unmerge', args=[ rollback_view_1.id ]) + '?organization_id={}'.format(self.org.pk) self.client.post(rollback_unmerge_url_2, content_type='application/json') rollback_view_2 = TaxLotView.objects.prefetch_related('state').exclude( state__city__in=['1st Oldest City', '3rd Oldest City']).get() self.assertEqual(rollback_view_2.state.city, '2nd Oldest City') self.assertEqual(rollback_view_2.state.extra_data['state_order'], 'second')