def test_unmatch(self): # unmatch a specific entry property_state = PropertyState.objects.filter( use_description='Club', import_file_id=self.import_file_2, data_state__in=[DATA_STATE_MAPPING, DATA_STATE_MATCHING], merge_state__in=[MERGE_STATE_UNKNOWN, MERGE_STATE_NEW]).first() vs = ImportFileViewSet() fields = ['id', 'extra_data', 'lot_number', 'use_description'] # get the coparent of the 'Club' to get the ID coparents = vs.has_coparent(property_state.id, 'properties', fields) # verify that the coparent id is not part of the view prop = PropertyView.objects.filter(cycle=self.cycle, state__id=coparents['id']) self.assertFalse(prop.exists()) data = { "inventory_type": "properties", "state_id": property_state.id, "coparent_id": coparents['id'] } url = reverse("api:v2:import_files-unmatch", args=[self.import_file_2.pk]) resp = self.client.post(url, data=json.dumps(data), content_type='application/json') body = json.loads(resp.content) self.assertEqual(body['status'], 'success') # verify that the coparent id is now in the view self.assertTrue(prop.exists())
def test_unmatch(self): # unmatch a specific entry property_state = PropertyState.objects.filter( use_description='Club', import_file_id=self.import_file_2, data_state__in=[DATA_STATE_MAPPING, DATA_STATE_MATCHING], merge_state__in=[MERGE_STATE_UNKNOWN, MERGE_STATE_NEW]).first() vs = ImportFileViewSet() fields = ['id', 'extra_data', 'lot_number', 'use_description'] # get the coparent of the 'Club' to get the ID coparents = vs.has_coparent(property_state.id, 'properties', fields) # verify that the coparent id is not part of the view prop = PropertyView.objects.filter(cycle=self.cycle, state__id=coparents['id']) self.assertFalse(prop.exists())
def test_get_coparents(self): # get a specific test case with coparents property_state = PropertyState.objects.filter( use_description='Pizza House', import_file_id=self.import_file_2, data_state__in=[DATA_STATE_MAPPING, DATA_STATE_MATCHING], merge_state__in=[MERGE_STATE_UNKNOWN, MERGE_STATE_NEW]).first() vs = ImportFileViewSet() fields = ['id', 'extra_data', 'lot_number', 'use_description'] coparents = vs.has_coparent(property_state.id, 'properties', fields) expected = { 'lot_number': u'11160509', 'gross_floor_area': 23543.0, 'owner_telephone': u'213-546-9755', 'energy_score': 63, 'use_description': 'Retail', } self.assertDictContainsSubset(expected, coparents)
def merge(self, request): """ Merge multiple tax lot records into a single new record, and run this new record through a match and merge round within it's current Cycle. --- parameters: - name: organization_id description: The organization_id for this user's organization required: true paramType: query - name: state_ids description: Array containing tax lot state ids to merge paramType: body """ body = request.data state_ids = body.get('state_ids', []) organization_id = int(request.query_params.get('organization_id', None)) # Check the number of state_ids to merge if len(state_ids) < 2: return JsonResponse( { 'status': 'error', 'message': 'At least two ids are necessary to merge' }, status=status.HTTP_400_BAD_REQUEST) # Make sure the state isn't already matched for state_id in state_ids: if ImportFileViewSet.has_coparent(state_id, 'properties'): return JsonResponse( { 'status': 'error', 'message': 'Source state [' + state_id + '] is already matched' }, status=status.HTTP_400_BAD_REQUEST) merged_state = merge_taxlots(state_ids, organization_id, 'Manual Match') count, view_id = match_merge_in_cycle( merged_state.taxlotview_set.first().id, 'TaxLotState') result = {'status': 'success'} if view_id is not None: result.update({'match_merged_count': count}) return result
def merge(self, request): """ Merge multiple tax lot records into a single new record --- parameters: - name: organization_id description: The organization_id for this user's organization required: true paramType: query - name: state_ids description: Array containing tax lot state ids to merge paramType: body """ body = request.data state_ids = body.get('state_ids', []) organization_id = int(request.query_params.get('organization_id', None)) # Check the number of state_ids to merge if len(state_ids) < 2: return JsonResponse( { 'status': 'error', 'message': 'At least two ids are necessary to merge' }, status=status.HTTP_400_BAD_REQUEST) # Make sure the state isn't already matched for state_id in state_ids: if ImportFileViewSet.has_coparent(state_id, 'properties'): return JsonResponse( { 'status': 'error', 'message': 'Source state [' + state_id + '] is already matched' }, status=status.HTTP_400_BAD_REQUEST) audit_log = TaxLotAuditLog inventory = TaxLot label = apps.get_model('seed', 'TaxLot_labels') state = TaxLotState view = TaxLotView index = 1 merged_state = None while index < len(state_ids): # state 1 is the base, state 2 is merged on top of state 1 # Use index 0 the first time through, merged_state from then on if index == 1: state1 = state.objects.get(id=state_ids[index - 1]) else: state1 = merged_state state2 = state.objects.get(id=state_ids[index]) merged_state = state.objects.create( organization_id=organization_id) merged_state = merging.merge_state(merged_state, state1, state2, merging.get_state_attrs( [state1, state2]), default=state2) state_1_audit_log = audit_log.objects.filter(state=state1).first() state_2_audit_log = audit_log.objects.filter(state=state2).first() audit_log.objects.create(organization=state1.organization, parent1=state_1_audit_log, parent2=state_2_audit_log, parent_state1=state1, parent_state2=state2, state=merged_state, name='Manual Match', description='Automatic Merge', import_filename=None, record_type=AUDIT_IMPORT) # Set the merged_state to merged merged_state.data_state = DATA_STATE_MATCHING merged_state.merge_state = MERGE_STATE_MERGED merged_state.save() state1.merge_state = MERGE_STATE_UNKNOWN state1.save() state2.merge_state = MERGE_STATE_UNKNOWN state2.save() # Delete existing views and inventory records views = view.objects.filter(state_id__in=[state1.id, state2.id]) view_ids = list(views.values_list('id', flat=True)) # Find unique notes notes = list( Note.objects.values( 'name', 'note_type', 'text', 'log_data', 'created', 'updated', 'organization_id', 'user_id').filter(taxlot_view_id__in=view_ids).distinct()) cycle_id = views.first().cycle_id label_ids = [] # Get paired view ids paired_view_ids = list( TaxLotProperty.objects.filter( taxlot_view_id__in=view_ids).order_by('property_view_id'). distinct('property_view_id').values_list('property_view_id', flat=True)) for v in views: label_ids.extend( list(v.taxlot.labels.all().values_list('id', flat=True))) v.taxlot.delete() label_ids = list(set(label_ids)) # Create new inventory record inventory_record = inventory(organization_id=organization_id) inventory_record.save() # Create new labels and view for label_id in label_ids: label(taxlot_id=inventory_record.id, statuslabel_id=label_id).save() new_view = view(cycle_id=cycle_id, state_id=merged_state.id, taxlot_id=inventory_record.id) new_view.save() # Assign notes to the new view for note in notes: note['taxlot_view'] = new_view n = Note(**note) n.save() # Correct the created and updated times to match the original note Note.objects.filter(id=n.id).update(created=note['created'], updated=note['updated']) # Delete existing pairs and re-pair all to new view # Probably already deleted by cascade TaxLotProperty.objects.filter(taxlot_view_id__in=view_ids).delete() for paired_view_id in paired_view_ids: TaxLotProperty(primary=True, cycle_id=cycle_id, property_view_id=paired_view_id, taxlot_view_id=new_view.id).save() index += 1 return {'status': 'success'}