def test_merge_geocoding_ignore_merge_protection(self): """ When merging records with geocoding columns including the ignore_merge_protection flag as True always takes the "new" state's geocoding results regardless of geocoding columns merge protection setting. """ pv1 = self.property_view_factory.get_property_view( address_line_1='original_address', geocoding_confidence='Low - check address (Z1XAA)', ) pv2 = self.property_view_factory.get_property_view( address_line_1='new_address', latitude=39.765251, longitude=-104.986138, geocoding_confidence='High (P1AAA)', long_lat='POINT (-104.986138 39.765251)', ) # Column priorities while purposely leaving out long_lat (as it's not available to users) column_priorities = { 'address_line_1': 'Favor New', 'geocoding_confidence': 'Favor Existing', 'latitude': 'Favor New', 'longitude': 'Favor New', 'extra_data': {} } result = merging.merge_state(pv1.state, pv1.state, pv2.state, column_priorities, True) self.assertEqual(result.geocoding_confidence, 'High (P1AAA)') self.assertEqual(result.latitude, 39.765251) self.assertEqual(result.longitude, -104.986138) self.assertEqual(long_lat_wkt(result), 'POINT (-104.986138 39.765251)')
def test_merge_geocoding_results_no_merge_protection_unpopulated_existing_state(self): """ When merging records with geocoding columns that have merge protection active, if only one record has geocoding results, always take the geocoding results from the one record, regardless of merge protection settings. """ pv1 = self.property_view_factory.get_property_view( address_line_1='original_address', geocoding_confidence='Low - check address (Z1XAA)', ) pv2 = self.property_view_factory.get_property_view( address_line_1='new_address', ) # Column priorities while purposely leaving out long_lat (as it's not available to users) column_priorities = { 'address_line_1': 'Favor New', 'geocoding_confidence': 'Favor New', 'latitude': 'Favor New', 'longitude': 'Favor New', 'extra_data': {} } result = merging.merge_state(pv1.state, pv1.state, pv2.state, column_priorities) self.assertEqual(result.geocoding_confidence, 'Low - check address (Z1XAA)') self.assertIsNone(result.latitude) self.assertIsNone(result.longitude) self.assertIsNone(result.long_lat)
def test_merge_geocoding_results_with_merge_protection(self): """ When merging records that have geocoding results, if any of the geocoding results columns have merge protection, and both records have some form of geocoding results, completely "take" the results from the "existing" state. """ pv1 = self.property_view_factory.get_property_view( address_line_1='original_address', latitude=39.765251, longitude=-104.986138, geocoding_confidence='High (P1AAA)', long_lat='POINT (-104.986138 39.765251)', ) pv2 = self.property_view_factory.get_property_view( address_line_1='new_address', geocoding_confidence='Low - check address (Z1XAA)', ) # Column priorities while purposely leaving out long_lat (as it's not available to users) column_priorities = { 'address_line_1': 'Favor New', 'geocoding_confidence': 'Favor Existing', 'latitude': 'Favor New', 'longitude': 'Favor New', 'extra_data': {} } result = merging.merge_state(pv1.state, pv1.state, pv2.state, column_priorities) self.assertEqual(result.geocoding_confidence, 'High (P1AAA)') self.assertEqual(result.latitude, 39.765251) self.assertEqual(result.longitude, -104.986138) self.assertEqual(long_lat_wkt(result), 'POINT (-104.986138 39.765251)')
def test_recognize_empty_and_favor_new_column_settings_together(self): # create 2 records pv1 = self.property_view_factory.get_property_view( address_line_1='original_address', energy_score=None, extra_data={ 'ed_field_1': 'ed_original_value', 'ed_field_2': None } ) pv2 = self.property_view_factory.get_property_view( address_line_1=None, energy_score=86, extra_data={ 'ed_field_1': None, 'ed_field_2': 'ED eighty-six' } ) # Update and create columns with recognize_empty = True self.org.column_set.filter( table_name='PropertyState', column_name__in=['address_line_1', 'energy_score'] ).update(recognize_empty=True) Column.objects.create( column_name='ed_field_1', table_name='PropertyState', organization=self.org, is_extra_data=True, recognize_empty=True ) Column.objects.create( column_name='ed_field_2', table_name='PropertyState', organization=self.org, is_extra_data=True, recognize_empty=True ) # Treat pv1.state as "newer" and favor existing for all priorities priorities = { 'address_line_1': 'Favor Existing', 'energy_score': 'Favor Existing', 'extra_data': { 'ed_field_1': 'Favor Existing', 'ed_field_2': 'Favor Existing' } } result = merging.merge_state(pv2.state, pv2.state, pv1.state, priorities) # should be all the values from state 2 self.assertIsNone(result.address_line_1) self.assertEqual(result.energy_score, 86) self.assertIsNone(result.extra_data['ed_field_1']) self.assertEqual(result.extra_data['ed_field_2'], 'ED eighty-six')
def test_recognize_empty_column_setting_allows_empty_values_to_overwrite_nonempty_values(self): # create 2 records pv1 = self.property_view_factory.get_property_view( address_line_1='original_address', energy_score=None, extra_data={ 'ed_field_1': 'ed_original_value', 'ed_field_2': None } ) pv2 = self.property_view_factory.get_property_view( address_line_1=None, energy_score=86, extra_data={ 'ed_field_1': None, 'ed_field_2': 'ED eighty-six' } ) # Update and create columns with recognize_empty = True self.org.column_set.filter( table_name='PropertyState', column_name__in=['address_line_1', 'energy_score'] ).update(recognize_empty=True) Column.objects.create( column_name='ed_field_1', table_name='PropertyState', organization=self.org, is_extra_data=True, recognize_empty=True ) Column.objects.create( column_name='ed_field_2', table_name='PropertyState', organization=self.org, is_extra_data=True, recognize_empty=True ) # Treat pv1.state as "newer" result = merging.merge_state(pv2.state, pv2.state, pv1.state, {'extra_data': {}}) # should be all the values from state 1 self.assertEqual(result.address_line_1, 'original_address') self.assertIsNone(result.energy_score) self.assertEqual(result.extra_data['ed_field_1'], 'ed_original_value') self.assertIsNone(result.extra_data['ed_field_2'])
def test_merge_state_favor_existing(self): pv1 = self.property_view_factory.get_property_view( address_line_1='original_address', address_line_2='orig', extra_data={'field_1': 'orig_value'} ) pv2 = self.property_view_factory.get_property_view( address_line_1='new_address', address_line_2='new', extra_data={'field_1': 'new_value'} ) # Do not set priority for address_line_2 to make sure that it chooses t column_priorities = { 'address_line_1': 'Favor Existing', 'extra_data': {'field_1': 'Favor Existing'} } result = merging.merge_state(pv1.state, pv1.state, pv2.state, column_priorities) self.assertEqual(result.address_line_1, 'original_address') self.assertEqual(result.address_line_2, 'new') self.assertEqual(result.extra_data['field_1'], 'orig_value')
def _merge_log_states(org_id, state_1, state_2, log_name, ignore_merge_protection): if isinstance(state_1, PropertyState): StateClass = PropertyState AuditLogClass = PropertyAuditLog else: StateClass = TaxLotState AuditLogClass = TaxLotAuditLog priorities = Column.retrieve_priorities(org_id) merged_state = StateClass.objects.create(organization_id=org_id) merged_state = merging.merge_state(merged_state, state_1, state_2, priorities[StateClass.__name__], ignore_merge_protection) state_1_audit_log = AuditLogClass.objects.filter(state=state_1).first() state_2_audit_log = AuditLogClass.objects.filter(state=state_2).first() AuditLogClass.objects.create(organization_id=org_id, parent1=state_1_audit_log, parent2=state_2_audit_log, parent_state1=state_1, parent_state2=state_2, state=merged_state, name=log_name, description='Automatic Merge', import_filename=None, record_type=AUDIT_IMPORT) # Set the merged_state to merged merged_state.data_state = DATA_STATE_MATCHING merged_state.merge_state = MERGE_STATE_MERGED merged_state.save() state_1.merge_state = MERGE_STATE_UNKNOWN state_1.save() state_2.merge_state = MERGE_STATE_UNKNOWN state_2.save() return merged_state
def merge(self, request): """ Merge multiple tax lot records into a single new record --- parameters: - name: organization_id description: The organization_id for this user's organization required: true paramType: query - name: state_ids description: Array containing tax lot state ids to merge paramType: body """ body = request.data state_ids = body.get('state_ids', []) organization_id = int(request.query_params.get('organization_id', None)) # Check the number of state_ids to merge if len(state_ids) < 2: return JsonResponse( { 'status': 'error', 'message': 'At least two ids are necessary to merge' }, status=status.HTTP_400_BAD_REQUEST) # Make sure the state isn't already matched for state_id in state_ids: if ImportFileViewSet.has_coparent(state_id, 'properties'): return JsonResponse( { 'status': 'error', 'message': 'Source state [' + state_id + '] is already matched' }, status=status.HTTP_400_BAD_REQUEST) audit_log = TaxLotAuditLog inventory = TaxLot label = apps.get_model('seed', 'TaxLot_labels') state = TaxLotState view = TaxLotView index = 1 merged_state = None while index < len(state_ids): # state 1 is the base, state 2 is merged on top of state 1 # Use index 0 the first time through, merged_state from then on if index == 1: state1 = state.objects.get(id=state_ids[index - 1]) else: state1 = merged_state state2 = state.objects.get(id=state_ids[index]) merged_state = state.objects.create( organization_id=organization_id) merged_state = merging.merge_state(merged_state, state1, state2, merging.get_state_attrs( [state1, state2]), default=state2) state_1_audit_log = audit_log.objects.filter(state=state1).first() state_2_audit_log = audit_log.objects.filter(state=state2).first() audit_log.objects.create(organization=state1.organization, parent1=state_1_audit_log, parent2=state_2_audit_log, parent_state1=state1, parent_state2=state2, state=merged_state, name='Manual Match', description='Automatic Merge', import_filename=None, record_type=AUDIT_IMPORT) # Set the merged_state to merged merged_state.data_state = DATA_STATE_MATCHING merged_state.merge_state = MERGE_STATE_MERGED merged_state.save() state1.merge_state = MERGE_STATE_UNKNOWN state1.save() state2.merge_state = MERGE_STATE_UNKNOWN state2.save() # Delete existing views and inventory records views = view.objects.filter(state_id__in=[state1.id, state2.id]) view_ids = list(views.values_list('id', flat=True)) # Find unique notes notes = list( Note.objects.values( 'name', 'note_type', 'text', 'log_data', 'created', 'updated', 'organization_id', 'user_id').filter(taxlot_view_id__in=view_ids).distinct()) cycle_id = views.first().cycle_id label_ids = [] # Get paired view ids paired_view_ids = list( TaxLotProperty.objects.filter( taxlot_view_id__in=view_ids).order_by('property_view_id'). distinct('property_view_id').values_list('property_view_id', flat=True)) for v in views: label_ids.extend( list(v.taxlot.labels.all().values_list('id', flat=True))) v.taxlot.delete() label_ids = list(set(label_ids)) # Create new inventory record inventory_record = inventory(organization_id=organization_id) inventory_record.save() # Create new labels and view for label_id in label_ids: label(taxlot_id=inventory_record.id, statuslabel_id=label_id).save() new_view = view(cycle_id=cycle_id, state_id=merged_state.id, taxlot_id=inventory_record.id) new_view.save() # Assign notes to the new view for note in notes: note['taxlot_view'] = new_view n = Note(**note) n.save() # Correct the created and updated times to match the original note Note.objects.filter(id=n.id).update(created=note['created'], updated=note['updated']) # Delete existing pairs and re-pair all to new view # Probably already deleted by cascade TaxLotProperty.objects.filter(taxlot_view_id__in=view_ids).delete() for paired_view_id in paired_view_ids: TaxLotProperty(primary=True, cycle_id=cycle_id, property_view_id=paired_view_id, taxlot_view_id=new_view.id).save() index += 1 return {'status': 'success'}
def process(self, organization_id, cycle, property_view=None): """ Process the building file that was uploaded and create the correct models for the object :param organization_id: integer, ID of organization :param cycle: object, instance of cycle object :param property_view: Existing property view of the building file that will be updated from merging the property_view.state :return: list, [status, (PropertyState|None), (PropertyView|None), messages] """ Parser = self.BUILDING_FILE_PARSERS.get(self.file_type, None) if not Parser: acceptable_file_types = ', '.join( map( dict(self.BUILDING_FILE_TYPES).get, list(self.BUILDING_FILE_PARSERS.keys()))) return False, None, None, "File format was not one of: {}".format( acceptable_file_types) parser = Parser() try: parser.import_file(self.file.path) parser_args = [] parser_kwargs = {} # TODO: use table_mappings for BuildingSync process method data, messages = parser.process(*parser_args, **parser_kwargs) except ParsingError as e: return False, None, None, [str(e)] if len(messages['errors']) > 0 or not data: return False, None, None, messages # Create the property state if none already exists for this file if self.property_state is None: property_state = self._create_property_state(organization_id, data) else: property_state = self.property_state # save the property state self.property_state_id = property_state.id self.save() # add in the measures for m in data.get('measures', []): # Find the measure in the database try: measure = Measure.objects.get( category=m['category'], name=m['name'], organization_id=organization_id, ) except Measure.DoesNotExist: messages['warnings'].append( 'Measure category and name is not valid %s:%s' % (m['category'], m['name'])) continue # Add the measure to the join table. # Need to determine what constitutes the unique measure for a property implementation_status = m['implementation_status'] if m.get( 'implementation_status') else 'Proposed' application_scale = m['application_scale_of_application'] if m.get( 'application_scale_of_application' ) else PropertyMeasure.SCALE_ENTIRE_FACILITY category_affected = m['system_category_affected'] if m.get( 'system_category_affected') else PropertyMeasure.CATEGORY_OTHER join, _ = PropertyMeasure.objects.get_or_create( property_state_id=self.property_state_id, measure_id=measure.pk, property_measure_name=m.get('property_measure_name'), implementation_status=PropertyMeasure.str_to_impl_status( implementation_status), application_scale=PropertyMeasure.str_to_application_scale( application_scale), category_affected=PropertyMeasure.str_to_category_affected( category_affected), recommended=m.get('recommended', 'false') == 'true', ) join.description = m.get('description') join.cost_mv = m.get('mv_cost') join.cost_total_first = m.get('measure_total_first_cost') join.cost_installation = m.get('measure_installation_cost') join.cost_material = m.get('measure_material_cost') join.cost_capital_replacement = m.get( 'measure_capital_replacement_cost') join.cost_residual_value = m.get('measure_residual_value') join.save() # add in scenarios for s in data.get('scenarios', []): # measures = models.ManyToManyField(PropertyMeasure) # {'reference_case': 'Baseline', 'annual_savings_site_energy': None, # 'measures': [], 'id': 'Baseline', 'name': 'Baseline'} # If the scenario does not have a name then log a warning and continue if not s.get('name'): messages['warnings'].append( 'Skipping scenario because it does not have a name. ID = %s' % s.get('id')) continue scenario, _ = Scenario.objects.get_or_create( name=s.get('name'), property_state_id=self.property_state_id, ) scenario.description = s.get('description') scenario.annual_site_energy_savings = s.get( 'annual_site_energy_savings') scenario.annual_source_energy_savings = s.get( 'annual_source_energy_savings') scenario.annual_cost_savings = s.get('annual_cost_savings') scenario.summer_peak_load_reduction = s.get( 'summer_peak_load_reduction') scenario.winter_peak_load_reduction = s.get( 'winter_peak_load_reduction') scenario.hdd = s.get('hdd') scenario.hdd_base_temperature = s.get('hdd_base_temperature') scenario.cdd = s.get('cdd') scenario.cdd_base_temperature = s.get('cdd_base_temperature') scenario.annual_electricity_savings = s.get( 'annual_electricity_savings') scenario.annual_natural_gas_savings = s.get( 'annual_natural_gas_savings') scenario.annual_site_energy = s.get('annual_site_energy') scenario.annual_source_energy = s.get('annual_source_energy') scenario.annual_site_energy_use_intensity = s.get( 'annual_site_energy_use_intensity') scenario.annual_source_energy_use_intensity = s.get( 'annual_source_energy_use_intensity') scenario.annual_natural_gas_energy = s.get( 'annual_natural_gas_energy') scenario.annual_electricity_energy = s.get( 'annual_electricity_energy') scenario.annual_peak_demand = s.get('annual_peak_demand') # temporal_status = models.IntegerField(choices=TEMPORAL_STATUS_TYPES, # default=TEMPORAL_STATUS_CURRENT) if s.get('reference_case'): ref_case = Scenario.objects.filter( name=s.get('reference_case'), property_state_id=self.property_state_id, ) if len(ref_case) == 1: scenario.reference_case = ref_case.first() # set the list of measures. Note that this can be empty (e.g. baseline has no measures) for measure_name in s.get('measures', []): # find the join measure in the database measure = None try: measure = PropertyMeasure.objects.get( property_state_id=self.property_state_id, property_measure_name=measure_name, ) except PropertyMeasure.DoesNotExist: # PropertyMeasure is not in database, skipping silently messages['warnings'].append( 'Measure associated with scenario not found. Scenario: %s, Measure name: %s' % (s.get('name'), measure_name)) continue scenario.measures.add(measure) scenario.save() # meters for m in s.get('meters', []): # print("BUILDING FILE METER: {}".format(m)) # check by scenario_id and source_id meter, _ = Meter.objects.get_or_create( scenario_id=scenario.id, source_id=m.get('source_id'), ) meter.source = m.get('source') meter.type = m.get('type') meter.is_virtual = m.get('is_virtual') meter.save() # meterreadings # TODO: need to check that these are in kBtu already? readings = { MeterReading( start_time=mr.get('start_time'), end_time=mr.get('end_time'), reading=mr.get('reading'), source_unit=mr.get('source_unit'), meter_id=meter.id, conversion_factor=1.00, # assuming kBtu ) for mr in m.get('readings', []) } MeterReading.objects.bulk_create(readings) # merge or create the property state's view if property_view: # create a new blank state to merge the two together merged_state = PropertyState.objects.create( organization_id=organization_id) # assume the same cycle id as the former state. # should merge_state also copy/move over the relationships? priorities = Column.retrieve_priorities(organization_id) merged_state = merge_state(merged_state, property_view.state, property_state, priorities['PropertyState']) # log the merge # Not a fan of the parent1/parent2 logic here, seems error prone, what this # is also in here: https://github.com/SEED-platform/seed/blob/63536e99cf5be3a9a86391c5cead6dd4ff74462b/seed/data_importer/tasks.py#L1549 PropertyAuditLog.objects.create( organization_id=organization_id, parent1=PropertyAuditLog.objects.filter( state=property_view.state).first(), parent2=PropertyAuditLog.objects.filter( state=property_state).first(), parent_state1=property_view.state, parent_state2=property_state, state=merged_state, name='System Match', description='Automatic Merge', import_filename=None, record_type=AUDIT_IMPORT) property_view.state = merged_state property_view.save() merged_state.merge_state = MERGE_STATE_MERGED merged_state.save() # set the property_state to the new one property_state = merged_state elif not property_view: property_view = property_state.promote(cycle) else: # invalid arguments, must pass both or neither return False, None, None, "Invalid arguments passed to BuildingFile.process()" return True, property_state, property_view, messages
def process(self, organization_id, cycle, property_view=None): """ Process the building file that was uploaded and create the correct models for the object :param organization_id: integer, ID of organization :param cycle: object, instance of cycle object :param property_view: Existing property view of the building file that will be updated from merging the property_view.state :return: list, [status, (PropertyState|None), (PropertyView|None), messages] """ Parser = self.BUILDING_FILE_PARSERS.get(self.file_type, None) if not Parser: acceptable_file_types = ', '.join( map( dict(self.BUILDING_FILE_TYPES).get, list(self.BUILDING_FILE_PARSERS.keys()))) return False, None, None, "File format was not one of: {}".format( acceptable_file_types) parser = Parser() parser.import_file(self.file.path) parser_args = [] parser_kwargs = {} if self.file_type == self.BUILDINGSYNC: parser_args.append(BuildingSync.BRICR_STRUCT) data, messages = parser.process(*parser_args, **parser_kwargs) if len(messages['errors']) > 0 or not data: return False, None, None, messages # sub-select the data that are needed to create the PropertyState object db_columns = Column.retrieve_db_field_table_and_names_from_db_tables() create_data = {"organization_id": organization_id} extra_data = {} for k, v in data.items(): # Skip the keys that are for measures and reports and process later if k in ['measures', 'reports', 'scenarios']: continue # Check if the column exists, if not, then create one. if ('PropertyState', k) in db_columns: create_data[k] = v else: extra_data[k] = v # always create the new object, then decide if we need to merge it. # create a new property_state for the object and promote to a new property_view property_state = PropertyState.objects.create(**create_data) property_state.extra_data = extra_data property_state.save() Column.save_column_names(property_state) PropertyAuditLog.objects.create( organization_id=organization_id, state_id=property_state.id, name='Import Creation', description='Creation from Import file.', import_filename=self.file.path, record_type=AUDIT_IMPORT) # set the property_state_id so that we can list the building files by properties self.property_state_id = property_state.id self.save() # add in the measures for m in data.get('measures', []): # Find the measure in the database try: measure = Measure.objects.get( category=m['category'], name=m['name'], organization_id=organization_id, ) except Measure.DoesNotExist: messages['warnings'].append( 'Measure category and name is not valid %s:%s' % (m['category'], m['name'])) continue # Add the measure to the join table. # Need to determine what constitutes the unique measure for a property join, _ = PropertyMeasure.objects.get_or_create( property_state_id=self.property_state_id, measure_id=measure.pk, property_measure_name=m.get('property_measure_name'), implementation_status=PropertyMeasure.str_to_impl_status( m.get('implementation_status', 'Proposed')), application_scale=PropertyMeasure.str_to_application_scale( m.get('application_scale_of_application', PropertyMeasure.SCALE_ENTIRE_FACILITY)), category_affected=PropertyMeasure.str_to_category_affected( m.get('system_category_affected', PropertyMeasure.CATEGORY_OTHER)), recommended=m.get('recommended', 'false') == 'true', ) join.description = m.get('description') join.cost_mv = m.get('mv_cost') join.cost_total_first = m.get('measure_total_first_cost') join.cost_installation = m.get('measure_installation_cost') join.cost_material = m.get('measure_material_cost') join.cost_capital_replacement = m.get( 'measure_capital_replacement_cost') join.cost_residual_value = m.get('measure_residual_value') join.save() # add in scenarios for s in data.get('scenarios', []): # measures = models.ManyToManyField(PropertyMeasure) # {'reference_case': 'Baseline', 'annual_savings_site_energy': None, # 'measures': [], 'id': 'Baseline', 'name': 'Baseline'} # If the scenario does not have a name then log a warning and continue if not s.get('name'): messages['warnings'].append( 'Scenario does not have a name. ID = %s' % s.get('id')) continue scenario, _ = Scenario.objects.get_or_create( name=s.get('name'), property_state_id=self.property_state_id, ) scenario.description = s.get('description') scenario.annual_site_energy_savings = s.get( 'annual_site_energy_savings') scenario.annual_source_energy_savings = s.get( 'annual_source_energy_savings') scenario.annual_cost_savings = s.get('annual_cost_savings') scenario.summer_peak_load_reduction = s.get( 'summer_peak_load_reduction') scenario.winter_peak_load_reduction = s.get( 'winter_peak_load_reduction') scenario.hdd = s.get('hdd') scenario.hdd_base_temperature = s.get('hdd_base_temperature') scenario.cdd = s.get('cdd') scenario.cdd_base_temperature = s.get('cdd_base_temperature') # temporal_status = models.IntegerField(choices=TEMPORAL_STATUS_TYPES, # default=TEMPORAL_STATUS_CURRENT) if s.get('reference_case'): ref_case = Scenario.objects.filter( name=s.get('reference_case'), property_state_id=self.property_state_id, ) if len(ref_case) == 1: scenario.reference_case = ref_case.first() # set the list of measures. Note that this can be empty (e.g. baseline has no measures) for measure_name in s.get('measures', []): # find the join measure in the database measure = None try: measure = PropertyMeasure.objects.get( property_state_id=self.property_state_id, property_measure_name=measure_name, ) except PropertyMeasure.DoesNotExist: # PropertyMeasure is not in database, skipping silently messages['warnings'].append( 'Measure associated with scenario not found. Scenario: %s, Measure name: %s' % (s.get('name'), measure_name)) continue scenario.measures.add(measure) scenario.save() if property_view: # create a new blank state to merge the two together merged_state = PropertyState.objects.create( organization_id=organization_id) # assume the same cycle id as the former state. # should merge_state also copy/move over the relationships? priorities = Column.retrieve_priorities(organization_id) merged_state = merge_state(merged_state, property_view.state, property_state, priorities['PropertyState']) # log the merge # Not a fan of the parent1/parent2 logic here, seems error prone, what this # is also in here: https://github.com/SEED-platform/seed/blob/63536e99cf5be3a9a86391c5cead6dd4ff74462b/seed/data_importer/tasks.py#L1549 PropertyAuditLog.objects.create( organization_id=organization_id, parent1=PropertyAuditLog.objects.filter( state=property_view.state).first(), parent2=PropertyAuditLog.objects.filter( state=property_state).first(), parent_state1=property_view.state, parent_state2=property_state, state=merged_state, name='System Match', description='Automatic Merge', import_filename=None, record_type=AUDIT_IMPORT) property_view.state = merged_state property_view.save() merged_state.merge_state = MERGE_STATE_MERGED merged_state.save() # set the property_state to the new one property_state = merged_state elif not property_view: property_view = property_state.promote(cycle) else: # invalid arguments, must pass both or neither return False, None, None, "Invalid arguments passed to BuildingFile.process()" return True, property_state, property_view, messages
def save_state_match(state1, state2, priorities): """ Merge the contents of state2 into state1 :param state1: PropertyState or TaxLotState :param state2: PropertyState or TaxLotState :param priorities: dict, column names and the priorities of the merging of data. This includes all of the priorites for the columns, not just the priorities for the selected taxlotstate. :return: state1, after merge """ merged_state = type(state1).objects.create( organization=state1.organization) merged_state = merging.merge_state( merged_state, state1, state2, priorities[merged_state.__class__.__name__]) AuditLogClass = PropertyAuditLog if isinstance( merged_state, PropertyState) else TaxLotAuditLog assert AuditLogClass.objects.filter(state=state1).count() >= 1 assert AuditLogClass.objects.filter(state=state2).count() >= 1 # NJACHECK - is this logic correct? state_1_audit_log = AuditLogClass.objects.filter(state=state1).first() state_2_audit_log = AuditLogClass.objects.filter(state=state2).first() AuditLogClass.objects.create(organization=state1.organization, parent1=state_1_audit_log, parent2=state_2_audit_log, parent_state1=state1, parent_state2=state2, state=merged_state, name='System Match', description='Automatic Merge', import_filename=None, record_type=AUDIT_IMPORT) # If the two states being merged were just imported from the same import file, carry the import_file_id into the new # state. Also merge the lot_number fields so that pairing can work correctly on the resulting merged record # Possible conditions: # state1.data_state = 2, state1.merge_state = 0 and state2.data_state = 2, state2.merge_state = 0 # state1.data_state = 0, state1.merge_state = 2 and state2.data_state = 2, state2.merge_state = 0 if state1.import_file_id == state2.import_file_id: if ((state1.data_state == DATA_STATE_MAPPING and state1.merge_state == MERGE_STATE_UNKNOWN and state2.data_state == DATA_STATE_MAPPING and state2.merge_state == MERGE_STATE_UNKNOWN) or (state1.data_state == DATA_STATE_UNKNOWN and state1.merge_state == MERGE_STATE_MERGED and state2.data_state == DATA_STATE_MAPPING and state2.merge_state == MERGE_STATE_UNKNOWN)): merged_state.import_file_id = state1.import_file_id if isinstance(merged_state, PropertyState): joined_lots = set() if state1.lot_number: joined_lots = joined_lots.union( state1.lot_number.split(';')) if state2.lot_number: joined_lots = joined_lots.union( state2.lot_number.split(';')) if joined_lots: merged_state.lot_number = ';'.join(joined_lots) # Set the merged_state to merged merged_state.merge_state = MERGE_STATE_MERGED merged_state.save() return merged_state