def test_cleanse(self): # Import the file and run mapping # Year Ending,ENERGY STAR Score,Total GHG Emissions (MtCO2e),Weather Normalized Site EUI (kBtu/ft2), # National Median Site EUI (kBtu/ft2),Source EUI (kBtu/ft2),Weather Normalized Source EUI (kBtu/ft2), # National Median Source EUI (kBtu/ft2),Parking - Gross Floor Area (ft2),Organization # Release Date fake_mappings = { 'pm_property_id': u'Property Id', 'property_name': u'Property Name', 'address_line_1': u'Address 1', 'address_line_2': u'Address 2', 'city': u'City', 'state_province': u'State/Province', 'postal_code': u'Postal Code', 'year_built': u'Year Built', 'gross_floor_area': u'Property Floor Area (Buildings and Parking) (ft2)', 'site_eui': u'Site EUI (kBtu/ft2)', 'generation_date': u'Generation Date' } tasks.save_raw_data(self.import_file.id) util.make_fake_mappings(fake_mappings, self.org) tasks.map_data(self.import_file.id) qs = BuildingSnapshot.objects.filter( import_file=self.import_file, source_type=PORTFOLIO_BS, ).iterator() c = Cleansing() c.cleanse(qs) data = c.results self.assertEqual(len(c.results), 2) result = [v for v in c.results.values() if v['address_line_1'] == '120243 E True Lane'] if len(result) == 1: result = result[0] else: raise RuntimeError('Non unity results') res = [{'field': u'pm_property_id', 'message': 'Value is missing', 'severity': 'error'}] self.assertEqual(res, result['cleansing_results']) result = [v for v in c.results.values() if v['address_line_1'] == '95373 E Peach Avenue'] if len(result) == 1: result = result[0] else: raise RuntimeError('Non unity results') res = [{'field': u'site_eui', 'message': 'Value [0.1] < 10', 'severity': u'warning'}] self.assertEqual(res, result['cleansing_results'])
def get_csv(request): """ Download a csv of the results. """ import_file_id = request.GET.get('import_file_id') cleansing_results = get_cache_raw(Cleansing.cache_key(import_file_id)) response = HttpResponse(content_type='text/csv') response[ 'Content-Disposition'] = 'attachment; filename="Data Cleansing Results.csv"' writer = csv.writer(response) writer.writerow([ 'Address Line 1', 'PM Property ID', 'Tax Lot ID', 'Custom ID', 'Field', 'Error Message', 'Severity' ]) for row in cleansing_results: for result in row['cleansing_results']: field = result['field'] if field in Cleansing.ASSESSOR_FIELDS_BY_COLUMN: field = Cleansing.ASSESSOR_FIELDS_BY_COLUMN[field]['title'] writer.writerow([ row['address_line_1'], row['pm_property_id'], row['tax_lot_id'], row['custom_id_1'], field, result['message'], result['severity'] ]) return response
def get_cleansing_results(self, request, pk=None): """ Retrieve the details of the cleansing script. --- type: status: required: true type: string description: either success or error message: type: string description: additional information, if any progress: type: integer description: integer percent of completion data: type: JSON description: object describing the results of the cleansing parameter_strategy: replace parameters: - name: pk description: Import file ID required: true paramType: path """ import_file_id = pk cleansing_results = get_cache_raw(Cleansing.cache_key(import_file_id)) return JsonResponse({ 'status': 'success', 'message': 'Cleansing complete', 'progress': 100, 'data': cleansing_results })
def get_csv(request): """ Download a csv of the results. """ import_file_id = request.GET.get("import_file_id") cleansing_results = get_cache_raw(Cleansing.cache_key(import_file_id)) response = HttpResponse(content_type="text/csv") response["Content-Disposition"] = 'attachment; filename="Data Cleansing Results.csv"' writer = csv.writer(response) writer.writerow( ["Address Line 1", "PM Property ID", "Tax Lot ID", "Custom ID", "Field", "Error Message", "Severity"] ) for row in cleansing_results: for result in row["cleansing_results"]: writer.writerow( [ row["address_line_1"], row["pm_property_id"], row["tax_lot_id"], row["custom_id_1"], result["formatted_field"], result["detailed_message"], result["severity"], ] ) return response
def get_csv(request): """ Download a csv of the results. """ import_file_id = request.GET.get('import_file_id') cleansing_results = get_cache_raw(Cleansing.cache_key(import_file_id)) response = HttpResponse(content_type='text/csv') response['Content-Disposition'] = 'attachment; filename="Data Cleansing Results.csv"' writer = csv.writer(response) writer.writerow(['Address Line 1', 'PM Property ID', 'Tax Lot ID', 'Custom ID', 'Field', 'Error Message', 'Severity']) for row in cleansing_results: for result in row['cleansing_results']: field = result['field'] if field in Cleansing.ASSESSOR_FIELDS_BY_COLUMN: field = Cleansing.ASSESSOR_FIELDS_BY_COLUMN[field]['title'] writer.writerow([ row['address_line_1'], row['pm_property_id'], row['tax_lot_id'], row['custom_id_1'], field, result['message'], result['severity'] ]) return response
def get_cleansing_results(request): """ Retrieve the details of the cleansing script. """ import_file_id = request.GET.get("import_file_id") cleansing_results = get_cache_raw(Cleansing.cache_key(import_file_id)) return {"status": "success", "message": "Cleansing complete", "progress": 100, "data": cleansing_results}
def get_cleansing_results(request): """ Retrieve the details of the cleansing script. """ import_file_id = request.GET.get('import_file_id') cleansing_results = cache.get(Cleansing.cache_key(import_file_id), []) for i, row in enumerate(cleansing_results): for j, result in enumerate(row['cleansing_results']): if result['field'] in Cleansing.ASSESSOR_FIELDS_BY_COLUMN: result['field'] = Cleansing.ASSESSOR_FIELDS_BY_COLUMN[result['field']]['title'] return cleansing_results
def get_cleansing_results(request): """ Retrieve the details of the cleansing script. """ import_file_id = request.GET.get('import_file_id') cleansing_results = get_cache_raw(Cleansing.cache_key(import_file_id)) return { 'status': 'success', 'message': 'Cleansing complete', 'progress': 100, 'data': cleansing_results }
def get_cleansing_results(request): """ Retrieve the details of the cleansing script. """ import_file_id = request.GET.get('import_file_id') cleansing_results = get_cache_raw(Cleansing.cache_key(import_file_id)) # add in additional fields for view for i, row in enumerate(cleansing_results): for j, result in enumerate(row['cleansing_results']): if result['field'] in Cleansing.ASSESSOR_FIELDS_BY_COLUMN: result['field'] = Cleansing.ASSESSOR_FIELDS_BY_COLUMN[result['field']]['title'] return { 'status': 'success', 'message': 'Cleansing complete', 'progress': 100, 'data': cleansing_results }
def get_csv(self, request, pk=None): """ Download a csv of the results. --- type: status: required: true type: string description: either success or error progress_key: type: integer description: ID of background job, for retrieving job progress parameter_strategy: replace parameters: - name: pk description: Import file ID required: true paramType: path """ import_file_id = pk cleansing_results = get_cache_raw(Cleansing.cache_key(import_file_id)) response = HttpResponse(content_type='text/csv') response['Content-Disposition'] = 'attachment; filename="Data Cleansing Results.csv"' writer = csv.writer(response) writer.writerow(['Address Line 1', 'PM Property ID', 'Tax Lot ID', 'Custom ID', 'Field', 'Error Message', 'Severity']) for row in cleansing_results: for result in row['cleansing_results']: writer.writerow([ row['address_line_1'], row['pm_property_id'], row['tax_lot_id'], row['custom_id_1'], result['formatted_field'], result['detailed_message'], result['severity'] ]) return response
def get_cleansing_results(request): """ Retrieve the details of the cleansing script. """ import_file_id = request.GET.get('import_file_id') cleansing_results = get_cache_raw(Cleansing.cache_key(import_file_id)) # add in additional fields for view for i, row in enumerate(cleansing_results): for j, result in enumerate(row['cleansing_results']): if result['field'] in Cleansing.ASSESSOR_FIELDS_BY_COLUMN: result['field'] = Cleansing.ASSESSOR_FIELDS_BY_COLUMN[ result['field']]['title'] return { 'status': 'success', 'message': 'Cleansing complete', 'progress': 100, 'data': cleansing_results }
def test_cleanse(self): # Import the file and run mapping # This is silly, the mappings are backwards from what you would expect. The key is the BS field, and the # value is the value in the CSV fake_mappings = { 'city': 'city', 'postal_code': 'Zip', 'gross_floor_area': 'GBA', 'building_count': 'BLDGS', 'year_built': 'AYB_YearBuilt', 'state_province': 'State', 'address_line_1': 'Address', 'owner': 'Owner', 'property_notes': 'Property Type', 'tax_lot_id': 'UBI', 'custom_id_1': 'Custom ID', 'pm_property_id': 'PM Property ID' } tasks.save_raw_data(self.import_file.id) util.make_fake_mappings(fake_mappings, self.org) tasks.map_data(self.import_file.id) qs = BuildingSnapshot.objects.filter( import_file=self.import_file, source_type=ASSESSED_BS, ).iterator() c = Cleansing(self.org) c.cleanse(qs) data = c.results self.assertEqual(len(c.results), 2) result = [v for v in c.results.values() if v['address_line_1'] == '95373 E Peach Avenue'] if len(result) == 1: result = result[0] else: raise RuntimeError('Non unity results') self.assertTrue(result['address_line_1'], '95373 E Peach Avenue') self.assertTrue(result['tax_lot_id'], '10107/c6596') res = [{ 'field': u'pm_property_id', 'formatted_field': u'PM Property ID', 'value': u'', 'message': u'PM Property ID is missing', 'detailed_message': u'PM Property ID is missing', 'severity': u'error' }] self.assertEqual(res, result['cleansing_results']) result = [v for v in c.results.values() if v['address_line_1'] == '120243 E True Lane'] if len(result) == 1: result = result[0] else: raise RuntimeError('Non unity results') res = [{ 'field': u'year_built', 'formatted_field': u'Year Built', 'value': 0, 'message': u'Year Built out of range', 'detailed_message': u'Year Built [0] < 1700', 'severity': u'error' }, { 'field': u'gross_floor_area', 'formatted_field': u'Gross Floor Area', 'value': 10000000000.0, 'message': u'Gross Floor Area out of range', 'detailed_message': u'Gross Floor Area [10000000000.0] > 7000000.0', 'severity': u'error' }, { 'field': u'custom_id_1', 'formatted_field': u'Custom ID 1', 'value': u'', 'message': u'Custom ID 1 is missing', 'detailed_message': u'Custom ID 1 is missing', 'severity': u'error' }, { 'field': u'pm_property_id', 'formatted_field': u'PM Property ID', 'value': u'', 'message': u'PM Property ID is missing', 'detailed_message': u'PM Property ID is missing', 'severity': u'error' }] self.assertItemsEqual(res, result['cleansing_results']) result = [v for v in c.results.values() if v['address_line_1'] == '1234 Peach Tree Avenue'] self.assertEqual(len(result), 0) self.assertEqual(result, [])
def test_cleanse(self): # Import the file and run mapping # This is silly, the mappings are backwards from what you would expect. The key is the BS field, and the # value is the value in the CSV fake_mappings = { 'block_number': 'block_number', 'error_type': 'error type', 'building_count': 'building_count', 'conditioned_floor_area': 'conditioned_floor_area', 'energy_score': 'energy_score', 'gross_floor_area': 'gross_floor_area', 'lot_number': 'lot_number', 'occupied_floor_area': 'occupied_floor_area', 'postal_code': 'postal_code', 'site_eui': 'site_eui', 'site_eui_weather_normalized': 'site_eui_weather_normalized', 'source_eui': 'source_eui', 'source_eui_weather_normalized': 'source_eui_weather_normalized', 'address_line_1': 'address_line_1', 'address_line_2': 'address_line_2', 'building_certification': 'building_certification', 'city': 'city', 'custom_id_1': 'custom_id_1', 'district': 'district', 'energy_alerts': 'energy_alerts', 'owner': 'owner', 'owner_address': 'owner_address', 'owner_city_state': 'owner_city_state', 'owner_email': 'owner_email', 'owner_postal_code': 'owner_postal_code', 'owner_telephone': 'owner_telephone', 'pm_property_id': 'pm_property_id', 'property_name': 'property_name', 'property_notes': 'property_notes', 'space_alerts': 'space_alerts', 'state_province': 'state_province', 'tax_lot_id': 'tax_lot_id', 'use_description': 'use_description', 'generation_date': 'generation_date', 'recent_sale_date': 'recent_sale_date', 'release_date': 'release_date', 'year_built': 'year_built', 'year_ending': 'year_ending', } tasks.save_raw_data(self.import_file.id) util.make_fake_mappings(fake_mappings, self.org) tasks.map_data(self.import_file.id) qs = BuildingSnapshot.objects.filter( import_file=self.import_file, source_type=ASSESSED_BS, ).iterator() c = Cleansing(self.org) c.cleanse(qs) data = c.results # print data # This only checks to make sure the 35 errors have occurred. self.assertEqual(len(c.results), 35)
def test_cleanse(self): # Import the file and run mapping # Year Ending,Energy Score,Total GHG Emissions (MtCO2e),Weather Normalized Site EUI (kBtu/ft2), # National Median Site EUI (kBtu/ft2),Source EUI (kBtu/ft2),Weather Normalized Source EUI (kBtu/ft2), # National Median Source EUI (kBtu/ft2),Parking - Gross Floor Area (ft2),Organization # Release Date fake_mappings = { 'pm_property_id': u'Property Id', 'property_name': u'Property Name', 'address_line_1': u'Address 1', 'address_line_2': u'Address 2', 'city': u'City', 'state_province': u'State/Province', 'postal_code': u'Postal Code', 'year_built': u'Year Built', 'gross_floor_area': u'Property Floor Area (Buildings and Parking) (ft2)', 'site_eui': u'Site EUI (kBtu/ft2)', 'generation_date': u'Generation Date' } tasks.save_raw_data(self.import_file.id) util.make_fake_mappings(fake_mappings, self.org) tasks.map_data(self.import_file.id) qs = BuildingSnapshot.objects.filter( import_file=self.import_file, source_type=PORTFOLIO_BS, ).iterator() c = Cleansing(self.org) c.cleanse(qs) data = c.results self.assertEqual(len(c.results), 2) result = [v for v in c.results.values() if v['address_line_1'] == '120243 E True Lane'] if len(result) == 1: result = result[0] else: raise RuntimeError('Non unity results') res = [{ 'field': u'pm_property_id', 'formatted_field': u'PM Property ID', 'value': u'', 'message': u'PM Property ID is missing', 'detailed_message': u'PM Property ID is missing', 'severity': u'error' }] self.assertEqual(res, result['cleansing_results']) result = [v for v in c.results.values() if v['address_line_1'] == '95373 E Peach Avenue'] if len(result) == 1: result = result[0] else: raise RuntimeError('Non unity results') res = [{ 'field': u'site_eui', 'formatted_field': u'Site EUI', 'value': 0.1, 'message': u'Site EUI out of range', 'detailed_message': u'Site EUI [0.1] < 10.0', 'severity': u'warning' }] self.assertEqual(res, result['cleansing_results'])
def test_cleanse(self): # Import the file and run mapping # This is silly, the mappings are backwards from what you would expect. The key is the BS field, and the # value is the value in the CSV fake_mappings = { 'city': 'city', 'postal_code': 'Zip', 'gross_floor_area': 'GBA', 'building_count': 'BLDGS', 'year_built': 'AYB_YearBuilt', 'state_province': 'State', 'address_line_1': 'Address', 'owner': 'Owner', 'property_notes': 'Property Type', 'tax_lot_id': 'UBI', 'custom_id_1': 'Custom ID', 'pm_property_id': 'PM Property ID' } tasks.save_raw_data(self.import_file.id) util.make_fake_mappings(fake_mappings, self.org) tasks.map_data(self.import_file.id) qs = BuildingSnapshot.objects.filter( import_file=self.import_file, source_type=ASSESSED_BS, ).iterator() c = Cleansing(self.org) c.cleanse(qs) self.assertEqual(len(c.results), 2) result = [v for v in c.results.values() if v['address_line_1'] == '95373 E Peach Avenue'] if len(result) == 1: result = result[0] else: raise RuntimeError('Non unity results') self.assertTrue(result['address_line_1'], '95373 E Peach Avenue') self.assertTrue(result['tax_lot_id'], '10107/c6596') res = [{ 'field': u'pm_property_id', 'formatted_field': u'PM Property ID', 'value': u'', 'message': u'PM Property ID is missing', 'detailed_message': u'PM Property ID is missing', 'severity': u'error' }] self.assertEqual(res, result['cleansing_results']) result = [v for v in c.results.values() if v['address_line_1'] == '120243 E True Lane'] if len(result) == 1: result = result[0] else: raise RuntimeError('Non unity results') res = [{ 'field': u'year_built', 'formatted_field': u'Year Built', 'value': 0, 'message': u'Year Built out of range', 'detailed_message': u'Year Built [0] < 1700', 'severity': u'error' }, { 'field': u'gross_floor_area', 'formatted_field': u'Gross Floor Area', 'value': 10000000000.0, 'message': u'Gross Floor Area out of range', 'detailed_message': u'Gross Floor Area [10000000000.0] > 7000000.0', 'severity': u'error' }, { 'field': u'custom_id_1', 'formatted_field': u'Custom ID 1', 'value': u'', 'message': u'Custom ID 1 is missing', 'detailed_message': u'Custom ID 1 is missing', 'severity': u'error' }, { 'field': u'pm_property_id', 'formatted_field': u'PM Property ID', 'value': u'', 'message': u'PM Property ID is missing', 'detailed_message': u'PM Property ID is missing', 'severity': u'error' }] self.assertItemsEqual(res, result['cleansing_results']) result = [v for v in c.results.values() if v['address_line_1'] == '1234 Peach Tree Avenue'] self.assertEqual(len(result), 0) self.assertEqual(result, [])
def test_cleanse(self): # Import the file and run mapping # This is silly, the mappings are backwards from what you would expect. # The key is the BS field, and the value is the value in the CSV fake_mappings = [ { "from_field": u'block_number', "to_table_name": u'PropertyState', "to_field": u'block_number', }, { "from_field": u'error_type', "to_table_name": u'PropertyState', "to_field": u'error_type', }, { "from_field": u'building_count', "to_table_name": u'PropertyState', "to_field": u'building_count', }, { "from_field": u'conditioned_floor_area', "to_table_name": u'PropertyState', "to_field": u'conditioned_floor_area', }, { "from_field": u'energy_score', "to_table_name": u'PropertyState', "to_field": u'energy_score', }, { "from_field": u'gross_floor_area', "to_table_name": u'PropertyState', "to_field": u'gross_floor_area', }, { "from_field": u'lot_number', "to_table_name": u'PropertyState', "to_field": u'lot_number', }, { "from_field": u'occupied_floor_area', "to_table_name": u'PropertyState', "to_field": u'occupied_floor_area', }, { "from_field": u'conditioned_floor_area', "to_table_name": u'PropertyState', "to_field": u'conditioned_floor_area', }, { "from_field": u'postal_code', "to_table_name": u'PropertyState', "to_field": u'postal_code', }, { "from_field": u'site_eui', "to_table_name": u'PropertyState', "to_field": u'site_eui', }, { "from_field": u'site_eui_weather_normalized', "to_table_name": u'PropertyState', "to_field": u'site_eui_weather_normalized', }, { "from_field": u'source_eui', "to_table_name": u'PropertyState', "to_field": u'source_eui', }, { "from_field": u'source_eui_weather_normalized', "to_table_name": u'PropertyState', "to_field": u'source_eui_weather_normalized', }, { "from_field": u'address_line_1', "to_table_name": u'PropertyState', "to_field": u'address_line_1', }, { "from_field": u'address_line_2', "to_table_name": u'PropertyState', "to_field": u'address_line_2', }, { "from_field": u'building_certification', "to_table_name": u'PropertyState', "to_field": u'building_certification', }, { "from_field": u'city', "to_table_name": u'PropertyState', "to_field": u'city', }, { "from_field": u'custom_id_1', "to_table_name": u'PropertyState', "to_field": u'custom_id_1', }, { "from_field": u'district', "to_table_name": u'PropertyState', "to_field": u'district', }, { "from_field": u'energy_alerts', "to_table_name": u'PropertyState', "to_field": u'energy_alerts', }, { "from_field": u'owner_address', "to_table_name": u'PropertyState', "to_field": u'owner_address', }, { "from_field": u'owner_city_state', "to_table_name": u'PropertyState', "to_field": u'owner_city_state', }, { "from_field": u'owner_email', "to_table_name": u'PropertyState', "to_field": u'owner_email', }, { "from_field": u'owner_postal_code', "to_table_name": u'PropertyState', "to_field": u'owner_postal_code', }, { "from_field": u'owner_telephone', "to_table_name": u'PropertyState', "to_field": u'owner_telephone', }, { "from_field": u'pm_property_id', "to_table_name": u'PropertyState', "to_field": u'pm_property_id', }, { "from_field": u'property_name', "to_table_name": u'PropertyState', "to_field": u'property_name', }, { "from_field": u'property_notes', "to_table_name": u'PropertyState', "to_field": u'property_notes', }, { "from_field": u'space_alerts', "to_table_name": u'PropertyState', "to_field": u'space_alerts', }, { "from_field": u'state_province', "to_table_name": u'PropertyState', "to_field": u'state_province', }, { "from_field": u'tax_lot_id', "to_table_name": u'PropertyState', "to_field": u'tax_lot_id', }, { "from_field": u'use_description', "to_table_name": u'PropertyState', "to_field": u'use_description', }, { "from_field": u'generation_date', "to_table_name": u'PropertyState', "to_field": u'generation_date', }, { "from_field": u'recent_sale_date', "to_table_name": u'PropertyState', "to_field": u'recent_sale_date', }, { "from_field": u'generation_date', "to_table_name": u'PropertyState', "to_field": u'generation_date', }, { "from_field": u'release_date', "to_table_name": u'PropertyState', "to_field": u'release_date', }, { "from_field": u'year_built', "to_table_name": u'PropertyState', "to_field": u'year_built', }, { "from_field": u'year_ending', "to_table_name": u'PropertyState', "to_field": u'year_ending', } ] tasks.save_raw_data(self.import_file.id) Column.create_mappings(fake_mappings, self.org, self.user) tasks.map_data(self.import_file.id) qs = PropertyState.objects.filter( import_file=self.import_file, source_type=ASSESSED_BS, ).iterator() c = Cleansing(self.org) c.cleanse('property', qs) # _log.debug(c.results) # This only checks to make sure the 34 errors have occurred. self.assertEqual(len(c.results), 34)
def test_cleanse(self): # Import the file and run mapping # This is silly, the mappings are backwards from what you would expect. The key is the BS field, and the # value is the value in the CSV fake_mappings = { 'block_number': 'block_number', 'error_type': 'error type', 'building_count': 'building_count', 'conditioned_floor_area': 'conditioned_floor_area', 'energy_score': 'energy_score', 'gross_floor_area': 'gross_floor_area', 'lot_number': 'lot_number', 'occupied_floor_area': 'occupied_floor_area', 'postal_code': 'postal_code', 'site_eui': 'site_eui', 'site_eui_weather_normalized': 'site_eui_weather_normalized', 'source_eui': 'source_eui', 'source_eui_weather_normalized': 'source_eui_weather_normalized', 'address_line_1': 'address_line_1', 'address_line_2': 'address_line_2', 'building_certification': 'building_certification', 'city': 'city', 'custom_id_1': 'custom_id_1', 'district': 'district', 'energy_alerts': 'energy_alerts', 'owner': 'owner', 'owner_address': 'owner_address', 'owner_city_state': 'owner_city_state', 'owner_email': 'owner_email', 'owner_postal_code': 'owner_postal_code', 'owner_telephone': 'owner_telephone', 'pm_property_id': 'pm_property_id', 'property_name': 'property_name', 'property_notes': 'property_notes', 'space_alerts': 'space_alerts', 'state_province': 'state_province', 'tax_lot_id': 'tax_lot_id', 'use_description': 'use_description', 'generation_date': 'generation_date', 'recent_sale_date': 'recent_sale_date', 'release_date': 'release_date', 'year_built': 'year_built', 'year_ending': 'year_ending', } tasks.save_raw_data(self.import_file.id) util.make_fake_mappings(fake_mappings, self.org) tasks.map_data(self.import_file.id) qs = BuildingSnapshot.objects.filter( import_file=self.import_file, source_type=ASSESSED_BS, ).iterator() c = Cleansing(self.org) c.cleanse(qs) # print data # This only checks to make sure the 35 errors have occurred. self.assertEqual(len(c.results), 35)
def test_cleanse(self): # Import the file and run mapping # Year Ending,Energy Score,Total GHG Emissions (MtCO2e),Weather Normalized Site EUI (kBtu/ft2), # National Median Site EUI (kBtu/ft2),Source EUI (kBtu/ft2),Weather Normalized Source EUI (kBtu/ft2), # National Median Source EUI (kBtu/ft2),Parking - Gross Floor Area (ft2),Organization # Release Date fake_mappings = [ { "from_field": u'Property Id', "to_table_name": u'PropertyState', "to_field": u'pm_property_id', }, { "from_field": u'Property Name', "to_table_name": u'PropertyState', "to_field": u'property_name', }, { "from_field": u'Address 1', "to_table_name": u'PropertyState', "to_field": u'address_line_1', }, { "from_field": u'Address 2', "to_table_name": u'PropertyState', "to_field": u'address_line_2', }, { "from_field": u'City', "to_table_name": u'PropertyState', "to_field": u'city', }, { "from_field": u'State/Province', "to_table_name": u'PropertyState', "to_field": u'state_province', }, { "from_field": u'Postal Code', "to_table_name": u'PropertyState', "to_field": u'postal_code', }, { "from_field": u'Year Built', "to_table_name": u'PropertyState', "to_field": u'year_built', }, { "from_field": u'Property Floor Area (Buildings and Parking) (ft2)', "to_table_name": u'PropertyState', "to_field": u'gross_floor_area', }, { "from_field": u'Site EUI (kBtu/ft2)', "to_table_name": u'PropertyState', "to_field": u'site_eui', }, { "from_field": u'Generation Date', "to_table_name": u'PropertyState', "to_field": u'generation_date', } ] tasks.save_raw_data(self.import_file.id) Column.create_mappings(fake_mappings, self.org, self.user) tasks.map_data(self.import_file.id) qs = PropertyState.objects.filter( import_file=self.import_file, source_type=PORTFOLIO_BS, ).iterator() c = Cleansing(self.org) c.cleanse('property', qs) _log.debug(c.results) self.assertEqual(len(c.results), 2) result = [v for v in c.results.values() if v['address_line_1'] == '120243 E True Lane'] if len(result) == 1: result = result[0] else: raise RuntimeError('Non unity results') res = [{ 'field': u'pm_property_id', 'formatted_field': u'PM Property ID', 'value': u'', 'message': u'PM Property ID is missing', 'detailed_message': u'PM Property ID is missing', 'severity': u'error' }] self.assertEqual(res, result['cleansing_results']) result = [v for v in c.results.values() if v['address_line_1'] == '95373 E Peach Avenue'] if len(result) == 1: result = result[0] else: raise RuntimeError('Non unity results') res = [{ 'field': u'site_eui', 'formatted_field': u'Site EUI', 'value': 0.1, 'message': u'Site EUI out of range', 'detailed_message': u'Site EUI [0.1] < 10.0', 'severity': u'warning' }] self.assertEqual(res, result['cleansing_results'])