class InventoryViewTests(DeleteModelsTestCase): def setUp(self): user_details = { 'username': '******', 'password': '******', 'email': '*****@*****.**' } self.user = User.objects.create_superuser(**user_details) self.org, _, _ = create_organization(self.user) self.status_label = StatusLabel.objects.create( name='test', super_organization=self.org ) self.column_factory = FakeColumnFactory(organization=self.org) self.cycle_factory = FakeCycleFactory(organization=self.org, user=self.user) self.property_factory = FakePropertyFactory(organization=self.org) self.property_state_factory = FakePropertyStateFactory(organization=self.org) self.taxlot_state_factory = FakeTaxLotStateFactory(organization=self.org) self.cycle = self.cycle_factory.get_cycle( start=datetime(2010, 10, 10, tzinfo=timezone.get_current_timezone()) ) self.client.login(**user_details) def test_get_building_sync(self): state = self.property_state_factory.get_property_state() prprty = self.property_factory.get_property() pv = PropertyView.objects.create( property=prprty, cycle=self.cycle, state=state ) # go to buildingsync endpoint params = { 'organization_id': self.org.pk } url = reverse('api:v2.1:properties-building-sync', args=[pv.id]) response = self.client.get(url, params) self.assertIn('<auc:FloorAreaValue>%s.0</auc:FloorAreaValue>' % state.gross_floor_area, response.content) def test_get_hpxml(self): state = self.property_state_factory.get_property_state() prprty = self.property_factory.get_property() pv = PropertyView.objects.create( property=prprty, cycle=self.cycle, state=state ) # go to buildingsync endpoint params = { 'organization_id': self.org.pk } url = reverse('api:v2.1:properties-hpxml', args=[pv.id]) response = self.client.get(url, params) self.assertIn('<GrossFloorArea>%s.0</GrossFloorArea>' % state.gross_floor_area, response.content)
def test_taxlots_merge_without_losing_pairings(self): # Create 2 pairings and distribute them to the two -Views. property_factory = FakePropertyFactory(organization=self.org) property_state_factory = FakePropertyStateFactory( organization=self.org) property_1 = property_factory.get_property() state_1 = property_state_factory.get_property_state() property_view_1 = PropertyView.objects.create(property=property_1, cycle=self.cycle, state=state_1) property_2 = property_factory.get_property() state_2 = property_state_factory.get_property_state() property_view_2 = PropertyView.objects.create(property=property_2, cycle=self.cycle, state=state_2) TaxLotProperty(primary=True, cycle_id=self.cycle.id, property_view_id=property_view_1.id, taxlot_view_id=self.view_1.id).save() TaxLotProperty(primary=True, cycle_id=self.cycle.id, property_view_id=property_view_2.id, taxlot_view_id=self.view_2.id).save() # Merge the taxlots url = reverse('api:v2:taxlots-merge') + '?organization_id={}'.format( self.org.pk) post_params = json.dumps({ 'state_ids': [self.state_2.pk, self.state_1.pk] # priority given to state_1 }) self.client.post(url, post_params, content_type='application/json') # There should still be 2 TaxLotProperties self.assertEqual(TaxLotProperty.objects.count(), 2) taxlot_view = TaxLotView.objects.first() paired_propertyview_ids = list( TaxLotProperty.objects.filter( taxlot_view_id=taxlot_view.id).values_list('property_view_id', flat=True)) self.assertCountEqual(paired_propertyview_ids, [property_view_1.id, property_view_2.id])
class MeterUtilTests(TestCase): def setUp(self): self.user_details = { 'username': '******', 'password': '******', } self.user = User.objects.create_superuser(email='*****@*****.**', **self.user_details) self.org, _, _ = create_organization(self.user) self.property_state_factory = FakePropertyStateFactory( organization=self.org) property_details = self.property_state_factory.get_details() self.pm_property_id = '12345' property_details['pm_property_id'] = self.pm_property_id property_details['organization_id'] = self.org.id state = PropertyState(**property_details) state.save() self.state = PropertyState.objects.get(pk=state.id) self.cycle_factory = FakeCycleFactory(organization=self.org, user=self.user) self.cycle = self.cycle_factory.get_cycle( start=datetime(2010, 10, 10, tzinfo=get_current_timezone())) self.property_factory = FakePropertyFactory(organization=self.org) self.property = self.property_factory.get_property() self.property_view = PropertyView.objects.create( property=self.property, cycle=self.cycle, state=self.state) self.tz_obj = timezone(TIME_ZONE) def test_parse_meter_details_splits_monthly_info_into_meter_data_and_readings_even_with_DST_changing( self): raw_meters = [{ 'Portfolio Manager ID': self.pm_property_id, 'Portfolio Manager Meter ID': '123-PMMeterID', 'Start Date': '2016-03-01 00:00:00', 'End Date': '2016-04-01 00:00:00', 'Meter Type': 'Electric - Grid', 'Usage Units': 'kBtu (thousand Btu)', 'Usage/Quantity': 100, }, { 'Portfolio Manager ID': self.pm_property_id, 'Portfolio Manager Meter ID': '123-PMMeterID', 'Start Date': '2016-03-01 00:00:00', 'End Date': '2016-04-01 00:00:00', 'Meter Type': 'Natural Gas', 'Usage Units': 'kBtu (thousand Btu)', 'Usage/Quantity': 200, }] expected = [{ 'property_id': self.property.id, 'source': Meter.PORTFOLIO_MANAGER, 'source_id': '123-PMMeterID', 'type': Meter.ELECTRICITY_GRID, 'readings': [{ 'start_time': make_aware(datetime(2016, 3, 1, 0, 0, 0), timezone=self.tz_obj), 'end_time': make_aware(datetime(2016, 4, 1, 0, 0, 0), timezone=self.tz_obj), 'reading': 100, 'source_unit': 'kBtu (thousand Btu)', 'conversion_factor': 1 }] }, { 'property_id': self.property.id, 'source': Meter.PORTFOLIO_MANAGER, 'source_id': '123-PMMeterID', 'type': Meter.NATURAL_GAS, 'readings': [{ 'start_time': make_aware(datetime(2016, 3, 1, 0, 0, 0), timezone=self.tz_obj), 'end_time': make_aware(datetime(2016, 4, 1, 0, 0, 0), timezone=self.tz_obj), 'reading': 200, 'source_unit': 'kBtu (thousand Btu)', 'conversion_factor': 1 }] }] meters_parser = MetersParser(self.org.id, raw_meters) self.assertEqual(meters_parser.meter_and_reading_objs, expected) def test_parse_meter_details_splits_monthly_info_including_cost_into_meter_data_and_readings( self): raw_meters = [{ 'Portfolio Manager ID': self.pm_property_id, 'Portfolio Manager Meter ID': '123-PMMeterID-el', 'Start Date': '2016-03-01 00:00:00', 'End Date': '2016-04-01 00:00:00', 'Usage Units': 'kBtu (thousand Btu)', 'Meter Type': 'Electric - Grid', 'Usage/Quantity': 100, 'Cost ($)': 100, }, { 'Portfolio Manager ID': self.pm_property_id, 'Portfolio Manager Meter ID': '123-PMMeterID-gas', 'Start Date': '2016-03-01 00:00:00', 'End Date': '2016-04-01 00:00:00', 'Meter Type': 'Natural Gas', 'Usage Units': 'kBtu (thousand Btu)', 'Usage/Quantity': 200, 'Cost ($)': 50, }] expected = [ { 'property_id': self.property.id, 'source': Meter.PORTFOLIO_MANAGER, 'source_id': '123-PMMeterID-el', 'type': Meter.ELECTRICITY_GRID, 'readings': [{ 'start_time': make_aware(datetime(2016, 3, 1, 0, 0, 0), timezone=self.tz_obj), 'end_time': make_aware(datetime(2016, 4, 1, 0, 0, 0), timezone=self.tz_obj), 'reading': 100, 'source_unit': 'kBtu (thousand Btu)', 'conversion_factor': 1 }] }, { 'property_id': self.property.id, 'source': Meter.PORTFOLIO_MANAGER, 'source_id': '123-PMMeterID-el', 'type': Meter.COST, 'readings': [{ 'start_time': make_aware(datetime(2016, 3, 1, 0, 0, 0), timezone=self.tz_obj), 'end_time': make_aware(datetime(2016, 4, 1, 0, 0, 0), timezone=self.tz_obj), 'reading': 100, 'source_unit': 'US Dollars', 'conversion_factor': 1 }] }, { 'property_id': self.property.id, 'source': Meter.PORTFOLIO_MANAGER, 'source_id': '123-PMMeterID-gas', 'type': Meter.NATURAL_GAS, 'readings': [{ 'start_time': make_aware(datetime(2016, 3, 1, 0, 0, 0), timezone=self.tz_obj), 'end_time': make_aware(datetime(2016, 4, 1, 0, 0, 0), timezone=self.tz_obj), 'reading': 200, 'source_unit': 'kBtu (thousand Btu)', 'conversion_factor': 1 }] }, { 'property_id': self.property.id, 'source': Meter.PORTFOLIO_MANAGER, 'source_id': '123-PMMeterID-gas', 'type': Meter.COST, 'readings': [{ 'start_time': make_aware(datetime(2016, 3, 1, 0, 0, 0), timezone=self.tz_obj), 'end_time': make_aware(datetime(2016, 4, 1, 0, 0, 0), timezone=self.tz_obj), 'reading': 50, 'source_unit': 'US Dollars', 'conversion_factor': 1 }] }, ] meters_parser = MetersParser(self.org.id, raw_meters) self.assertEqual(meters_parser.meter_and_reading_objs, expected) def test_parser_uses_canadian_thermal_conversion_assumptions_if_org_specifies_it( self): self.org.thermal_conversion_assumption = Organization.CAN self.org.save() raw_meters = [{ 'Portfolio Manager ID': self.pm_property_id, 'Portfolio Manager Meter ID': '123-PMMeterID-gas', 'Start Date': '2016-03-01 00:00:00', 'End Date': '2016-04-01 00:00:00', 'Meter Type': 'Natural Gas', 'Usage Units': 'cm (cubic meters)', 'Usage/Quantity': 1000, 'Cost ($)': 100, }] expected = [ { 'property_id': self.property.id, 'source': Meter.PORTFOLIO_MANAGER, 'source_id': '123-PMMeterID-gas', 'type': Meter.NATURAL_GAS, 'readings': [{ 'start_time': make_aware(datetime(2016, 3, 1, 0, 0, 0), timezone=self.tz_obj), 'end_time': make_aware(datetime(2016, 4, 1, 0, 0, 0), timezone=self.tz_obj), 'reading': 36420.0, 'source_unit': 'cm (cubic meters)', 'conversion_factor': 36.42, }], }, { 'property_id': self.property.id, 'source': Meter.PORTFOLIO_MANAGER, 'source_id': '123-PMMeterID-gas', 'type': Meter.COST, 'readings': [{ 'start_time': make_aware(datetime(2016, 3, 1, 0, 0, 0), timezone=self.tz_obj), 'end_time': make_aware(datetime(2016, 4, 1, 0, 0, 0), timezone=self.tz_obj), 'reading': 100, 'source_unit': 'CAN Dollars', 'conversion_factor': 1, }], }, ] meters_parser = MetersParser(self.org.id, raw_meters) self.assertEqual(meters_parser.meter_and_reading_objs, expected) def test_parse_meter_details_works_with_multiple_meters_impacted_by_a_leap_year( self): raw_meters = [{ 'Property Id': self.pm_property_id, 'Month': 'Feb-16', 'Electricity Use (kBtu)': 111, 'Natural Gas Use (kBtu)': 333 }, { 'Property Id': self.pm_property_id, 'Month': 'Feb-17', 'Electricity Use (kBtu)': 222, 'Natural Gas Use (kBtu)': 444 }] raw_meters = [{ 'Portfolio Manager ID': self.pm_property_id, 'Portfolio Manager Meter ID': '123-PMMeterID', 'Start Date': '2016-02-01 00:00:00', 'End Date': '2016-03-01 00:00:00', 'Meter Type': 'Electric - Grid', 'Usage Units': 'kBtu (thousand Btu)', 'Usage/Quantity': 111, }, { 'Portfolio Manager ID': self.pm_property_id, 'Portfolio Manager Meter ID': '123-PMMeterID', 'Start Date': '2016-02-01 00:00:00', 'End Date': '2016-03-01 00:00:00', 'Meter Type': 'Natural Gas', 'Usage Units': 'kBtu (thousand Btu)', 'Usage/Quantity': 333, }, { 'Portfolio Manager ID': self.pm_property_id, 'Portfolio Manager Meter ID': '123-PMMeterID', 'Start Date': '2017-02-01 00:00:00', 'End Date': '2017-03-01 00:00:00', 'Meter Type': 'Electric - Grid', 'Usage Units': 'kBtu (thousand Btu)', 'Usage/Quantity': 222, }, { 'Portfolio Manager ID': self.pm_property_id, 'Portfolio Manager Meter ID': '123-PMMeterID', 'Start Date': '2017-02-01 00:00:00', 'End Date': '2017-03-01 00:00:00', 'Meter Type': 'Natural Gas', 'Usage Units': 'kBtu (thousand Btu)', 'Usage/Quantity': 444, }] expected = [{ 'property_id': self.property.id, 'source': Meter.PORTFOLIO_MANAGER, 'source_id': '123-PMMeterID', 'type': Meter.ELECTRICITY_GRID, 'readings': [{ 'start_time': make_aware(datetime(2016, 2, 1, 0, 0, 0), timezone=self.tz_obj), 'end_time': make_aware(datetime(2016, 3, 1, 0, 0, 0), timezone=self.tz_obj), 'reading': 111, 'source_unit': 'kBtu (thousand Btu)', 'conversion_factor': 1 }, { 'start_time': make_aware(datetime(2017, 2, 1, 0, 0, 0), timezone=self.tz_obj), 'end_time': make_aware(datetime(2017, 3, 1, 0, 0, 0), timezone=self.tz_obj), 'reading': 222, 'source_unit': 'kBtu (thousand Btu)', 'conversion_factor': 1 }] }, { 'property_id': self.property.id, 'source': Meter.PORTFOLIO_MANAGER, 'source_id': '123-PMMeterID', 'type': Meter.NATURAL_GAS, 'readings': [{ 'start_time': make_aware(datetime(2016, 2, 1, 0, 0, 0), timezone=self.tz_obj), 'end_time': make_aware(datetime(2016, 3, 1, 0, 0, 0), timezone=self.tz_obj), 'reading': 333, 'source_unit': 'kBtu (thousand Btu)', 'conversion_factor': 1 }, { 'start_time': make_aware(datetime(2017, 2, 1, 0, 0, 0), timezone=self.tz_obj), 'end_time': make_aware(datetime(2017, 3, 1, 0, 0, 0), timezone=self.tz_obj), 'reading': 444, 'source_unit': 'kBtu (thousand Btu)', 'conversion_factor': 1 }] }] meters_parser = MetersParser(self.org.id, raw_meters) self.assertEqual(meters_parser.meter_and_reading_objs, expected) def test_parse_meter_details_converts_energy_units_if_necessary(self): raw_meters = [{ 'Portfolio Manager ID': self.pm_property_id, 'Portfolio Manager Meter ID': '123-PMMeterID', 'Start Date': '2016-03-01 00:00:00', 'End Date': '2016-04-01 00:00:00', 'Meter Type': 'Natural Gas', 'Usage Units': 'ccf (hundred cubic feet)', 'Usage/Quantity': 1000, }, { 'Portfolio Manager ID': self.pm_property_id, 'Portfolio Manager Meter ID': '123-PMMeterID', 'Start Date': '2016-03-01 00:00:00', 'End Date': '2016-04-01 00:00:00', 'Meter Type': 'Fuel Oil (No. 1)', 'Usage Units': 'GJ', 'Usage/Quantity': 1000, }] meters_parser = MetersParser(self.org.id, raw_meters) result = meters_parser.meter_and_reading_objs if result[0]["type"] == Meter.FUEL_OIL_NO_1: fuel_oil_details = result[0] gas_details = result[1] else: fuel_oil_details = result[1] gas_details = result[0] self.assertEqual(fuel_oil_details["readings"][0]["reading"], 947820) self.assertEqual(fuel_oil_details["readings"][0]["source_unit"], "GJ") self.assertEqual(fuel_oil_details["readings"][0]["conversion_factor"], 947.82) self.assertEqual(gas_details["readings"][0]["reading"], 102600) self.assertEqual(gas_details["readings"][0]["source_unit"], "ccf (hundred cubic feet)") self.assertEqual(gas_details["readings"][0]["conversion_factor"], 102.6) def test_unlinked_properties_are_identified(self): raw_meters = [{ 'Portfolio Manager ID': "11111111", 'Portfolio Manager Meter ID': '123-PMMeterID', 'Start Date': '2016-03-01 00:00:00', 'End Date': '2016-04-01 00:00:00', 'Meter Type': 'Electric - Grid', 'Usage Units': 'kBtu (thousand Btu)', 'Usage/Quantity': 100, }, { 'Portfolio Manager ID': "22222222", 'Portfolio Manager Meter ID': '123-PMMeterID', 'Start Date': '2016-03-01 00:00:00', 'End Date': '2016-04-01 00:00:00', 'Meter Type': 'Electric - Grid', 'Usage Units': 'kBtu (thousand Btu)', 'Usage/Quantity': 100, }, { 'Portfolio Manager ID': "22222222", 'Portfolio Manager Meter ID': '123-PMMeterID', 'Start Date': '2016-04-01 00:00:00', 'End Date': '2016-05-01 00:00:00', 'Meter Type': 'Electric - Grid', 'Usage Units': 'kBtu (thousand Btu)', 'Usage/Quantity': 100, }] meters_parser = MetersParser(self.org.id, raw_meters) expected = [ { 'portfolio_manager_id': "11111111" }, { 'portfolio_manager_id': "22222222" }, ] self.assertCountEqual(expected, meters_parser.unlinkable_pm_ids) self.assertEqual([], meters_parser.meter_and_reading_objs) def test_meters_parser_can_handle_raw_meters_with_start_time_and_duration_involving_DST_change_and_a_leap_year( self): raw_meters = [ { 'start_time': 1552211999, # Mar. 10, 2019 01:59:59 (pre-DST change) 'source_id': 'ABCDEF', 'duration': 900, 'Meter Type': 'Natural Gas', 'Usage Units': 'GJ', 'Usage/Quantity': 100 }, { 'start_time': 1456732799, # Feb. 28, 2016 23:59:59 (leap year) 'source_id': 'ABCDEF', 'duration': 900, 'Meter Type': 'Natural Gas', 'Usage Units': 'GJ', 'Usage/Quantity': 1000 } ] expected = [{ 'property_id': self.property.id, 'source': Meter.GREENBUTTON, 'source_id': 'ABCDEF', 'type': Meter.NATURAL_GAS, 'readings': [ { 'start_time': make_aware(datetime(2019, 3, 10, 1, 59, 59), timezone=self.tz_obj), 'end_time': make_aware(datetime(2019, 3, 10, 3, 14, 59), timezone=self.tz_obj), 'reading': 94782.0, 'source_unit': 'GJ', 'conversion_factor': 947.82 }, { 'start_time': make_aware(datetime(2016, 2, 28, 23, 59, 59), timezone=self.tz_obj), 'end_time': make_aware(datetime(2016, 2, 29, 0, 14, 59), timezone=self.tz_obj), 'reading': 947820.0, 'source_unit': 'GJ', 'conversion_factor': 947.82 }, ] }] meters_parser = MetersParser(self.org.id, raw_meters, source_type=Meter.GREENBUTTON, property_id=self.property.id) self.assertEqual(meters_parser.meter_and_reading_objs, expected) def test_meters_parser_can_handle_delivered_PM_meters(self): raw_meters = [{ 'Portfolio Manager ID': self.pm_property_id, 'Portfolio Manager Meter ID': '123-PMMeterID', 'Start Date': 'Not Available', 'End Date': 'Not Available', 'Delivery Date': '2016-03-05 00:00:00', 'Meter Type': 'Electric - Grid', 'Usage Units': 'kBtu (thousand Btu)', 'Usage/Quantity': 100, }, { 'Portfolio Manager ID': self.pm_property_id, 'Portfolio Manager Meter ID': '123-PMMeterID', 'Start Date': 'Not Available', 'End Date': 'Not Available', 'Delivery Date': '2016-03-01 00:00:00', 'Meter Type': 'Natural Gas', 'Usage Units': 'kBtu (thousand Btu)', 'Usage/Quantity': 200, }] expected = [{ 'property_id': self.property.id, 'source': Meter.PORTFOLIO_MANAGER, 'source_id': '123-PMMeterID', 'type': Meter.ELECTRICITY_GRID, 'readings': [{ 'start_time': make_aware(datetime(2016, 3, 1, 0, 0, 0), timezone=self.tz_obj), 'end_time': make_aware(datetime(2016, 4, 1, 0, 0, 0), timezone=self.tz_obj), 'reading': 100, 'source_unit': 'kBtu (thousand Btu)', 'conversion_factor': 1 }] }, { 'property_id': self.property.id, 'source': Meter.PORTFOLIO_MANAGER, 'source_id': '123-PMMeterID', 'type': Meter.NATURAL_GAS, 'readings': [{ 'start_time': make_aware(datetime(2016, 3, 1, 0, 0, 0), timezone=self.tz_obj), 'end_time': make_aware(datetime(2016, 4, 1, 0, 0, 0), timezone=self.tz_obj), 'reading': 200, 'source_unit': 'kBtu (thousand Btu)', 'conversion_factor': 1 }] }] meters_parser = MetersParser(self.org.id, raw_meters) self.assertEqual(meters_parser.meter_and_reading_objs, expected)
class PropertyViewTests(DeleteModelsTestCase): def setUp(self): user_details = { 'username': '******', 'password': '******', 'email': '*****@*****.**' } self.user = User.objects.create_superuser(**user_details) self.org, self.org_user, _ = create_organization(self.user) self.column_factory = FakeColumnFactory(organization=self.org) self.cycle_factory = FakeCycleFactory(organization=self.org, user=self.user) self.property_factory = FakePropertyFactory(organization=self.org) self.property_state_factory = FakePropertyStateFactory(organization=self.org) self.property_view_factory = FakePropertyViewFactory(organization=self.org) self.taxlot_state_factory = FakeTaxLotStateFactory(organization=self.org) self.cycle = self.cycle_factory.get_cycle( start=datetime(2010, 10, 10, tzinfo=timezone.get_current_timezone())) self.client.login(**user_details) def test_get_and_edit_properties(self): state = self.property_state_factory.get_property_state() prprty = self.property_factory.get_property() view = PropertyView.objects.create( property=prprty, cycle=self.cycle, state=state ) params = { 'organization_id': self.org.pk, 'page': 1, 'per_page': 999999999, 'columns': COLUMNS_TO_SEND, } url = reverse('api:v2.1:properties-list') + '?cycle_id={}'.format(self.cycle.pk) response = self.client.get(url, params) data = json.loads(response.content) self.assertEqual(len(data['properties']), 1) result = data['properties'][0] self.assertEqual(result['state']['address_line_1'], state.address_line_1) db_created_time = result['created'] db_updated_time = result['updated'] self.assertTrue(db_created_time is not None) self.assertTrue(db_updated_time is not None) # update the address new_data = { "state": { "address_line_1": "742 Evergreen Terrace" } } url = reverse('api:v2:properties-detail', args=[view.id]) + '?organization_id={}'.format(self.org.pk) response = self.client.put(url, json.dumps(new_data), content_type='application/json') data = json.loads(response.content) self.assertEqual(data['status'], 'success') # the above call returns data from the PropertyState, need to get the Property -- # call the get on the same API to retrieve it response = self.client.get(url, content_type='application/json') data = json.loads(response.content) # make sure the address was updated and that the datetimes were modified self.assertEqual(data['status'], 'success') self.assertEqual(data['state']['address_line_1'], '742 Evergreen Terrace') self.assertEqual(datetime.strptime(db_created_time, "%Y-%m-%dT%H:%M:%S.%fZ").replace(microsecond=0), datetime.strptime(data['property']['created'], "%Y-%m-%dT%H:%M:%S.%fZ").replace( microsecond=0)) self.assertGreater(datetime.strptime(data['property']['updated'], "%Y-%m-%dT%H:%M:%S.%fZ"), datetime.strptime(db_updated_time, "%Y-%m-%dT%H:%M:%S.%fZ")) def test_search_identifier(self): self.property_view_factory.get_property_view(cycle=self.cycle, custom_id_1='123456') self.property_view_factory.get_property_view(cycle=self.cycle, custom_id_1='987654 Long Street') self.property_view_factory.get_property_view(cycle=self.cycle, address_line_1='123 Main Street') self.property_view_factory.get_property_view(cycle=self.cycle, address_line_1='Hamilton Road', analysis_state=PropertyState.ANALYSIS_STATE_QUEUED) self.property_view_factory.get_property_view(cycle=self.cycle, custom_id_1='long road', analysis_state=PropertyState.ANALYSIS_STATE_QUEUED) # Typically looks like this # http://localhost:8000/api/v2.1/properties/?organization_id=265&cycle=219&identifier=09-IS # check for all items query_params = "?cycle={}&organization_id={}".format(self.cycle.pk, self.org.pk) url = reverse('api:v2.1:properties-list') + query_params response = self.client.get(url) result = json.loads(response.content) self.assertEqual(result['status'], 'success') results = result['properties'] self.assertEqual(len(results), 5) # check for 2 items with 123 query_params = "?cycle={}&organization_id={}&identifier={}".format(self.cycle.pk, self.org.pk, '123') url = reverse('api:v2.1:properties-list') + query_params response = self.client.get(url) result = json.loads(response.content) self.assertEqual(result['status'], 'success') results = result['properties'] # print out the result of this when there are more than two in an attempt to catch the # non-deterministic part of this test if len(results) > 2: print results self.assertEqual(len(results), 2) # check the analysis states query_params = "?cycle={}&organization_id={}&analysis_state={}".format(self.cycle.pk, self.org.pk, 'Completed') url = reverse('api:v2.1:properties-list') + query_params response = self.client.get(url) result = json.loads(response.content) self.assertEqual(result['status'], 'success') results = result['properties'] self.assertEqual(len(results), 0) query_params = "?cycle={}&organization_id={}&analysis_state={}".format( self.cycle.pk, self.org.pk, 'Not Started' ) url = reverse('api:v2.1:properties-list') + query_params response = self.client.get(url) result = json.loads(response.content) self.assertEqual(result['status'], 'success') results = result['properties'] self.assertEqual(len(results), 3) query_params = "?cycle={}&organization_id={}&analysis_state={}".format( self.cycle.pk, self.org.pk, 'Queued' ) url = reverse('api:v2.1:properties-list') + query_params response = self.client.get(url) result = json.loads(response.content) self.assertEqual(result['status'], 'success') results = result['properties'] self.assertEqual(len(results), 2) # check the combination of both the identifier and the analysis state query_params = "?cycle={}&organization_id={}&identifier={}&analysis_state={}".format( self.cycle.pk, self.org.pk, 'Long', 'Queued' ) url = reverse('api:v2.1:properties-list') + query_params response = self.client.get(url) result = json.loads(response.content) self.assertEqual(result['status'], 'success') results = result['properties'] self.assertEqual(len(results), 1)
class TestProfileIdMixin(TestCase): """Test OrgMixin -- provides get_organization_id method""" def setUp(self): self.maxDiff = None user_details = { 'username': '******', 'password': '******', } self.user = User.objects.create_superuser( email='*****@*****.**', **user_details) self.org, self.org_user, _ = create_organization(self.user) self.cycle_factory = FakeCycleFactory(organization=self.org, user=self.user) self.property_factory = FakePropertyFactory(organization=self.org) self.property_state_factory = FakePropertyStateFactory(organization=self.org) self.column_list_factory = FakeColumnListSettingsFactory(organization=self.org) self.cycle = self.cycle_factory.get_cycle( start=datetime(2010, 10, 10, tzinfo=timezone.get_current_timezone()) ) class ProfileIdMixInclass(ProfileIdMixin): pass self.mixin_class = ProfileIdMixInclass() def tearDown(self): PropertyView.objects.all().delete() self.user.delete() self.org.delete() self.org_user.delete() def test_get_profile_id(self): """test get_organization method""" state = self.property_state_factory.get_property_state(extra_data={"field_1": "value_1"}) prprty = self.property_factory.get_property() PropertyView.objects.create( property=prprty, cycle=self.cycle, state=state ) # save all the columns in the state to the database so we can setup column list settings Column.save_column_names(state) columns = self.mixin_class.get_show_columns(self.org.id, None) self.assertGreater(len(columns['fields']), 10) self.assertListEqual(columns['extra_data'], ['field_1']) columns = self.mixin_class.get_show_columns(self.org.id, -1) self.assertGreater(len(columns['fields']), 10) self.assertListEqual(columns['extra_data'], ['field_1']) columns = self.mixin_class.get_show_columns(self.org.id, 1000000) self.assertGreater(len(columns['fields']), 10) self.assertListEqual(columns['extra_data'], ['field_1']) # no extra data columnlistsetting = self.column_list_factory.get_columnlistsettings( columns=['address_line_1', 'site_eui'] ) columns = self.mixin_class.get_show_columns(self.org.id, columnlistsetting.id) self.assertListEqual(columns['fields'], ['extra_data', 'id', 'address_line_1', 'site_eui']) self.assertListEqual(columns['extra_data'], []) # with extra data columnlistsetting = self.column_list_factory.get_columnlistsettings( columns=['address_line_1', 'site_eui', 'field_1'] ) columns = self.mixin_class.get_show_columns(self.org.id, columnlistsetting.id) self.assertListEqual(columns['fields'], ['extra_data', 'id', 'address_line_1', 'site_eui']) self.assertListEqual(columns['extra_data'], ['field_1'])
def test_merged_indicators_provided_on_filter_endpoint(self): _import_record, import_file_1 = self.create_import_file( self.user, self.org, self.cycle) base_details = { 'address_line_1': '123 Match Street', 'import_file_id': import_file_1.id, 'data_state': DATA_STATE_MAPPING, 'no_default_data': False, } self.taxlot_state_factory.get_taxlot_state(**base_details) # set import_file_1 mapping done so that record is "created for users to view". import_file_1.mapping_done = True import_file_1.save() match_buildings(import_file_1.id) _import_record_2, import_file_2 = self.create_import_file( self.user, self.org, self.cycle) url = reverse( 'api:v2:taxlots-filter' ) + '?cycle_id={}&organization_id={}&page=1&per_page=999999999'.format( self.cycle.pk, self.org.pk) response = self.client.post(url) data = json.loads(response.content) self.assertFalse(data['results'][0]['merged_indicator']) # make sure merged_indicator is True when merge occurs base_details['city'] = 'Denver' base_details['import_file_id'] = import_file_2.id self.taxlot_state_factory.get_taxlot_state(**base_details) # set import_file_2 mapping done so that match merging can occur. import_file_2.mapping_done = True import_file_2.save() match_buildings(import_file_2.id) url = reverse( 'api:v2:taxlots-filter' ) + '?cycle_id={}&organization_id={}&page=1&per_page=999999999'.format( self.cycle.pk, self.org.pk) response = self.client.post(url) data = json.loads(response.content) self.assertTrue(data['results'][0]['merged_indicator']) # Create pairings and check if paired object has indicator as well property_factory = FakePropertyFactory(organization=self.org) property_state_factory = FakePropertyStateFactory( organization=self.org) property = property_factory.get_property() property_state = property_state_factory.get_property_state() property_view = PropertyView.objects.create(property=property, cycle=self.cycle, state=property_state) # attach pairing to one and only taxlot_view TaxLotProperty(primary=True, cycle_id=self.cycle.id, property_view_id=property_view.id, taxlot_view_id=TaxLotView.objects.get().id).save() url = reverse( 'api:v2:taxlots-filter' ) + '?cycle_id={}&organization_id={}&page=1&per_page=999999999'.format( self.cycle.pk, self.org.pk) response = self.client.post(url) data = json.loads(response.content) related = data['results'][0]['related'][0] self.assertTrue('merged_indicator' in related) self.assertFalse(related['merged_indicator'])
class PropertyUnmergeViewTests(DeleteModelsTestCase): def setUp(self): user_details = { 'username': '******', 'password': '******', 'email': '*****@*****.**' } self.user = User.objects.create_superuser(**user_details) self.org, self.org_user, _ = create_organization(self.user) self.cycle_factory = FakeCycleFactory(organization=self.org, user=self.user) self.property_factory = FakePropertyFactory(organization=self.org) self.property_state_factory = FakePropertyStateFactory(organization=self.org) self.cycle = self.cycle_factory.get_cycle( start=datetime(2010, 10, 10, tzinfo=get_current_timezone())) self.client.login(**user_details) self.state_1 = self.property_state_factory.get_property_state( address_line_1='1 property state', pm_property_id='5766973' # this allows the Property to be targetted for PM meter additions ) self.property_1 = self.property_factory.get_property() PropertyView.objects.create( property=self.property_1, cycle=self.cycle, state=self.state_1 ) self.state_2 = self.property_state_factory.get_property_state(address_line_1='2 property state') self.property_2 = self.property_factory.get_property() PropertyView.objects.create( property=self.property_2, cycle=self.cycle, state=self.state_2 ) self.import_record = ImportRecord.objects.create(owner=self.user, last_modified_by=self.user, super_organization=self.org) # Give 2 meters to one of the properties gb_filename = "example-GreenButton-data.xml" filepath = os.path.dirname(os.path.abspath(__file__)) + "/data/" + gb_filename gb_import_file = ImportFile.objects.create( import_record=self.import_record, source_type="GreenButton", uploaded_filename=gb_filename, file=SimpleUploadedFile(name=gb_filename, content=open(filepath, 'rb').read()), cycle=self.cycle, matching_results_data={"property_id": self.property_1.id} # this is how target property is specified ) gb_import_url = reverse("api:v2:import_files-save-raw-data", args=[gb_import_file.id]) gb_import_post_params = { 'cycle_id': self.cycle.pk, 'organization_id': self.org.pk, } self.client.post(gb_import_url, gb_import_post_params) # Merge the properties url = reverse('api:v2:properties-merge') + '?organization_id={}'.format(self.org.pk) post_params = json.dumps({ 'state_ids': [self.state_2.pk, self.state_1.pk] # priority given to state_1 }) self.client.post(url, post_params, content_type='application/json') def test_properties_unmerge_without_losing_labels(self): # Create 3 Labels - add 2 to view label_factory = FakeStatusLabelFactory(organization=self.org) label_1 = label_factory.get_statuslabel() label_2 = label_factory.get_statuslabel() view = PropertyView.objects.first() # There's only one PropertyView view.labels.add(label_1, label_2) # Unmerge the properties url = reverse('api:v2:properties-unmerge', args=[view.id]) + '?organization_id={}'.format(self.org.pk) self.client.post(url, content_type='application/json') for new_view in PropertyView.objects.all(): self.assertEqual(new_view.labels.count(), 2) label_names = list(new_view.labels.values_list('name', flat=True)) self.assertCountEqual(label_names, [label_1.name, label_2.name]) def test_unmerging_assigns_new_canonical_records_to_each_resulting_records(self): # Capture old property_ids view = PropertyView.objects.first() # There's only one PropertyView existing_property_ids = [ view.property_id, self.property_1.id, self.property_2.id, ] # Unmerge the properties url = reverse('api:v2:properties-unmerge', args=[view.id]) + '?organization_id={}'.format(self.org.pk) self.client.post(url, content_type='application/json') self.assertFalse(PropertyView.objects.filter(property_id__in=existing_property_ids).exists()) def test_unmerging_two_properties_with_meters_gives_meters_to_both_of_the_resulting_records(self): # Unmerge the properties view_id = PropertyView.objects.first().id # There's only one PropertyView url = reverse('api:v2:properties-unmerge', args=[view_id]) + '?organization_id={}'.format(self.org.pk) self.client.post(url, content_type='application/json') # Verify 2 -Views now exist self.assertEqual(PropertyView.objects.count(), 2) # Check that meters and readings of each -View exists and verify they are identical. reading_sets = [] for view in PropertyView.objects.all(): self.assertEqual(view.property.meters.count(), 1) self.assertEqual(view.property.meters.first().meter_readings.count(), 2) reading_sets.append([ { 'start_time': reading.start_time, 'end_time': reading.end_time, 'reading': reading.reading, 'source_unit': reading.source_unit, 'conversion_factor': reading.conversion_factor, } for reading in view.property.meters.first().meter_readings.all().order_by('start_time') ]) self.assertEqual(reading_sets[0], reading_sets[1]) def test_unmerge_results_in_the_use_of_new_canonical_records_and_deletion_of_old_canonical_state_if_unrelated_to_any_views(self): # Capture "old" property_id - there's only one PropertyView view = PropertyView.objects.first() property_id = view.property_id # Unmerge the properties url = reverse('api:v2:properties-unmerge', args=[view.id]) + '?organization_id={}'.format(self.org.pk) self.client.post(url, content_type='application/json') self.assertFalse(Property.objects.filter(pk=property_id).exists()) self.assertEqual(Property.objects.count(), 2) def test_unmerge_results_in_the_persistence_of_old_canonical_state_if_related_to_any_views(self): # Associate only canonical property with records across Cycle view = PropertyView.objects.first() property_id = view.property_id new_cycle = self.cycle_factory.get_cycle( start=datetime(2011, 10, 10, tzinfo=get_current_timezone()) ) new_property_state = self.property_state_factory.get_property_state() PropertyView.objects.create( property_id=property_id, cycle=new_cycle, state=new_property_state ) # Unmerge the properties url = reverse('api:v2:properties-unmerge', args=[view.id]) + '?organization_id={}'.format(self.org.pk) self.client.post(url, content_type='application/json') self.assertTrue(Property.objects.filter(pk=view.property_id).exists()) self.assertEqual(Property.objects.count(), 3)
def setUp(self): user_details = { 'username': '******', 'password': '******', 'email': '*****@*****.**', 'first_name': 'Test', 'last_name': 'User', } self.user = User.objects.create_user(**user_details) self.org, self.org_user, _ = create_organization(self.user) self.org_b, self.org_user, _ = create_organization(self.user) self.client.login(**user_details) cycle_factory = FakeCycleFactory(organization=self.org, user=self.user) cycle_a = cycle_factory.get_cycle(name="Cycle A") cycle_b = cycle_factory.get_cycle(name="Cycle B") property_factory = FakePropertyFactory(organization=self.org) self.property_a = property_factory.get_property() property_b = property_factory.get_property() property_state_factory = FakePropertyStateFactory( organization=self.org) property_state_a = property_state_factory.get_property_state() property_state_b = property_state_factory.get_property_state() property_state_c = property_state_factory.get_property_state() property_state_d = property_state_factory.get_property_state() # create an analysis with two property views, each with the same property but a different cycle self.analysis_a = Analysis.objects.create(name='test a', service=Analysis.BSYNCR, status=Analysis.CREATING, user=self.user, organization=self.org) self.analysis_property_view_a = AnalysisPropertyView.objects.create( analysis=self.analysis_a, property=self.property_a, cycle=cycle_a, property_state=property_state_a) self.analysis_property_view_b = AnalysisPropertyView.objects.create( analysis=self.analysis_a, property=self.property_a, cycle=cycle_b, property_state=property_state_b) # create an analysis with two property views, each with the same cycle but a different property self.analysis_b = Analysis.objects.create(name='test b', service=Analysis.BSYNCR, status=Analysis.READY, user=self.user, organization=self.org) self.analysis_property_view_c = AnalysisPropertyView.objects.create( analysis=self.analysis_b, property=self.property_a, cycle=cycle_a, property_state=property_state_c) self.analysis_property_view_d = AnalysisPropertyView.objects.create( analysis=self.analysis_b, property=property_b, cycle=cycle_a, property_state=property_state_d) # create an analysis with no property views self.analysis_c = Analysis.objects.create(name='test c', service=Analysis.BSYNCR, status=Analysis.QUEUED, user=self.user, organization=self.org) # create an analysis with a different organization self.analysis_d = Analysis.objects.create(name='test d', service=Analysis.BSYNCR, status=Analysis.RUNNING, user=self.user, organization=self.org_b) # create an output file and add to 3 analysis property views self.analysis_output_file_a = AnalysisOutputFile.objects.create( file=SimpleUploadedFile('test file a', b'test file a contents'), content_type=AnalysisOutputFile.BUILDINGSYNC) self.analysis_output_file_a.analysis_property_views.add( self.analysis_property_view_a) self.analysis_output_file_a.analysis_property_views.add( self.analysis_property_view_b) self.analysis_output_file_a.analysis_property_views.add( self.analysis_property_view_c) # create an output file and add to 1 analysis property view self.analysis_output_file_b = AnalysisOutputFile.objects.create( file=SimpleUploadedFile('test file b', b'test file b contents'), content_type=AnalysisOutputFile.BUILDINGSYNC) self.analysis_output_file_b.analysis_property_views.add( self.analysis_property_view_a)
class PropertyMergeViewTests(DeleteModelsTestCase): def setUp(self): user_details = { 'username': '******', 'password': '******', 'email': '*****@*****.**' } self.user = User.objects.create_superuser(**user_details) self.org, self.org_user, _ = create_organization(self.user) cycle_factory = FakeCycleFactory(organization=self.org, user=self.user) self.property_factory = FakePropertyFactory(organization=self.org) self.property_state_factory = FakePropertyStateFactory( organization=self.org) self.cycle = cycle_factory.get_cycle( start=datetime(2010, 10, 10, tzinfo=get_current_timezone())) self.client.login(**user_details) self.state_1 = self.property_state_factory.get_property_state( address_line_1='1 property state', pm_property_id= '5766973' # this allows the Property to be targetted for PM meter additions ) self.property_1 = self.property_factory.get_property() PropertyView.objects.create(property=self.property_1, cycle=self.cycle, state=self.state_1) self.state_2 = self.property_state_factory.get_property_state( address_line_1='2 property state') self.property_2 = self.property_factory.get_property() PropertyView.objects.create(property=self.property_2, cycle=self.cycle, state=self.state_2) self.import_record = ImportRecord.objects.create( owner=self.user, last_modified_by=self.user, super_organization=self.org) def test_properties_merge_without_losing_meters_1st_has_meters(self): # Assign meters to the first Property filename = "example-GreenButton-data.xml" filepath = os.path.dirname( os.path.abspath(__file__)) + "/data/" + filename import_file = ImportFile.objects.create( import_record=self.import_record, source_type="GreenButton", uploaded_filename=filename, file=SimpleUploadedFile(name=filename, content=open(filepath, 'rb').read()), cycle=self.cycle, matching_results_data={ "property_id": self.property_1.id } # this is how target property is specified ) gb_import_url = reverse("api:v2:import_files-save-raw-data", args=[import_file.id]) gb_import_post_params = { 'cycle_id': self.cycle.pk, 'organization_id': self.org.pk, } self.client.post(gb_import_url, gb_import_post_params) # Merge PropertyStates url = reverse('api:v2:properties-merge' ) + '?organization_id={}'.format(self.org.pk) post_params = json.dumps( {'state_ids': [self.state_2.pk, self.state_1.pk]}) self.client.post(url, post_params, content_type='application/json') # There should only be one PropertyView self.assertEqual(PropertyView.objects.count(), 1) self.assertEqual(PropertyView.objects.first().property.meters.count(), 1) self.assertEqual( PropertyView.objects.first().property.meters.first(). meter_readings.count(), 2) def test_properties_merge_without_losing_meters_2nd_has_meters(self): # Assign Meters to the second Property filename = "example-GreenButton-data.xml" filepath = os.path.dirname( os.path.abspath(__file__)) + "/data/" + filename import_file = ImportFile.objects.create( import_record=self.import_record, source_type="GreenButton", uploaded_filename=filename, file=SimpleUploadedFile(name=filename, content=open(filepath, 'rb').read()), cycle=self.cycle, matching_results_data={ "property_id": self.property_2.id } # this is how target property is specified ) gb_import_url = reverse("api:v2:import_files-save-raw-data", args=[import_file.id]) gb_import_post_params = { 'cycle_id': self.cycle.pk, 'organization_id': self.org.pk, } self.client.post(gb_import_url, gb_import_post_params) # Merge PropertyStates url = reverse('api:v2:properties-merge' ) + '?organization_id={}'.format(self.org.pk) post_params = json.dumps( {'state_ids': [self.state_2.pk, self.state_1.pk]}) self.client.post(url, post_params, content_type='application/json') # There should only be one PropertyView self.assertEqual(PropertyView.objects.count(), 1) self.assertEqual(PropertyView.objects.first().property.meters.count(), 1) self.assertEqual( PropertyView.objects.first().property.meters.first(). meter_readings.count(), 2) def test_properties_merge_without_losing_meters_from_different_sources_nonoverlapping( self): # For first Property, PM Meters containing 2 readings for each Electricty and Natural Gas for property_1 # This file has multiple tabs pm_filename = "example-pm-monthly-meter-usage.xlsx" filepath = os.path.dirname( os.path.abspath(__file__)) + "/data/" + pm_filename pm_import_file = ImportFile.objects.create( import_record=self.import_record, source_type="PM Meter Usage", uploaded_filename=pm_filename, file=SimpleUploadedFile(name=pm_filename, content=open(filepath, 'rb').read()), cycle=self.cycle, ) pm_import_url = reverse("api:v2:import_files-save-raw-data", args=[pm_import_file.id]) pm_import_post_params = { 'cycle_id': self.cycle.pk, 'organization_id': self.org.pk, } self.client.post(pm_import_url, pm_import_post_params) # For second Property, add GreenButton Meters containing 2 readings for Electricity only gb_filename = "example-GreenButton-data.xml" filepath = os.path.dirname( os.path.abspath(__file__)) + "/data/" + gb_filename gb_import_file = ImportFile.objects.create( import_record=self.import_record, source_type="GreenButton", uploaded_filename=gb_filename, file=SimpleUploadedFile(name=gb_filename, content=open(filepath, 'rb').read()), cycle=self.cycle, matching_results_data={ "property_id": self.property_2.id } # this is how target property is specified ) gb_import_url = reverse("api:v2:import_files-save-raw-data", args=[gb_import_file.id]) gb_import_post_params = { 'cycle_id': self.cycle.pk, 'organization_id': self.org.pk, } self.client.post(gb_import_url, gb_import_post_params) # Merge PropertyStates url = reverse('api:v2:properties-merge' ) + '?organization_id={}'.format(self.org.pk) post_params = json.dumps({ 'state_ids': [self.state_2.pk, self.state_1.pk] # priority given to state_1 }) self.client.post(url, post_params, content_type='application/json') # There should only be one PropertyView self.assertEqual(PropertyView.objects.count(), 1) # The Property of the (only) -View has all of the Meters now. meters = PropertyView.objects.first().property.meters self.assertEqual(meters.count(), 3) self.assertEqual( meters.get(type=Meter.ELECTRICITY_GRID, source=Meter.GREENBUTTON).meter_readings.count(), 2) self.assertEqual( meters.get(type=Meter.ELECTRICITY_GRID, source=Meter.PORTFOLIO_MANAGER).meter_readings.count(), 2) self.assertEqual( meters.get(type=Meter.NATURAL_GAS).meter_readings.count(), 2) # Old meters deleted, so only merged meters exist self.assertEqual(Meter.objects.count(), 3) self.assertEqual(MeterReading.objects.count(), 6) def test_properties_merge_without_losing_meters_when_some_meters_from_same_source_are_overlapping( self): # For first Property, add GreenButton Meters containing 2 readings for Electricity only gb_filename = "example-GreenButton-data.xml" filepath = os.path.dirname( os.path.abspath(__file__)) + "/data/" + gb_filename gb_import_file = ImportFile.objects.create( import_record=self.import_record, source_type="GreenButton", uploaded_filename=gb_filename, file=SimpleUploadedFile(name=gb_filename, content=open(filepath, 'rb').read()), cycle=self.cycle, matching_results_data={ "property_id": self.property_1.id } # this is how target property is specified ) gb_import_url = reverse("api:v2:import_files-save-raw-data", args=[gb_import_file.id]) gb_import_post_params = { 'cycle_id': self.cycle.pk, 'organization_id': self.org.pk, } self.client.post(gb_import_url, gb_import_post_params) # For second Property, add GreenButton Meters containing 2 Electricitiy readings: 1 overlapping gb_overlapping_filename = "example-GreenButton-data-1-overlapping.xml" filepath = os.path.dirname( os.path.abspath(__file__)) + "/data/" + gb_overlapping_filename gb_overlapping_import_file = ImportFile.objects.create( import_record=self.import_record, source_type="GreenButton", uploaded_filename=gb_overlapping_filename, file=SimpleUploadedFile(name=gb_overlapping_filename, content=open(filepath, 'rb').read()), cycle=self.cycle, matching_results_data={ "property_id": self.property_2.id } # this is how target property is specified ) gb_overlapping_import_url = reverse( "api:v2:import_files-save-raw-data", args=[gb_overlapping_import_file.id]) gb_overlapping_import_post_params = { 'cycle_id': self.cycle.pk, 'organization_id': self.org.pk, } self.client.post(gb_overlapping_import_url, gb_overlapping_import_post_params) # Check that there are 2 overlapping readings (that are separate for now) out of 4. self.assertEqual(MeterReading.objects.count(), 4) tz_obj = timezone(TIME_ZONE) start_time_match = make_aware(datetime(2011, 3, 5, 21, 15, 0), timezone=tz_obj) end_time_match = make_aware(datetime(2011, 3, 5, 21, 30, 0), timezone=tz_obj) same_time_windows = MeterReading.objects.filter( start_time=start_time_match, end_time=end_time_match) self.assertEqual(same_time_windows.count(), 2) # Capture the overlapping reading of property_1, and ensure it's different from property_2's priority_property_id = self.property_1.meters.first().id property_1_reading = same_time_windows.get( meter_id=priority_property_id).reading property_2_reading = same_time_windows.exclude( meter_id=priority_property_id).get().reading self.assertNotEqual(property_1_reading, property_2_reading) # Merge PropertyStates url = reverse('api:v2:properties-merge' ) + '?organization_id={}'.format(self.org.pk) post_params = json.dumps({ 'state_ids': [self.state_2.pk, self.state_1.pk] # priority given to state_1 }) self.client.post(url, post_params, content_type='application/json') # There should only be one PropertyView self.assertEqual(PropertyView.objects.count(), 1) # The Property of the (only) -View has all of the Meters now. meters = PropertyView.objects.first().property.meters self.assertEqual(meters.count(), 1) self.assertEqual(meters.first().meter_readings.count(), 3) # Old meters deleted, so only merged meters exist self.assertEqual(Meter.objects.count(), 1) self.assertEqual(MeterReading.objects.count(), 3) # Check that the resulting reading used belonged to property_1 merged_reading = MeterReading.objects.filter( start_time=start_time_match, end_time=end_time_match) self.assertEqual(merged_reading.count(), 1) self.assertEqual(merged_reading.first().reading, property_1_reading) # Overlapping reading that wasn't prioritized should not exist self.assertFalse( MeterReading.objects.filter(reading=property_2_reading).exists())
class MeterUsageImportTest(TestCase): def setUp(self): self.user_details = { 'username': '******', 'password': '******', } self.user = User.objects.create_superuser(email='*****@*****.**', **self.user_details) self.org, _, _ = create_organization(self.user) self.client.login(**self.user_details) self.property_state_factory = FakePropertyStateFactory( organization=self.org) property_details = self.property_state_factory.get_details() property_details['organization_id'] = self.org.id # pm_property_ids must match those within example-monthly-meter-usage.xlsx self.pm_property_id_1 = '5766973' self.pm_property_id_2 = '5766975' property_details['pm_property_id'] = self.pm_property_id_1 state_1 = PropertyState(**property_details) state_1.save() self.state_1 = PropertyState.objects.get(pk=state_1.id) property_details['pm_property_id'] = self.pm_property_id_2 state_2 = PropertyState(**property_details) state_2.save() self.state_2 = PropertyState.objects.get(pk=state_2.id) self.cycle_factory = FakeCycleFactory(organization=self.org, user=self.user) self.cycle = self.cycle_factory.get_cycle( start=datetime(2010, 10, 10, tzinfo=get_current_timezone())) self.property_factory = FakePropertyFactory(organization=self.org) self.property_1 = self.property_factory.get_property() self.property_2 = self.property_factory.get_property() self.property_view_1 = PropertyView.objects.create( property=self.property_1, cycle=self.cycle, state=self.state_1) self.property_view_2 = PropertyView.objects.create( property=self.property_2, cycle=self.cycle, state=self.state_2) self.import_record = ImportRecord.objects.create( owner=self.user, last_modified_by=self.user, super_organization=self.org) # This file has multiple tabs filename = "example-pm-monthly-meter-usage.xlsx" filepath = os.path.dirname( os.path.abspath(__file__)) + "/data/" + filename self.import_file = ImportFile.objects.create( import_record=self.import_record, source_type="PM Meter Usage", uploaded_filename=filename, file=SimpleUploadedFile(name=filename, content=open(filepath, 'rb').read()), cycle=self.cycle) self.tz_obj = timezone(TIME_ZONE) def test_import_meter_usage_file_base_case(self): """ Expect to have 4 meters - 2 for each property - 1 for gas and 1 for electricity. Each meter will have 2 readings, for a total of 8 readings. These come from 8 meter usage rows in the .xlsx file - 1 per reading. """ url = reverse("api:v3:import_files-start-save-data", args=[self.import_file.id]) url += f'?organization_id={self.org.pk}' post_params = { 'cycle_id': self.cycle.pk, } self.client.post(url, post_params) refreshed_property_1 = Property.objects.get(pk=self.property_1.id) self.assertEqual(refreshed_property_1.meters.all().count(), 2) meter_1 = refreshed_property_1.meters.get(type=Meter.ELECTRICITY_GRID) self.assertEqual(meter_1.source, Meter.PORTFOLIO_MANAGER) self.assertEqual(meter_1.source_id, '5766973-0') self.assertEqual(meter_1.is_virtual, False) self.assertEqual(meter_1.meter_readings.all().count(), 2) meter_reading_10, meter_reading_11 = list( meter_1.meter_readings.order_by('start_time').all()) self.assertEqual( meter_reading_10.start_time, make_aware(datetime(2016, 1, 1, 0, 0, 0), timezone=self.tz_obj)) self.assertEqual( meter_reading_10.end_time, make_aware(datetime(2016, 2, 1, 0, 0, 0), timezone=self.tz_obj)) self.assertEqual(meter_reading_10.reading, 597478.9) self.assertEqual(meter_reading_10.source_unit, "kBtu (thousand Btu)") # spot check self.assertEqual(meter_reading_10.conversion_factor, 1) # spot check self.assertEqual( meter_reading_11.start_time, make_aware(datetime(2016, 2, 1, 0, 0, 0), timezone=self.tz_obj)) self.assertEqual( meter_reading_11.end_time, make_aware(datetime(2016, 3, 1, 0, 0, 0), timezone=self.tz_obj)) self.assertEqual(meter_reading_11.reading, 548603.7) meter_2 = refreshed_property_1.meters.get(type=Meter.NATURAL_GAS) self.assertEqual(meter_2.source, Meter.PORTFOLIO_MANAGER) self.assertEqual(meter_2.source_id, '5766973-1') self.assertEqual(meter_2.meter_readings.all().count(), 2) meter_reading_20, meter_reading_21 = list( meter_2.meter_readings.order_by('start_time').all()) self.assertEqual( meter_reading_20.start_time, make_aware(datetime(2016, 1, 1, 0, 0, 0), timezone=self.tz_obj)) self.assertEqual( meter_reading_20.end_time, make_aware(datetime(2016, 2, 1, 0, 0, 0), timezone=self.tz_obj)) self.assertEqual(meter_reading_20.reading, 576000.2) self.assertEqual( meter_reading_21.start_time, make_aware(datetime(2016, 2, 1, 0, 0, 0), timezone=self.tz_obj)) self.assertEqual( meter_reading_21.end_time, make_aware(datetime(2016, 3, 1, 0, 0, 0), timezone=self.tz_obj)) self.assertEqual(meter_reading_21.reading, 488000.1) refreshed_property_2 = Property.objects.get(pk=self.property_2.id) self.assertEqual(refreshed_property_2.meters.all().count(), 2) meter_3 = refreshed_property_2.meters.get(type=Meter.ELECTRICITY_GRID) self.assertEqual(meter_3.source, Meter.PORTFOLIO_MANAGER) self.assertEqual(meter_3.source_id, '5766975-0') self.assertEqual(meter_3.meter_readings.all().count(), 2) meter_reading_30, meter_reading_40 = list( meter_3.meter_readings.order_by('start_time').all()) self.assertEqual( meter_reading_30.start_time, make_aware(datetime(2016, 1, 1, 0, 0, 0), timezone=self.tz_obj)) self.assertEqual( meter_reading_30.end_time, make_aware(datetime(2016, 2, 1, 0, 0, 0), timezone=self.tz_obj)) self.assertEqual(meter_reading_30.reading, 154572.2) self.assertEqual(meter_reading_30.source_unit, "kBtu (thousand Btu)") # spot check self.assertEqual(meter_reading_30.conversion_factor, 1) # spot check self.assertEqual( meter_reading_40.start_time, make_aware(datetime(2016, 2, 1, 0, 0, 0), timezone=self.tz_obj)) self.assertEqual( meter_reading_40.end_time, make_aware(datetime(2016, 3, 1, 0, 0, 0), timezone=self.tz_obj)) self.assertEqual(meter_reading_40.reading, 141437.5) meter_4 = refreshed_property_2.meters.get(type=Meter.NATURAL_GAS) self.assertEqual(meter_4.source, Meter.PORTFOLIO_MANAGER) self.assertEqual(meter_4.source_id, '5766975-1') self.assertEqual(meter_4.meter_readings.all().count(), 2) meter_reading_40, meter_reading_41 = list( meter_4.meter_readings.order_by('start_time').all()) self.assertEqual( meter_reading_40.start_time, make_aware(datetime(2016, 1, 1, 0, 0, 0), timezone=self.tz_obj)) self.assertEqual( meter_reading_40.end_time, make_aware(datetime(2016, 2, 1, 0, 0, 0), timezone=self.tz_obj)) self.assertEqual(meter_reading_40.reading, 299915) self.assertEqual( meter_reading_41.start_time, make_aware(datetime(2016, 2, 1, 0, 0, 0), timezone=self.tz_obj)) self.assertEqual( meter_reading_41.end_time, make_aware(datetime(2016, 3, 1, 0, 0, 0), timezone=self.tz_obj)) self.assertEqual(meter_reading_41.reading, 496310.9) # file should be disassociated from cycle too refreshed_import_file = ImportFile.objects.get(pk=self.import_file.id) self.assertEqual(refreshed_import_file.cycle_id, None) def test_import_meter_usage_file_ignores_unknown_types_or_units(self): """ Expect to have 3 meters. The first meter belongs to the first property and should have 2 readings. The second meter belongs to the second property and should have 1 reading. The last meter belongs to the second property and should have 1 reading. These come from 8 meter usage rows in the .xlsx file (1 per reading) where 4 of them have either an invalid type or unit. """ filename = "example-pm-monthly-meter-usage-with-unknown-types-and-units.xlsx" filepath = os.path.dirname( os.path.abspath(__file__)) + "/data/" + filename import_file_with_invalids = ImportFile.objects.create( import_record=self.import_record, source_type="PM Meter Usage", uploaded_filename=filename, file=SimpleUploadedFile(name=filename, content=open(filepath, 'rb').read()), cycle=self.cycle) url = reverse("api:v3:import_files-start-save-data", args=[import_file_with_invalids.id]) url += f'?organization_id={self.org.pk}' post_params = { 'cycle_id': self.cycle.pk, } self.client.post(url, post_params) self.assertEqual(3, Meter.objects.count()) self.assertEqual(4, MeterReading.objects.count()) refreshed_property_1 = Property.objects.get(pk=self.property_1.id) self.assertEqual(refreshed_property_1.meters.all().count(), 1) meter_1 = refreshed_property_1.meters.first() self.assertEqual(meter_1.meter_readings.all().count(), 2) refreshed_property_2 = Property.objects.get(pk=self.property_2.id) self.assertEqual(refreshed_property_2.meters.all().count(), 2) meter_2 = refreshed_property_2.meters.get(type=Meter.ELECTRICITY_GRID) self.assertEqual(meter_2.meter_readings.all().count(), 1) meter_3 = refreshed_property_2.meters.get(type=Meter.NATURAL_GAS) self.assertEqual(meter_3.meter_readings.all().count(), 1) def test_import_meter_usage_file_including_2_cost_meters(self): filename = "example-pm-monthly-meter-usage-2-cost-meters.xlsx" filepath = os.path.dirname( os.path.abspath(__file__)) + "/data/" + filename cost_meter_import_file = ImportFile.objects.create( import_record=self.import_record, source_type="PM Meter Usage", uploaded_filename=filename, file=SimpleUploadedFile(name=filename, content=open(filepath, 'rb').read()), cycle=self.cycle) url = reverse("api:v3:import_files-start-save-data", args=[cost_meter_import_file.id]) url += f'?organization_id={self.org.pk}' post_params = { 'cycle_id': self.cycle.pk, } self.client.post(url, post_params) cost_meters = Meter.objects.filter(type=Meter.COST) self.assertEqual(2, cost_meters.count()) electric_cost_meter = cost_meters.get(source_id='5766973-0') gas_cost_meter = cost_meters.get(source_id='5766973-1') self.assertEqual(2, electric_cost_meter.meter_readings.count()) self.assertEqual(2, gas_cost_meter.meter_readings.count()) electric_reading_values = electric_cost_meter.meter_readings.values_list( 'reading', flat=True) self.assertCountEqual([100, 200], electric_reading_values) gas_reading_values = gas_cost_meter.meter_readings.values_list( 'reading', flat=True) self.assertCountEqual([300, 400], gas_reading_values) def test_existing_meter_is_found_and_used_if_import_file_should_reference_it( self): property = Property.objects.get(pk=self.property_1.id) # Create a meter with the same details of one meter in the import file unsaved_meter = Meter( property=property, source=Meter.PORTFOLIO_MANAGER, source_id='5766973-0', type=Meter.ELECTRICITY_GRID, ) unsaved_meter.save() existing_meter = Meter.objects.get(pk=unsaved_meter.id) # Create a reading with a different date from those in the import file unsaved_meter_reading = MeterReading( meter=existing_meter, start_time=make_aware(datetime(2018, 1, 1, 0, 0, 0), timezone=self.tz_obj), end_time=make_aware(datetime(2018, 2, 1, 0, 0, 0), timezone=self.tz_obj), reading=12345, conversion_factor=1.0) unsaved_meter_reading.save() existing_meter_reading = MeterReading.objects.get(reading=12345) url = reverse("api:v3:import_files-start-save-data", args=[self.import_file.id]) url += f'?organization_id={self.org.pk}' post_params = { 'cycle_id': self.cycle.pk, } self.client.post(url, post_params) refreshed_property_1 = Property.objects.get(pk=self.property_1.id) self.assertEqual(refreshed_property_1.meters.all().count(), 2) refreshed_meter = refreshed_property_1.meters.get( type=Meter.ELECTRICITY_GRID) meter_reading_10, meter_reading_11, meter_reading_12 = list( refreshed_meter.meter_readings.order_by('start_time').all()) self.assertEqual(meter_reading_10.reading, 597478.9) self.assertEqual(meter_reading_11.reading, 548603.7) # Sanity check to be sure, nothing was changed with existing meter reading self.assertEqual(meter_reading_12, existing_meter_reading) def test_existing_meter_reading_has_reading_source_unit_and_conversion_factor_updated_if_import_file_references_previous_entry( self): property = Property.objects.get(pk=self.property_1.id) # Create a meter with the same details of one meter in the import file unsaved_meter = Meter( property=property, source=Meter.PORTFOLIO_MANAGER, source_id='5766973-0', type=Meter.ELECTRICITY_GRID, ) unsaved_meter.save() existing_meter = Meter.objects.get(pk=unsaved_meter.id) # Create a reading with the same date as one from the import file but different reading start_time = make_aware(datetime(2016, 1, 1, 0, 0, 0), timezone=self.tz_obj) end_time = make_aware(datetime(2016, 2, 1, 0, 0, 0), timezone=self.tz_obj) unsaved_meter_reading = MeterReading(meter=existing_meter, start_time=start_time, end_time=end_time, reading=12345, source_unit="GJ", conversion_factor=947.82) unsaved_meter_reading.save() url = reverse("api:v3:import_files-start-save-data", args=[self.import_file.id]) url += f'?organization_id={self.org.pk}' post_params = { 'cycle_id': self.cycle.pk, } self.client.post(url, post_params) # Just as in the first test, 8 meter readings should exist self.assertEqual(MeterReading.objects.all().count(), 8) refreshed_property = Property.objects.get(pk=self.property_1.id) refreshed_meter = refreshed_property.meters.get( type=Meter.ELECTRICITY_GRID) meter_reading = refreshed_meter.meter_readings.get( start_time=start_time) self.assertEqual(meter_reading.end_time, end_time) self.assertEqual(meter_reading.reading, 597478.9) self.assertEqual(meter_reading.source_unit, "kBtu (thousand Btu)") self.assertEqual(meter_reading.conversion_factor, 1) def test_property_existing_in_multiple_cycles_can_have_meters_and_readings_associated_to_it( self): property_details = FakePropertyStateFactory( organization=self.org).get_details() property_details['organization_id'] = self.org.id # new state to be associated to new cycle using the same pm_property_id as state in old cycle property_details['pm_property_id'] = self.state_1.pm_property_id state = PropertyState(**property_details) state.save() new_property_state = PropertyState.objects.get(pk=state.id) new_cycle_factory = FakeCycleFactory(organization=self.org, user=self.user) new_cycle = new_cycle_factory.get_cycle( start=datetime(2010, 10, 10, tzinfo=get_current_timezone())) # new state and cycle associated to old property PropertyView.objects.create(property=self.property_1, cycle=new_cycle, state=new_property_state) url = reverse("api:v3:import_files-start-save-data", args=[self.import_file.id]) url += f'?organization_id={self.org.pk}' post_params = { 'cycle_id': self.cycle.pk, } self.client.post(url, post_params) refreshed_property_1 = Property.objects.get(pk=self.property_1.id) self.assertEqual(refreshed_property_1.meters.all().count(), 2) def test_meters_and_readings_are_associated_to_every_record_across_all_cycles_with_a_given_pm_property_id( self): # new, in-cycle state NOT associated to existing record but has same PM Property ID property_details_1 = FakePropertyStateFactory( organization=self.org).get_details() property_details_1['organization_id'] = self.org.id property_details_1['pm_property_id'] = self.state_1.pm_property_id property_details_1['custom_id_1'] = "values that forces non-match" new_property_1 = PropertyState(**property_details_1) new_property_1.save() property_3 = self.property_factory.get_property() PropertyView.objects.create(property=property_3, cycle=self.cycle, state=new_property_1) # new, out-cycle state NOT associated to existing record but has same PM Property ID property_details_2 = FakePropertyStateFactory( organization=self.org).get_details() property_details_2['organization_id'] = self.org.id property_details_2['pm_property_id'] = self.state_1.pm_property_id property_details_2[ 'custom_id_1'] = "second value that forces non-match" new_property_2 = PropertyState(**property_details_2) new_property_2.save() new_cycle = self.cycle_factory.get_cycle( start=datetime(2011, 10, 10, tzinfo=get_current_timezone())) property_4 = self.property_factory.get_property() PropertyView.objects.create(property=property_4, cycle=new_cycle, state=new_property_2) url = reverse("api:v3:import_files-start-save-data", args=[self.import_file.id]) url += f'?organization_id={self.org.pk}' post_params = { 'cycle_id': self.cycle.pk, } self.client.post(url, post_params) refreshed_property_1 = Property.objects.get(pk=self.property_1.id) self.assertEqual(refreshed_property_1.meters.all().count(), 2) refreshed_property_3 = Property.objects.get(pk=property_3.id) self.assertEqual(refreshed_property_3.meters.all().count(), 2) refreshed_property_4 = Property.objects.get(pk=property_4.id) self.assertEqual(refreshed_property_4.meters.all().count(), 2) def test_pm_property_id_existing_across_two_different_orgs_wont_lead_to_misassociated_meters( self): new_org, _, _ = create_organization(self.user) property_details = FakePropertyStateFactory( organization=new_org).get_details() property_details['organization_id'] = new_org.id # new state to be associated to property of different organization but has the same pm_property_id property_details['pm_property_id'] = self.state_1.pm_property_id state = PropertyState(**property_details) state.save() new_property_state = PropertyState.objects.get(pk=state.id) new_cycle_factory = FakeCycleFactory(organization=new_org, user=self.user) new_cycle = new_cycle_factory.get_cycle( start=datetime(2010, 10, 10, tzinfo=get_current_timezone())) new_property = self.property_factory.get_property() PropertyView.objects.create(property=new_property, cycle=new_cycle, state=new_property_state) url = reverse("api:v3:import_files-start-save-data", args=[self.import_file.id]) url += f'?organization_id={self.org.pk}' post_params = { 'cycle_id': self.cycle.pk, } self.client.post(url, post_params) # self.property_1 is associated to self.org, so according to post request, it should have 2 meters refreshed_property_1 = Property.objects.get( pk=self.property_1.id, organization_id__exact=self.org.pk) self.assertEqual(refreshed_property_1.meters.all().count(), 2) refreshed_new_property = Property.objects.get(pk=new_property.id) self.assertEqual(refreshed_new_property.meters.count(), 0) def test_the_response_contains_expected_and_actual_reading_counts_single_cycle( self): url = reverse("api:v3:import_files-start-save-data", args=[self.import_file.id]) url += f'?organization_id={self.org.pk}' post_params = { 'cycle_id': self.cycle.pk, } response = self.client.post(url, post_params) result = json.loads(response.content) expectation = [ { "property_id": self.property_1.id, "cycles": self.cycle.name, "pm_property_id": "5766973", "source_id": "5766973-0", "type": "Electric - Grid", "incoming": 2, "successfully_imported": 2, }, { "property_id": self.property_1.id, "cycles": self.cycle.name, "pm_property_id": "5766973", "source_id": "5766973-1", "type": "Natural Gas", "incoming": 2, "successfully_imported": 2, }, { "property_id": self.property_2.id, "cycles": self.cycle.name, "pm_property_id": "5766975", "source_id": "5766975-0", "type": "Electric - Grid", "incoming": 2, "successfully_imported": 2, }, { "property_id": self.property_2.id, "cycles": self.cycle.name, "pm_property_id": "5766975", "source_id": "5766975-1", "type": "Natural Gas", "incoming": 2, "successfully_imported": 2, }, ] self.assertCountEqual(result['message'], expectation) def test_the_response_contains_expected_and_actual_reading_counts_across_cycles_for_linked_properties( self): property_details = FakePropertyStateFactory( organization=self.org).get_details() property_details['organization_id'] = self.org.id # new state will be linked to existing record and has same PM Property ID property_details['pm_property_id'] = self.state_1.pm_property_id state = PropertyState(**property_details) state.save() new_property_state = PropertyState.objects.get(pk=state.id) new_cycle = self.cycle_factory.get_cycle( start=datetime(2011, 10, 10, tzinfo=get_current_timezone())) PropertyView.objects.create(property=self.property_1, cycle=new_cycle, state=new_property_state) url = reverse("api:v3:import_files-start-save-data", args=[self.import_file.id]) url += f'?organization_id={self.org.pk}' post_params = { 'cycle_id': self.cycle.pk, } response = self.client.post(url, post_params) result = json.loads(response.content) expectation = [ { "property_id": self.property_1.id, "cycles": self.cycle.name + ", " + new_cycle.name, "pm_property_id": "5766973", "source_id": "5766973-0", "type": "Electric - Grid", "incoming": 2, "successfully_imported": 2, }, { "property_id": self.property_1.id, "cycles": self.cycle.name + ", " + new_cycle.name, "pm_property_id": "5766973", "source_id": "5766973-1", "type": "Natural Gas", "incoming": 2, "successfully_imported": 2, }, { "property_id": self.property_2.id, "cycles": self.cycle.name, "pm_property_id": "5766975", "source_id": "5766975-0", "type": "Electric - Grid", "incoming": 2, "successfully_imported": 2, }, { "property_id": self.property_2.id, "cycles": self.cycle.name, "pm_property_id": "5766975", "source_id": "5766975-1", "type": "Natural Gas", "incoming": 2, "successfully_imported": 2, }, ] self.assertCountEqual(result['message'], expectation) def test_the_response_contains_expected_and_actual_reading_counts_by_property_id_even_in_the_same_cycle( self): property_details = FakePropertyStateFactory( organization=self.org).get_details() property_details['organization_id'] = self.org.id # Create new state NOT associated to existing record but has same PM Property ID property_details['pm_property_id'] = self.state_1.pm_property_id property_details['custom_id_1'] = "values that forces non-match" state = PropertyState(**property_details) state.save() new_property_state = PropertyState.objects.get(pk=state.id) # new state in cycle associated to old property property_3 = self.property_factory.get_property() PropertyView.objects.create(property=property_3, cycle=self.cycle, state=new_property_state) url = reverse("api:v3:import_files-start-save-data", args=[self.import_file.id]) url += f'?organization_id={self.org.pk}' post_params = { 'cycle_id': self.cycle.pk, } response = self.client.post(url, post_params) result = json.loads(response.content) expectation = [ { "property_id": self.property_1.id, "cycles": self.cycle.name, "pm_property_id": "5766973", "source_id": "5766973-0", "type": "Electric - Grid", "incoming": 2, "successfully_imported": 2, }, { "property_id": property_3.id, "cycles": self.cycle.name, "pm_property_id": "5766973", "source_id": "5766973-0", "type": "Electric - Grid", "incoming": 2, "successfully_imported": 2, }, { "property_id": self.property_1.id, "cycles": self.cycle.name, "pm_property_id": "5766973", "source_id": "5766973-1", "type": "Natural Gas", "incoming": 2, "successfully_imported": 2, }, { "property_id": property_3.id, "cycles": self.cycle.name, "pm_property_id": "5766973", "source_id": "5766973-1", "type": "Natural Gas", "incoming": 2, "successfully_imported": 2, }, { "property_id": self.property_2.id, "cycles": self.cycle.name, "pm_property_id": "5766975", "source_id": "5766975-0", "type": "Electric - Grid", "incoming": 2, "successfully_imported": 2, }, { "property_id": self.property_2.id, "cycles": self.cycle.name, "pm_property_id": "5766975", "source_id": "5766975-1", "type": "Natural Gas", "incoming": 2, "successfully_imported": 2, }, ] self.assertCountEqual(result['message'], expectation) def test_the_response_contains_expected_and_actual_reading_counts_for_pm_ids_with_costs( self): filename = "example-pm-monthly-meter-usage-2-cost-meters.xlsx" filepath = os.path.dirname( os.path.abspath(__file__)) + "/data/" + filename cost_meter_import_file = ImportFile.objects.create( import_record=self.import_record, source_type="PM Meter Usage", uploaded_filename=filename, file=SimpleUploadedFile(name=filename, content=open(filepath, 'rb').read()), cycle=self.cycle) url = reverse("api:v3:import_files-start-save-data", args=[cost_meter_import_file.id]) url += f'?organization_id={self.org.pk}' post_params = { 'cycle_id': self.cycle.pk, } response = self.client.post(url, post_params) result = json.loads(response.content) expectation = [ { "property_id": self.property_1.id, "cycles": self.cycle.name, "pm_property_id": "5766973", "source_id": "5766973-0", "type": "Electric - Grid", "incoming": 2, "successfully_imported": 2, }, { "property_id": self.property_1.id, "cycles": self.cycle.name, "pm_property_id": "5766973", "source_id": "5766973-1", "type": "Natural Gas", "incoming": 2, "successfully_imported": 2, }, { "property_id": self.property_1.id, "cycles": self.cycle.name, "pm_property_id": "5766973", "source_id": "5766973-0", "type": "Cost", "incoming": 2, "successfully_imported": 2, }, { "property_id": self.property_1.id, "cycles": self.cycle.name, "pm_property_id": "5766973", "source_id": "5766973-1", "type": "Cost", "incoming": 2, "successfully_imported": 2, }, { "property_id": self.property_2.id, "cycles": self.cycle.name, "pm_property_id": "5766975", "source_id": "5766975-0", "type": "Electric - Grid", "incoming": 2, "successfully_imported": 2, }, { "property_id": self.property_2.id, "cycles": self.cycle.name, "pm_property_id": "5766975", "source_id": "5766975-1", "type": "Natural Gas", "incoming": 2, "successfully_imported": 2, }, ] self.assertCountEqual(result['message'], expectation) def test_error_noted_in_response_if_meter_has_overlapping_readings(self): """ If a meter has overlapping readings, the process of upserting a reading will encounter the issue of not knowing which reading should take precedence over the other. In this case, neither the meter (if applicable) nor any of its readings are created. """ dup_import_record = ImportRecord.objects.create( owner=self.user, last_modified_by=self.user, super_organization=self.org) dup_filename = "example-pm-monthly-meter-usage-1-dup.xlsx" dup_filepath = os.path.dirname(os.path.abspath( __file__)) + "/../data_importer/tests/data/" + dup_filename dup_file = ImportFile.objects.create( import_record=dup_import_record, source_type="PM Meter Usage", uploaded_filename=dup_filename, file=SimpleUploadedFile(name=dup_filename, content=open(dup_filepath, 'rb').read()), cycle=self.cycle) url = reverse("api:v3:import_files-start-save-data", args=[dup_file.id]) url += f'?organization_id={self.org.pk}' post_params = { 'cycle_id': self.cycle.pk, } response = self.client.post(url, post_params) total_meters_count = Meter.objects.count() result_summary = json.loads(response.content) expected_import_summary = [ { "property_id": self.property_1.id, "cycles": self.cycle.name, "pm_property_id": "5766973", "source_id": "5766973-0", "type": "Electric - Grid", "incoming": 2, "successfully_imported": 2, "errors": "", }, { "property_id": self.property_1.id, "cycles": self.cycle.name, "pm_property_id": "5766973", "source_id": "5766973-1", "type": "Natural Gas", "incoming": 2, "successfully_imported": 2, "errors": "", }, { "property_id": self.property_2.id, "cycles": self.cycle.name, "pm_property_id": "5766975", "source_id": "5766975-0", "type": "Electric - Grid", "incoming": 4, "successfully_imported": 0, "errors": "Overlapping readings.", }, { "property_id": self.property_2.id, "cycles": self.cycle.name, "pm_property_id": "5766975", "source_id": "5766975-1", "type": "Natural Gas", "incoming": 4, "successfully_imported": 0, "errors": "Overlapping readings.", }, ] self.assertCountEqual(result_summary['message'], expected_import_summary) self.assertEqual(total_meters_count, 2)
class TestMeterViewSet(DataMappingBaseTestCase): def setUp(self): self.user_details = { 'username': '******', 'password': '******', } self.user = User.objects.create_superuser(email='*****@*****.**', **self.user_details) self.org, _, _ = create_organization(self.user) # For some reason, defaults weren't established consistently for each test. self.org.display_meter_units = Organization._default_display_meter_units.copy( ) self.org.save() self.client.login(**self.user_details) self.property_state_factory = FakePropertyStateFactory( organization=self.org) property_details = self.property_state_factory.get_details() property_details['organization_id'] = self.org.id # pm_property_ids must match those within example-monthly-meter-usage.xlsx self.pm_property_id_1 = '5766973' self.pm_property_id_2 = '5766975' property_details['pm_property_id'] = self.pm_property_id_1 state_1 = PropertyState(**property_details) state_1.save() self.state_1 = PropertyState.objects.get(pk=state_1.id) property_details['pm_property_id'] = self.pm_property_id_2 state_2 = PropertyState(**property_details) state_2.save() self.state_2 = PropertyState.objects.get(pk=state_2.id) self.cycle_factory = FakeCycleFactory(organization=self.org, user=self.user) self.cycle = self.cycle_factory.get_cycle( start=datetime(2010, 10, 10, tzinfo=get_current_timezone())) self.property_factory = FakePropertyFactory(organization=self.org) self.property_1 = self.property_factory.get_property() self.property_2 = self.property_factory.get_property() self.property_view_1 = PropertyView.objects.create( property=self.property_1, cycle=self.cycle, state=self.state_1) self.property_view_2 = PropertyView.objects.create( property=self.property_2, cycle=self.cycle, state=self.state_2) self.import_record = ImportRecord.objects.create( owner=self.user, last_modified_by=self.user, super_organization=self.org) # This file has multiple tabs filename = "example-pm-monthly-meter-usage.xlsx" filepath = os.path.dirname( os.path.abspath(__file__)) + "/data/" + filename self.import_file = ImportFile.objects.create( import_record=self.import_record, source_type="PM Meter Usage", uploaded_filename=filename, file=SimpleUploadedFile(name=filename, content=open(filepath, 'rb').read()), cycle=self.cycle) def test_parsed_meters_confirmation_verifies_energy_type_and_units(self): url = reverse('api:v2:meters-parsed-meters-confirmation') post_params = json.dumps({ 'file_id': self.import_file.id, 'organization_id': self.org.pk, }) result = self.client.post(url, post_params, content_type="application/json") result_dict = ast.literal_eval(result.content.decode("utf-8")) expectation = [ { "parsed_type": "Electric - Grid", "parsed_unit": "kBtu (thousand Btu)", }, { "parsed_type": "Natural Gas", "parsed_unit": "kBtu (thousand Btu)", }, ] self.assertCountEqual(result_dict.get("validated_type_units"), expectation) def test_parsed_meters_confirmation_verifies_energy_type_and_units_and_ignores_invalid_types_and_units( self): filename = "example-pm-monthly-meter-usage-with-unknown-types-and-units.xlsx" filepath = os.path.dirname( os.path.abspath(__file__)) + "/data/" + filename import_file_with_invalids = ImportFile.objects.create( import_record=self.import_record, source_type="PM Meter Usage", uploaded_filename=filename, file=SimpleUploadedFile(name=filename, content=open(filepath, 'rb').read()), cycle=self.cycle) url = reverse('api:v2:meters-parsed-meters-confirmation') post_params = json.dumps({ 'file_id': import_file_with_invalids.id, 'organization_id': self.org.pk, }) result = self.client.post(url, post_params, content_type="application/json") result_dict = ast.literal_eval(result.content.decode("utf-8")) expectation = [ { "parsed_type": "Electric - Grid", "parsed_unit": "kBtu (thousand Btu)", }, { "parsed_type": "Natural Gas", "parsed_unit": "kBtu (thousand Btu)", }, ] self.assertCountEqual(result_dict.get("validated_type_units"), expectation) def test_parsed_meters_confirmation_returns_pm_property_ids_and_corresponding_incoming_counts( self): url = reverse('api:v2:meters-parsed-meters-confirmation') post_params = json.dumps({ 'file_id': self.import_file.id, 'organization_id': self.org.pk, }) result = self.client.post(url, post_params, content_type="application/json") result_dict = ast.literal_eval(result.content.decode("utf-8")) expectation = [ { "pm_property_id": "5766973", "source_id": "5766973-0", "type": 'Electric - Grid', "incoming": 2, }, { "pm_property_id": "5766973", "source_id": "5766973-1", "type": 'Natural Gas', "incoming": 2, }, { "pm_property_id": "5766975", "source_id": "5766975-0", "type": 'Electric - Grid', "incoming": 2, }, { "pm_property_id": "5766975", "source_id": "5766975-1", "type": 'Natural Gas', "incoming": 2, }, ] self.assertCountEqual(result_dict.get("proposed_imports"), expectation) def test_parsed_meters_confirmation_also_verifies_cost_type_and_units_and_counts( self): filename = "example-pm-monthly-meter-usage-2-cost-meters.xlsx" filepath = os.path.dirname( os.path.abspath(__file__)) + "/data/" + filename cost_import_file = ImportFile.objects.create( import_record=self.import_record, source_type="PM Meter Usage", uploaded_filename=filename, file=SimpleUploadedFile(name=filename, content=open(filepath, 'rb').read()), cycle=self.cycle) url = reverse('api:v2:meters-parsed-meters-confirmation') post_params = json.dumps({ 'file_id': cost_import_file.id, 'organization_id': self.org.pk, }) result = self.client.post(url, post_params, content_type="application/json") result_dict = ast.literal_eval(result.content.decode("utf-8")) validated_type_units = [ { "parsed_type": "Electric - Grid", "parsed_unit": "kBtu (thousand Btu)", }, { "parsed_type": "Natural Gas", "parsed_unit": "kBtu (thousand Btu)", }, { "parsed_type": "Cost", "parsed_unit": "US Dollars", }, ] self.assertCountEqual(result_dict.get("validated_type_units"), validated_type_units) proposed_imports = [ { "pm_property_id": "5766973", "source_id": "5766973-0", "type": 'Electric - Grid', "incoming": 2, }, { "pm_property_id": "5766973", "source_id": "5766973-1", "type": 'Natural Gas', "incoming": 2, }, { "pm_property_id": "5766973", "source_id": "5766973-0", "type": 'Cost', "incoming": 2, }, { "pm_property_id": "5766973", "source_id": "5766973-1", "type": 'Cost', "incoming": 2, }, { "pm_property_id": "5766975", "source_id": "5766975-0", "type": 'Electric - Grid', "incoming": 2, }, { "pm_property_id": "5766975", "source_id": "5766975-1", "type": 'Natural Gas', "incoming": 2, }, ] self.assertCountEqual(result_dict.get("proposed_imports"), proposed_imports) # Verify this works for Org with CAN thermal conversions self.org.thermal_conversion_assumption = Organization.CAN self.org.save() can_result = self.client.post(url, post_params, content_type="application/json") can_result_dict = ast.literal_eval(can_result.content.decode("utf-8")) validated_type_units[2] = { "parsed_type": "Cost", "parsed_unit": "CAN Dollars", } self.assertCountEqual(can_result_dict.get("validated_type_units"), validated_type_units) def test_green_button_parsed_meters_confirmation_returns_a_green_button_id_incoming_counts_and_parsed_type_units_and_saves_property_id_to_file_cache( self): filename = "example-GreenButton-data.xml" filepath = os.path.dirname( os.path.abspath(__file__)) + "/data/" + filename xml_import_file = ImportFile.objects.create( import_record=self.import_record, source_type="GreenButton", uploaded_filename=filename, file=SimpleUploadedFile(name=filename, content=open(filepath, 'rb').read()), cycle=self.cycle) url = reverse('api:v2:meters-greenbutton-parsed-meters-confirmation') post_params = json.dumps({ 'file_id': xml_import_file.id, 'organization_id': self.org.pk, 'view_id': self.property_view_1.id, }) result = self.client.post(url, post_params, content_type="application/json") result_dict = ast.literal_eval(result.content.decode("utf-8")) proposed_imports = [ { "source_id": '409483', "type": 'Electric - Grid', "incoming": 2, }, ] validated_type_units = [ { "parsed_type": "Electric - Grid", "parsed_unit": "kWh (thousand Watt-hours)", }, ] self.assertEqual(result_dict['proposed_imports'], proposed_imports) self.assertEqual(result_dict['validated_type_units'], validated_type_units) refreshed_import_file = ImportFile.objects.get(pk=xml_import_file.id) self.assertEqual(refreshed_import_file.matching_results_data, {'property_id': self.property_view_1.property_id}) def test_parsed_meters_confirmation_returns_unlinkable_pm_property_ids( self): PropertyState.objects.all().delete() url = reverse('api:v2:meters-parsed-meters-confirmation') post_params = json.dumps({ 'file_id': self.import_file.id, 'organization_id': self.org.pk, }) result = self.client.post(url, post_params, content_type="application/json") result_dict = ast.literal_eval(result.content.decode("utf-8")) expectation = [ { "portfolio_manager_id": "5766973", }, { "portfolio_manager_id": "5766975", }, ] self.assertCountEqual(result_dict.get("unlinkable_pm_ids"), expectation) def test_property_meters_endpoint_returns_a_list_of_meters_of_a_view(self): # add meters and readings to property associated to property_view_1 save_raw_data(self.import_file.id) # create GB gas meter meter_details = { 'source': Meter.GREENBUTTON, 'source_id': '/v1/User/000/UsagePoint/123fakeID/MeterReading/000', 'type': Meter.NATURAL_GAS, 'property_id': self.property_view_1.property.id, } gb_gas_meter = Meter.objects.create(**meter_details) url = reverse('api:v2:meters-property-meters') post_params = json.dumps({ 'property_view_id': self.property_view_1.id, }) result = self.client.post(url, post_params, content_type="application/json") result_dict = ast.literal_eval(result.content.decode("utf-8")) electric_meter = Meter.objects.get( property_id=self.property_view_1.property_id, type=Meter.ELECTRICITY_GRID) gas_meter = Meter.objects.get( property_id=self.property_view_1.property_id, type=Meter.NATURAL_GAS, source=Meter.PORTFOLIO_MANAGER) expectation = [ { 'id': electric_meter.id, 'type': 'Electric - Grid', 'source': 'PM', 'source_id': '5766973-0', }, { 'id': gas_meter.id, 'type': 'Natural Gas', 'source': 'PM', 'source_id': '5766973-1', }, { 'id': gb_gas_meter.id, 'type': 'Natural Gas', 'source': 'GB', 'source_id': '123fakeID', }, ] self.assertCountEqual(result_dict, expectation) def test_property_meter_usage_returns_meter_readings_and_column_defs_given_property_view_and_nondefault_meter_display_org_settings( self): # Update settings for display meter units to change it from the default values. self.org.display_meter_units[ 'Electric - Grid'] = 'kWh (thousand Watt-hours)' self.org.display_meter_units[ 'Natural Gas'] = 'kcf (thousand cubic feet)' self.org.save() # add meters and readings to property associated to property_view_1 save_raw_data(self.import_file.id) meter_details = { 'source': Meter.GREENBUTTON, 'source_id': '/v1/User/000/UsagePoint/123fakeID/MeterReading/000', 'type': Meter.NATURAL_GAS, 'property_id': self.property_view_1.property.id, } gb_gas_meter = Meter.objects.create(**meter_details) tz_obj = timezone(TIME_ZONE) gb_gas_reading_details = { 'start_time': make_aware(datetime(2016, 1, 1, 0, 0, 0), timezone=tz_obj), 'end_time': make_aware(datetime(2016, 2, 1, 0, 0, 0), timezone=tz_obj), 'reading': 1000, 'source_unit': 'kBtu (thousand Btu)', 'conversion_factor': 1, 'meter_id': gb_gas_meter.id, } MeterReading.objects.create(**gb_gas_reading_details) url = reverse('api:v2:meters-property-meter-usage') post_params = json.dumps({ 'property_view_id': self.property_view_1.id, 'interval': 'Exact', 'excluded_meter_ids': [], }) result = self.client.post(url, post_params, content_type="application/json") result_dict = ast.literal_eval(result.content.decode("utf-8")) expectation = { 'readings': [ { 'start_time': '2016-01-01 00:00:00', 'end_time': '2016-02-01 00:00:00', 'Electric - Grid - PM - 5766973-0': (597478.9 / 3.41), 'Natural Gas - PM - 5766973-1': 576000.2 / 1026, 'Natural Gas - GB - 123fakeID': 1000 / 1026, }, { 'start_time': '2016-02-01 00:00:00', 'end_time': '2016-03-01 00:00:00', 'Electric - Grid - PM - 5766973-0': (548603.7 / 3.41), 'Natural Gas - PM - 5766973-1': 488000.1 / 1026, }, ], 'column_defs': [ { 'field': 'start_time', '_filter_type': 'datetime', }, { 'field': 'end_time', '_filter_type': 'datetime', }, { 'field': 'Electric - Grid - PM - 5766973-0', 'displayName': 'Electric - Grid - PM - 5766973-0 (kWh (thousand Watt-hours))', '_filter_type': 'reading', }, { 'field': 'Natural Gas - PM - 5766973-1', 'displayName': 'Natural Gas - PM - 5766973-1 (kcf (thousand cubic feet))', '_filter_type': 'reading', }, { 'field': 'Natural Gas - GB - 123fakeID', 'displayName': 'Natural Gas - GB - 123fakeID (kcf (thousand cubic feet))', '_filter_type': 'reading', }, ] } self.assertCountEqual(result_dict['readings'], expectation['readings']) self.assertCountEqual(result_dict['column_defs'], expectation['column_defs']) def test_property_meter_usage_returns_meter_readings_and_column_defs_when_cost_meter_included( self): filename = "example-pm-monthly-meter-usage-2-cost-meters.xlsx" filepath = os.path.dirname( os.path.abspath(__file__)) + "/data/" + filename cost_import_file = ImportFile.objects.create( import_record=self.import_record, source_type="PM Meter Usage", uploaded_filename=filename, file=SimpleUploadedFile(name=filename, content=open(filepath, 'rb').read()), cycle=self.cycle) # add meters and readings to property associated to property_view_1 save_raw_data(cost_import_file.id) url = reverse('api:v2:meters-property-meter-usage') post_params = json.dumps({ 'property_view_id': self.property_view_1.id, 'interval': 'Exact', 'excluded_meter_ids': [], }) result = self.client.post(url, post_params, content_type="application/json") result_dict = ast.literal_eval(result.content.decode("utf-8")) expectation = { 'readings': [ { 'start_time': '2016-01-01 00:00:00', 'end_time': '2016-02-01 00:00:00', 'Electric - Grid - PM - 5766973-0': 597478.9 / 3.41, 'Cost - PM - 5766973-0': 100, 'Natural Gas - PM - 5766973-1': 576000.2, 'Cost - PM - 5766973-1': 300, }, { 'start_time': '2016-02-01 00:00:00', 'end_time': '2016-03-01 00:00:00', 'Electric - Grid - PM - 5766973-0': 548603.7 / 3.41, 'Cost - PM - 5766973-0': 200, 'Natural Gas - PM - 5766973-1': 488000.1, 'Cost - PM - 5766973-1': 400, }, ], 'column_defs': [ { 'field': 'start_time', '_filter_type': 'datetime', }, { 'field': 'end_time', '_filter_type': 'datetime', }, { 'field': 'Electric - Grid - PM - 5766973-0', 'displayName': 'Electric - Grid - PM - 5766973-0 (kWh (thousand Watt-hours))', '_filter_type': 'reading', }, { 'field': 'Natural Gas - PM - 5766973-1', 'displayName': 'Natural Gas - PM - 5766973-1 (kBtu (thousand Btu))', '_filter_type': 'reading', }, { 'field': 'Cost - PM - 5766973-0', 'displayName': 'Cost - PM - 5766973-0 (US Dollars)', '_filter_type': 'reading', }, { 'field': 'Cost - PM - 5766973-1', 'displayName': 'Cost - PM - 5766973-1 (US Dollars)', '_filter_type': 'reading', }, ] } self.assertCountEqual(result_dict['readings'], expectation['readings']) self.assertCountEqual(result_dict['column_defs'], expectation['column_defs']) def test_property_meter_usage_returns_meter_readings_according_to_thermal_conversion_preferences_of_an_org_if_applicable_for_display_settings( self): # update the org settings thermal preference and display preference self.org.thermal_conversion_assumption = Organization.CAN self.org.display_meter_units["Diesel"] = "Liters" self.org.display_meter_units["Coke"] = "Lbs. (pounds)" self.org.save() # add meters and readings to property associated to property_view_1 meter_details = { 'source': Meter.PORTFOLIO_MANAGER, 'source_id': '123fakeID', 'type': Meter.DIESEL, 'property_id': self.property_view_1.property.id, } diesel_meter = Meter.objects.create(**meter_details) tz_obj = timezone(TIME_ZONE) diesel_reading_details = { 'start_time': make_aware(datetime(2016, 1, 1, 0, 0, 0), timezone=tz_obj), 'end_time': make_aware(datetime(2016, 2, 1, 0, 0, 0), timezone=tz_obj), 'reading': 10, 'source_unit': 'kBtu (thousand Btu)', 'conversion_factor': 1, 'meter_id': diesel_meter.id, } MeterReading.objects.create(**diesel_reading_details) meter_details['type'] = Meter.COKE meter_details['source_id'] = '456fakeID' coke_meter = Meter.objects.create(**meter_details) coke_reading_details = { 'start_time': make_aware(datetime(2016, 1, 1, 0, 0, 0), timezone=tz_obj), 'end_time': make_aware(datetime(2016, 2, 1, 0, 0, 0), timezone=tz_obj), 'reading': 100, 'source_unit': 'kBtu (thousand Btu)', 'conversion_factor': 1, 'meter_id': coke_meter.id, } MeterReading.objects.create(**coke_reading_details) post_params = json.dumps({ 'property_view_id': self.property_view_1.id, 'interval': 'Exact', 'excluded_meter_ids': [], }) url = reverse('api:v2:meters-property-meter-usage') result = self.client.post(url, post_params, content_type="application/json") result_dict = ast.literal_eval(result.content.decode("utf-8")) display_readings = [ { 'start_time': '2016-01-01 00:00:00', 'end_time': '2016-02-01 00:00:00', 'Diesel - PM - 123fakeID': 10 / 36.30, 'Coke - PM - 456fakeID': 100 / 12.39, }, ] self.assertCountEqual(result_dict['readings'], display_readings) def test_property_meter_usage_can_return_monthly_meter_readings_and_column_defs_with_nondefault_display_setting( self): # Update settings for display meter units to change it from the default values. self.org.display_meter_units[ 'Electric - Grid'] = 'kWh (thousand Watt-hours)' self.org.save() # add initial meters and readings save_raw_data(self.import_file.id) # add additional entries for each initial meter tz_obj = timezone(TIME_ZONE) for meter in Meter.objects.all(): # March 2016 reading reading_details = { 'meter_id': meter.id, 'start_time': make_aware(datetime(2016, 3, 1, 0, 0, 0), timezone=tz_obj), 'end_time': make_aware(datetime(2016, 4, 1, 0, 0, 0), timezone=tz_obj), 'reading': 100, 'source_unit': 'kBtu (thousand Btu)', 'conversion_factor': 1 } MeterReading.objects.create(**reading_details) # May 2016 reading reading_details['start_time'] = make_aware(datetime( 2016, 5, 1, 0, 0, 0), timezone=tz_obj) reading_details['end_time'] = make_aware(datetime( 2016, 6, 1, 0, 0, 0), timezone=tz_obj) reading_details['reading'] = 200 MeterReading.objects.create(**reading_details) url = reverse('api:v2:meters-property-meter-usage') post_params = json.dumps({ 'property_view_id': self.property_view_1.id, 'interval': 'Month', 'excluded_meter_ids': [], }) result = self.client.post(url, post_params, content_type="application/json") result_dict = ast.literal_eval(result.content.decode("utf-8")) expectation = { 'readings': [ { 'month': 'January 2016', 'Electric - Grid - PM - 5766973-0': 597478.9 / 3.41, 'Natural Gas - PM - 5766973-1': 576000.2, }, { 'month': 'February 2016', 'Electric - Grid - PM - 5766973-0': 548603.7 / 3.41, 'Natural Gas - PM - 5766973-1': 488000.1, }, { 'month': 'March 2016', 'Electric - Grid - PM - 5766973-0': 100 / 3.41, 'Natural Gas - PM - 5766973-1': 100, }, { 'month': 'May 2016', 'Electric - Grid - PM - 5766973-0': 200 / 3.41, 'Natural Gas - PM - 5766973-1': 200, }, ], 'column_defs': [ { 'field': 'month', '_filter_type': 'datetime', }, { 'field': 'Electric - Grid - PM - 5766973-0', 'displayName': 'Electric - Grid - PM - 5766973-0 (kWh (thousand Watt-hours))', '_filter_type': 'reading', }, { 'field': 'Natural Gas - PM - 5766973-1', 'displayName': 'Natural Gas - PM - 5766973-1 (kBtu (thousand Btu))', '_filter_type': 'reading', }, ] } self.assertCountEqual(result_dict['readings'], expectation['readings']) self.assertCountEqual(result_dict['column_defs'], expectation['column_defs']) def test_property_meter_usage_can_return_monthly_meter_readings_and_column_defs_for_submonthly_data_with_DST_transitions_and_specific_meters( self): # add initial meters and readings save_raw_data(self.import_file.id) property_1_electric_meter = Meter.objects.get(source_id='5766973-0') # add additional sub-montly entries for each initial meter tz_obj = timezone(TIME_ZONE) for meter in Meter.objects.all(): # November 2019 reading between DST transition reading_details = { 'meter_id': meter.id, 'start_time': make_aware(datetime(2019, 11, 3, 1, 59, 59), timezone=tz_obj, is_dst=True), 'end_time': make_aware(datetime(2019, 11, 3, 1, 59, 59), timezone=tz_obj, is_dst=False), 'reading': 100, 'source_unit': 'kBtu (thousand Btu)', 'conversion_factor': 1 } MeterReading.objects.create(**reading_details) # November 2019 reading after DST transition reading_details['start_time'] = make_aware(datetime( 2019, 11, 3, 2, 0, 0), timezone=tz_obj) reading_details['end_time'] = make_aware(datetime( 2019, 11, 3, 3, 0, 0), timezone=tz_obj) reading_details['reading'] = 200 MeterReading.objects.create(**reading_details) # Create a reading for only one of the meters that will be filtered out completely if meter.source_id == property_1_electric_meter.id: reading_details['start_time'] = make_aware(datetime( 2020, 11, 3, 2, 0, 0), timezone=tz_obj) reading_details['end_time'] = make_aware(datetime( 2020, 11, 3, 3, 0, 0), timezone=tz_obj) reading_details['reading'] = 10000000 MeterReading.objects.create(**reading_details) url = reverse('api:v2:meters-property-meter-usage') post_params = json.dumps({ 'property_view_id': self.property_view_1.id, 'interval': 'Month', 'excluded_meter_ids': [property_1_electric_meter.id], }) result = self.client.post(url, post_params, content_type="application/json") result_dict = ast.literal_eval(result.content.decode("utf-8")) expectation = { 'readings': [ { 'month': 'January 2016', 'Natural Gas - PM - 5766973-1': 576000.2, }, { 'month': 'February 2016', 'Natural Gas - PM - 5766973-1': 488000.1, }, { 'month': 'November 2019', 'Natural Gas - PM - 5766973-1': 300, }, ], 'column_defs': [ { 'field': 'month', '_filter_type': 'datetime', }, { 'field': 'Natural Gas - PM - 5766973-1', 'displayName': 'Natural Gas - PM - 5766973-1 (kBtu (thousand Btu))', '_filter_type': 'reading', }, ] } self.assertCountEqual(result_dict['readings'], expectation['readings']) self.assertCountEqual(result_dict['column_defs'], expectation['column_defs']) def test_property_meter_usage_can_return_monthly_meter_readings_and_column_defs_of_overlapping_submonthly_data_aggregating_monthly_data_to_maximize_total( self): # add initial meters and readings save_raw_data(self.import_file.id) # add additional entries for the Electricity meter tz_obj = timezone(TIME_ZONE) meter = Meter.objects.get(property_id=self.property_view_1.property.id, type=Meter.type_lookup['Electric - Grid']) # 2016 January reading that should override the existing reading reading_details = { 'meter_id': meter.id, 'start_time': make_aware(datetime(2016, 1, 1, 0, 0, 0), timezone=tz_obj), 'end_time': make_aware(datetime(2016, 1, 20, 23, 59, 59), timezone=tz_obj), 'reading': 100000000000000, 'source_unit': 'kBtu (thousand Btu)', 'conversion_factor': 1 } MeterReading.objects.create(**reading_details) # 2016 January reading that should be ignored reading_details['start_time'] = make_aware(datetime( 2016, 1, 1, 0, 0, 0), timezone=tz_obj) reading_details['end_time'] = make_aware(datetime( 2016, 3, 31, 23, 59, 59), timezone=tz_obj) reading_details['reading'] = 0.1 MeterReading.objects.create(**reading_details) # Create March 2016 entries having disregarded readings when finding monthly total # 1 week - not included in total reading_details['start_time'] = make_aware(datetime( 2016, 3, 1, 0, 0, 0), timezone=tz_obj) reading_details['end_time'] = make_aware(datetime( 2016, 3, 6, 23, 59, 59), timezone=tz_obj) reading_details['reading'] = 1 MeterReading.objects.create(**reading_details) # 1 week - not included in total reading_details['start_time'] = make_aware(datetime( 2016, 3, 7, 0, 0, 0), timezone=tz_obj) reading_details['end_time'] = make_aware(datetime( 2016, 3, 13, 23, 59, 59), timezone=tz_obj) reading_details['reading'] = 10 MeterReading.objects.create(**reading_details) # 10 days - included in total reading_details['start_time'] = make_aware(datetime( 2016, 3, 2, 0, 0, 0), timezone=tz_obj) reading_details['end_time'] = make_aware(datetime( 2016, 3, 11, 23, 59, 59), timezone=tz_obj) reading_details['reading'] = 100 MeterReading.objects.create(**reading_details) # 10 days - included in total reading_details['start_time'] = make_aware(datetime( 2016, 3, 12, 0, 0, 0), timezone=tz_obj) reading_details['end_time'] = make_aware(datetime( 2016, 3, 21, 23, 59, 59), timezone=tz_obj) reading_details['reading'] = 1000 MeterReading.objects.create(**reading_details) # Create April 2016 entries having disregarded readings when finding monthly total # 5 days - not included in total reading_details['start_time'] = make_aware(datetime( 2016, 4, 1, 0, 0, 0), timezone=tz_obj) reading_details['end_time'] = make_aware(datetime( 2016, 4, 4, 23, 59, 59), timezone=tz_obj) reading_details['reading'] = 2 MeterReading.objects.create(**reading_details) # 10 days - not included in total reading_details['start_time'] = make_aware(datetime( 2016, 4, 6, 0, 0, 0), timezone=tz_obj) reading_details['end_time'] = make_aware(datetime( 2016, 4, 15, 23, 59, 59), timezone=tz_obj) reading_details['reading'] = 20 MeterReading.objects.create(**reading_details) # 20 days - included in total reading_details['start_time'] = make_aware(datetime( 2016, 4, 2, 0, 0, 0), timezone=tz_obj) reading_details['end_time'] = make_aware(datetime( 2016, 4, 21, 23, 59, 59), timezone=tz_obj) reading_details['reading'] = 200 MeterReading.objects.create(**reading_details) url = reverse('api:v2:meters-property-meter-usage') post_params = json.dumps({ 'property_view_id': self.property_view_1.id, 'interval': 'Month', 'excluded_meter_ids': [], }) result = self.client.post(url, post_params, content_type="application/json") result_dict = ast.literal_eval(result.content.decode("utf-8")) expectation = { 'readings': [ { 'month': 'January 2016', 'Electric - Grid - PM - 5766973-0': 100000000000000 / 3.41, 'Natural Gas - PM - 5766973-1': 576000.2, }, { 'month': 'February 2016', 'Electric - Grid - PM - 5766973-0': 548603.7 / 3.41, 'Natural Gas - PM - 5766973-1': 488000.1, }, { 'month': 'March 2016', 'Electric - Grid - PM - 5766973-0': 1100 / 3.41, }, { 'month': 'April 2016', 'Electric - Grid - PM - 5766973-0': 200 / 3.41, }, ], 'column_defs': [ { 'field': 'month', '_filter_type': 'datetime', }, { 'field': 'Electric - Grid - PM - 5766973-0', 'displayName': 'Electric - Grid - PM - 5766973-0 (kWh (thousand Watt-hours))', '_filter_type': 'reading', }, { 'field': 'Natural Gas - PM - 5766973-1', 'displayName': 'Natural Gas - PM - 5766973-1 (kBtu (thousand Btu))', '_filter_type': 'reading', }, ] } self.assertCountEqual(result_dict['readings'], expectation['readings']) self.assertCountEqual(result_dict['column_defs'], expectation['column_defs']) def test_property_meter_usage_can_return_annual_meter_readings_and_column_defs_while_handling_a_nondefault_display_setting( self): # Update settings for display meter units to change it from the default values. self.org.display_meter_units[ 'Electric - Grid'] = 'kWh (thousand Watt-hours)' self.org.save() # add initial meters and readings save_raw_data(self.import_file.id) # add additional 2018 entries for each initial meter tz_obj = timezone(TIME_ZONE) for meter in Meter.objects.all(): # March 2018 reading reading_details = { 'meter_id': meter.id, 'start_time': make_aware(datetime(2018, 3, 1, 0, 0, 0), timezone=tz_obj), 'end_time': make_aware(datetime(2018, 4, 1, 0, 0, 0), timezone=tz_obj), 'reading': 100, 'source_unit': 'kBtu (thousand Btu)', 'conversion_factor': 1 } MeterReading.objects.create(**reading_details) # May 2018 reading reading_details['start_time'] = make_aware(datetime( 2018, 5, 1, 0, 0, 0), timezone=tz_obj) reading_details['end_time'] = make_aware(datetime( 2018, 6, 1, 0, 0, 0), timezone=tz_obj) reading_details['reading'] = 200 MeterReading.objects.create(**reading_details) url = reverse('api:v2:meters-property-meter-usage') post_params = json.dumps({ 'property_view_id': self.property_view_1.id, 'interval': 'Year', 'excluded_meter_ids': [], }) result = self.client.post(url, post_params, content_type="application/json") result_dict = ast.literal_eval(result.content.decode("utf-8")) expectation = { 'readings': [ { 'year': 2016, 'Electric - Grid - PM - 5766973-0': (597478.9 + 548603.7) / 3.41, 'Natural Gas - PM - 5766973-1': 576000.2 + 488000.1, }, { 'year': 2018, 'Electric - Grid - PM - 5766973-0': (100 + 200) / 3.41, 'Natural Gas - PM - 5766973-1': 100 + 200, }, ], 'column_defs': [ { 'field': 'year', '_filter_type': 'datetime', }, { 'field': 'Electric - Grid - PM - 5766973-0', 'displayName': 'Electric - Grid - PM - 5766973-0 (kWh (thousand Watt-hours))', '_filter_type': 'reading', }, { 'field': 'Natural Gas - PM - 5766973-1', 'displayName': 'Natural Gas - PM - 5766973-1 (kBtu (thousand Btu))', '_filter_type': 'reading', }, ] } self.assertCountEqual(result_dict['readings'], expectation['readings']) self.assertCountEqual(result_dict['column_defs'], expectation['column_defs'])
class TestMeterViewSet(DataMappingBaseTestCase): def setUp(self): self.user_details = { 'username': '******', 'password': '******', } self.user = User.objects.create_superuser(email='*****@*****.**', **self.user_details) self.org, _, _ = create_organization(self.user) # For some reason, defaults weren't established consistently for each test. self.org.display_meter_units = Organization._default_display_meter_units.copy( ) self.org.save() self.client.login(**self.user_details) self.property_state_factory = FakePropertyStateFactory( organization=self.org) property_details = self.property_state_factory.get_details() property_details['organization_id'] = self.org.id # pm_property_ids must match those within example-monthly-meter-usage.xlsx self.pm_property_id_1 = '5766973' self.pm_property_id_2 = '5766975' property_details['pm_property_id'] = self.pm_property_id_1 state_1 = PropertyState(**property_details) state_1.save() self.state_1 = PropertyState.objects.get(pk=state_1.id) property_details['pm_property_id'] = self.pm_property_id_2 state_2 = PropertyState(**property_details) state_2.save() self.state_2 = PropertyState.objects.get(pk=state_2.id) self.cycle_factory = FakeCycleFactory(organization=self.org, user=self.user) self.cycle = self.cycle_factory.get_cycle( start=datetime(2010, 10, 10, tzinfo=get_current_timezone())) self.property_factory = FakePropertyFactory(organization=self.org) self.property_1 = self.property_factory.get_property() self.property_2 = self.property_factory.get_property() self.property_view_1 = PropertyView.objects.create( property=self.property_1, cycle=self.cycle, state=self.state_1) self.property_view_2 = PropertyView.objects.create( property=self.property_2, cycle=self.cycle, state=self.state_2) self.import_record = ImportRecord.objects.create( owner=self.user, last_modified_by=self.user, super_organization=self.org) # This file has multiple tabs filename = "example-pm-monthly-meter-usage.xlsx" filepath = os.path.dirname( os.path.abspath(__file__)) + "/data/" + filename self.import_file = ImportFile.objects.create( import_record=self.import_record, source_type="PM Meter Usage", uploaded_filename=filename, file=SimpleUploadedFile(name=filename, content=open(filepath, 'rb').read()), cycle=self.cycle) def test_parsed_meters_confirmation_verifies_energy_type_and_units(self): url = reverse('api:v3:import_files-pm-meters-preview', kwargs={'pk': self.import_file.id}) url += f'?organization_id={self.org.pk}' result = self.client.get(url) result_dict = ast.literal_eval(result.content.decode("utf-8")) expectation = [ { "parsed_type": "Electric - Grid", "parsed_unit": "kBtu (thousand Btu)", }, { "parsed_type": "Natural Gas", "parsed_unit": "kBtu (thousand Btu)", }, ] self.assertCountEqual(result_dict.get("validated_type_units"), expectation) def test_parsed_meters_confirmation_verifies_energy_type_and_units_and_ignores_invalid_types_and_units( self): filename = "example-pm-monthly-meter-usage-with-unknown-types-and-units.xlsx" filepath = os.path.dirname( os.path.abspath(__file__)) + "/data/" + filename import_file_with_invalids = ImportFile.objects.create( import_record=self.import_record, source_type="PM Meter Usage", uploaded_filename=filename, file=SimpleUploadedFile(name=filename, content=open(filepath, 'rb').read()), cycle=self.cycle) url = reverse('api:v3:import_files-pm-meters-preview', kwargs={'pk': import_file_with_invalids.id}) url += f'?organization_id={self.org.pk}' result = self.client.get(url) result_dict = ast.literal_eval(result.content.decode("utf-8")) expectation = [ { "parsed_type": "Electric - Grid", "parsed_unit": "kBtu (thousand Btu)", }, { "parsed_type": "Natural Gas", "parsed_unit": "kBtu (thousand Btu)", }, ] self.assertCountEqual(result_dict.get("validated_type_units"), expectation) def test_parsed_meters_confirmation_returns_pm_property_ids_and_corresponding_incoming_counts( self): url = reverse('api:v3:import_files-pm-meters-preview', kwargs={'pk': self.import_file.id}) url += f'?organization_id={self.org.pk}' result = self.client.get(url) result_dict = ast.literal_eval(result.content.decode("utf-8")) expectation = [ { "property_id": self.property_1.id, "cycles": self.cycle.name, "pm_property_id": "5766973", "source_id": "5766973-0", "type": 'Electric - Grid', "incoming": 2, }, { "property_id": self.property_1.id, "cycles": self.cycle.name, "pm_property_id": "5766973", "source_id": "5766973-1", "type": 'Natural Gas', "incoming": 2, }, { "property_id": self.property_2.id, "cycles": self.cycle.name, "pm_property_id": "5766975", "source_id": "5766975-0", "type": 'Electric - Grid', "incoming": 2, }, { "property_id": self.property_2.id, "cycles": self.cycle.name, "pm_property_id": "5766975", "source_id": "5766975-1", "type": 'Natural Gas', "incoming": 2, }, ] self.assertCountEqual(result_dict.get("proposed_imports"), expectation) def test_parsed_meters_confirmation_also_verifies_cost_type_and_units_and_counts( self): filename = "example-pm-monthly-meter-usage-2-cost-meters.xlsx" filepath = os.path.dirname( os.path.abspath(__file__)) + "/data/" + filename cost_import_file = ImportFile.objects.create( import_record=self.import_record, source_type="PM Meter Usage", uploaded_filename=filename, file=SimpleUploadedFile(name=filename, content=open(filepath, 'rb').read()), cycle=self.cycle) url = reverse('api:v3:import_files-pm-meters-preview', kwargs={'pk': cost_import_file.id}) url += f'?organization_id={self.org.pk}' result = self.client.get(url) result_dict = ast.literal_eval(result.content.decode("utf-8")) validated_type_units = [ { "parsed_type": "Electric - Grid", "parsed_unit": "kBtu (thousand Btu)", }, { "parsed_type": "Natural Gas", "parsed_unit": "kBtu (thousand Btu)", }, { "parsed_type": "Cost", "parsed_unit": "US Dollars", }, ] self.assertCountEqual(result_dict.get("validated_type_units"), validated_type_units) proposed_imports = [ { "property_id": self.property_1.id, "cycles": self.cycle.name, "pm_property_id": "5766973", "source_id": "5766973-0", "type": 'Electric - Grid', "incoming": 2, }, { "property_id": self.property_1.id, "cycles": self.cycle.name, "pm_property_id": "5766973", "source_id": "5766973-1", "type": 'Natural Gas', "incoming": 2, }, { "property_id": self.property_1.id, "cycles": self.cycle.name, "pm_property_id": "5766973", "source_id": "5766973-0", "type": 'Cost', "incoming": 2, }, { "property_id": self.property_1.id, "cycles": self.cycle.name, "pm_property_id": "5766973", "source_id": "5766973-1", "type": 'Cost', "incoming": 2, }, { "property_id": self.property_2.id, "cycles": self.cycle.name, "pm_property_id": "5766975", "source_id": "5766975-0", "type": 'Electric - Grid', "incoming": 2, }, { "property_id": self.property_2.id, "cycles": self.cycle.name, "pm_property_id": "5766975", "source_id": "5766975-1", "type": 'Natural Gas', "incoming": 2, }, ] self.assertCountEqual(result_dict.get("proposed_imports"), proposed_imports) # Verify this works for Org with CAN thermal conversions self.org.thermal_conversion_assumption = Organization.CAN self.org.save() can_result = self.client.get(url) can_result_dict = ast.literal_eval(can_result.content.decode("utf-8")) validated_type_units[2] = { "parsed_type": "Cost", "parsed_unit": "CAN Dollars", } self.assertCountEqual(can_result_dict.get("validated_type_units"), validated_type_units) def test_green_button_parsed_meters_confirmation_returns_a_green_button_id_incoming_counts_and_parsed_type_units_and_saves_property_id_to_file_cache( self): filename = "example-GreenButton-data.xml" filepath = os.path.dirname( os.path.abspath(__file__)) + "/data/" + filename xml_import_file = ImportFile.objects.create( import_record=self.import_record, source_type="GreenButton", uploaded_filename=filename, file=SimpleUploadedFile(name=filename, content=open(filepath, 'rb').read()), cycle=self.cycle) url = reverse('api:v3:import_files-greenbutton-meters-preview', kwargs={'pk': xml_import_file.id}) url += f'?organization_id={self.org.pk}&view_id={self.property_view_1.id}' result = self.client.get(url) result_dict = ast.literal_eval(result.content.decode("utf-8")) proposed_imports = [ { "source_id": '409483', "property_id": self.property_1.id, "type": 'Electric - Grid', "incoming": 2, }, ] validated_type_units = [ { "parsed_type": "Electric - Grid", "parsed_unit": "kWh (thousand Watt-hours)", }, ] self.assertEqual(result_dict['proposed_imports'], proposed_imports) self.assertEqual(result_dict['validated_type_units'], validated_type_units) refreshed_import_file = ImportFile.objects.get(pk=xml_import_file.id) self.assertEqual(refreshed_import_file.matching_results_data, {'property_id': self.property_view_1.property_id}) def test_parsed_meters_confirmation_returns_unlinkable_pm_property_ids( self): PropertyState.objects.all().delete() url = reverse('api:v3:import_files-pm-meters-preview', kwargs={'pk': self.import_file.id}) url += f'?organization_id={self.org.pk}' result = self.client.get(url) result_dict = ast.literal_eval(result.content.decode("utf-8")) expectation = [ { "portfolio_manager_id": "5766973", }, { "portfolio_manager_id": "5766975", }, ] self.assertCountEqual(result_dict.get("unlinkable_pm_ids"), expectation)
class DataQualityCheckTests(DataMappingBaseTestCase): def setUp(self): selfvars = self.set_up(ASSESSED_RAW) self.user, self.org, self.import_file, self.import_record, self.cycle = selfvars self.property_factory = FakePropertyFactory(organization=self.org) self.property_state_factory = FakePropertyStateFactory(organization=self.org) self.taxlot_state_factory = FakeTaxLotStateFactory(organization=self.org) def test_default_create(self): dq = DataQualityCheck.retrieve(self.org.id) self.assertEqual(dq.rules.count(), 22) # Example rule to check ex_rule = { 'table_name': 'PropertyState', 'field': 'conditioned_floor_area', 'data_type': Rule.TYPE_AREA, 'rule_type': Rule.RULE_TYPE_DEFAULT, 'min': 0, 'max': 7000000, 'severity': Rule.SEVERITY_ERROR, 'units': 'ft**2', } rule = Rule.objects.filter( table_name='PropertyState', field='conditioned_floor_area', severity=Rule.SEVERITY_ERROR ) self.assertDictContainsSubset(ex_rule, model_to_dict(rule.first())) def test_remove_rules(self): dq = DataQualityCheck.retrieve(self.org.id) self.assertEqual(dq.rules.count(), 22) dq.remove_all_rules() self.assertEqual(dq.rules.count(), 0) def test_add_custom_rule(self): dq = DataQualityCheck.retrieve(self.org.id) dq.remove_all_rules() ex_rule = { 'table_name': 'PropertyState', 'field': 'some_floor_area', 'data_type': Rule.TYPE_AREA, 'rule_type': Rule.RULE_TYPE_DEFAULT, 'min': 8760, 'max': 525600, 'severity': Rule.SEVERITY_ERROR, 'units': 'm**2', } dq.add_rule(ex_rule) self.assertEqual(dq.rules.count(), 1) self.assertDictContainsSubset(ex_rule, model_to_dict(dq.rules.first())) def test_add_custom_rule_exception(self): dq = DataQualityCheck.retrieve(self.org.id) dq.remove_all_rules() ex_rule = { 'table_name_does_not_exist': 'PropertyState', } with self.assertRaises(Exception) as exc: dq.add_rule(ex_rule) self.assertEqual( str(exc.exception), "Rule data is not defined correctly: Rule() got an unexpected keyword argument 'table_name_does_not_exist'" ) def test_check_property_state_example_data(self): """Trigger 5 rules - 2 default and 3 custom rules - one of each condition type""" ps_data = { 'no_default_data': True, 'custom_id_1': 'abcd', 'pm_property_id': 'PMID', 'site_eui': 525600, } ps = self.property_state_factory.get_property_state(None, **ps_data) # Add 3 additionals rule to default set dq = DataQualityCheck.retrieve(self.org.id) rule_info = { 'field': 'custom_id_1', 'table_name': 'PropertyState', 'enabled': True, 'data_type': Rule.TYPE_STRING, 'rule_type': Rule.RULE_TYPE_DEFAULT, 'condition': Rule.RULE_INCLUDE, 'required': False, 'not_null': False, 'min': None, 'max': None, 'text_match': 'zzzzzzzzz', 'severity': Rule.SEVERITY_ERROR, 'units': "", } dq.add_rule(rule_info) rule_info['field'] = 'pm_property_id' rule_info['condition'] = Rule.RULE_EXCLUDE rule_info['text_match'] = 'PMID' dq.add_rule(rule_info) rule_info['field'] = 'address_line_2' rule_info['condition'] = Rule.RULE_REQUIRED dq.add_rule(rule_info) # Run DQ check and test that each rule was triggered dq.check_data(ps.__class__.__name__, [ps]) # { # 11: { # 'id': 11, # 'custom_id_1': 'abcd', # 'pm_property_id': 'PMID', # 'data_quality_results': [ # { # 'severity': 'error', 'value': '525600', 'field': 'site_eui', 'table_name': 'PropertyState', 'message': 'Site EUI out of range', 'detailed_message': 'Site EUI [525600] > 1000', 'formatted_field': 'Site EUI' # ... # } # ] # } record_results = dq.results[ps.id] self.assertEqual(record_results['custom_id_1'], 'abcd') self.assertEqual(record_results['pm_property_id'], 'PMID') violation_fields = [] for violation in record_results['data_quality_results']: field = violation['field'] if field == 'address_line_1': self.assertEqual(violation['detailed_message'], 'Address Line 1 is null') elif field == 'address_line_2': self.assertEqual(violation['detailed_message'], 'Address Line 2 is required but is None') elif field == 'custom_id_1': self.assertEqual(violation['detailed_message'], 'Custom ID 1 [abcd] does not contain "zzzzzzzzz"') elif field == 'pm_property_id': self.assertEqual(violation['detailed_message'], 'PM Property ID [PMID] contains "PMID"') elif field == 'site_eui': self.assertEqual(violation['detailed_message'], 'Site EUI [525600] > 1000') else: # we should have hit one of the cases above self.fail('invalid "field" provided') violation_fields.append(field) expected_fields = [ 'address_line_1', 'address_line_2', 'custom_id_1', 'pm_property_id', 'site_eui', ] self.assertCountEqual(expected_fields, violation_fields) def test_check_example_with_extra_data_fields(self): """Trigger 5 ED rules - 2 default and 3 custom rules - one of each condition type""" ps_data = { 'no_default_data': True, 'custom_id_1': 'abcd', 'extra_data': { 'range_and_out_of_range': 1, 'include_and_doesnt': 'aaaaa', 'exclude_and_does': 'foo', } } ps = self.property_state_factory.get_property_state(None, **ps_data) # Create 5 column objects that correspond to the 3 ED rules since rules don't get # checked for anything other than REQUIRED if they don't have a corresponding col object column_names = [ 'required_and_missing', 'not_null_and_missing', 'range_and_out_of_range', 'include_and_doesnt', 'exclude_and_does' ] for col_name in column_names: Column.objects.create( column_name=col_name, table_name='PropertyState', organization=self.org, is_extra_data=True, ) dq = DataQualityCheck.retrieve(self.org.id) dq.remove_all_rules() rule_info = { 'field': 'required_and_missing', 'table_name': 'PropertyState', 'enabled': True, 'data_type': Rule.TYPE_STRING, 'rule_type': Rule.RULE_TYPE_DEFAULT, 'condition': Rule.RULE_REQUIRED, 'required': False, 'not_null': False, 'min': None, 'max': None, 'text_match': None, 'severity': Rule.SEVERITY_ERROR, 'units': "", } dq.add_rule(rule_info) rule_info['field'] = 'not_null_and_missing' rule_info['condition'] = Rule.RULE_NOT_NULL dq.add_rule(rule_info) rule_info['field'] = 'range_and_out_of_range' rule_info['condition'] = Rule.RULE_RANGE rule_info['min'] = 100 dq.add_rule(rule_info) rule_info['field'] = 'include_and_doesnt' rule_info['condition'] = Rule.RULE_INCLUDE rule_info['text_match'] = 'zzzzzzzzz' dq.add_rule(rule_info) rule_info['field'] = 'exclude_and_does' rule_info['condition'] = Rule.RULE_EXCLUDE rule_info['text_match'] = 'foo' dq.add_rule(rule_info) # Run DQ check and test that each rule was triggered dq.check_data(ps.__class__.__name__, [ps]) record_results = dq.results[ps.id] violation_fields = [] for violation in record_results['data_quality_results']: field = violation['field'] if field == 'required_and_missing': self.assertEqual(violation['detailed_message'], 'required_and_missing is required but is None') elif field == 'not_null_and_missing': self.assertEqual(violation['detailed_message'], 'not_null_and_missing is null') elif field == 'range_and_out_of_range': self.assertEqual(violation['detailed_message'], 'range_and_out_of_range [1] < 100') elif field == 'include_and_doesnt': self.assertEqual(violation['detailed_message'], 'include_and_doesnt [aaaaa] does not contain "zzzzzzzzz"') elif field == 'exclude_and_does': self.assertEqual(violation['detailed_message'], 'exclude_and_does [foo] contains "foo"') else: # we should have hit one of the cases above self.fail('invalid "field" provided') violation_fields.append(field) self.assertCountEqual(column_names, violation_fields) def test_check_property_state_example_data_with_labels(self): dq = DataQualityCheck.retrieve(self.org.id) # Create labels and apply them to the rules being triggered later site_eui_label = StatusLabel.objects.create(name='Check Site EUI', super_organization=self.org) site_eui_rule = dq.rules.get(table_name='PropertyState', field='site_eui', max='1000') site_eui_rule.status_label = site_eui_label site_eui_rule.save() year_built_label = StatusLabel.objects.create(name='Check Year Built', super_organization=self.org) year_built_rule = dq.rules.get(table_name='PropertyState', field='year_built') year_built_rule.status_label = year_built_label year_built_rule.save() # Create state and associate it to view ps_data = { 'no_default_data': True, 'custom_id_1': 'abcd', 'address_line_1': '742 Evergreen Terrace', 'pm_property_id': 'PMID', 'site_eui': 525600, 'year_built': 1699, } ps = self.property_state_factory.get_property_state(None, **ps_data) property = self.property_factory.get_property() PropertyView.objects.create( property=property, cycle=self.cycle, state=ps ) dq.check_data(ps.__class__.__name__, [ps]) dq_results = dq.results[ps.id]['data_quality_results'] labels = [r['label'] for r in dq_results] self.assertCountEqual(['Check Site EUI', 'Check Year Built'], labels) def test_text_match(self): dq = DataQualityCheck.retrieve(self.org.id) dq.remove_all_rules() new_rule = { 'table_name': 'PropertyState', 'field': 'address_line_1', 'data_type': Rule.TYPE_STRING, 'rule_type': Rule.RULE_TYPE_DEFAULT, 'severity': Rule.SEVERITY_ERROR, 'not_null': True, 'text_match': 742, } dq.add_rule(new_rule) ps_data = { 'no_default_data': True, 'custom_id_1': 'abcd', 'address_line_1': '742 Evergreen Terrace', 'pm_property_id': 'PMID', 'site_eui': 525600, } ps = self.property_state_factory.get_property_state(None, **ps_data) dq.check_data(ps.__class__.__name__, [ps]) self.assertEqual(dq.results, {}) def test_str_to_data_type_string(self): rule = Rule.objects.create(name='str_rule', data_type=Rule.TYPE_STRING) self.assertEqual(rule.str_to_data_type(' '), '') self.assertEqual(rule.str_to_data_type(None), None) self.assertEqual(rule.str_to_data_type(27.5), 27.5) def test_str_to_data_type_float(self): rule = Rule.objects.create(name='flt_rule', data_type=Rule.TYPE_NUMBER) self.assertEqual(rule.str_to_data_type(' '), None) self.assertEqual(rule.str_to_data_type(None), None) self.assertEqual(rule.str_to_data_type(27.5), 27.5) with self.assertRaises(DataQualityTypeCastError): self.assertEqual(rule.str_to_data_type('not-a-number'), '') def test_str_to_data_type_date(self): rule = Rule.objects.create(name='date_rule', data_type=Rule.TYPE_DATE) d = rule.str_to_data_type('07/04/2000 08:55:30') self.assertEqual(d.strftime("%Y-%m-%d %H %M %S"), '2000-07-04 08 55 30') self.assertEqual(rule.str_to_data_type(None), None) self.assertEqual(rule.str_to_data_type(27.5), 27.5) # floats should return float def test_str_to_data_type_datetime(self): rule = Rule.objects.create(name='year_rule', data_type=Rule.TYPE_YEAR) d = rule.str_to_data_type('07/04/2000') self.assertEqual(d.strftime("%Y-%m-%d"), '2000-07-04') self.assertEqual(rule.str_to_data_type(None), None) self.assertEqual(rule.str_to_data_type(27.5), 27.5) # floats should return float def test_min_value(self): rule = Rule.objects.create(name='min_str_rule', data_type=Rule.TYPE_NUMBER, min=0.5) self.assertTrue(rule.minimum_valid(1000)) self.assertTrue(rule.minimum_valid('1000')) self.assertFalse(rule.minimum_valid(0.1)) self.assertFalse(rule.minimum_valid('0.1')) with self.assertRaises(DataQualityTypeCastError): self.assertEqual(rule.minimum_valid('not-a-number'), '') def test_max_value(self): rule = Rule.objects.create(name='max_str_rule', data_type=Rule.TYPE_NUMBER, max=1000) self.assertTrue(rule.maximum_valid(0.1)) self.assertTrue(rule.maximum_valid('0.1')) self.assertFalse(rule.maximum_valid(9999)) self.assertFalse(rule.maximum_valid('9999')) with self.assertRaises(DataQualityTypeCastError): self.assertEqual(rule.maximum_valid('not-a-number'), '') def test_min_value_quantities(self): rule = Rule.objects.create(name='min_str_rule', data_type=Rule.TYPE_EUI, min=10, max=100, units='kBtu/ft**2/year') self.assertTrue(rule.minimum_valid(15)) self.assertTrue(rule.minimum_valid('15')) self.assertTrue(rule.maximum_valid(15)) self.assertTrue(rule.maximum_valid('15')) self.assertFalse(rule.minimum_valid(5)) self.assertFalse(rule.minimum_valid('5')) self.assertFalse(rule.maximum_valid(150)) self.assertFalse(rule.maximum_valid('150')) # All of these should value since they are less than 10 (e.g. 5 kbtu/m2/year =~ 0.5 kbtu/ft2/year) # different units on check data self.assertFalse(rule.minimum_valid(ureg.Quantity(5, "kBtu/ft**2/year"))) self.assertFalse(rule.minimum_valid(ureg.Quantity(5, "kBtu/m**2/year"))) # ~ 0.5 kbtu/ft2/year self.assertFalse(rule.maximum_valid(ureg.Quantity(110, "kBtu/ft**2/year"))) self.assertFalse(rule.maximum_valid(ureg.Quantity(1100, "kBtu/m**2/year"))) # ~ 102.2 kbtu/ft2/year # these should all pass self.assertTrue(rule.minimum_valid(ureg.Quantity(10, "kBtu/ft**2/year"))) self.assertTrue(rule.minimum_valid(ureg.Quantity(110, "kBtu/m**2/year"))) # 10.22 kbtu/ft2/year # test the rule with different units rule = Rule.objects.create(name='min_str_rule', data_type=Rule.TYPE_EUI, min=10, max=100, units='kBtu/m**2/year') self.assertFalse(rule.minimum_valid(ureg.Quantity(0.05, "kBtu/ft**2/year"))) # ~ 0.538 kbtu/m2/year self.assertFalse(rule.maximum_valid(ureg.Quantity(15, "kBtu/ft**2/year"))) # ~ 161 kbtu/m2/year self.assertFalse(rule.minimum_valid(ureg.Quantity(5, "kBtu/m**2/year"))) self.assertFalse(rule.maximum_valid(ureg.Quantity(110, "kBtu/m**2/year"))) def test_incorrect_pint_unit_conversions(self): rule = Rule.objects.create(name='min_str_rule', data_type=Rule.TYPE_EUI, min=10, max=100, units='ft**2') # this should error out nicely with self.assertRaises(UnitMismatchError): self.assertFalse(rule.minimum_valid(ureg.Quantity(5, "kBtu/ft**2/year"))) with self.assertRaises(UnitMismatchError): self.assertFalse(rule.maximum_valid(ureg.Quantity(5, "kBtu/ft**2/year")))
class DataQualityCheckTests(DataMappingBaseTestCase): def setUp(self): selfvars = self.set_up(ASSESSED_RAW) self.user, self.org, self.import_file, self.import_record, self.cycle = selfvars self.property_factory = FakePropertyFactory(organization=self.org) self.property_state_factory = FakePropertyStateFactory(organization=self.org) self.taxlot_state_factory = FakeTaxLotStateFactory(organization=self.org) def test_default_create(self): dq = DataQualityCheck.retrieve(self.org.id) self.assertEqual(dq.rules.count(), 22) # Example rule to check ex_rule = { 'table_name': 'PropertyState', 'field': 'conditioned_floor_area', 'data_type': Rule.TYPE_AREA, 'rule_type': Rule.RULE_TYPE_DEFAULT, 'min': 0, 'max': 7000000, 'severity': Rule.SEVERITY_ERROR, 'units': 'ft**2', } rule = Rule.objects.filter( table_name='PropertyState', field='conditioned_floor_area', severity=Rule.SEVERITY_ERROR ) self.assertDictContainsSubset(ex_rule, model_to_dict(rule.first())) def test_remove_rules(self): dq = DataQualityCheck.retrieve(self.org.id) self.assertEqual(dq.rules.count(), 22) dq.remove_all_rules() self.assertEqual(dq.rules.count(), 0) def test_add_custom_rule(self): dq = DataQualityCheck.retrieve(self.org.id) dq.remove_all_rules() ex_rule = { 'table_name': 'PropertyState', 'field': 'some_floor_area', 'data_type': Rule.TYPE_AREA, 'rule_type': Rule.RULE_TYPE_DEFAULT, 'min': 8760, 'max': 525600, 'severity': Rule.SEVERITY_ERROR, 'units': 'm**2', } dq.add_rule(ex_rule) self.assertEqual(dq.rules.count(), 1) self.assertDictContainsSubset(ex_rule, model_to_dict(dq.rules.first())) def test_add_custom_rule_exception(self): dq = DataQualityCheck.retrieve(self.org.id) dq.remove_all_rules() ex_rule = { 'table_name_does_not_exist': 'PropertyState', } with self.assertRaises(Exception) as exc: dq.add_rule(ex_rule) self.assertEqual( str(exc.exception), "Rule data is not defined correctly: Rule() got an unexpected keyword argument 'table_name_does_not_exist'" ) def test_check_property_state_example_data(self): dq = DataQualityCheck.retrieve(self.org.id) ps_data = { 'no_default_data': True, 'custom_id_1': 'abcd', 'address_line_1': '742 Evergreen Terrace', 'pm_property_id': 'PMID', 'site_eui': 525600, } ps = self.property_state_factory.get_property_state(None, **ps_data) dq.check_data(ps.__class__.__name__, [ps]) # { # 11: { # 'id': 11, # 'custom_id_1': 'abcd', # 'pm_property_id': 'PMID', # 'address_line_1': '742 Evergreen Terrace', # 'data_quality_results': [ # { # 'severity': 'error', 'value': '525600', 'field': 'site_eui', 'table_name': 'PropertyState', 'message': 'Site EUI out of range', 'detailed_message': 'Site EUI [525600] > 1000', 'formatted_field': 'Site EUI' # } # ] # } error_found = False for index, row in dq.results.items(): self.assertEqual(row['custom_id_1'], 'abcd') self.assertEqual(row['pm_property_id'], 'PMID') self.assertEqual(row['address_line_1'], '742 Evergreen Terrace') for violation in row['data_quality_results']: if violation['message'] == 'Site EUI out of range': error_found = True self.assertEqual(violation['detailed_message'], 'Site EUI [525600] > 1000') self.assertEqual(error_found, True) def test_check_property_state_example_data_with_labels(self): dq = DataQualityCheck.retrieve(self.org.id) # Create labels and apply them to the rules being triggered later site_eui_label = StatusLabel.objects.create(name='Check Site EUI', super_organization=self.org) site_eui_rule = dq.rules.get(table_name='PropertyState', field='site_eui', max='1000') site_eui_rule.status_label = site_eui_label site_eui_rule.save() year_built_label = StatusLabel.objects.create(name='Check Year Built', super_organization=self.org) year_built_rule = dq.rules.get(table_name='PropertyState', field='year_built') year_built_rule.status_label = year_built_label year_built_rule.save() # Create state and associate it to view ps_data = { 'no_default_data': True, 'custom_id_1': 'abcd', 'address_line_1': '742 Evergreen Terrace', 'pm_property_id': 'PMID', 'site_eui': 525600, 'year_built': 1699, } ps = self.property_state_factory.get_property_state(None, **ps_data) property = self.property_factory.get_property() PropertyView.objects.create( property=property, cycle=self.cycle, state=ps ) dq.check_data(ps.__class__.__name__, [ps]) dq_results = dq.results[ps.id]['data_quality_results'] labels = [r['label'] for r in dq_results] self.assertCountEqual(['Check Site EUI', 'Check Year Built'], labels) def test_text_match(self): dq = DataQualityCheck.retrieve(self.org.id) dq.remove_all_rules() new_rule = { 'table_name': 'PropertyState', 'field': 'address_line_1', 'data_type': Rule.TYPE_STRING, 'rule_type': Rule.RULE_TYPE_DEFAULT, 'severity': Rule.SEVERITY_ERROR, 'not_null': True, 'text_match': 742, } dq.add_rule(new_rule) ps_data = { 'no_default_data': True, 'custom_id_1': 'abcd', 'address_line_1': '742 Evergreen Terrace', 'pm_property_id': 'PMID', 'site_eui': 525600, } ps = self.property_state_factory.get_property_state(None, **ps_data) dq.check_data(ps.__class__.__name__, [ps]) self.assertEqual(dq.results, {}) def test_str_to_data_type_string(self): rule = Rule.objects.create(name='str_rule', data_type=Rule.TYPE_STRING) self.assertEqual(rule.str_to_data_type(' '), '') self.assertEqual(rule.str_to_data_type(None), None) self.assertEqual(rule.str_to_data_type(27.5), 27.5) def test_str_to_data_type_float(self): rule = Rule.objects.create(name='flt_rule', data_type=Rule.TYPE_NUMBER) self.assertEqual(rule.str_to_data_type(' '), None) self.assertEqual(rule.str_to_data_type(None), None) self.assertEqual(rule.str_to_data_type(27.5), 27.5) with self.assertRaises(DataQualityTypeCastError): self.assertEqual(rule.str_to_data_type('not-a-number'), '') def test_str_to_data_type_date(self): rule = Rule.objects.create(name='date_rule', data_type=Rule.TYPE_DATE) d = rule.str_to_data_type('07/04/2000 08:55:30') self.assertEqual(d.strftime("%Y-%m-%d %H %M %S"), '2000-07-04 08 55 30') self.assertEqual(rule.str_to_data_type(None), None) self.assertEqual(rule.str_to_data_type(27.5), 27.5) # floats should return float def test_str_to_data_type_datetime(self): rule = Rule.objects.create(name='year_rule', data_type=Rule.TYPE_YEAR) d = rule.str_to_data_type('07/04/2000') self.assertEqual(d.strftime("%Y-%m-%d"), '2000-07-04') self.assertEqual(rule.str_to_data_type(None), None) self.assertEqual(rule.str_to_data_type(27.5), 27.5) # floats should return float def test_min_value(self): rule = Rule.objects.create(name='min_str_rule', data_type=Rule.TYPE_NUMBER, min=0.5) self.assertTrue(rule.minimum_valid(1000)) self.assertTrue(rule.minimum_valid('1000')) self.assertFalse(rule.minimum_valid(0.1)) self.assertFalse(rule.minimum_valid('0.1')) with self.assertRaises(DataQualityTypeCastError): self.assertEqual(rule.minimum_valid('not-a-number'), '') def test_max_value(self): rule = Rule.objects.create(name='max_str_rule', data_type=Rule.TYPE_NUMBER, max=1000) self.assertTrue(rule.maximum_valid(0.1)) self.assertTrue(rule.maximum_valid('0.1')) self.assertFalse(rule.maximum_valid(9999)) self.assertFalse(rule.maximum_valid('9999')) with self.assertRaises(DataQualityTypeCastError): self.assertEqual(rule.maximum_valid('not-a-number'), '') def test_min_value_quantities(self): rule = Rule.objects.create(name='min_str_rule', data_type=Rule.TYPE_EUI, min=10, max=100, units='kBtu/ft**2/year') self.assertTrue(rule.minimum_valid(15)) self.assertTrue(rule.minimum_valid('15')) self.assertTrue(rule.maximum_valid(15)) self.assertTrue(rule.maximum_valid('15')) self.assertFalse(rule.minimum_valid(5)) self.assertFalse(rule.minimum_valid('5')) self.assertFalse(rule.maximum_valid(150)) self.assertFalse(rule.maximum_valid('150')) # All of these should value since they are less than 10 (e.g. 5 kbtu/m2/year =~ 0.5 kbtu/ft2/year) # different units on check data self.assertFalse(rule.minimum_valid(ureg.Quantity(5, "kBtu/ft**2/year"))) self.assertFalse(rule.minimum_valid(ureg.Quantity(5, "kBtu/m**2/year"))) # ~ 0.5 kbtu/ft2/year self.assertFalse(rule.maximum_valid(ureg.Quantity(110, "kBtu/ft**2/year"))) self.assertFalse(rule.maximum_valid(ureg.Quantity(1100, "kBtu/m**2/year"))) # ~ 102.2 kbtu/ft2/year # these should all pass self.assertTrue(rule.minimum_valid(ureg.Quantity(10, "kBtu/ft**2/year"))) self.assertTrue(rule.minimum_valid(ureg.Quantity(110, "kBtu/m**2/year"))) # 10.22 kbtu/ft2/year # test the rule with different units rule = Rule.objects.create(name='min_str_rule', data_type=Rule.TYPE_EUI, min=10, max=100, units='kBtu/m**2/year') self.assertFalse(rule.minimum_valid(ureg.Quantity(0.05, "kBtu/ft**2/year"))) # ~ 0.538 kbtu/m2/year self.assertFalse(rule.maximum_valid(ureg.Quantity(15, "kBtu/ft**2/year"))) # ~ 161 kbtu/m2/year self.assertFalse(rule.minimum_valid(ureg.Quantity(5, "kBtu/m**2/year"))) self.assertFalse(rule.maximum_valid(ureg.Quantity(110, "kBtu/m**2/year"))) def test_incorrect_pint_unit_conversions(self): rule = Rule.objects.create(name='min_str_rule', data_type=Rule.TYPE_EUI, min=10, max=100, units='ft**2') # this should error out nicely with self.assertRaises(UnitMismatchError): self.assertFalse(rule.minimum_valid(ureg.Quantity(5, "kBtu/ft**2/year"))) with self.assertRaises(UnitMismatchError): self.assertFalse(rule.maximum_valid(ureg.Quantity(5, "kBtu/ft**2/year")))
class TestPropertySerializers(DeleteModelsTestCase): def setUp(self): self.maxDiff = None user_details = { 'username': '******', 'password': '******', } self.user = User.objects.create_superuser(email='*****@*****.**', **user_details) self.org, _, _ = create_organization(self.user) self.audit_log_factory = FakePropertyAuditLogFactory( organization=self.org, user=self.user) self.property_factory = FakePropertyFactory(organization=self.org) self.property_state_factory = FakePropertyStateFactory( organization=self.org) self.property_view_factory = FakePropertyViewFactory( organization=self.org, user=self.user) self.ga_factory = FakeGreenAssessmentFactory(organization=self.org) self.gap_factory = FakeGreenAssessmentPropertyFactory( organization=self.org, user=self.user) self.label_factory = FakeStatusLabelFactory(organization=self.org) self.assessment = self.ga_factory.get_green_assessment() self.property_view = self.property_view_factory.get_property_view() self.gap_data = { 'source': 'test', 'status': 'complete', 'status_date': datetime.date(2017, 0o1, 0o1), 'metric': 5, 'version': '0.1', 'date': datetime.date(2016, 0o1, 0o1), 'eligibility': True, 'assessment': self.assessment, 'view': self.property_view, } self.urls = ['http://example.com', 'http://example.org'] def test_audit_log_serializer(self): """Test to_representation method.""" # test with AUDIT_USER_CREATE audit_log = self.audit_log_factory.get_property_audit_log() result = PropertyAuditLogReadOnlySerializer(audit_log).data self.assertEqual(result['description'], 'test audit log') self.assertEqual(result['date_edited'], audit_log.created.ctime()) self.assertEqual(result['source'], 'UserCreate') self.assertIsNone(result['changed_fields']) self.assertEqual(result['state']['city'], 'Boring') # test with AUDIT_USER_EDIT changed_fields = ['a', 'b', 'c'] audit_log = self.audit_log_factory.get_property_audit_log( record_type=AUDIT_USER_EDIT, description=json.dumps(changed_fields)) result = PropertyAuditLogReadOnlySerializer(audit_log).data self.assertEqual(result['description'], 'User edit') self.assertEqual(result['source'], 'UserEdit') self.assertEqual(result['changed_fields'], changed_fields) def test_property_view_list_serializer(self): """Test to_representation method.""" property_view_1 = self.property_view_factory.get_property_view() property_view_2 = self.property_view_factory.get_property_view() gap1_data = self.gap_data.copy() gap2_data = self.gap_data.copy() gap1_data['view'] = property_view_1 gap2_data['view'] = property_view_2 gap2_data['metric'] = 4 self.gap_factory.get_green_assessment_property(**gap1_data) self.gap_factory.get_green_assessment_property(**gap2_data) serializer = PropertyViewListSerializer(child=PropertyViewSerializer()) result = serializer.to_representation( [property_view_1, property_view_2]) self.assertEqual(result[0]['cycle']['id'], property_view_1.cycle_id) self.assertEqual(result[1]['cycle']['id'], property_view_2.cycle_id) self.assertEqual(result[0]['state']['id'], property_view_1.state_id) self.assertEqual(result[1]['state']['id'], property_view_2.state_id) self.assertEqual(result[0]['certifications'][0]['score'], 5) self.assertEqual(result[1]['certifications'][0]['score'], 4) self.assertEqual(result[0]['certifications'][0]['assessment']['name'], self.assessment.name) self.assertEqual(result[1]['certifications'][0]['assessment']['name'], self.assessment.name) # with queryset serializer = PropertyViewListSerializer(child=PropertyViewSerializer()) queryset = PropertyView.objects.filter( id__in=[property_view_1.id, property_view_2.id]).order_by('id') result = serializer.to_representation(queryset) self.assertEqual(result[0]['cycle']['id'], property_view_1.cycle_id) self.assertEqual(result[1]['cycle']['id'], property_view_2.cycle_id) self.assertEqual(result[0]['state']['id'], property_view_1.state_id) self.assertEqual(result[1]['state']['id'], property_view_2.state_id) self.assertEqual(result[0]['certifications'][0]['score'], 5) self.assertEqual(result[1]['certifications'][0]['score'], 4) self.assertEqual(result[0]['certifications'][0]['assessment']['name'], self.assessment.name) self.assertEqual(result[1]['certifications'][0]['assessment']['name'], self.assessment.name) def test_property_list_serializer(self): """Test PropertyListSerializer.to_representation""" # TODO test to representation property1 = self.property_factory.get_property() property2 = self.property_factory.get_property() expected = [ OrderedDict([ ('id', property1.id), ('campus', False), ('parent_property', None), ]), OrderedDict([ ('id', property2.id), ('campus', False), ('parent_property', None), ]), ] serializer = PropertyListSerializer(child=PropertyMinimalSerializer()) result = serializer.to_representation([property1, property2]) self.assertEqual(expected, result)
class GreenButtonImportTest(DataMappingBaseTestCase): def setUp(self): self.user_details = { 'username': '******', 'password': '******', } self.user = User.objects.create_superuser(email='*****@*****.**', **self.user_details) self.org, _, _ = create_organization(self.user) self.client.login(**self.user_details) self.property_state_factory = FakePropertyStateFactory( organization=self.org) property_details = self.property_state_factory.get_details() property_details['organization_id'] = self.org.id state_1 = PropertyState(**property_details) state_1.save() self.state_1 = PropertyState.objects.get(pk=state_1.id) self.cycle_factory = FakeCycleFactory(organization=self.org, user=self.user) self.cycle = self.cycle_factory.get_cycle( start=datetime(2010, 10, 10, tzinfo=get_current_timezone())) self.property_factory = FakePropertyFactory(organization=self.org) self.property_1 = self.property_factory.get_property() self.property_view_1 = PropertyView.objects.create( property=self.property_1, cycle=self.cycle, state=self.state_1) self.import_record = ImportRecord.objects.create( owner=self.user, last_modified_by=self.user, super_organization=self.org) filename = "example-GreenButton-data.xml" filepath = os.path.dirname( os.path.abspath(__file__)) + "/data/" + filename self.import_file = ImportFile.objects.create( import_record=self.import_record, source_type="GreenButton", uploaded_filename=filename, file=SimpleUploadedFile(name=filename, content=open(filepath, 'rb').read()), cycle=self.cycle, matching_results_data={"property_id": self.property_1.id}) self.tz_obj = timezone(TIME_ZONE) def test_green_button_import_base_case(self): url = reverse("api:v2:import_files-save-raw-data", args=[self.import_file.id]) post_params = { 'cycle_id': self.cycle.pk, 'organization_id': self.org.pk, } self.client.post(url, post_params) refreshed_property_1 = Property.objects.get(pk=self.property_1.id) self.assertEqual(refreshed_property_1.meters.all().count(), 1) meter_1 = refreshed_property_1.meters.get(type=Meter.ELECTRICITY_GRID) self.assertEqual(meter_1.source, Meter.GREENBUTTON) self.assertEqual( meter_1.source_id, 'User/6150855/UsagePoint/409483/MeterReading/1/IntervalBlock/1') self.assertEqual(meter_1.is_virtual, False) self.assertEqual(meter_1.meter_readings.all().count(), 2) meter_reading_10, meter_reading_11 = list( meter_1.meter_readings.order_by('start_time').all()) self.assertEqual( meter_reading_10.start_time, make_aware(datetime(2011, 3, 5, 21, 0, 0), timezone=self.tz_obj)) self.assertEqual( meter_reading_10.end_time, make_aware(datetime(2011, 3, 5, 21, 15, 0), timezone=self.tz_obj)) self.assertEqual(meter_reading_10.reading, 1790 * 3.41 / 1000) self.assertEqual(meter_reading_10.source_unit, 'kWh (thousand Watt-hours)') self.assertEqual(meter_reading_10.conversion_factor, 3.41) self.assertEqual( meter_reading_11.start_time, make_aware(datetime(2011, 3, 5, 21, 15, 0), timezone=self.tz_obj)) self.assertEqual( meter_reading_11.end_time, make_aware(datetime(2011, 3, 5, 21, 30, 0), timezone=self.tz_obj)) self.assertEqual(meter_reading_11.reading, 1791 * 3.41 / 1000) self.assertEqual(meter_reading_11.source_unit, 'kWh (thousand Watt-hours)') self.assertEqual(meter_reading_11.conversion_factor, 3.41) # matching_results_data gets cleared out since the field wasn't meant for this refreshed_import_file = ImportFile.objects.get(pk=self.import_file.id) self.assertEqual(refreshed_import_file.matching_results_data, {}) # file should be disassociated from cycle too self.assertEqual(refreshed_import_file.cycle_id, None) def test_existing_meter_is_found_and_used_if_import_file_should_reference_it( self): property = Property.objects.get(pk=self.property_1.id) # Create a meter with the same details of the meter in the import file unsaved_meter = Meter( property=property, source=Meter.GREENBUTTON, source_id= 'User/6150855/UsagePoint/409483/MeterReading/1/IntervalBlock/1', type=Meter.ELECTRICITY_GRID, ) unsaved_meter.save() existing_meter = Meter.objects.get(pk=unsaved_meter.id) # Create a reading with a different date from those in the import file unsaved_meter_reading = MeterReading( meter=existing_meter, start_time=make_aware(datetime(2018, 1, 1, 0, 0, 0), timezone=self.tz_obj), end_time=make_aware(datetime(2018, 2, 1, 0, 0, 0), timezone=self.tz_obj), reading=12345, ) unsaved_meter_reading.save() existing_meter_reading = MeterReading.objects.get(reading=12345) url = reverse("api:v2:import_files-save-raw-data", args=[self.import_file.id]) post_params = { 'cycle_id': self.cycle.pk, 'organization_id': self.org.pk, } self.client.post(url, post_params) refreshed_property_1 = Property.objects.get(pk=self.property_1.id) self.assertEqual(refreshed_property_1.meters.all().count(), 1) refreshed_meter = refreshed_property_1.meters.get( type=Meter.ELECTRICITY_GRID) meter_reading_10, meter_reading_11, meter_reading_12 = list( refreshed_meter.meter_readings.order_by('start_time').all()) self.assertEqual(meter_reading_10.reading, 1790 * 3.41 / 1000) self.assertEqual(meter_reading_11.reading, 1791 * 3.41 / 1000) # Sanity check to be sure, nothing was changed with existing meter reading self.assertEqual(meter_reading_12, existing_meter_reading) def test_existing_meter_reading_has_reading_source_unit_and_conversion_factor_updated_if_import_file_references_previous_entry( self): property = Property.objects.get(pk=self.property_1.id) # Create a meter with the same details of one meter in the import file unsaved_meter = Meter( property=property, source=Meter.GREENBUTTON, source_id= 'User/6150855/UsagePoint/409483/MeterReading/1/IntervalBlock/1', type=Meter.ELECTRICITY_GRID, ) unsaved_meter.save() existing_meter = Meter.objects.get(pk=unsaved_meter.id) # Create a reading with the same date as one from the import file but different reading start_time = make_aware(datetime(2011, 3, 5, 21, 0, 0), timezone=self.tz_obj) end_time = make_aware(datetime(2011, 3, 5, 21, 15, 0), timezone=self.tz_obj) unsaved_meter_reading = MeterReading(meter=existing_meter, start_time=start_time, end_time=end_time, reading=1000, source_unit="GJ", conversion_factor=947.82) unsaved_meter_reading.save() url = reverse("api:v2:import_files-save-raw-data", args=[self.import_file.id]) post_params = { 'cycle_id': self.cycle.pk, 'organization_id': self.org.pk, } self.client.post(url, post_params) # Just as in the first test, 2 meter readings should exist self.assertEqual(MeterReading.objects.all().count(), 2) refreshed_property = Property.objects.get(pk=self.property_1.id) refreshed_meter = refreshed_property.meters.get( type=Meter.ELECTRICITY_GRID) meter_reading = refreshed_meter.meter_readings.get( start_time=start_time) self.assertEqual(meter_reading.end_time, end_time) self.assertEqual(meter_reading.reading, 1790 * 3.41 / 1000) self.assertEqual(meter_reading.source_unit, 'kWh (thousand Watt-hours)') self.assertEqual(meter_reading.conversion_factor, 3.41) def test_the_response_contains_expected_and_actual_reading_counts_for_pm_ids( self): url = reverse("api:v2:import_files-save-raw-data", args=[self.import_file.id]) post_params = { 'cycle_id': self.cycle.pk, 'organization_id': self.org.pk, } response = self.client.post(url, post_params) result = json.loads(response.content) expectation = [ { "source_id": "409483", "incoming": 2, "type": "Electric - Grid", "successfully_imported": 2, }, ] self.assertEqual(result['message'], expectation) def test_error_noted_in_response_if_meter_has_overlapping_readings_in_the_same_batch( self): filename = 'example-GreenButton-data-1002-1-dup.xml' filepath = os.path.dirname( os.path.abspath(__file__)) + "/data/" + filename one_dup_import_file = ImportFile.objects.create( import_record=self.import_record, source_type="GreenButton", uploaded_filename=filename, file=SimpleUploadedFile(name=filename, content=open(filepath, 'rb').read()), cycle=self.cycle, matching_results_data={"property_id": self.property_1.id}) url = reverse("api:v2:import_files-save-raw-data", args=[one_dup_import_file.id]) post_params = { 'cycle_id': self.cycle.pk, 'organization_id': self.org.pk, } response = self.client.post(url, post_params) result = json.loads(response.content) expectation = [ { "source_id": "409483", "type": "Electric - Grid", "incoming": 1002, "successfully_imported": 1000, "errors": 'Overlapping readings.', }, ] self.assertEqual(result['message'], expectation)
class PropertyViewTests(DeleteModelsTestCase): def setUp(self): user_details = { 'username': '******', 'password': '******', 'email': '*****@*****.**' } self.user = User.objects.create_superuser(**user_details) self.org, self.org_user, _ = create_organization(self.user) self.column_factory = FakeColumnFactory(organization=self.org) self.cycle_factory = FakeCycleFactory(organization=self.org, user=self.user) self.property_factory = FakePropertyFactory(organization=self.org) self.property_state_factory = FakePropertyStateFactory( organization=self.org) self.property_view_factory = FakePropertyViewFactory( organization=self.org) self.taxlot_state_factory = FakeTaxLotStateFactory( organization=self.org) self.cycle = self.cycle_factory.get_cycle( start=datetime(2010, 10, 10, tzinfo=get_current_timezone())) self.column_list_factory = FakeColumnListSettingsFactory( organization=self.org) self.client.login(**user_details) def test_get_and_edit_properties(self): state = self.property_state_factory.get_property_state() prprty = self.property_factory.get_property() view = PropertyView.objects.create(property=prprty, cycle=self.cycle, state=state) params = { 'organization_id': self.org.pk, 'page': 1, 'per_page': 999999999, 'columns': COLUMNS_TO_SEND, } url = reverse('api:v2.1:properties-list') + '?cycle_id={}'.format( self.cycle.pk) response = self.client.get(url, params) data = json.loads(response.content) self.assertEqual(len(data['properties']), 1) result = data['properties'][0] self.assertEqual(result['state']['address_line_1'], state.address_line_1) db_created_time = result['created'] db_updated_time = result['updated'] self.assertTrue(db_created_time is not None) self.assertTrue(db_updated_time is not None) # update the address new_data = {"state": {"address_line_1": "742 Evergreen Terrace"}} url = reverse('api:v2:properties-detail', args=[ view.id ]) + '?organization_id={}'.format(self.org.pk) response = self.client.put(url, json.dumps(new_data), content_type='application/json') data = json.loads(response.content) self.assertEqual(data['status'], 'success') # the above call returns data from the PropertyState, need to get the Property -- # call the get on the same API to retrieve it response = self.client.get(url, content_type='application/json') data = json.loads(response.content) # make sure the address was updated and that the datetimes were modified self.assertEqual(data['status'], 'success') self.assertEqual(data['state']['address_line_1'], '742 Evergreen Terrace') self.assertEqual( datetime.strptime(db_created_time, "%Y-%m-%dT%H:%M:%S.%fZ").replace(microsecond=0), datetime.strptime(data['property']['created'], "%Y-%m-%dT%H:%M:%S.%fZ").replace(microsecond=0)) self.assertGreater( datetime.strptime(data['property']['updated'], "%Y-%m-%dT%H:%M:%S.%fZ"), datetime.strptime(db_updated_time, "%Y-%m-%dT%H:%M:%S.%fZ")) def test_list_properties_with_profile_id(self): state = self.property_state_factory.get_property_state( extra_data={"field_1": "value_1"}) prprty = self.property_factory.get_property() PropertyView.objects.create(property=prprty, cycle=self.cycle, state=state) # save all the columns in the state to the database so we can setup column list settings Column.save_column_names(state) # get the columnlistsetting (default) for all columns columnlistsetting = self.column_list_factory.get_columnlistsettings( columns=['address_line_1', 'field_1']) params = { 'organization_id': self.org.pk, 'profile_id': columnlistsetting.id, } url = reverse('api:v2.1:properties-list') + '?cycle_id={}'.format( self.cycle.pk) response = self.client.get(url, params) data = response.json() self.assertEqual(len(data['properties']), 1) result = data['properties'][0] self.assertEqual(result['state']['address_line_1'], state.address_line_1) self.assertEqual(result['state']['extra_data']['field_1'], 'value_1') self.assertFalse(result['state'].get('city', None)) def test_search_identifier(self): self.property_view_factory.get_property_view(cycle=self.cycle, custom_id_1='123456') self.property_view_factory.get_property_view( cycle=self.cycle, custom_id_1='987654 Long Street') self.property_view_factory.get_property_view( cycle=self.cycle, address_line_1='123 Main Street') self.property_view_factory.get_property_view( cycle=self.cycle, address_line_1='Hamilton Road', analysis_state=PropertyState.ANALYSIS_STATE_QUEUED) self.property_view_factory.get_property_view( cycle=self.cycle, custom_id_1='long road', analysis_state=PropertyState.ANALYSIS_STATE_QUEUED) # Typically looks like this # http://localhost:8000/api/v2.1/properties/?organization_id=265&cycle=219&identifier=09-IS # check for all items query_params = "?cycle={}&organization_id={}".format( self.cycle.pk, self.org.pk) url = reverse('api:v2.1:properties-list') + query_params response = self.client.get(url) result = json.loads(response.content) self.assertEqual(result['status'], 'success') results = result['properties'] self.assertEqual(len(results), 5) # check for 2 items with 123 query_params = "?cycle={}&organization_id={}&identifier={}".format( self.cycle.pk, self.org.pk, '123') url = reverse('api:v2.1:properties-list') + query_params response = self.client.get(url) result = json.loads(response.content) self.assertEqual(result['status'], 'success') results = result['properties'] # print out the result of this when there are more than two in an attempt to catch the # non-deterministic part of this test if len(results) > 2: print(results) self.assertEqual(len(results), 2) # check the analysis states query_params = "?cycle={}&organization_id={}&analysis_state={}".format( self.cycle.pk, self.org.pk, 'Completed') url = reverse('api:v2.1:properties-list') + query_params response = self.client.get(url) result = json.loads(response.content) self.assertEqual(result['status'], 'success') results = result['properties'] self.assertEqual(len(results), 0) query_params = "?cycle={}&organization_id={}&analysis_state={}".format( self.cycle.pk, self.org.pk, 'Not Started') url = reverse('api:v2.1:properties-list') + query_params response = self.client.get(url) result = json.loads(response.content) self.assertEqual(result['status'], 'success') results = result['properties'] self.assertEqual(len(results), 3) query_params = "?cycle={}&organization_id={}&analysis_state={}".format( self.cycle.pk, self.org.pk, 'Queued') url = reverse('api:v2.1:properties-list') + query_params response = self.client.get(url) result = json.loads(response.content) self.assertEqual(result['status'], 'success') results = result['properties'] self.assertEqual(len(results), 2) # check the combination of both the identifier and the analysis state query_params = "?cycle={}&organization_id={}&identifier={}&analysis_state={}".format( self.cycle.pk, self.org.pk, 'Long', 'Queued') url = reverse('api:v2.1:properties-list') + query_params response = self.client.get(url) result = json.loads(response.content) self.assertEqual(result['status'], 'success') results = result['properties'] self.assertEqual(len(results), 1) def test_meters_exist(self): # Create a property set with meters state_1 = self.property_state_factory.get_property_state() property_1 = self.property_factory.get_property() PropertyView.objects.create(property=property_1, cycle=self.cycle, state=state_1) import_record = ImportRecord.objects.create( owner=self.user, last_modified_by=self.user, super_organization=self.org) filename = "example-GreenButton-data.xml" filepath = os.path.dirname( os.path.abspath(__file__)) + "/data/" + filename import_file = ImportFile.objects.create( import_record=import_record, source_type="GreenButton", uploaded_filename=filename, file=SimpleUploadedFile(name=filename, content=open(filepath, 'rb').read()), cycle=self.cycle, matching_results_data={ "property_id": property_1.id } # this is how target property is specified ) gb_import_url = reverse("api:v2:import_files-save-raw-data", args=[import_file.id]) gb_import_post_params = { 'cycle_id': self.cycle.pk, 'organization_id': self.org.pk, } self.client.post(gb_import_url, gb_import_post_params) # Create a property set without meters state_2 = self.property_state_factory.get_property_state() property_2 = self.property_factory.get_property() PropertyView.objects.create(property=property_2, cycle=self.cycle, state=state_2) url = reverse('api:v2:properties-meters-exist') true_post_params = json.dumps( {'inventory_ids': [property_2.pk, property_1.pk]}) true_result = self.client.post(url, true_post_params, content_type='application/json') self.assertEqual(b'true', true_result.content) false_post_params = json.dumps({'inventory_ids': [property_2.pk]}) false_result = self.client.post(url, false_post_params, content_type='application/json') self.assertEqual(b'false', false_result.content)
class InventoryViewTests(DeleteModelsTestCase): def setUp(self): user_details = { 'username': '******', 'password': '******', 'email': '*****@*****.**' } self.user = User.objects.create_superuser(**user_details) self.org, _, _ = create_organization(self.user) self.status_label = StatusLabel.objects.create( name='test', super_organization=self.org) self.column_factory = FakeColumnFactory(organization=self.org) self.cycle_factory = FakeCycleFactory(organization=self.org, user=self.user) self.property_factory = FakePropertyFactory(organization=self.org) self.property_state_factory = FakePropertyStateFactory( organization=self.org) self.taxlot_state_factory = FakeTaxLotStateFactory( organization=self.org) self.cycle = self.cycle_factory.get_cycle(start=datetime( 2010, 10, 10, tzinfo=timezone.get_current_timezone())) self.client.login(**user_details) def test_get_building_sync(self): state = self.property_state_factory.get_property_state() prprty = self.property_factory.get_property() pv = PropertyView.objects.create(property=prprty, cycle=self.cycle, state=state) # go to buildingsync endpoint params = {'organization_id': self.org.pk} url = reverse('api:v2.1:properties-building-sync', args=[pv.id]) response = self.client.get(url, params) self.assertIn( '<auc:FloorAreaValue>%s.0</auc:FloorAreaValue>' % state.gross_floor_area, response.content.decode("utf-8")) def test_upload_and_get_building_sync(self): # import_record = filename = path.join(BASE_DIR, 'seed', 'building_sync', 'tests', 'data', 'ex_1.xml') url = reverse('api:v2:building_file-list') fsysparams = { 'file': open(filename, 'rb'), 'file_type': 'BuildingSync', 'organization_id': self.org.id, 'cycle_id': self.cycle.id } response = self.client.post(url, fsysparams) self.assertEqual(response.status_code, 200) result = json.loads(response.content) self.assertEqual(result['status'], 'success') self.assertEqual(result['message'], 'successfully imported file') self.assertEqual( result['data']['property_view']['state']['year_built'], 1967) self.assertEqual( result['data']['property_view']['state']['postal_code'], '94111') # now get the building sync that was just uploaded property_id = result['data']['property_view']['id'] url = reverse('api:v2.1:properties-building-sync', args=[property_id]) response = self.client.get(url) self.assertIn('<auc:YearOfConstruction>1967</auc:YearOfConstruction>', response.content.decode("utf-8")) def test_upload_with_measure_duplicates(self): # import_record = filename = path.join(BASE_DIR, 'seed', 'building_sync', 'tests', 'data', 'buildingsync_ex01_measures.xml') url = reverse('api:v2:building_file-list') fsysparams = { 'file': open(filename, 'rb'), 'file_type': 'BuildingSync', 'organization_id': self.org.id, 'cycle_id': self.cycle.id } response = self.client.post(url, fsysparams) self.assertEqual(response.status_code, 200) result = json.loads(response.content) self.assertEqual(result['status'], 'success') expected_message = "successfully imported file with warnings ['Measure category and name is not valid other_electric_motors_and_drives:replace_with_higher_efficiency', 'Measure category and name is not valid other_hvac:install_demand_control_ventilation', 'Measure associated with scenario not found. Scenario: Replace with higher efficiency Only, Measure name: Measure22', 'Measure associated with scenario not found. Scenario: Install demand control ventilation Only, Measure name: Measure24']" self.assertEqual(result['message'], expected_message) self.assertEqual( len(result['data']['property_view']['state']['measures']), 28) self.assertEqual( len(result['data']['property_view']['state']['scenarios']), 31) self.assertEqual( result['data']['property_view']['state']['year_built'], 1967) self.assertEqual( result['data']['property_view']['state']['postal_code'], '94111') # upload the same file again url = reverse('api:v2:building_file-list') fsysparams = { 'file': open(filename, 'rb'), 'file_type': 'BuildingSync', 'organization_id': self.org.id, 'cycle_id': self.cycle.id } response = self.client.post(url, fsysparams) self.assertEqual(response.status_code, 200) result = json.loads(response.content) self.assertEqual( len(result['data']['property_view']['state']['measures']), 28) self.assertEqual( len(result['data']['property_view']['state']['scenarios']), 31) def test_upload_and_get_building_sync_diff_ns(self): # import_record = filename = path.join(BASE_DIR, 'seed', 'building_sync', 'tests', 'data', 'ex_1_different_namespace.xml') url = reverse('api:v2:building_file-list') fsysparams = { 'file': open(filename, 'rb'), 'file_type': 'BuildingSync', 'organization_id': self.org.id, 'cycle_id': self.cycle.id } response = self.client.post(url, fsysparams) self.assertEqual(response.status_code, 200) result = json.loads(response.content) self.assertEqual(result['status'], 'success') self.assertEqual(result['message'], 'successfully imported file') self.assertEqual( result['data']['property_view']['state']['year_built'], 1889) # now get the building sync that was just uploaded property_id = result['data']['property_view']['id'] url = reverse('api:v2.1:properties-building-sync', args=[property_id]) response = self.client.get(url) self.assertIn('<auc:YearOfConstruction>1889</auc:YearOfConstruction>', response.content.decode('utf-8')) def test_get_hpxml(self): state = self.property_state_factory.get_property_state() prprty = self.property_factory.get_property() pv = PropertyView.objects.create(property=prprty, cycle=self.cycle, state=state) # go to buildingsync endpoint params = {'organization_id': self.org.pk} url = reverse('api:v2.1:properties-hpxml', args=[pv.id]) response = self.client.get(url, params) self.assertIn( '<GrossFloorArea>%s.0</GrossFloorArea>' % state.gross_floor_area, response.content.decode('utf-8'))
class PropertyUnmergeViewTests(DeleteModelsTestCase): def setUp(self): user_details = { 'username': '******', 'password': '******', 'email': '*****@*****.**' } self.user = User.objects.create_superuser(**user_details) self.org, self.org_user, _ = create_organization(self.user) cycle_factory = FakeCycleFactory(organization=self.org, user=self.user) self.property_factory = FakePropertyFactory(organization=self.org) self.property_state_factory = FakePropertyStateFactory( organization=self.org) self.cycle = cycle_factory.get_cycle( start=datetime(2010, 10, 10, tzinfo=get_current_timezone())) self.client.login(**user_details) self.state_1 = self.property_state_factory.get_property_state( address_line_1='1 property state', pm_property_id= '5766973' # this allows the Property to be targetted for PM meter additions ) self.property_1 = self.property_factory.get_property() PropertyView.objects.create(property=self.property_1, cycle=self.cycle, state=self.state_1) self.state_2 = self.property_state_factory.get_property_state( address_line_1='2 property state') self.property_2 = self.property_factory.get_property() PropertyView.objects.create(property=self.property_2, cycle=self.cycle, state=self.state_2) self.import_record = ImportRecord.objects.create( owner=self.user, last_modified_by=self.user, super_organization=self.org) # Give 2 meters to one of the properties gb_filename = "example-GreenButton-data.xml" filepath = os.path.dirname( os.path.abspath(__file__)) + "/data/" + gb_filename gb_import_file = ImportFile.objects.create( import_record=self.import_record, source_type="GreenButton", uploaded_filename=gb_filename, file=SimpleUploadedFile(name=gb_filename, content=open(filepath, 'rb').read()), cycle=self.cycle, matching_results_data={ "property_id": self.property_1.id } # this is how target property is specified ) gb_import_url = reverse("api:v2:import_files-save-raw-data", args=[gb_import_file.id]) gb_import_post_params = { 'cycle_id': self.cycle.pk, 'organization_id': self.org.pk, } self.client.post(gb_import_url, gb_import_post_params) # Merge the properties url = reverse('api:v2:properties-merge' ) + '?organization_id={}'.format(self.org.pk) post_params = json.dumps({ 'state_ids': [self.state_2.pk, self.state_1.pk] # priority given to state_1 }) self.client.post(url, post_params, content_type='application/json') def test_unmerging_two_properties_with_meters_gives_meters_to_both_of_the_resulting_records( self): # Unmerge the properties view_id = PropertyView.objects.first( ).id # There's only one PropertyView url = reverse('api:v2:properties-unmerge', args=[ view_id ]) + '?organization_id={}'.format(self.org.pk) self.client.post(url, content_type='application/json') # Verify 2 -Views now exist self.assertEqual(PropertyView.objects.count(), 2) # Check that meters and readings of each -View exists and verify they are identical. reading_sets = [] for view in PropertyView.objects.all(): self.assertEqual(view.property.meters.count(), 1) self.assertEqual( view.property.meters.first().meter_readings.count(), 2) reading_sets.append([{ 'start_time': reading.start_time, 'end_time': reading.end_time, 'reading': reading.reading, 'source_unit': reading.source_unit, 'conversion_factor': reading.conversion_factor, } for reading in view.property.meters.first().meter_readings.all(). order_by('start_time')]) self.assertEqual(reading_sets[0], reading_sets[1])