예제 #1
0
class InventoryViewTests(DeleteModelsTestCase):
    def setUp(self):
        user_details = {
            'username': '******',
            'password': '******',
            'email': '*****@*****.**'
        }
        self.user = User.objects.create_superuser(**user_details)
        self.org, _, _ = create_organization(self.user)
        self.status_label = StatusLabel.objects.create(
            name='test', super_organization=self.org
        )

        self.column_factory = FakeColumnFactory(organization=self.org)
        self.cycle_factory = FakeCycleFactory(organization=self.org,
                                              user=self.user)
        self.property_factory = FakePropertyFactory(organization=self.org)
        self.property_state_factory = FakePropertyStateFactory(organization=self.org)
        self.taxlot_state_factory = FakeTaxLotStateFactory(organization=self.org)

        self.cycle = self.cycle_factory.get_cycle(
            start=datetime(2010, 10, 10, tzinfo=timezone.get_current_timezone())
        )

        self.client.login(**user_details)

    def test_get_building_sync(self):
        state = self.property_state_factory.get_property_state()
        prprty = self.property_factory.get_property()
        pv = PropertyView.objects.create(
            property=prprty, cycle=self.cycle, state=state
        )

        # go to buildingsync endpoint
        params = {
            'organization_id': self.org.pk
        }
        url = reverse('api:v2.1:properties-building-sync', args=[pv.id])
        response = self.client.get(url, params)
        self.assertIn('<auc:FloorAreaValue>%s.0</auc:FloorAreaValue>' % state.gross_floor_area, response.content)

    def test_get_hpxml(self):
        state = self.property_state_factory.get_property_state()
        prprty = self.property_factory.get_property()
        pv = PropertyView.objects.create(
            property=prprty, cycle=self.cycle, state=state
        )

        # go to buildingsync endpoint
        params = {
            'organization_id': self.org.pk
        }
        url = reverse('api:v2.1:properties-hpxml', args=[pv.id])
        response = self.client.get(url, params)
        self.assertIn('<GrossFloorArea>%s.0</GrossFloorArea>' % state.gross_floor_area, response.content)
예제 #2
0
    def test_taxlots_merge_without_losing_pairings(self):
        # Create 2 pairings and distribute them to the two -Views.
        property_factory = FakePropertyFactory(organization=self.org)
        property_state_factory = FakePropertyStateFactory(
            organization=self.org)

        property_1 = property_factory.get_property()
        state_1 = property_state_factory.get_property_state()
        property_view_1 = PropertyView.objects.create(property=property_1,
                                                      cycle=self.cycle,
                                                      state=state_1)

        property_2 = property_factory.get_property()
        state_2 = property_state_factory.get_property_state()
        property_view_2 = PropertyView.objects.create(property=property_2,
                                                      cycle=self.cycle,
                                                      state=state_2)

        TaxLotProperty(primary=True,
                       cycle_id=self.cycle.id,
                       property_view_id=property_view_1.id,
                       taxlot_view_id=self.view_1.id).save()

        TaxLotProperty(primary=True,
                       cycle_id=self.cycle.id,
                       property_view_id=property_view_2.id,
                       taxlot_view_id=self.view_2.id).save()

        # Merge the taxlots
        url = reverse('api:v2:taxlots-merge') + '?organization_id={}'.format(
            self.org.pk)
        post_params = json.dumps({
            'state_ids': [self.state_2.pk,
                          self.state_1.pk]  # priority given to state_1
        })
        self.client.post(url, post_params, content_type='application/json')

        # There should still be 2 TaxLotProperties
        self.assertEqual(TaxLotProperty.objects.count(), 2)

        taxlot_view = TaxLotView.objects.first()
        paired_propertyview_ids = list(
            TaxLotProperty.objects.filter(
                taxlot_view_id=taxlot_view.id).values_list('property_view_id',
                                                           flat=True))
        self.assertCountEqual(paired_propertyview_ids,
                              [property_view_1.id, property_view_2.id])
예제 #3
0
    def setUp(self):
        self.api_view = UpdateInventoryLabelsAPIView()

        # Models can't  be imported directly hence self
        self.PropertyViewLabels = self.api_view.models['property']
        self.TaxlotViewLabels = self.api_view.models['taxlot']

        self.user_details = {
            'username': '******',
            'password': '******',
            'email': '*****@*****.**'
        }
        self.user = User.objects.create_superuser(**self.user_details)
        self.org, _, _ = create_organization(self.user)
        self.status_label = Label.objects.create(
            name='test', super_organization=self.org
        )
        self.status_label_2 = Label.objects.create(
            name='test_2', super_organization=self.org
        )
        self.client.login(**self.user_details)

        self.label_1 = Label.objects.all()[0]
        self.label_2 = Label.objects.all()[1]
        self.label_3 = Label.objects.all()[2]
        self.label_4 = Label.objects.all()[3]

        # Create some real PropertyViews, Properties, PropertyStates, and StatusLabels since validations happen
        cycle_factory = FakeCycleFactory(organization=self.org, user=self.user)
        cycle = cycle_factory.get_cycle(start=datetime(2010, 10, 10, tzinfo=timezone.get_current_timezone()))
        property_state_factory = FakePropertyStateFactory(organization=self.org)
        for i in range(1, 11):
            ps = property_state_factory.get_property_state()
            p = Property.objects.create(organization=self.org)
            PropertyView.objects.create(
                cycle=cycle,
                state=ps,
                property=p
            )

        self.propertyview_ids = PropertyView.objects.all().order_by('id').values_list('id', flat=True)

        self.mock_propertyview_label_qs = mock_queryset_factory(
            self.PropertyViewLabels,
            flatten=True,
            propertyview_id=self.propertyview_ids,
            statuslabel_id=[self.label_1.id] * 3 + [self.label_2.id] * 3 + [self.label_3.id] * 2 + [self.label_4.id] * 2
        )
예제 #4
0
class TestMatchingHelperMethods(DataMappingBaseTestCase):
    def setUp(self):
        selfvars = self.set_up(ASSESSED_RAW)
        self.user, self.org, self.import_file, self.import_record, self.cycle = selfvars

        self.property_state_factory = FakePropertyStateFactory(
            organization=self.org)
        self.taxlot_state_factory = FakeTaxLotStateFactory(
            organization=self.org)

    def test_save_state_match(self):
        # create a couple states to merge together
        ps_1 = self.property_state_factory.get_property_state(
            property_name="this should persist")
        ps_2 = self.property_state_factory.get_property_state(
            extra_data={"extra_1": "this should exist too"})

        priorities = Column.retrieve_priorities(self.org.pk)
        merged_state = save_state_match(ps_1, ps_2, priorities)

        self.assertEqual(merged_state.merge_state, MERGE_STATE_MERGED)
        self.assertEqual(merged_state.property_name, ps_1.property_name)
        self.assertEqual(merged_state.extra_data['extra_1'],
                         "this should exist too")

        # verify that the audit log is correct.
        pal = PropertyAuditLog.objects.get(organization=self.org,
                                           state=merged_state)
        self.assertEqual(pal.name, 'System Match')
        self.assertEqual(pal.parent_state1, ps_1)
        self.assertEqual(pal.parent_state2, ps_2)
        self.assertEqual(pal.description, 'Automatic Merge')

    def test_filter_duplicate_states(self):
        for i in range(10):
            self.property_state_factory.get_property_state(
                no_default_data=True,
                address_line_1='123 The Same Address',
                # extra_data={"extra_1": "value_%s" % i},
                import_file_id=self.import_file.id,
                data_state=DATA_STATE_MAPPING,
            )
        for i in range(5):
            self.property_state_factory.get_property_state(
                import_file_id=self.import_file.id,
                data_state=DATA_STATE_MAPPING,
            )

        props = self.import_file.find_unmatched_property_states()
        uniq_state_ids, dup_state_count = filter_duplicate_states(props)

        # There should be 6 uniq states. 5 from the second call, and one of 'The Same Address'
        self.assertEqual(len(uniq_state_ids), 6)
        self.assertEqual(dup_state_count, 9)
예제 #5
0
파일: test_labels.py 프로젝트: zolton1/seed
    def test_error_occurs_when_trying_to_apply_a_label_to_propertyview_from_a_different_org(
            self):
        org_1_property = Property.objects.create(organization=self.org)
        property_state_factory = FakePropertyStateFactory(
            organization=self.org)
        org_1_propertystate = property_state_factory.get_property_state()
        org_1_propertyview = PropertyView.objects.create(
            property=org_1_property,
            state=org_1_propertystate,
            cycle=self.cycle)

        # Via Label API View
        with transaction.atomic():
            with self.assertRaises(IntegrityError):
                self.api_view.add_labels(
                    self.api_view.models['property'].objects.none(),
                    'property', [org_1_propertyview.id],
                    [self.org_2_status_label.id])

        # Via PropertyView Model
        with transaction.atomic():
            with self.assertRaises(IntegrityError):
                org_1_propertyview.labels.add(self.org_2_status_label)

        # Via PropertyState Rule with Label
        org_1_dq = DataQualityCheck.objects.get(organization=self.org)
        org_1_ps_rule = org_1_dq.rules.filter(
            table_name='PropertyState').first()
        # Purposely give an Org 1 Rule an Org 2 Label
        org_1_ps_rule.status_label = self.org_2_status_label
        org_1_ps_rule.save()

        with transaction.atomic():
            with self.assertRaises(IntegrityError):
                org_1_dq.update_status_label(
                    self.PropertyViewLabels,
                    Rule.objects.get(pk=org_1_ps_rule.id),
                    org_1_propertyview.id,
                )

        self.assertFalse(
            PropertyView.objects.get(
                pk=org_1_propertyview.id).labels.all().exists())
예제 #6
0
파일: test_merge.py 프로젝트: nW-fr/seed
class PropertyViewTests(DeleteModelsTestCase):
    def setUp(self):
        user_details = {
            'username': '******',
            'password': '******',
            'email': '*****@*****.**'
        }
        self.user = User.objects.create_superuser(**user_details)
        self.org, self.org_user, _ = create_organization(self.user)
        self.cycle_factory = FakeCycleFactory(organization=self.org, user=self.user)
        self.cycle = self.cycle_factory.get_cycle(
            start=datetime(2010, 10, 10, tzinfo=timezone.get_current_timezone())
        )
        self.property_factory = FakePropertyFactory(organization=self.org)
        self.property_state_factory = FakePropertyStateFactory(organization=self.org)
        self.property_view_factory = FakePropertyViewFactory(organization=self.org, cycle=self.cycle)
        self.taxlot_state_factory = FakeTaxLotStateFactory(organization=self.org)
        self.taxlot_view_factory = FakeTaxLotViewFactory(organization=self.org, cycle=self.cycle)

        # create 10 addresses that are exactly the same
        import_record = ImportRecord.objects.create(super_organization=self.org)
        self.import_file = ImportFile.objects.create(
            import_record=import_record,
            cycle=self.cycle,
        )

    def test_match_properties_and_taxlots_with_address(self):
        # create an ImportFile for testing purposes. Seems like we would want to run this matching just on a
        # list of properties and taxlots.
        #
        # This emulates importing the following
        #   Address,                Jurisdiction Tax Lot
        #   742 Evergreen Terrace,  100;101;110;111

        lot_numbers = '100;101;110;111'
        for i in range(10):
            self.property_state_factory.get_property_state(
                address_line_1='742 Evergreen Terrace',
                lot_number=lot_numbers,
                import_file_id=self.import_file.id,
                data_state=DATA_STATE_MAPPING,
            )

        for lot_number in lot_numbers.split(';'):
            self.taxlot_state_factory.get_taxlot_state(
                address_line_1=None,
                jurisdiction_tax_lot_id=lot_number,
                import_file_id=self.import_file.id,
                data_state=DATA_STATE_MAPPING,
            )

        for ps in PropertyState.objects.filter(organization=self.org):
            print "%s -- %s -- %s" % (ps.lot_number, ps.import_file_id, ps.address_line_1)
            # pv = PropertyView.objects.get(state=ps, cycle=self.cycle)
            # TaxLotProperty.objects.filter()

        for tl in TaxLotState.objects.filter(organization=self.org):
            print "%s -- %s" % (tl.import_file_id, tl.jurisdiction_tax_lot_id)

        # for tlm in TaxLotProperty.objects.filter()
        _match_properties_and_taxlots(self.import_file.id)

        for pv in PropertyView.objects.filter(state__organization=self.org):
            print "%s -- %s" % (pv.state, pv.cycle)

        # should only have 1 PropertyView and 4 taxlot views
        self.assertEqual(PropertyView.objects.filter(state__organization=self.org).count(), 1)
        self.assertEqual(TaxLotView.objects.filter(state__organization=self.org).count(), 4)
        pv = PropertyView.objects.filter(state__organization=self.org).first()

        # there should be 4 relationships in the TaxLotProperty associated with view, one each for the taxlots defined
        self.assertEqual(TaxLotProperty.objects.filter(property_view_id=pv).count(), 4)

    def test_match_properties_and_taxlots_with_address_no_lot_number(self):
        # create an ImportFile for testing purposes. Seems like we would want to run this matching just on a
        # list of properties and taxlots.
        #
        # This emulates importing the following
        #   Address,                Jurisdiction Tax Lot
        #   742 Evergreen Terrace,  100
        #   742 Evergreen Terrace,  101
        #   742 Evergreen Terrace,  110
        #   742 Evergreen Terrace,  111

        lot_numbers = '100;101;110;111'
        for lot_number in lot_numbers.split(';'):
            self.property_state_factory.get_property_state(
                address_line_1='742 Evergreen Terrace',
                lot_number=lot_number,
                import_file_id=self.import_file.id,
                data_state=DATA_STATE_MAPPING,
            )

            self.taxlot_state_factory.get_taxlot_state(
                address_line_1=None,
                jurisdiction_tax_lot_id=lot_number,
                import_file_id=self.import_file.id,
                data_state=DATA_STATE_MAPPING,
            )

        for ps in PropertyState.objects.filter(organization=self.org):
            print "%s -- %s -- %s" % (ps.lot_number, ps.import_file_id, ps.address_line_1)

        for tl in TaxLotState.objects.filter(organization=self.org):
            print "%s -- %s" % (tl.import_file_id, tl.jurisdiction_tax_lot_id)

        _match_properties_and_taxlots(self.import_file.id)

        for pv in PropertyView.objects.filter(state__organization=self.org):
            print "%s -- %s" % (pv.state, pv.cycle)

        # should only have 1 PropertyView and 4 taxlot views
        self.assertEqual(PropertyView.objects.filter(state__organization=self.org).count(), 1)
        self.assertEqual(TaxLotView.objects.filter(state__organization=self.org).count(), 4)
        pv = PropertyView.objects.filter(state__organization=self.org).first()

        # there should be 4 relationships in the TaxLotProperty associated with view, one each for the taxlots defined
        self.assertEqual(TaxLotProperty.objects.filter(property_view_id=pv).count(), 4)

    def test_match_properties_and_taxlots_with_ubid(self):
        # create an ImportFile for testing purposes. Seems like we would want to run this matching just on a
        # list of properties and taxlots.
        #
        # This emulates importing the following
        #   UBID,    Jurisdiction Tax Lot
        #   ubid_100,     lot_1
        #   ubid_101,     lot_1
        #   ubid_110,     lot_1
        #   ubid_111,     lot_1

        ids = [('ubid_100', 'lot_1'), ('ubid_101', 'lot_1'), ('ubid_110', 'lot_1'), ('ubid_111', 'lot_1')]
        for id in ids:
            self.property_state_factory.get_property_state(
                no_default_data=True,
                ubid=id[0],
                lot_number=id[1],
                import_file_id=self.import_file.id,
                data_state=DATA_STATE_MAPPING,
            )

        self.taxlot_state_factory.get_taxlot_state(
            no_default_data=True,
            jurisdiction_tax_lot_id=ids[0][1],
            import_file_id=self.import_file.id,
            data_state=DATA_STATE_MAPPING,
        )

        for ps in PropertyState.objects.filter(organization=self.org):
            print "%s -- %s -- %s" % (ps.lot_number, ps.import_file_id, ps.ubid)
            # pv = PropertyView.objects.get(state=ps, cycle=self.cycle)
            # TaxLotProperty.objects.filter()

        for tl in TaxLotState.objects.filter(organization=self.org):
            print "%s -- %s" % (tl.import_file_id, tl.jurisdiction_tax_lot_id)

        # for tlm in TaxLotProperty.objects.filter()
        _match_properties_and_taxlots(self.import_file.id)

        for pv in PropertyView.objects.filter(state__organization=self.org):
            print "%s -- %s" % (pv.state.ubid, pv.cycle)

        # should only have 1 PropertyView and 4 taxlot views
        self.assertEqual(PropertyView.objects.filter(state__organization=self.org).count(), 4)
        self.assertEqual(TaxLotView.objects.filter(state__organization=self.org).count(), 1)
        tlv = TaxLotView.objects.filter(state__organization=self.org).first()

        # there should be 4 relationships in the TaxLotProperty associated with view, one each for the taxlots defined
        self.assertEqual(TaxLotProperty.objects.filter(taxlot_view_id=tlv).count(), 4)

    def test_match_properties_and_taxlots_with_custom_id(self):
        # create an ImportFile for testing purposes. Seems like we would want to run this matching just on a
        # list of properties and taxlots.
        #
        # This emulates importing the following
        #   Custom ID 1,    Jurisdiction Tax Lot
        #   custom_100,     lot_1
        #   custom_101,     lot_1
        #   custom_110,     lot_1
        #   custom_111,     lot_1
        ids = [('custom_100', 'lot_1'), ('custom_101', 'lot_1'), ('custom_110', 'lot_1'), ('custom_111', 'lot_1')]
        for id in ids:
            self.property_state_factory.get_property_state(
                no_default_data=True,
                custom_id_1=id[0],
                lot_number=id[1],
                import_file_id=self.import_file.id,
                data_state=DATA_STATE_MAPPING,
            )

        self.taxlot_state_factory.get_taxlot_state(
            no_default_data=True,
            jurisdiction_tax_lot_id=ids[0][1],
            import_file_id=self.import_file.id,
            data_state=DATA_STATE_MAPPING,
        )

        for ps in PropertyState.objects.filter(organization=self.org):
            print "%s -- %s -- %s" % (ps.lot_number, ps.import_file_id, ps.custom_id_1)
            # pv = PropertyView.objects.get(state=ps, cycle=self.cycle)
            # TaxLotProperty.objects.filter()

        for tl in TaxLotState.objects.filter(organization=self.org):
            print "%s -- %s" % (tl.import_file_id, tl.jurisdiction_tax_lot_id)

        # for tlm in TaxLotProperty.objects.filter()
        _match_properties_and_taxlots(self.import_file.id)

        for pv in PropertyView.objects.filter(state__organization=self.org):
            print "%s -- %s" % (pv.state, pv.cycle)

        # should only have 1 PropertyView and 4 taxlot views
        self.assertEqual(PropertyView.objects.filter(state__organization=self.org).count(), 4)
        self.assertEqual(TaxLotView.objects.filter(state__organization=self.org).count(), 1)
        tlv = TaxLotView.objects.filter(state__organization=self.org).first()

        # there should be 4 relationships in the TaxLotProperty associated with view, one each for the taxlots defined
        self.assertEqual(TaxLotProperty.objects.filter(taxlot_view_id=tlv).count(), 4)

    def test_save_state_match(self):
        # create a couple states to merge together
        ps_1 = self.property_state_factory.get_property_state(property_name="this should persist")
        ps_2 = self.property_state_factory.get_property_state(extra_data={"extra_1": "this should exist too"})

        merged_state = save_state_match(ps_1, ps_2)

        self.assertEqual(merged_state.merge_state, MERGE_STATE_MERGED)
        self.assertEqual(merged_state.property_name, ps_1.property_name)
        self.assertEqual(merged_state.extra_data['extra_1'], "this should exist too")

        # verify that the audit log is correct.
        pal = PropertyAuditLog.objects.get(organization=self.org, state=merged_state)
        self.assertEqual(pal.name, 'System Match')
        self.assertEqual(pal.parent_state1, ps_1)
        self.assertEqual(pal.parent_state2, ps_2)
        self.assertEqual(pal.description, 'Automatic Merge')
예제 #7
0
class PropertyViewTests(DeleteModelsTestCase):
    def setUp(self):
        user_details = {
            'username': '******',
            'password': '******',
            'email': '*****@*****.**'
        }
        self.user = User.objects.create_superuser(**user_details)
        self.org, self.org_user, _ = create_organization(self.user)
        self.column_factory = FakeColumnFactory(organization=self.org)
        self.cycle_factory = FakeCycleFactory(organization=self.org, user=self.user)
        self.property_factory = FakePropertyFactory(organization=self.org)
        self.property_state_factory = FakePropertyStateFactory(organization=self.org)
        self.property_view_factory = FakePropertyViewFactory(organization=self.org)
        self.taxlot_state_factory = FakeTaxLotStateFactory(organization=self.org)
        self.cycle = self.cycle_factory.get_cycle(
            start=datetime(2010, 10, 10, tzinfo=timezone.get_current_timezone()))
        self.client.login(**user_details)

    def test_get_and_edit_properties(self):
        state = self.property_state_factory.get_property_state()
        prprty = self.property_factory.get_property()
        view = PropertyView.objects.create(
            property=prprty, cycle=self.cycle, state=state
        )
        params = {
            'organization_id': self.org.pk,
            'page': 1,
            'per_page': 999999999,
            'columns': COLUMNS_TO_SEND,
        }

        url = reverse('api:v2.1:properties-list') + '?cycle_id={}'.format(self.cycle.pk)
        response = self.client.get(url, params)
        data = json.loads(response.content)
        self.assertEqual(len(data['properties']), 1)
        result = data['properties'][0]
        self.assertEqual(result['state']['address_line_1'], state.address_line_1)

        db_created_time = result['created']
        db_updated_time = result['updated']
        self.assertTrue(db_created_time is not None)
        self.assertTrue(db_updated_time is not None)

        # update the address
        new_data = {
            "state": {
                "address_line_1": "742 Evergreen Terrace"
            }
        }
        url = reverse('api:v2:properties-detail', args=[view.id]) + '?organization_id={}'.format(self.org.pk)
        response = self.client.put(url, json.dumps(new_data), content_type='application/json')
        data = json.loads(response.content)
        self.assertEqual(data['status'], 'success')

        # the above call returns data from the PropertyState, need to get the Property --
        # call the get on the same API to retrieve it
        response = self.client.get(url, content_type='application/json')
        data = json.loads(response.content)
        # make sure the address was updated and that the datetimes were modified
        self.assertEqual(data['status'], 'success')
        self.assertEqual(data['state']['address_line_1'], '742 Evergreen Terrace')
        self.assertEqual(datetime.strptime(db_created_time, "%Y-%m-%dT%H:%M:%S.%fZ").replace(microsecond=0),
                         datetime.strptime(data['property']['created'], "%Y-%m-%dT%H:%M:%S.%fZ").replace(
                             microsecond=0))
        self.assertGreater(datetime.strptime(data['property']['updated'], "%Y-%m-%dT%H:%M:%S.%fZ"),
                           datetime.strptime(db_updated_time, "%Y-%m-%dT%H:%M:%S.%fZ"))

    def test_search_identifier(self):
        self.property_view_factory.get_property_view(cycle=self.cycle, custom_id_1='123456')
        self.property_view_factory.get_property_view(cycle=self.cycle, custom_id_1='987654 Long Street')
        self.property_view_factory.get_property_view(cycle=self.cycle, address_line_1='123 Main Street')
        self.property_view_factory.get_property_view(cycle=self.cycle, address_line_1='Hamilton Road',
                                                     analysis_state=PropertyState.ANALYSIS_STATE_QUEUED)
        self.property_view_factory.get_property_view(cycle=self.cycle, custom_id_1='long road',
                                                     analysis_state=PropertyState.ANALYSIS_STATE_QUEUED)

        # Typically looks like this
        # http://localhost:8000/api/v2.1/properties/?organization_id=265&cycle=219&identifier=09-IS

        # check for all items
        query_params = "?cycle={}&organization_id={}".format(self.cycle.pk, self.org.pk)
        url = reverse('api:v2.1:properties-list') + query_params
        response = self.client.get(url)
        result = json.loads(response.content)
        self.assertEqual(result['status'], 'success')
        results = result['properties']
        self.assertEqual(len(results), 5)

        # check for 2 items with 123
        query_params = "?cycle={}&organization_id={}&identifier={}".format(self.cycle.pk, self.org.pk, '123')
        url = reverse('api:v2.1:properties-list') + query_params
        response = self.client.get(url)
        result = json.loads(response.content)
        self.assertEqual(result['status'], 'success')
        results = result['properties']
        # print out the result of this when there are more than two in an attempt to catch the
        # non-deterministic part of this test
        if len(results) > 2:
            print results

        self.assertEqual(len(results), 2)

        # check the analysis states
        query_params = "?cycle={}&organization_id={}&analysis_state={}".format(self.cycle.pk, self.org.pk, 'Completed')
        url = reverse('api:v2.1:properties-list') + query_params
        response = self.client.get(url)
        result = json.loads(response.content)
        self.assertEqual(result['status'], 'success')
        results = result['properties']
        self.assertEqual(len(results), 0)

        query_params = "?cycle={}&organization_id={}&analysis_state={}".format(
            self.cycle.pk, self.org.pk, 'Not Started'
        )
        url = reverse('api:v2.1:properties-list') + query_params
        response = self.client.get(url)
        result = json.loads(response.content)
        self.assertEqual(result['status'], 'success')
        results = result['properties']
        self.assertEqual(len(results), 3)

        query_params = "?cycle={}&organization_id={}&analysis_state={}".format(
            self.cycle.pk, self.org.pk, 'Queued'
        )
        url = reverse('api:v2.1:properties-list') + query_params
        response = self.client.get(url)
        result = json.loads(response.content)
        self.assertEqual(result['status'], 'success')
        results = result['properties']
        self.assertEqual(len(results), 2)

        # check the combination of both the identifier and the analysis state
        query_params = "?cycle={}&organization_id={}&identifier={}&analysis_state={}".format(
            self.cycle.pk, self.org.pk, 'Long', 'Queued'
        )
        url = reverse('api:v2.1:properties-list') + query_params
        response = self.client.get(url)
        result = json.loads(response.content)
        self.assertEqual(result['status'], 'success')
        results = result['properties']
        self.assertEqual(len(results), 1)
예제 #8
0
class TestMatching(DataMappingBaseTestCase):
    def setUp(self):
        selfvars = self.set_up(ASSESSED_RAW)
        self.user, self.org, self.import_file, self.import_record, self.cycle = selfvars

        self.property_factory = FakePropertyFactory(organization=self.org)
        self.property_state_factory = FakePropertyStateFactory(
            organization=self.org)
        self.property_view_factory = FakePropertyViewFactory(
            organization=self.org, cycle=self.cycle)
        self.taxlot_state_factory = FakeTaxLotStateFactory(
            organization=self.org)
        self.taxlot_view_factory = FakeTaxLotViewFactory(organization=self.org,
                                                         cycle=self.cycle)

    def test_match_properties_and_taxlots_with_address(self):
        # create an ImportFile for testing purposes. Seems like we would want to run this matching just on a
        # list of properties and taxlots.
        #
        # This emulates importing the following
        #   Address,                Jurisdiction Tax Lot
        #   742 Evergreen Terrace,  100;101;110;111

        lot_numbers = '100;101;110;111'
        for i in range(10):
            self.property_state_factory.get_property_state(
                address_line_1='742 Evergreen Terrace',
                lot_number=lot_numbers,
                import_file_id=self.import_file.id,
                data_state=DATA_STATE_MAPPING,
            )

        for lot_number in lot_numbers.split(';'):
            self.taxlot_state_factory.get_taxlot_state(
                address_line_1=None,
                jurisdiction_tax_lot_id=lot_number,
                import_file_id=self.import_file.id,
                data_state=DATA_STATE_MAPPING,
            )

        # for ps in PropertyState.objects.filter(organization=self.org):
        #     print("%s -- %s -- %s" % (ps.lot_number, ps.import_file_id, ps.address_line_1))

        # for tl in TaxLotState.objects.filter(organization=self.org):
        #     print("%s -- %s" % (tl.import_file_id, tl.jurisdiction_tax_lot_id))

        # set import_file mapping done so that matching can occur.
        self.import_file.mapping_done = True
        self.import_file.save()
        match_buildings(self.import_file.id)

        # for pv in PropertyView.objects.filter(state__organization=self.org):
        #     print("%s -- %s" % (pv.state, pv.cycle))

        # should only have 1 PropertyView and 4 taxlot views
        self.assertEqual(
            PropertyView.objects.filter(state__organization=self.org).count(),
            1)
        self.assertEqual(
            TaxLotView.objects.filter(state__organization=self.org).count(), 4)
        pv = PropertyView.objects.filter(state__organization=self.org).first()

        # there should be 4 relationships in the TaxLotProperty associated with view, one each for the taxlots defined
        self.assertEqual(
            TaxLotProperty.objects.filter(property_view_id=pv).count(), 4)

    def test_match_properties_and_taxlots_with_address_no_lot_number(self):
        # create an ImportFile for testing purposes. Seems like we would want to run this matching just on a
        # list of properties and taxlots.
        #
        # This emulates importing the following
        #   Address,                Jurisdiction Tax Lot
        #   742 Evergreen Terrace,  100
        #   742 Evergreen Terrace,  101
        #   742 Evergreen Terrace,  110
        #   742 Evergreen Terrace,  111

        lot_numbers = '100;101;110;111'
        for lot_number in lot_numbers.split(';'):
            self.property_state_factory.get_property_state(
                address_line_1='742 Evergreen Terrace',
                lot_number=lot_number,
                import_file_id=self.import_file.id,
                data_state=DATA_STATE_MAPPING,
            )

            self.taxlot_state_factory.get_taxlot_state(
                address_line_1=None,
                jurisdiction_tax_lot_id=lot_number,
                import_file_id=self.import_file.id,
                data_state=DATA_STATE_MAPPING,
            )

        # for ps in PropertyState.objects.filter(organization=self.org):
        #     print("%s -- %s -- %s" % (ps.lot_number, ps.import_file_id, ps.address_line_1))

        # for tl in TaxLotState.objects.filter(organization=self.org):
        #     print("%s -- %s" % (tl.import_file_id, tl.jurisdiction_tax_lot_id))

        # set import_file mapping done so that matching can occur.
        self.import_file.mapping_done = True
        self.import_file.save()
        match_buildings(self.import_file.id)

        # for pv in PropertyView.objects.filter(state__organization=self.org):
        #     print("%s -- %s" % (pv.state, pv.cycle))

        # should only have 1 PropertyView and 4 taxlot views
        self.assertEqual(
            PropertyView.objects.filter(state__organization=self.org).count(),
            1)
        self.assertEqual(
            TaxLotView.objects.filter(state__organization=self.org).count(), 4)
        pv = PropertyView.objects.filter(state__organization=self.org).first()

        # there should be 4 relationships in the TaxLotProperty associated with view, one each for the taxlots defined
        self.assertEqual(
            TaxLotProperty.objects.filter(property_view_id=pv).count(), 4)

    def test_match_properties_and_taxlots_with_ubid(self):
        # create an ImportFile for testing purposes. Seems like we would want to run this matching just on a
        # list of properties and taxlots.
        #
        # This emulates importing the following
        #   UBID,    Jurisdiction Tax Lot
        #   ubid_100,     lot_1
        #   ubid_101,     lot_1
        #   ubid_110,     lot_1
        #   ubid_111,     lot_1

        ids = [('ubid_100', 'lot_1'), ('ubid_101', 'lot_1'),
               ('ubid_110', 'lot_1'), ('ubid_111', 'lot_1')]
        for id in ids:
            self.property_state_factory.get_property_state(
                no_default_data=True,
                ubid=id[0],
                lot_number=id[1],
                import_file_id=self.import_file.id,
                data_state=DATA_STATE_MAPPING,
            )

        self.taxlot_state_factory.get_taxlot_state(
            no_default_data=True,
            jurisdiction_tax_lot_id=ids[0][1],
            import_file_id=self.import_file.id,
            data_state=DATA_STATE_MAPPING,
        )

        # for ps in PropertyState.objects.filter(organization=self.org):
        #     print("%s -- %s -- %s" % (ps.lot_number, ps.import_file_id, ps.ubid))
        # pv = PropertyView.objects.get(state=ps, cycle=self.cycle)
        # TaxLotProperty.objects.filter()

        # for tl in TaxLotState.objects.filter(organization=self.org):
        #     print("%s -- %s" % (tl.import_file_id, tl.jurisdiction_tax_lot_id))

        # set import_file mapping done so that matching can occur.
        self.import_file.mapping_done = True
        self.import_file.save()
        match_buildings(self.import_file.id)

        # for pv in PropertyView.objects.filter(state__organization=self.org):
        #     print("%s -- %s" % (pv.state.ubid, pv.cycle))

        # should only have 1 PropertyView and 4 taxlot views
        self.assertEqual(
            PropertyView.objects.filter(state__organization=self.org).count(),
            4)
        self.assertEqual(
            TaxLotView.objects.filter(state__organization=self.org).count(), 1)
        tlv = TaxLotView.objects.filter(state__organization=self.org).first()

        # there should be 4 relationships in the TaxLotProperty associated with view, one each for the taxlots defined
        self.assertEqual(
            TaxLotProperty.objects.filter(taxlot_view_id=tlv).count(), 4)

    def test_match_properties_and_taxlots_with_custom_id(self):
        # create an ImportFile for testing purposes. Seems like we would want to run this matching just on a
        # list of properties and taxlots.
        #
        # This emulates importing the following
        #   Custom ID 1,    Jurisdiction Tax Lot
        #   custom_100,     lot_1
        #   custom_101,     lot_1
        #   custom_110,     lot_1
        #   custom_111,     lot_1
        ids = [('custom_100', 'lot_1'), ('custom_101', 'lot_1'),
               ('custom_110', 'lot_1'), ('custom_111', 'lot_1')]
        for id in ids:
            self.property_state_factory.get_property_state(
                no_default_data=True,
                custom_id_1=id[0],
                lot_number=id[1],
                import_file_id=self.import_file.id,
                data_state=DATA_STATE_MAPPING,
            )

        self.taxlot_state_factory.get_taxlot_state(
            no_default_data=True,
            jurisdiction_tax_lot_id=ids[0][1],
            import_file_id=self.import_file.id,
            data_state=DATA_STATE_MAPPING,
        )

        # for ps in PropertyState.objects.filter(organization=self.org):
        #     print("%s -- %s -- %s" % (ps.lot_number, ps.import_file_id, ps.custom_id_1))
        # pv = PropertyView.objects.get(state=ps, cycle=self.cycle)
        # TaxLotProperty.objects.filter()

        # for tl in TaxLotState.objects.filter(organization=self.org):
        #     print("%s -- %s" % (tl.import_file_id, tl.jurisdiction_tax_lot_id))

        # set import_file mapping done so that matching can occur.
        self.import_file.mapping_done = True
        self.import_file.save()
        match_buildings(self.import_file.id)

        # for pv in PropertyView.objects.filter(state__organization=self.org):
        #     print("%s -- %s" % (pv.state, pv.cycle))

        # should only have 1 PropertyView and 4 taxlot views
        self.assertEqual(
            PropertyView.objects.filter(state__organization=self.org).count(),
            4)
        self.assertEqual(
            TaxLotView.objects.filter(state__organization=self.org).count(), 1)
        tlv = TaxLotView.objects.filter(state__organization=self.org).first()

        # there should be 4 relationships in the TaxLotProperty associated with view, one each for the taxlots defined
        self.assertEqual(
            TaxLotProperty.objects.filter(taxlot_view_id=tlv).count(), 4)

    def test_save_state_match(self):
        # create a couple states to merge together
        ps_1 = self.property_state_factory.get_property_state(
            property_name="this should persist")
        ps_2 = self.property_state_factory.get_property_state(
            extra_data={"extra_1": "this should exist too"})

        priorities = Column.retrieve_priorities(self.org.pk)
        merged_state = save_state_match(ps_1, ps_2, priorities)

        self.assertEqual(merged_state.merge_state, MERGE_STATE_MERGED)
        self.assertEqual(merged_state.property_name, ps_1.property_name)
        self.assertEqual(merged_state.extra_data['extra_1'],
                         "this should exist too")

        # verify that the audit log is correct.
        pal = PropertyAuditLog.objects.get(organization=self.org,
                                           state=merged_state)
        self.assertEqual(pal.name, 'System Match')
        self.assertEqual(pal.parent_state1, ps_1)
        self.assertEqual(pal.parent_state2, ps_2)
        self.assertEqual(pal.description, 'Automatic Merge')

    def test_filter_duplicated_states(self):
        for i in range(10):
            self.property_state_factory.get_property_state(
                no_default_data=True,
                address_line_1='123 The Same Address',
                # extra_data={"extra_1": "value_%s" % i},
                import_file_id=self.import_file.id,
                data_state=DATA_STATE_MAPPING,
            )
        for i in range(5):
            self.property_state_factory.get_property_state(
                import_file_id=self.import_file.id,
                data_state=DATA_STATE_MAPPING,
            )

        props = self.import_file.find_unmatched_property_states()
        uniq_states, dup_states = filter_duplicated_states(props)

        # There should be 6 uniq states. 5 from the second call, and one of 'The Same Address'
        self.assertEqual(len(uniq_states), 6)
        self.assertEqual(len(dup_states), 9)

    def test_match_and_merge_unmatched_objects_all_unique(self):
        # create some objects to match and merge
        partitioner = EquivalencePartitioner.make_default_state_equivalence(
            PropertyState)

        for i in range(10):
            self.property_state_factory.get_property_state(
                import_file_id=self.import_file.id,
                data_state=DATA_STATE_MAPPING,
            )

        props = self.import_file.find_unmatched_property_states()
        uniq_states, dup_states = filter_duplicated_states(props)
        merged, keys = match_and_merge_unmatched_objects(
            uniq_states, partitioner)

        self.assertEqual(len(merged), 10)

    def test_match_and_merge_unmatched_objects_with_duplicates(self):
        # create some objects to match and merge
        partitioner = EquivalencePartitioner.make_default_state_equivalence(
            PropertyState)

        for i in range(8):
            self.property_state_factory.get_property_state(
                import_file_id=self.import_file.id,
                data_state=DATA_STATE_MAPPING,
            )

        self.property_state_factory.get_property_state(
            no_default_data=True,
            extra_data={'moniker': '12345'},
            address_line_1='123 same address',
            site_eui=25,
            import_file_id=self.import_file.id,
            data_state=DATA_STATE_MAPPING,
        )

        self.property_state_factory.get_property_state(
            no_default_data=True,
            extra_data={'moniker': '12345'},
            address_line_1='123 same address',
            site_eui=150,
            import_file_id=self.import_file.id,
            data_state=DATA_STATE_MAPPING,
        )

        props = self.import_file.find_unmatched_property_states()
        uniq_states, dup_states = filter_duplicated_states(props)
        merged, keys = match_and_merge_unmatched_objects(
            uniq_states, partitioner)

        self.assertEqual(len(merged), 9)
        self.assertEqual(len(keys), 9)

        # find the ps_cp_1 in the list of merged
        found = False
        for ps in merged:
            if ps.extra_data.get('moniker', None) == '12345':
                found = True
                self.assertEqual(ps.site_eui.magnitude,
                                 150)  # from the second record
        self.assertEqual(found, True)

    def test_match_and_merge_unmatched_objects_with_dates(self):
        # Make sure that the dates sort correctly! (only testing release_date, but also sorts
        # on generation_date, then pk

        partitioner = EquivalencePartitioner.make_default_state_equivalence(
            PropertyState)

        self.property_state_factory.get_property_state(
            no_default_data=True,
            address_line_1='123 same address',
            release_date=datetime.datetime(2010,
                                           1,
                                           1,
                                           1,
                                           1,
                                           tzinfo=tz.get_current_timezone()),
            site_eui=25,
            import_file_id=self.import_file.id,
            data_state=DATA_STATE_MAPPING,
        )

        self.property_state_factory.get_property_state(
            no_default_data=True,
            address_line_1='123 same address',
            release_date=datetime.datetime(2015,
                                           1,
                                           1,
                                           1,
                                           1,
                                           tzinfo=tz.get_current_timezone()),
            site_eui=150,
            import_file_id=self.import_file.id,
            data_state=DATA_STATE_MAPPING,
        )

        self.property_state_factory.get_property_state(
            no_default_data=True,
            address_line_1='123 same address',
            release_date=datetime.datetime(2005,
                                           1,
                                           1,
                                           1,
                                           1,
                                           tzinfo=tz.get_current_timezone()),
            site_eui=300,
            import_file_id=self.import_file.id,
            data_state=DATA_STATE_MAPPING,
        )

        props = self.import_file.find_unmatched_property_states()
        uniq_states, dup_states = filter_duplicated_states(props)
        merged, keys = match_and_merge_unmatched_objects(
            uniq_states, partitioner)

        found = False
        for ps in merged:
            found = True
            self.assertEqual(ps.site_eui.magnitude,
                             150)  # from the second record
        self.assertEqual(found, True)

    def test_merge_unmatched_into_views_no_matches(self):
        """It is very unlikely that any of these states will match since it is using faker."""
        for i in range(10):
            self.property_state_factory.get_property_state(
                import_file_id=self.import_file.id,
                data_state=DATA_STATE_MAPPING,
            )
예제 #9
0
class TestProfileIdMixin(TestCase):
    """Test OrgMixin -- provides get_organization_id method"""

    def setUp(self):
        self.maxDiff = None
        user_details = {
            'username': '******',
            'password': '******',
        }
        self.user = User.objects.create_superuser(
            email='*****@*****.**', **user_details)
        self.org, self.org_user, _ = create_organization(self.user)
        self.cycle_factory = FakeCycleFactory(organization=self.org, user=self.user)
        self.property_factory = FakePropertyFactory(organization=self.org)
        self.property_state_factory = FakePropertyStateFactory(organization=self.org)
        self.column_list_factory = FakeColumnListSettingsFactory(organization=self.org)
        self.cycle = self.cycle_factory.get_cycle(
            start=datetime(2010, 10, 10, tzinfo=timezone.get_current_timezone())
        )

        class ProfileIdMixInclass(ProfileIdMixin):
            pass

        self.mixin_class = ProfileIdMixInclass()

    def tearDown(self):
        PropertyView.objects.all().delete()
        self.user.delete()
        self.org.delete()
        self.org_user.delete()

    def test_get_profile_id(self):
        """test get_organization method"""
        state = self.property_state_factory.get_property_state(extra_data={"field_1": "value_1"})
        prprty = self.property_factory.get_property()
        PropertyView.objects.create(
            property=prprty, cycle=self.cycle, state=state
        )

        # save all the columns in the state to the database so we can setup column list settings
        Column.save_column_names(state)

        columns = self.mixin_class.get_show_columns(self.org.id, None)
        self.assertGreater(len(columns['fields']), 10)
        self.assertListEqual(columns['extra_data'], ['field_1'])

        columns = self.mixin_class.get_show_columns(self.org.id, -1)
        self.assertGreater(len(columns['fields']), 10)
        self.assertListEqual(columns['extra_data'], ['field_1'])

        columns = self.mixin_class.get_show_columns(self.org.id, 1000000)
        self.assertGreater(len(columns['fields']), 10)
        self.assertListEqual(columns['extra_data'], ['field_1'])

        # no extra data
        columnlistsetting = self.column_list_factory.get_columnlistsettings(
            columns=['address_line_1', 'site_eui']
        )
        columns = self.mixin_class.get_show_columns(self.org.id, columnlistsetting.id)
        self.assertListEqual(columns['fields'], ['extra_data', 'id', 'address_line_1', 'site_eui'])
        self.assertListEqual(columns['extra_data'], [])

        # with extra data
        columnlistsetting = self.column_list_factory.get_columnlistsettings(
            columns=['address_line_1', 'site_eui', 'field_1']
        )
        columns = self.mixin_class.get_show_columns(self.org.id, columnlistsetting.id)
        self.assertListEqual(columns['fields'], ['extra_data', 'id', 'address_line_1', 'site_eui'])
        self.assertListEqual(columns['extra_data'], ['field_1'])
예제 #10
0
class TestMatchingImportIntegration(DataMappingBaseTestCase):
    def setUp(self):
        selfvars = self.set_up(ASSESSED_RAW)
        self.user, self.org, self.import_file_1, self.import_record_1, self.cycle = selfvars

        self.import_record_2, self.import_file_2 = self.create_import_file(
            self.user, self.org, self.cycle)

        self.property_state_factory = FakePropertyStateFactory(
            organization=self.org)
        self.taxlot_state_factory = FakeTaxLotStateFactory(
            organization=self.org)

    def test_properties(self):
        # Define matching values
        matching_pm_property_id = '11111'
        matching_address_line_1 = '123 Match Street'
        matching_ubid = '86HJPCWQ+2VV-1-3-2-3'
        matching_custom_id_1 = 'MatchingID12345'

        # For first file, create properties with no duplicates or matches
        base_details_file_1 = {
            'import_file_id': self.import_file_1.id,
            'data_state': DATA_STATE_MAPPING,
            'no_default_data': False,
        }

        # No matching_criteria values
        self.property_state_factory.get_property_state(**base_details_file_1)

        # Build out properties with increasingly more matching_criteria values
        base_details_file_1['pm_property_id'] = matching_pm_property_id
        self.property_state_factory.get_property_state(**base_details_file_1)
        base_details_file_1['address_line_1'] = matching_address_line_1
        self.property_state_factory.get_property_state(**base_details_file_1)
        base_details_file_1['ubid'] = matching_ubid
        self.property_state_factory.get_property_state(**base_details_file_1)
        base_details_file_1['custom_id_1'] = matching_custom_id_1
        self.property_state_factory.get_property_state(**base_details_file_1)

        self.import_file_1.mapping_done = True
        self.import_file_1.save()
        match_buildings(self.import_file_1.id)

        # Verify no duplicates/matched-merges yet
        counts = [
            Property.objects.count(),
            PropertyState.objects.count(),
            PropertyView.objects.count(),
        ]
        self.assertEqual([5, 5, 5], counts)
        """
        For second file, create several properties that are one or many of the following:
            - duplicates amongst file_1
            - duplicates amongst file_2
            - matching amongst file_1
            - matching amongst file_2
            - completely new
        """
        base_details_file_2 = {
            'import_file_id': self.import_file_2.id,
            'data_state': DATA_STATE_MAPPING,
            'no_default_data': False,
        }

        # Create 1 duplicate of the 'No matching_criteria values' properties
        # (outcome: 1 additional -States, NO new Property/-View)
        ps_1 = self.property_state_factory.get_property_state(
            **base_details_file_2)

        # Create a non-duplicate property also having no matching criteria values
        # (outcome: 1 additional -States, 1 new Property/-View)
        base_details_file_2['postal_code'] = '01234'
        ps_2 = self.property_state_factory.get_property_state(
            **base_details_file_2)

        # Create 2 completely new properties with misaligned combinations of matching values
        # (outcome: 2 additional -States, 2 new Property/-View)
        base_details_file_2['custom_id_1'] = matching_custom_id_1
        ps_3 = self.property_state_factory.get_property_state(
            **base_details_file_2)
        base_details_file_2['ubid'] = matching_ubid
        ps_4 = self.property_state_factory.get_property_state(
            **base_details_file_2)

        # Create 3 properties - with 1 duplicate and 1 match within it's own file that will
        # eventually become 1 completely new property
        # (outcome: 4 additional -States, 1 new Property/-View)
        base_details_file_2['address_line_1'] = matching_address_line_1
        base_details_file_2['city'] = 'Denver'
        ps_5 = self.property_state_factory.get_property_state(
            **base_details_file_2)
        ps_6 = self.property_state_factory.get_property_state(
            **base_details_file_2)
        base_details_file_2['city'] = 'Golden'
        ps_7 = self.property_state_factory.get_property_state(
            **base_details_file_2)

        # Create 3 properties - with 1 duplicate and 1 match within it's own file that will
        # eventually match the last property in file_1
        # (outcome: 5 additional -States, NO new Property/-View)
        base_details_file_2['pm_property_id'] = matching_pm_property_id
        base_details_file_2['state'] = 'Colorado'
        ps_8 = self.property_state_factory.get_property_state(
            **base_details_file_2)
        ps_9 = self.property_state_factory.get_property_state(
            **base_details_file_2)
        base_details_file_2['state'] = 'California'
        ps_10 = self.property_state_factory.get_property_state(
            **base_details_file_2)

        self.import_file_2.mapping_done = True
        self.import_file_2.save()
        match_buildings(self.import_file_2.id)

        self.assertEqual(9, Property.objects.count())
        self.assertEqual(9, PropertyView.objects.count())
        self.assertEqual(18, PropertyState.objects.count())

        ps_ids_of_deleted = PropertyState.objects.filter(
            data_state=DATA_STATE_DELETE).values_list('id',
                                                      flat=True).order_by('id')
        self.assertEqual([ps_1.id, ps_6.id, ps_9.id], list(ps_ids_of_deleted))

        ps_ids_of_merged_in_file = PropertyState.objects.filter(
            data_state=DATA_STATE_MAPPING,
            merge_state=MERGE_STATE_UNKNOWN).values_list(
                'id', flat=True).order_by('id')
        self.assertEqual([ps_5.id, ps_7.id, ps_8.id, ps_10.id],
                         list(ps_ids_of_merged_in_file))

        ps_ids_of_all_promoted = PropertyView.objects.values_list('state_id',
                                                                  flat=True)
        self.assertIn(ps_2.id, ps_ids_of_all_promoted)
        self.assertIn(ps_3.id, ps_ids_of_all_promoted)
        self.assertIn(ps_4.id, ps_ids_of_all_promoted)

        rimport_file_2 = ImportFile.objects.get(pk=self.import_file_2.id)
        results = rimport_file_2.matching_results_data
        del results['progress_key']

        expected = {
            'import_file_records':
            None,  # This is calculated in a separate process
            'property_all_unmatched': 10,
            'property_duplicates': 2,
            'property_duplicates_of_existing': 1,
            'property_unmatched': 4,
            'tax_lot_all_unmatched': 0,
            'tax_lot_duplicates': 0,
            'tax_lot_duplicates_of_existing': 0,
            'tax_lot_unmatched': 0,
        }
        self.assertEqual(results, expected)

    def test_taxlots(self):
        # Define matching values
        matching_jurisdiction_tax_lot_id = '11111'
        matching_address_line_1 = '123 Match Street'
        matching_ulid = '86HJPCWQ+2VV-1-3-2-3'
        matching_custom_id_1 = 'MatchingID12345'

        # For first file, create taxlots with no duplicates or matches
        base_details_file_1 = {
            'import_file_id': self.import_file_1.id,
            'data_state': DATA_STATE_MAPPING,
            'no_default_data': False,
        }

        # No matching_criteria values
        self.taxlot_state_factory.get_taxlot_state(**base_details_file_1)

        # Build out taxlots with increasingly more matching_criteria values
        base_details_file_1[
            'jurisdiction_tax_lot_id'] = matching_jurisdiction_tax_lot_id
        self.taxlot_state_factory.get_taxlot_state(**base_details_file_1)
        base_details_file_1['address_line_1'] = matching_address_line_1
        self.taxlot_state_factory.get_taxlot_state(**base_details_file_1)
        base_details_file_1['ulid'] = matching_ulid
        self.taxlot_state_factory.get_taxlot_state(**base_details_file_1)
        base_details_file_1['custom_id_1'] = matching_custom_id_1
        self.taxlot_state_factory.get_taxlot_state(**base_details_file_1)

        self.import_file_1.mapping_done = True
        self.import_file_1.save()
        match_buildings(self.import_file_1.id)

        # Verify no duplicates/matched-merges yet
        counts = [
            TaxLot.objects.count(),
            TaxLotState.objects.count(),
            TaxLotView.objects.count(),
        ]
        self.assertEqual([5, 5, 5], counts)
        """
        For second file, create several taxlots that are one or many of the following:
            - duplicates amongst file_1
            - duplicates amongst file_2
            - matching amongst file_1
            - matching amongst file_2
            - completely new
        """
        base_details_file_2 = {
            'import_file_id': self.import_file_2.id,
            'data_state': DATA_STATE_MAPPING,
            'no_default_data': False,
        }

        # Create 2 duplicates of the 'No matching_criteria values' taxlots
        # (outcome: 2 additional -States, NO new TaxLot/-View)
        tls_1 = self.taxlot_state_factory.get_taxlot_state(
            **base_details_file_2)
        tls_2 = self.taxlot_state_factory.get_taxlot_state(
            **base_details_file_2)

        # Create 2 completely new taxlots with misaligned combinations of matching values
        # (outcome: 2 additional -States, 2 new TaxLot/-View)
        base_details_file_2['custom_id_1'] = matching_custom_id_1
        tls_3 = self.taxlot_state_factory.get_taxlot_state(
            **base_details_file_2)
        base_details_file_2['ulid'] = matching_ulid
        tls_4 = self.taxlot_state_factory.get_taxlot_state(
            **base_details_file_2)

        # Create 3 taxlots - with 1 duplicate and 1 match within it's own file that will
        # eventually become 1 completely new property
        # (outcome: 4 additional -States, 1 new TaxLot/-View)
        base_details_file_2['address_line_1'] = matching_address_line_1
        base_details_file_2['city'] = 'Denver'
        tls_5 = self.taxlot_state_factory.get_taxlot_state(
            **base_details_file_2)
        tls_6 = self.taxlot_state_factory.get_taxlot_state(
            **base_details_file_2)
        base_details_file_2['city'] = 'Golden'
        tls_7 = self.taxlot_state_factory.get_taxlot_state(
            **base_details_file_2)

        # Create 3 properties - with 1 duplicate and 1 match within it's own file that will
        # eventually match the last property in file_1
        # (outcome: 5 additional -States, NO new TaxLot/-View)
        base_details_file_2[
            'jurisdiction_tax_lot_id'] = matching_jurisdiction_tax_lot_id
        base_details_file_2['state'] = 'Colorado'
        tls_8 = self.taxlot_state_factory.get_taxlot_state(
            **base_details_file_2)
        tls_9 = self.taxlot_state_factory.get_taxlot_state(
            **base_details_file_2)
        base_details_file_2['state'] = 'California'
        tls_10 = self.taxlot_state_factory.get_taxlot_state(
            **base_details_file_2)

        self.import_file_2.mapping_done = True
        self.import_file_2.save()
        match_buildings(self.import_file_2.id)

        self.assertEqual(8, TaxLot.objects.count())
        self.assertEqual(8, TaxLotView.objects.count())
        self.assertEqual(18, TaxLotState.objects.count())

        tls_ids_of_deleted = TaxLotState.objects.filter(
            data_state=DATA_STATE_DELETE).values_list('id',
                                                      flat=True).order_by('id')
        self.assertEqual([tls_1.id, tls_2.id, tls_6.id, tls_9.id],
                         list(tls_ids_of_deleted))

        tls_ids_of_merged_in_file = TaxLotState.objects.filter(
            data_state=DATA_STATE_MAPPING,
            merge_state=MERGE_STATE_UNKNOWN).values_list(
                'id', flat=True).order_by('id')
        self.assertEqual([tls_5.id, tls_7.id, tls_8.id, tls_10.id],
                         list(tls_ids_of_merged_in_file))

        tls_ids_of_all_promoted = TaxLotView.objects.values_list('state_id',
                                                                 flat=True)
        self.assertIn(tls_3.id, tls_ids_of_all_promoted)
        self.assertIn(tls_4.id, tls_ids_of_all_promoted)

        rimport_file_2 = ImportFile.objects.get(pk=self.import_file_2.id)
        results = rimport_file_2.matching_results_data
        del results['progress_key']

        expected = {
            'import_file_records':
            None,  # This is calculated in a separate process
            'property_all_unmatched': 0,
            'property_duplicates': 0,
            'property_duplicates_of_existing': 0,
            'property_unmatched': 0,
            'tax_lot_all_unmatched': 10,
            'tax_lot_duplicates': 3,
            'tax_lot_duplicates_of_existing': 1,
            'tax_lot_unmatched': 3,
        }
        self.assertEqual(results, expected)
예제 #11
0
class DataQualityCheckTests(DataMappingBaseTestCase):
    def setUp(self):
        selfvars = self.set_up(ASSESSED_RAW)

        self.user, self.org, self.import_file, self.import_record, self.cycle = selfvars

        self.property_factory = FakePropertyFactory(organization=self.org)
        self.property_state_factory = FakePropertyStateFactory(
            organization=self.org)
        self.property_view_factory = FakePropertyViewFactory(
            organization=self.org)
        self.taxlot_state_factory = FakeTaxLotStateFactory(
            organization=self.org)

    def test_default_create(self):
        dq = DataQualityCheck.retrieve(self.org.id)
        self.assertEqual(dq.rules.count(), 22)
        # Example rule to check
        ex_rule = {
            'table_name': 'PropertyState',
            'field': 'conditioned_floor_area',
            'data_type': Rule.TYPE_AREA,
            'rule_type': Rule.RULE_TYPE_DEFAULT,
            'min': 0,
            'max': 7000000,
            'severity': Rule.SEVERITY_ERROR,
            'units': 'ft**2',
        }

        rule = Rule.objects.filter(table_name='PropertyState',
                                   field='conditioned_floor_area',
                                   severity=Rule.SEVERITY_ERROR)
        self.assertDictContainsSubset(ex_rule, model_to_dict(rule.first()))

    def test_remove_rules(self):
        dq = DataQualityCheck.retrieve(self.org.id)
        self.assertEqual(dq.rules.count(), 22)
        dq.remove_all_rules()
        self.assertEqual(dq.rules.count(), 0)

    def test_add_custom_rule(self):
        dq = DataQualityCheck.retrieve(self.org.id)
        dq.remove_all_rules()

        ex_rule = {
            'table_name': 'PropertyState',
            'field': 'some_floor_area',
            'data_type': Rule.TYPE_AREA,
            'rule_type': Rule.RULE_TYPE_DEFAULT,
            'min': 8760,
            'max': 525600,
            'severity': Rule.SEVERITY_ERROR,
            'units': 'm**2',
        }

        dq.add_rule(ex_rule)
        self.assertEqual(dq.rules.count(), 1)
        self.assertDictContainsSubset(ex_rule, model_to_dict(dq.rules.first()))

    def test_add_custom_rule_exception(self):
        dq = DataQualityCheck.retrieve(self.org.id)
        dq.remove_all_rules()

        ex_rule = {
            'table_name_does_not_exist': 'PropertyState',
        }

        with self.assertRaises(Exception) as exc:
            dq.add_rule(ex_rule)
        self.assertEqual(
            str(exc.exception),
            "Rule data is not defined correctly: 'table_name_does_not_exist' is an invalid keyword argument for this function"
        )

    def test_check_property_state_example_data(self):
        dq = DataQualityCheck.retrieve(self.org.id)
        ps_data = {
            'no_default_data': True,
            'custom_id_1': 'abcd',
            'address_line_1': '742 Evergreen Terrace',
            'pm_property_id': 'PMID',
            'site_eui': 525600,
        }
        ps = self.property_state_factory.get_property_state(None, **ps_data)

        dq.check_data(ps.__class__.__name__, [ps])

        # {
        #   11: {
        #           'id': 11,
        #           'custom_id_1': 'abcd',
        #           'pm_property_id': 'PMID',
        #           'address_line_1': '742 Evergreen Terrace',
        #           'data_quality_results': [
        #               {
        #                  'severity': 'error', 'value': '525600', 'field': 'site_eui', 'table_name': 'PropertyState', 'message': 'Site EUI out of range', 'detailed_message': 'Site EUI [525600] > 1000', 'formatted_field': 'Site EUI'
        #               }
        #           ]
        #       }
        error_found = False
        for index, row in dq.results.items():
            self.assertEqual(row['custom_id_1'], 'abcd')
            self.assertEqual(row['pm_property_id'], 'PMID')
            self.assertEqual(row['address_line_1'], '742 Evergreen Terrace')
            for violation in row['data_quality_results']:
                if violation['message'] == 'Site EUI out of range':
                    error_found = True
                    self.assertEqual(violation['detailed_message'],
                                     'Site EUI [525600] > 1000')

        self.assertEqual(error_found, True)

    def test_text_match(self):
        dq = DataQualityCheck.retrieve(self.org.id)
        dq.remove_all_rules()
        new_rule = {
            'table_name': 'PropertyState',
            'field': 'address_line_1',
            'data_type': Rule.TYPE_STRING,
            'rule_type': Rule.RULE_TYPE_DEFAULT,
            'severity': Rule.SEVERITY_ERROR,
            'not_null': True,
            'text_match': 742,
        }
        dq.add_rule(new_rule)
        ps_data = {
            'no_default_data': True,
            'custom_id_1': 'abcd',
            'address_line_1': '742 Evergreen Terrace',
            'pm_property_id': 'PMID',
            'site_eui': 525600,
        }
        ps = self.property_state_factory.get_property_state(None, **ps_data)
        dq.check_data(ps.__class__.__name__, [ps])
        self.assertEqual(dq.results, {})

    def test_str_to_data_type_string(self):
        rule = Rule.objects.create(name='str_rule', data_type=Rule.TYPE_STRING)
        self.assertEqual(rule.str_to_data_type(' '), '')
        self.assertEqual(rule.str_to_data_type(None), None)
        self.assertEqual(rule.str_to_data_type(27.5), 27.5)

    def test_str_to_data_type_float(self):
        rule = Rule.objects.create(name='flt_rule', data_type=Rule.TYPE_NUMBER)
        self.assertEqual(rule.str_to_data_type('   '), None)
        self.assertEqual(rule.str_to_data_type(None), None)
        self.assertEqual(rule.str_to_data_type(27.5), 27.5)
        with self.assertRaises(DataQualityTypeCastError):
            self.assertEqual(rule.str_to_data_type('not-a-number'), '')

    def test_str_to_data_type_date(self):
        rule = Rule.objects.create(name='date_rule', data_type=Rule.TYPE_DATE)
        d = rule.str_to_data_type('07/04/2000 08:55:30')
        self.assertEqual(d.strftime("%Y-%m-%d %H  %M  %S"),
                         '2000-07-04 08  55  30')
        self.assertEqual(rule.str_to_data_type(None), None)
        self.assertEqual(rule.str_to_data_type(27.5),
                         27.5)  # floats should return float

    def test_str_to_data_type_datetime(self):
        rule = Rule.objects.create(name='year_rule', data_type=Rule.TYPE_YEAR)
        d = rule.str_to_data_type('07/04/2000')
        self.assertEqual(d.strftime("%Y-%m-%d"), '2000-07-04')
        self.assertEqual(rule.str_to_data_type(None), None)
        self.assertEqual(rule.str_to_data_type(27.5),
                         27.5)  # floats should return float
예제 #12
0
class PropertyMergeViewTests(DeleteModelsTestCase):
    def setUp(self):
        user_details = {
            'username': '******',
            'password': '******',
            'email': '*****@*****.**'
        }
        self.user = User.objects.create_superuser(**user_details)
        self.org, self.org_user, _ = create_organization(self.user)

        cycle_factory = FakeCycleFactory(organization=self.org, user=self.user)
        self.property_factory = FakePropertyFactory(organization=self.org)
        self.property_state_factory = FakePropertyStateFactory(
            organization=self.org)

        self.cycle = cycle_factory.get_cycle(
            start=datetime(2010, 10, 10, tzinfo=get_current_timezone()))
        self.client.login(**user_details)

        self.state_1 = self.property_state_factory.get_property_state(
            address_line_1='1 property state',
            pm_property_id=
            '5766973'  # this allows the Property to be targetted for PM meter additions
        )
        self.property_1 = self.property_factory.get_property()
        PropertyView.objects.create(property=self.property_1,
                                    cycle=self.cycle,
                                    state=self.state_1)

        self.state_2 = self.property_state_factory.get_property_state(
            address_line_1='2 property state')
        self.property_2 = self.property_factory.get_property()
        PropertyView.objects.create(property=self.property_2,
                                    cycle=self.cycle,
                                    state=self.state_2)

        self.import_record = ImportRecord.objects.create(
            owner=self.user,
            last_modified_by=self.user,
            super_organization=self.org)

    def test_properties_merge_without_losing_meters_1st_has_meters(self):
        # Assign meters to the first Property
        filename = "example-GreenButton-data.xml"
        filepath = os.path.dirname(
            os.path.abspath(__file__)) + "/data/" + filename
        import_file = ImportFile.objects.create(
            import_record=self.import_record,
            source_type="GreenButton",
            uploaded_filename=filename,
            file=SimpleUploadedFile(name=filename,
                                    content=open(filepath, 'rb').read()),
            cycle=self.cycle,
            matching_results_data={
                "property_id": self.property_1.id
            }  # this is how target property is specified
        )
        gb_import_url = reverse("api:v2:import_files-save-raw-data",
                                args=[import_file.id])
        gb_import_post_params = {
            'cycle_id': self.cycle.pk,
            'organization_id': self.org.pk,
        }
        self.client.post(gb_import_url, gb_import_post_params)

        # Merge PropertyStates
        url = reverse('api:v2:properties-merge'
                      ) + '?organization_id={}'.format(self.org.pk)
        post_params = json.dumps(
            {'state_ids': [self.state_2.pk, self.state_1.pk]})
        self.client.post(url, post_params, content_type='application/json')

        # There should only be one PropertyView
        self.assertEqual(PropertyView.objects.count(), 1)

        self.assertEqual(PropertyView.objects.first().property.meters.count(),
                         1)
        self.assertEqual(
            PropertyView.objects.first().property.meters.first().
            meter_readings.count(), 2)

    def test_properties_merge_without_losing_meters_2nd_has_meters(self):
        # Assign Meters to the second Property
        filename = "example-GreenButton-data.xml"
        filepath = os.path.dirname(
            os.path.abspath(__file__)) + "/data/" + filename
        import_file = ImportFile.objects.create(
            import_record=self.import_record,
            source_type="GreenButton",
            uploaded_filename=filename,
            file=SimpleUploadedFile(name=filename,
                                    content=open(filepath, 'rb').read()),
            cycle=self.cycle,
            matching_results_data={
                "property_id": self.property_2.id
            }  # this is how target property is specified
        )
        gb_import_url = reverse("api:v2:import_files-save-raw-data",
                                args=[import_file.id])
        gb_import_post_params = {
            'cycle_id': self.cycle.pk,
            'organization_id': self.org.pk,
        }
        self.client.post(gb_import_url, gb_import_post_params)

        # Merge PropertyStates
        url = reverse('api:v2:properties-merge'
                      ) + '?organization_id={}'.format(self.org.pk)
        post_params = json.dumps(
            {'state_ids': [self.state_2.pk, self.state_1.pk]})
        self.client.post(url, post_params, content_type='application/json')

        # There should only be one PropertyView
        self.assertEqual(PropertyView.objects.count(), 1)

        self.assertEqual(PropertyView.objects.first().property.meters.count(),
                         1)
        self.assertEqual(
            PropertyView.objects.first().property.meters.first().
            meter_readings.count(), 2)

    def test_properties_merge_without_losing_meters_from_different_sources_nonoverlapping(
            self):
        # For first Property, PM Meters containing 2 readings for each Electricty and Natural Gas for property_1
        # This file has multiple tabs
        pm_filename = "example-pm-monthly-meter-usage.xlsx"
        filepath = os.path.dirname(
            os.path.abspath(__file__)) + "/data/" + pm_filename
        pm_import_file = ImportFile.objects.create(
            import_record=self.import_record,
            source_type="PM Meter Usage",
            uploaded_filename=pm_filename,
            file=SimpleUploadedFile(name=pm_filename,
                                    content=open(filepath, 'rb').read()),
            cycle=self.cycle,
        )
        pm_import_url = reverse("api:v2:import_files-save-raw-data",
                                args=[pm_import_file.id])
        pm_import_post_params = {
            'cycle_id': self.cycle.pk,
            'organization_id': self.org.pk,
        }
        self.client.post(pm_import_url, pm_import_post_params)

        # For second Property, add GreenButton Meters containing 2 readings for Electricity only
        gb_filename = "example-GreenButton-data.xml"
        filepath = os.path.dirname(
            os.path.abspath(__file__)) + "/data/" + gb_filename
        gb_import_file = ImportFile.objects.create(
            import_record=self.import_record,
            source_type="GreenButton",
            uploaded_filename=gb_filename,
            file=SimpleUploadedFile(name=gb_filename,
                                    content=open(filepath, 'rb').read()),
            cycle=self.cycle,
            matching_results_data={
                "property_id": self.property_2.id
            }  # this is how target property is specified
        )
        gb_import_url = reverse("api:v2:import_files-save-raw-data",
                                args=[gb_import_file.id])
        gb_import_post_params = {
            'cycle_id': self.cycle.pk,
            'organization_id': self.org.pk,
        }
        self.client.post(gb_import_url, gb_import_post_params)

        # Merge PropertyStates
        url = reverse('api:v2:properties-merge'
                      ) + '?organization_id={}'.format(self.org.pk)
        post_params = json.dumps({
            'state_ids': [self.state_2.pk,
                          self.state_1.pk]  # priority given to state_1
        })
        self.client.post(url, post_params, content_type='application/json')

        # There should only be one PropertyView
        self.assertEqual(PropertyView.objects.count(), 1)

        # The Property of the (only) -View has all of the Meters now.
        meters = PropertyView.objects.first().property.meters

        self.assertEqual(meters.count(), 3)
        self.assertEqual(
            meters.get(type=Meter.ELECTRICITY_GRID,
                       source=Meter.GREENBUTTON).meter_readings.count(), 2)
        self.assertEqual(
            meters.get(type=Meter.ELECTRICITY_GRID,
                       source=Meter.PORTFOLIO_MANAGER).meter_readings.count(),
            2)
        self.assertEqual(
            meters.get(type=Meter.NATURAL_GAS).meter_readings.count(), 2)

        # Old meters deleted, so only merged meters exist
        self.assertEqual(Meter.objects.count(), 3)
        self.assertEqual(MeterReading.objects.count(), 6)

    def test_properties_merge_without_losing_meters_when_some_meters_from_same_source_are_overlapping(
            self):
        # For first Property, add GreenButton Meters containing 2 readings for Electricity only
        gb_filename = "example-GreenButton-data.xml"
        filepath = os.path.dirname(
            os.path.abspath(__file__)) + "/data/" + gb_filename
        gb_import_file = ImportFile.objects.create(
            import_record=self.import_record,
            source_type="GreenButton",
            uploaded_filename=gb_filename,
            file=SimpleUploadedFile(name=gb_filename,
                                    content=open(filepath, 'rb').read()),
            cycle=self.cycle,
            matching_results_data={
                "property_id": self.property_1.id
            }  # this is how target property is specified
        )
        gb_import_url = reverse("api:v2:import_files-save-raw-data",
                                args=[gb_import_file.id])
        gb_import_post_params = {
            'cycle_id': self.cycle.pk,
            'organization_id': self.org.pk,
        }
        self.client.post(gb_import_url, gb_import_post_params)

        # For second Property, add GreenButton Meters containing 2 Electricitiy readings: 1 overlapping
        gb_overlapping_filename = "example-GreenButton-data-1-overlapping.xml"
        filepath = os.path.dirname(
            os.path.abspath(__file__)) + "/data/" + gb_overlapping_filename
        gb_overlapping_import_file = ImportFile.objects.create(
            import_record=self.import_record,
            source_type="GreenButton",
            uploaded_filename=gb_overlapping_filename,
            file=SimpleUploadedFile(name=gb_overlapping_filename,
                                    content=open(filepath, 'rb').read()),
            cycle=self.cycle,
            matching_results_data={
                "property_id": self.property_2.id
            }  # this is how target property is specified
        )
        gb_overlapping_import_url = reverse(
            "api:v2:import_files-save-raw-data",
            args=[gb_overlapping_import_file.id])
        gb_overlapping_import_post_params = {
            'cycle_id': self.cycle.pk,
            'organization_id': self.org.pk,
        }
        self.client.post(gb_overlapping_import_url,
                         gb_overlapping_import_post_params)

        # Check that there are 2 overlapping readings (that are separate for now) out of 4.
        self.assertEqual(MeterReading.objects.count(), 4)
        tz_obj = timezone(TIME_ZONE)
        start_time_match = make_aware(datetime(2011, 3, 5, 21, 15, 0),
                                      timezone=tz_obj)
        end_time_match = make_aware(datetime(2011, 3, 5, 21, 30, 0),
                                    timezone=tz_obj)
        same_time_windows = MeterReading.objects.filter(
            start_time=start_time_match, end_time=end_time_match)
        self.assertEqual(same_time_windows.count(), 2)

        # Capture the overlapping reading of property_1, and ensure it's different from property_2's
        priority_property_id = self.property_1.meters.first().id
        property_1_reading = same_time_windows.get(
            meter_id=priority_property_id).reading
        property_2_reading = same_time_windows.exclude(
            meter_id=priority_property_id).get().reading
        self.assertNotEqual(property_1_reading, property_2_reading)

        # Merge PropertyStates
        url = reverse('api:v2:properties-merge'
                      ) + '?organization_id={}'.format(self.org.pk)
        post_params = json.dumps({
            'state_ids': [self.state_2.pk,
                          self.state_1.pk]  # priority given to state_1
        })
        self.client.post(url, post_params, content_type='application/json')

        # There should only be one PropertyView
        self.assertEqual(PropertyView.objects.count(), 1)

        # The Property of the (only) -View has all of the Meters now.
        meters = PropertyView.objects.first().property.meters
        self.assertEqual(meters.count(), 1)
        self.assertEqual(meters.first().meter_readings.count(), 3)

        # Old meters deleted, so only merged meters exist
        self.assertEqual(Meter.objects.count(), 1)
        self.assertEqual(MeterReading.objects.count(), 3)

        # Check that the resulting reading used belonged to property_1
        merged_reading = MeterReading.objects.filter(
            start_time=start_time_match, end_time=end_time_match)
        self.assertEqual(merged_reading.count(), 1)
        self.assertEqual(merged_reading.first().reading, property_1_reading)

        # Overlapping reading that wasn't prioritized should not exist
        self.assertFalse(
            MeterReading.objects.filter(reading=property_2_reading).exists())
예제 #13
0
class TestMatchingInImportFile(DataMappingBaseTestCase):
    def setUp(self):
        selfvars = self.set_up(ASSESSED_RAW)
        self.user, self.org, self.import_file, self.import_record, self.cycle = selfvars

        self.property_state_factory = FakePropertyStateFactory(
            organization=self.org)
        self.taxlot_state_factory = FakeTaxLotStateFactory(
            organization=self.org)

    def test_duplicate_properties_identified(self):
        base_details = {
            'address_line_1': '123 Match Street',
            'import_file_id': self.import_file.id,
            'data_state': DATA_STATE_MAPPING,
            'no_default_data': False,
        }
        # Create pair of properties that are exact duplicates
        self.property_state_factory.get_property_state(**base_details)
        self.property_state_factory.get_property_state(**base_details)

        # Create a non-matching, non-duplicate property
        base_details['address_line_1'] = '123 Different Ave'
        base_details['city'] = 'Denver'
        self.property_state_factory.get_property_state(**base_details)

        # set import_file mapping done so that matching can occur.
        self.import_file.mapping_done = True
        self.import_file.save()
        match_buildings(self.import_file.id)

        # 2 Property, 2 PropertyViews, 3 PropertyState (1 flagged to be ignored)
        self.assertEqual(Property.objects.count(), 2)
        self.assertEqual(PropertyView.objects.count(), 2)
        self.assertEqual(PropertyState.objects.count(), 3)
        self.assertEqual(
            PropertyState.objects.filter(data_state=DATA_STATE_DELETE).count(),
            1)

        # Make sure "deleted" -States are not found in the -Views
        deleted = PropertyState.objects.get(data_state=DATA_STATE_DELETE)
        self.assertNotIn(
            deleted.id, PropertyView.objects.values_list('state_id',
                                                         flat=True))

    def test_duplicate_taxlots_identified(self):
        base_details = {
            'address_line_1': '123 Match Street',
            'import_file_id': self.import_file.id,
            'data_state': DATA_STATE_MAPPING,
            'no_default_data': False,
        }
        # Create pair of properties that are exact duplicates
        self.taxlot_state_factory.get_taxlot_state(**base_details)
        self.taxlot_state_factory.get_taxlot_state(**base_details)

        # Create a non-matching, non-duplicate property
        base_details['address_line_1'] = '123 Different Ave'
        base_details['city'] = 'Denver'
        self.taxlot_state_factory.get_taxlot_state(**base_details)

        # set import_file mapping done so that matching can occur.
        self.import_file.mapping_done = True
        self.import_file.save()
        match_buildings(self.import_file.id)

        # 2 TaxLot, 2 TaxLotViews, 3 TaxLotState (1 flagged to be ignored)
        self.assertEqual(TaxLot.objects.count(), 2)
        self.assertEqual(TaxLotView.objects.count(), 2)
        self.assertEqual(TaxLotState.objects.count(), 3)
        self.assertEqual(
            TaxLotState.objects.filter(data_state=DATA_STATE_DELETE).count(),
            1)

        # Make sure "deleted" -States are not found in the -Views
        deleted = TaxLotState.objects.get(data_state=DATA_STATE_DELETE)
        self.assertNotIn(deleted.id,
                         TaxLotView.objects.values_list('state_id', flat=True))

    def test_match_properties_if_all_default_fields_match(self):
        base_details = {
            'address_line_1': '123 Match Street',
            'import_file_id': self.import_file.id,
            'data_state': DATA_STATE_MAPPING,
            'no_default_data': False,
        }
        # Create first set of properties that match each other
        ps_1 = self.property_state_factory.get_property_state(**base_details)
        base_details['city'] = 'Denver'
        ps_2 = self.property_state_factory.get_property_state(**base_details)

        # Create second set of properties that match each other
        base_details['pm_property_id'] = '11111'
        ps_3 = self.property_state_factory.get_property_state(**base_details)
        base_details['city'] = 'Philadelphia'
        ps_4 = self.property_state_factory.get_property_state(**base_details)

        # Create unmatched property
        base_details['pm_property_id'] = '000'
        ps_5 = self.property_state_factory.get_property_state(**base_details)

        # set import_file mapping done so that matching can occur.
        self.import_file.mapping_done = True
        self.import_file.save()
        match_buildings(self.import_file.id)

        # 3 Property, 3 PropertyViews, 7 PropertyStates (5 imported, 2 merge results)
        self.assertEqual(Property.objects.count(), 3)
        self.assertEqual(PropertyView.objects.count(), 3)
        self.assertEqual(PropertyState.objects.count(), 7)

        # Refresh -States and check data_state and merge_state values
        rps_1 = PropertyState.objects.get(pk=ps_1.id)
        self.assertEqual(rps_1.data_state, DATA_STATE_MAPPING)
        self.assertEqual(rps_1.merge_state, MERGE_STATE_UNKNOWN)

        rps_2 = PropertyState.objects.get(pk=ps_2.id)
        self.assertEqual(rps_2.data_state, DATA_STATE_MAPPING)
        self.assertEqual(rps_2.merge_state, MERGE_STATE_UNKNOWN)

        ps_1_plus_2 = PropertyState.objects.filter(
            pm_property_id__isnull=True,
            city='Denver',
            address_line_1='123 Match Street').exclude(
                data_state=DATA_STATE_MAPPING,
                merge_state=MERGE_STATE_UNKNOWN).get()

        self.assertEqual(ps_1_plus_2.data_state, DATA_STATE_MATCHING)
        self.assertEqual(ps_1_plus_2.merge_state, MERGE_STATE_MERGED)

        rps_3 = PropertyState.objects.get(pk=ps_3.id)
        self.assertEqual(rps_3.data_state, DATA_STATE_MAPPING)
        self.assertEqual(rps_3.merge_state, MERGE_STATE_UNKNOWN)

        rps_4 = PropertyState.objects.get(pk=ps_4.id)
        self.assertEqual(rps_4.data_state, DATA_STATE_MAPPING)
        self.assertEqual(rps_4.merge_state, MERGE_STATE_UNKNOWN)

        ps_3_plus_4 = PropertyState.objects.filter(
            pm_property_id='11111',
            city='Philadelphia',
            address_line_1='123 Match Street').exclude(
                data_state=DATA_STATE_MAPPING,
                merge_state=MERGE_STATE_UNKNOWN).get()
        self.assertEqual(ps_3_plus_4.data_state, DATA_STATE_MATCHING)
        self.assertEqual(ps_3_plus_4.merge_state, MERGE_STATE_MERGED)

        rps_5 = PropertyState.objects.get(pk=ps_5.id)
        self.assertEqual(rps_5.data_state, DATA_STATE_MATCHING)
        self.assertEqual(rps_5.merge_state, MERGE_STATE_NEW)

    def test_match_taxlots_if_all_default_fields_match(self):
        base_details = {
            'address_line_1': '123 Match Street',
            'import_file_id': self.import_file.id,
            'data_state': DATA_STATE_MAPPING,
            'no_default_data': False,
        }
        # Create first set of taxlots that match each other
        tls_1 = self.taxlot_state_factory.get_taxlot_state(**base_details)
        base_details['city'] = 'Denver'
        tls_2 = self.taxlot_state_factory.get_taxlot_state(**base_details)

        # Create second set of taxlots that match each other
        base_details['jurisdiction_tax_lot_id'] = '11111'
        tls_3 = self.taxlot_state_factory.get_taxlot_state(**base_details)
        base_details['city'] = 'Philadelphia'
        tls_4 = self.taxlot_state_factory.get_taxlot_state(**base_details)

        # Create unmatched taxlot
        base_details['jurisdiction_tax_lot_id'] = '000'
        tls_5 = self.taxlot_state_factory.get_taxlot_state(**base_details)

        # set import_file mapping done so that matching can occur.
        self.import_file.mapping_done = True
        self.import_file.save()
        match_buildings(self.import_file.id)

        # 3 TaxLot, 3 TaxLotViews, 7 TaxLotStates (5 imported, 2 merge results)
        self.assertEqual(TaxLot.objects.count(), 3)
        self.assertEqual(TaxLotView.objects.count(), 3)
        self.assertEqual(TaxLotState.objects.count(), 7)

        # Refresh -States and check data_state and merge_state values
        rtls_1 = TaxLotState.objects.get(pk=tls_1.id)
        self.assertEqual(rtls_1.data_state, DATA_STATE_MAPPING)
        self.assertEqual(rtls_1.merge_state, MERGE_STATE_UNKNOWN)

        rtls_2 = TaxLotState.objects.get(pk=tls_2.id)
        self.assertEqual(rtls_2.data_state, DATA_STATE_MAPPING)
        self.assertEqual(rtls_2.merge_state, MERGE_STATE_UNKNOWN)

        tls_1_plus_2 = TaxLotState.objects.filter(
            jurisdiction_tax_lot_id__isnull=True,
            city='Denver',
            address_line_1='123 Match Street').exclude(
                data_state=DATA_STATE_MAPPING,
                merge_state=MERGE_STATE_UNKNOWN).get()

        self.assertEqual(tls_1_plus_2.data_state, DATA_STATE_MATCHING)
        self.assertEqual(tls_1_plus_2.merge_state, MERGE_STATE_MERGED)

        rtls_3 = TaxLotState.objects.get(pk=tls_3.id)
        self.assertEqual(rtls_3.data_state, DATA_STATE_MAPPING)
        self.assertEqual(rtls_3.merge_state, MERGE_STATE_UNKNOWN)

        rtls_4 = TaxLotState.objects.get(pk=tls_4.id)
        self.assertEqual(rtls_4.data_state, DATA_STATE_MAPPING)
        self.assertEqual(rtls_4.merge_state, MERGE_STATE_UNKNOWN)

        tls_3_plus_4 = TaxLotState.objects.filter(
            jurisdiction_tax_lot_id='11111',
            city='Philadelphia',
            address_line_1='123 Match Street').exclude(
                data_state=DATA_STATE_MAPPING,
                merge_state=MERGE_STATE_UNKNOWN).get()
        self.assertEqual(tls_3_plus_4.data_state, DATA_STATE_MATCHING)
        self.assertEqual(tls_3_plus_4.merge_state, MERGE_STATE_MERGED)

        rtls_5 = TaxLotState.objects.get(pk=tls_5.id)
        self.assertEqual(rtls_5.data_state, DATA_STATE_MATCHING)
        self.assertEqual(rtls_5.merge_state, MERGE_STATE_NEW)

    def test_match_properties_on_ubid(self):
        base_details = {
            'ubid': '86HJPCWQ+2VV-1-3-2-3',
            'import_file_id': self.import_file.id,
            'data_state': DATA_STATE_MAPPING,
            'no_default_data': False,
        }
        # Create set of properties that match each other
        self.property_state_factory.get_property_state(**base_details)
        base_details['city'] = 'Denver'
        self.property_state_factory.get_property_state(**base_details)

        # set import_file mapping done so that matching can occur.
        self.import_file.mapping_done = True
        self.import_file.save()
        match_buildings(self.import_file.id)

        # 1 Property, 1 PropertyView, 3 PropertyStates (2 imported, 1 merge result)
        self.assertEqual(Property.objects.count(), 1)
        self.assertEqual(PropertyView.objects.count(), 1)
        self.assertEqual(PropertyState.objects.count(), 3)

    def test_match_properties_normalized_address_used_instead_of_address_line_1(
            self):
        base_details = {
            'import_file_id': self.import_file.id,
            'data_state': DATA_STATE_MAPPING,
            'no_default_data': False,
        }
        # Create set of properties that have the same address_line_1 in slightly different format
        base_details['address_line_1'] = '123 Match Street'
        self.property_state_factory.get_property_state(**base_details)
        base_details['address_line_1'] = '123 match St.'
        base_details['city'] = 'Denver'
        self.property_state_factory.get_property_state(**base_details)

        # set import_file mapping done so that matching can occur.
        self.import_file.mapping_done = True
        self.import_file.save()
        match_buildings(self.import_file.id)

        # 1 Property, 1 PropertyView, 3 PropertyStates (2 imported, 1 merge result)
        self.assertEqual(Property.objects.count(), 1)
        self.assertEqual(PropertyView.objects.count(), 1)
        self.assertEqual(PropertyState.objects.count(), 3)

    def test_match_taxlots_normalized_address_used_instead_of_address_line_1(
            self):
        base_details = {
            'import_file_id': self.import_file.id,
            'data_state': DATA_STATE_MAPPING,
            'no_default_data': False,
        }
        # Create set of taxlots that have the same address_line_1 in slightly different format
        base_details['address_line_1'] = '123 Match Street'
        self.taxlot_state_factory.get_taxlot_state(**base_details)
        base_details['address_line_1'] = '123 match St.'
        base_details['city'] = 'Denver'
        self.taxlot_state_factory.get_taxlot_state(**base_details)

        # set import_file mapping done so that matching can occur.
        self.import_file.mapping_done = True
        self.import_file.save()
        match_buildings(self.import_file.id)

        # 1 TaxLot, 1 TaxLotView, 3 TaxLotStates (2 imported, 1 merge result)
        self.assertEqual(TaxLot.objects.count(), 1)
        self.assertEqual(TaxLotView.objects.count(), 1)
        self.assertEqual(TaxLotState.objects.count(), 3)

    def test_no_matches_if_all_matching_criteria_is_None(self):
        """
        Default matching criteria for PropertyStates are:
            - address_line_1 (substituted by normalized_address)
            - ubid
            - pm_property_id
            - custom_id_1
        and all are set to None.
        """
        base_details = {
            'import_file_id': self.import_file.id,
            'data_state': DATA_STATE_MAPPING,
            'no_default_data': False,
        }

        # Create set of properties that won't match
        self.property_state_factory.get_property_state(**base_details)
        base_details['city'] = 'Denver'
        self.property_state_factory.get_property_state(**base_details)

        # set import_file mapping done so that matching can occur.
        self.import_file.mapping_done = True
        self.import_file.save()
        match_buildings(self.import_file.id)

        # 2 Property, 2 PropertyView, 2 PropertyStates - No merges
        self.assertEqual(Property.objects.count(), 2)
        self.assertEqual(PropertyView.objects.count(), 2)
        self.assertEqual(PropertyState.objects.count(), 2)

    def test_match_properties_get_rolled_up_into_one_in_the_order_their_uploaded(
            self):
        """
        The most recently uploaded should take precedence when merging states.
        If more than 2 states match each other, they are merged two at a time
        until one is remaining.

        Reminder, this is only for -States within an ImportFile.
        """
        base_details = {
            'address_line_1': '123 Match Street',
            'import_file_id': self.import_file.id,
            'data_state': DATA_STATE_MAPPING,
            'no_default_data': False,
        }
        # Create first set of properties that match each other
        base_details['city'] = 'Philadelphia'
        self.property_state_factory.get_property_state(**base_details)
        base_details['city'] = 'Arvada'
        self.property_state_factory.get_property_state(**base_details)
        base_details['city'] = 'Golden'
        self.property_state_factory.get_property_state(**base_details)
        base_details['city'] = 'Denver'
        self.property_state_factory.get_property_state(**base_details)

        # set import_file mapping done so that matching can occur.
        self.import_file.mapping_done = True
        self.import_file.save()
        match_buildings(self.import_file.id)

        # 1 Property, 1 PropertyViews, 7 PropertyStates (4 imported, 3 merge results)
        self.assertEqual(Property.objects.count(), 1)
        self.assertEqual(PropertyView.objects.count(), 1)
        self.assertEqual(PropertyState.objects.count(), 7)

        self.assertEqual(PropertyView.objects.first().state.city, 'Denver')
예제 #14
0
class TestMatchMergeLink(DataMappingBaseTestCase):
    def setUp(self):
        selfvars = self.set_up(ASSESSED_RAW)
        self.user, self.org, self.import_file_1, self.import_record_1, self.cycle_1 = selfvars

        cycle_factory = FakeCycleFactory(organization=self.org, user=self.user)
        self.cycle_2 = cycle_factory.get_cycle(name="Cycle 2")
        self.import_record_2, self.import_file_2 = self.create_import_file(
            self.user, self.org, self.cycle_2
        )

        self.cycle_3 = cycle_factory.get_cycle(name="Cycle 3")
        self.import_record_3, self.import_file_3 = self.create_import_file(
            self.user, self.org, self.cycle_3
        )

        self.property_state_factory = FakePropertyStateFactory(organization=self.org)
        self.taxlot_state_factory = FakeTaxLotStateFactory(organization=self.org)

    def test_match_merge_link_for_properties(self):
        """
        In this context, a "set" includes a -State, -View, and canonical record.

        Set up consists of 3 imports across 3 cycles respectively:
        Cycle 1 - 3 sets will be imported.
            - 2 sets match each other and are merged
            - 1 set doesn't match any others
        Cycle 2 - 4 sets will be imported.
            - 3 sets match. All will merge then link to match set in Cycle 1
            - 1 set doesn't match any others
        Cycle 3 - 2 sets will be imported.
            - 1 set will match sets from Cycles 1 and 2 and link to them
            - 1 set doesn't match any others
        """
        # Cycle 1 / ImportFile 1
        base_state_details = {
            'pm_property_id': '1st Match Set',
            'city': '1st Match - Cycle 1 - City 1',
            'import_file_id': self.import_file_1.id,
            'data_state': DATA_STATE_MAPPING,
            'no_default_data': False,
        }
        self.property_state_factory.get_property_state(**base_state_details)

        base_state_details['pm_property_id'] = '1st Match Set'
        base_state_details['city'] = '1st Match - Cycle 1 - City 2'
        self.property_state_factory.get_property_state(**base_state_details)

        base_state_details['pm_property_id'] = 'Single Unmatched - 1'
        base_state_details['city'] = 'Unmatched City - Cycle 1'
        self.property_state_factory.get_property_state(**base_state_details)

        # Import file and create -Views and canonical records.
        self.import_file_1.mapping_done = True
        self.import_file_1.save()
        geocode_and_match_buildings_task(self.import_file_1.id)

        # Cycle 2 / ImportFile 2
        base_state_details['import_file_id'] = self.import_file_2.id
        base_state_details['pm_property_id'] = '1st Match Set'
        base_state_details['city'] = '1st Match - Cycle 2 - City 1'
        self.property_state_factory.get_property_state(**base_state_details)

        base_state_details['pm_property_id'] = '1st Match Set'
        base_state_details['city'] = '1st Match - Cycle 2 - City 2'
        self.property_state_factory.get_property_state(**base_state_details)

        base_state_details['pm_property_id'] = '1st Match Set'
        base_state_details['city'] = '1st Match - Cycle 2 - City 3'
        self.property_state_factory.get_property_state(**base_state_details)

        base_state_details['pm_property_id'] = 'Single Unmatched - 2'
        base_state_details['city'] = 'Unmatched City - Cycle 2'
        self.property_state_factory.get_property_state(**base_state_details)

        # Import file and create -Views and canonical records.
        self.import_file_2.mapping_done = True
        self.import_file_2.save()
        geocode_and_match_buildings_task(self.import_file_2.id)

        # Cycle 3 / ImportFile 3
        base_state_details['import_file_id'] = self.import_file_3.id
        base_state_details['pm_property_id'] = '1st Match Set'
        base_state_details['city'] = '1st Match - Cycle 3 - City 1'
        self.property_state_factory.get_property_state(**base_state_details)

        base_state_details['pm_property_id'] = 'Single Unmatched - 3'
        base_state_details['city'] = 'Unmatched City - Cycle 3'
        self.property_state_factory.get_property_state(**base_state_details)

        # Import file and create -Views and canonical records.
        self.import_file_3.mapping_done = True
        self.import_file_3.save()
        geocode_and_match_buildings_task(self.import_file_3.id)

        # Verify merges and links happened
        self.assertEqual(6, PropertyView.objects.count())
        self.assertEqual(4 + 6 + 2, PropertyState.objects.count())
        # 4 unique canonical records used in -Views
        # For now, Properties are not deleted when they aren't used in -Views so a count test wouldn't be appropriate
        self.assertEqual(
            4,
            len(set(PropertyView.objects.values_list('property_id', flat=True)))
        )

        # At the moment, there should be 3 -Views with the same canonical record across 3 cycles
        views_with_same_canonical_record = PropertyView.objects.\
            values('property_id').\
            annotate(times_used=Count('id'), cycle_ids=ArrayAgg('cycle_id')).\
            filter(times_used__gt=1).\
            get()
        self.assertEqual(3, views_with_same_canonical_record['times_used'])
        self.assertCountEqual(
            [self.cycle_1.id, self.cycle_2.id, self.cycle_3.id],
            views_with_same_canonical_record['cycle_ids']
        )

    def test_match_merge_link_for_taxlots(self):
        """
        In this context, a "set" includes a -State, -View, and canonical record.

        Set up consists of 3 imports across 3 cycles respectively:
        Cycle 1 - 3 sets will be imported.
            - 2 sets match each other and are merged
            - 1 set doesn't match any others
        Cycle 2 - 4 sets will be imported.
            - 3 sets match. All will merge then link to match set in Cycle 1
            - 1 set doesn't match any others
        Cycle 3 - 2 sets will be imported.
            - 1 set will match sets from Cycles 1 and 2 and link to them
            - 1 set doesn't match any others
        """
        # Cycle 1 / ImportFile 1
        base_state_details = {
            'jurisdiction_tax_lot_id': '1st Match Set',
            'city': '1st Match - Cycle 1 - City 1',
            'import_file_id': self.import_file_1.id,
            'data_state': DATA_STATE_MAPPING,
            'no_default_data': False,
        }
        self.taxlot_state_factory.get_taxlot_state(**base_state_details)

        base_state_details['jurisdiction_tax_lot_id'] = '1st Match Set'
        base_state_details['city'] = '1st Match - Cycle 1 - City 2'
        self.taxlot_state_factory.get_taxlot_state(**base_state_details)

        base_state_details['jurisdiction_tax_lot_id'] = 'Single Unmatched - 1'
        base_state_details['city'] = 'Unmatched City - Cycle 1'
        self.taxlot_state_factory.get_taxlot_state(**base_state_details)

        # Import file and create -Views and canonical records.
        self.import_file_1.mapping_done = True
        self.import_file_1.save()
        geocode_and_match_buildings_task(self.import_file_1.id)

        # Cycle 2 / ImportFile 2
        base_state_details['import_file_id'] = self.import_file_2.id
        base_state_details['jurisdiction_tax_lot_id'] = '1st Match Set'
        base_state_details['city'] = '1st Match - Cycle 2 - City 1'
        self.taxlot_state_factory.get_taxlot_state(**base_state_details)

        base_state_details['jurisdiction_tax_lot_id'] = '1st Match Set'
        base_state_details['city'] = '1st Match - Cycle 2 - City 2'
        self.taxlot_state_factory.get_taxlot_state(**base_state_details)

        base_state_details['jurisdiction_tax_lot_id'] = '1st Match Set'
        base_state_details['city'] = '1st Match - Cycle 2 - City 3'
        self.taxlot_state_factory.get_taxlot_state(**base_state_details)

        base_state_details['jurisdiction_tax_lot_id'] = 'Single Unmatched - 2'
        base_state_details['city'] = 'Unmatched City - Cycle 2'
        self.taxlot_state_factory.get_taxlot_state(**base_state_details)

        # Import file and create -Views and canonical records.
        self.import_file_2.mapping_done = True
        self.import_file_2.save()
        geocode_and_match_buildings_task(self.import_file_2.id)

        # Cycle 3 / ImportFile 3
        base_state_details['import_file_id'] = self.import_file_3.id
        base_state_details['jurisdiction_tax_lot_id'] = '1st Match Set'
        base_state_details['city'] = '1st Match - Cycle 3 - City 1'
        self.taxlot_state_factory.get_taxlot_state(**base_state_details)

        base_state_details['jurisdiction_tax_lot_id'] = 'Single Unmatched - 3'
        base_state_details['city'] = 'Unmatched City - Cycle 3'
        self.taxlot_state_factory.get_taxlot_state(**base_state_details)

        # Import file and create -Views and canonical records.
        self.import_file_3.mapping_done = True
        self.import_file_3.save()
        geocode_and_match_buildings_task(self.import_file_3.id)

        # Verify merges and links happened
        self.assertEqual(6, TaxLotView.objects.count())
        self.assertEqual(4 + 6 + 2, TaxLotState.objects.count())
        # 4 unique canonical records used in -Views
        # For now, Properties are not deleted when they aren't used in -Views so a count test wouldn't be appropriate
        self.assertEqual(
            4,
            len(set(TaxLotView.objects.values_list('taxlot_id', flat=True)))
        )

        # At the moment, there should be 3 -Views with the same canonical record across 3 cycles
        views_with_same_canonical_record = TaxLotView.objects.\
            values('taxlot_id').\
            annotate(times_used=Count('id'), cycle_ids=ArrayAgg('cycle_id')).\
            filter(times_used__gt=1).\
            get()
        self.assertEqual(3, views_with_same_canonical_record['times_used'])
        self.assertCountEqual(
            [self.cycle_1.id, self.cycle_2.id, self.cycle_3.id],
            views_with_same_canonical_record['cycle_ids']
        )
예제 #15
0
class TestRenameColumns(TestCase):
    def setUp(self):
        user_details = {
            'username': '******',
            'password': '******',
        }
        self.user = User.objects.create_superuser(email='*****@*****.**',
                                                  **user_details)
        self.org, _, _ = create_organization(self.user)
        self.client.login(**user_details)

        self.property_state_factory = FakePropertyStateFactory(
            organization=self.org)
        self.tax_lot_state_factory = FakeTaxLotStateFactory(
            organization=self.org)

        self.extra_data_column = Column.objects.create(
            table_name='PropertyState',
            column_name='test_column',
            organization=self.org,
            is_extra_data=True,
        )

    def test_rename_column_no_data(self):
        address_column = Column.objects.filter(
            column_name='address_line_1').first()

        # verify that the column has to be new
        self.assertFalse(address_column.rename_column('custom_id_1')[0])

    def test_rename_column_no_data_and_force(self):
        orig_address_column = Column.objects.filter(
            column_name='address_line_1').first()

        # verify that the column has to be new
        self.assertTrue(
            orig_address_column.rename_column('custom_id_1', True)[0])

        # get the address column and check the fields
        address_column = Column.objects.filter(
            column_name='address_line_1').first()
        self.assertEqual(address_column.is_extra_data, False)
        self.assertEqual(address_column.display_name,
                         orig_address_column.display_name)

    def test_rename_column_field_to_field(self):
        address_column = Column.objects.filter(
            column_name='address_line_1').first()

        # create the test data and assemble the expected data result
        expected_data = []
        for i in range(0, 20):
            state = self.property_state_factory.get_property_state(
                data_state=DATA_STATE_MATCHING)
            expected_data.append(state.address_line_1)

        result = address_column.rename_column('property_type', force=True)
        self.assertTrue(result)

        results = list(
            PropertyState.objects.filter(
                organization=self.org).order_by('id').values_list(
                    'property_type', flat=True))
        self.assertListEqual(results, expected_data)

        # verify that the original field is now empty
        results = list(
            PropertyState.objects.filter(
                organization=self.org).order_by('id').values_list(
                    'address_line_1', flat=True))
        self.assertListEqual(results, [None for _x in range(20)])

    def test_rename_column_field_to_extra_data(self):
        address_column = Column.objects.filter(
            column_name='address_line_1').first()

        # create the test data and assemble the expected data result
        expected_data = []
        for i in range(0, 20):
            state = self.property_state_factory.get_property_state(
                data_state=DATA_STATE_MATCHING,
                extra_data={'string': 'abc %s' % i})
            expected_data.append({
                'string': state.extra_data['string'],
                'new_address_line_1': state.address_line_1
            })

        result = address_column.rename_column('new_address_line_1')
        self.assertTrue(result)

        results = list(
            PropertyState.objects.filter(
                organization=self.org).order_by('id').values_list('extra_data',
                                                                  flat=True))
        self.assertListEqual(results, expected_data)

        # verify that the original field is now empty
        results = list(
            PropertyState.objects.filter(
                organization=self.org).order_by('id').values_list(
                    'address_line_1', flat=True))
        self.assertListEqual(results, [None for _x in range(20)])

    def test_rename_column_extra_data_to_field(self):
        # create the test data and assemble the expected data result
        expected_data = []
        for i in range(0, 20):
            state = self.property_state_factory.get_property_state(
                data_state=DATA_STATE_MATCHING,
                extra_data={
                    self.extra_data_column.column_name: 'abc %s' % i,
                    'skip': 'value'
                })
            expected_data.append(
                state.extra_data[self.extra_data_column.column_name])

        result = self.extra_data_column.rename_column('address_line_1',
                                                      force=True)
        self.assertTrue(result)

        results = list(
            PropertyState.objects.filter(
                organization=self.org).order_by('id').values_list(
                    'address_line_1', flat=True))
        self.assertListEqual(results, expected_data)

        # verify that the original field is now empty
        results = list(
            PropertyState.objects.filter(
                organization=self.org).order_by('id').values_list('extra_data',
                                                                  flat=True))
        self.assertListEqual(results, [{'skip': 'value'} for _x in range(20)])

    def test_rename_column_extra_data_to_extra_data(self):
        # create the test data and assemble the expected data result
        expected_data = []
        for i in range(0, 20):
            state = self.property_state_factory.get_property_state(
                data_state=DATA_STATE_MATCHING,
                extra_data={
                    self.extra_data_column.column_name: 'abc %s' % i,
                    'skip': 'value'
                })
            expected_data.append(
                state.extra_data[self.extra_data_column.column_name])

        result = self.extra_data_column.rename_column('new_extra', force=True)
        self.assertTrue(result)

        results = list(
            PropertyState.objects.filter(
                organization=self.org).order_by('id').values_list('extra_data',
                                                                  flat=True))
        results = [x['new_extra'] for x in results]
        self.assertListEqual(results, expected_data)

        # verify that the original field is now empty
        results = list(
            PropertyState.objects.filter(
                organization=self.org).order_by('id').values_list('extra_data',
                                                                  flat=True))
        results = [
            x.get(self.extra_data_column.column_name, None) for x in results
        ]
        self.assertListEqual(results, [None for _x in range(20)])

    def test_rename_column_extra_data_to_field_int_to_int(self):
        # create the test data and assemble the expected data result
        expected_data = []
        for i in range(0, 20):
            state = self.property_state_factory.get_property_state(
                data_state=DATA_STATE_MATCHING,
                extra_data={self.extra_data_column.column_name: i})
            expected_data.append(
                state.extra_data[self.extra_data_column.column_name])

        result = self.extra_data_column.rename_column('building_count',
                                                      force=True)
        self.assertTrue(result)

        results = list(
            PropertyState.objects.filter(
                organization=self.org).order_by('id').values_list(
                    'building_count', flat=True))
        self.assertListEqual(results, expected_data)

    def test_rename_datetime_field_to_extra_data(self):
        expected_data = []

        new_col_name = 'recent_sale_date_renamed'

        for i in range(0, 5):
            date = "2018-04-02T19:53:0{}+00:00".format(i)
            state = self.property_state_factory.get_property_state(
                data_state=DATA_STATE_MATCHING, recent_sale_date=date)
            expected_data.append({new_col_name: state.recent_sale_date})

        old_column = Column.objects.filter(
            column_name='recent_sale_date').first()
        result = old_column.rename_column(new_col_name)
        self.assertTrue(result)

        results = list(
            PropertyState.objects.filter(
                organization=self.org).order_by('id').values_list('extra_data',
                                                                  flat=True))

        self.assertListEqual(results, expected_data)

    def test_rename_datetime_field_to_another_datetime_field(self):
        expected_data = []

        new_col_name = 'recent_sale_date'

        for i in range(0, 5):
            date = "2018-04-02T19:53:0{}+00:00".format(i)
            self.property_state_factory.get_property_state(
                data_state=DATA_STATE_MATCHING, generation_date=date)
            expected_data.append(date)

        old_column = Column.objects.filter(
            column_name='generation_date').first()
        result = old_column.rename_column(new_col_name, force=True)
        self.assertTrue(result)

        new_col_results_raw = list(
            PropertyState.objects.filter(
                organization=self.org).order_by('id').values_list(new_col_name,
                                                                  flat=True))
        new_col_results = [dt.isoformat() for dt in new_col_results_raw]
        self.assertListEqual(new_col_results, expected_data)

        # Check that generation_dates were cleared
        for p in PropertyState.objects.all():
            self.assertIsNone(p.generation_date)

    def test_rename_extra_data_field_to_datetime_field_success(self):
        expected_data = []

        new_col_name = 'recent_sale_date'

        for i in range(0, 5):
            date = "2018-04-02T19:53:0{}+00:00".format(i)
            self.property_state_factory.get_property_state(
                data_state=DATA_STATE_MATCHING,
                extra_data={self.extra_data_column.column_name: date})
            expected_data.append(date)

        result = self.extra_data_column.rename_column(new_col_name, force=True)
        self.assertTrue(result)

        raw_results = list(
            PropertyState.objects.filter(
                organization=self.org).order_by('id').values_list(new_col_name,
                                                                  flat=True))

        results = [dt.isoformat() for dt in raw_results]

        self.assertListEqual(results, expected_data)

    def test_rename_extra_data_field_to_datetime_field_unsuccessful(self):
        expected_data = []
        original_column_count = Column.objects.count()

        new_col_name = 'recent_sale_date'

        for i in range(
                9, 11
        ):  # range is purposely set to cause errors in the date format but not immediately
            date = "2018-04-02T19:53:0{}+00:00".format(i)
            self.property_state_factory.get_property_state(
                data_state=DATA_STATE_MATCHING,
                extra_data={self.extra_data_column.column_name: date})
            expected_data.append(date)

        result = self.extra_data_column.rename_column(new_col_name, force=True)
        self.assertEqual(result, [
            False,
            "The column data aren't formatted properly for the new column due to type constraints (e.g., Datatime, Quantities, etc.)."
        ])

        new_column_count = Column.objects.count()
        self.assertEqual(original_column_count, new_column_count)

        # Check that none of the PropertyStates were updated.
        for p in PropertyState.objects.all():
            self.assertIsNone(p.recent_sale_date)

    def test_rename_date_field_to_extra_data(self):
        expected_data = []

        new_col_name = 'year_ending_renamed'

        for i in range(1, 5):
            date = "2018-04-0{}".format(i)
            state = self.property_state_factory.get_property_state(
                data_state=DATA_STATE_MATCHING, year_ending=date)
            expected_data.append({new_col_name: state.year_ending})

        old_column = Column.objects.filter(column_name='year_ending').first()
        result = old_column.rename_column(new_col_name)
        self.assertTrue(result)

        results = list(
            PropertyState.objects.filter(
                organization=self.org).order_by('id').values_list('extra_data',
                                                                  flat=True))

        self.assertListEqual(results, expected_data)

    def test_rename_extra_data_field_to_date_field_success(self):
        expected_data = []

        new_col_name = 'year_ending'

        for i in range(1, 5):
            date = "2018-04-0{}".format(i)
            self.property_state_factory.get_property_state(
                data_state=DATA_STATE_MATCHING,
                extra_data={self.extra_data_column.column_name: date})
            expected_data.append(date)

        result = self.extra_data_column.rename_column(new_col_name, force=True)
        self.assertTrue(result)

        raw_results = list(
            PropertyState.objects.filter(
                organization=self.org).order_by('id').values_list(new_col_name,
                                                                  flat=True))

        results = [dt.isoformat() for dt in raw_results]

        self.assertListEqual(results, expected_data)

    def test_rename_extra_data_field_to_date_field_unsuccessful(self):
        expected_data = []
        original_column_count = Column.objects.count()

        new_col_name = 'year_ending'

        for i in range(
                9, 11
        ):  # range is purposely set to cause errors in the date format but not immediately
            date = "2018-04-0{}".format(i)
            self.property_state_factory.get_property_state(
                data_state=DATA_STATE_MATCHING,
                extra_data={self.extra_data_column.column_name: date})
            expected_data.append(date)

        result = self.extra_data_column.rename_column(new_col_name, force=True)
        self.assertEqual(result, [
            False,
            "The column data aren't formatted properly for the new column due to type constraints (e.g., Datatime, Quantities, etc.)."
        ])

        new_column_count = Column.objects.count()
        self.assertEqual(original_column_count, new_column_count)

        # Check that none of the PropertyStates were updated.
        for p in PropertyState.objects.all():
            self.assertIsNone(p.recent_sale_date)

    def test_rename_quantity_field_to_extra_data(self):
        expected_data = []

        new_col_name = 'gross_floor_area_renamed'

        for i in range(1, 5):
            area = i * 100.5
            self.property_state_factory.get_property_state(
                data_state=DATA_STATE_MATCHING, gross_floor_area=area)
            expected_data.append({new_col_name: area})

        old_column = Column.objects.filter(
            column_name='gross_floor_area').first()
        result = old_column.rename_column(new_col_name)
        self.assertTrue(result)

        results = list(
            PropertyState.objects.filter(
                organization=self.org).order_by('id').values_list('extra_data',
                                                                  flat=True))

        self.assertListEqual(results, expected_data)

    def test_rename_extra_data_field_to_quantity_field_success(self):
        expected_data = []

        new_col_name = 'gross_floor_area'

        for i in range(1, 5):
            area = i * 100.5
            self.property_state_factory.get_property_state(
                data_state=DATA_STATE_MATCHING,
                extra_data={self.extra_data_column.column_name: area})
            expected_data.append(ureg.Quantity(area, "foot ** 2"))

        result = self.extra_data_column.rename_column(new_col_name, force=True)
        self.assertTrue(result)

        results = list(
            PropertyState.objects.filter(
                organization=self.org).order_by('id').values_list(
                    'gross_floor_area', flat=True))

        self.assertListEqual(results, expected_data)

    def test_rename_extra_data_field_to_quantity_field_unsuccessful(self):
        expected_data = []
        original_column_count = Column.objects.count()

        new_col_name = 'gross_floor_area'

        for i in range(0, 2):
            # add a valid and invalid area
            area = (100 if i == 0 else "not a number")
            state = self.property_state_factory.get_property_state(
                data_state=DATA_STATE_MATCHING,
                extra_data={self.extra_data_column.column_name: area})
            # Capture default gross_floor_areas
            expected_data.append(
                ureg.Quantity(state.gross_floor_area, "foot ** 2"))

        result = self.extra_data_column.rename_column(new_col_name, force=True)
        self.assertEqual(result, [
            False,
            "The column data aren't formatted properly for the new column due to type constraints (e.g., Datatime, Quantities, etc.)."
        ])

        new_column_count = Column.objects.count()
        self.assertEqual(original_column_count, new_column_count)

        # check that the states' gross_floor_area values were unchanged
        results = list(
            PropertyState.objects.filter(
                organization=self.org).order_by('id').values_list(
                    'gross_floor_area', flat=True))

        self.assertListEqual(results, expected_data)

    def test_rename_quantity_field_to_another_quantity_field_success(self):
        expected_data = []

        new_col_name = 'occupied_floor_area'

        for i in range(1, 5):
            area = i * 100.5
            self.property_state_factory.get_property_state(
                data_state=DATA_STATE_MATCHING, gross_floor_area=area)
            # Capture the magnitude with default occupied_floor_area units
            expected_data.append(ureg.Quantity(area, "foot ** 2"))

        old_column = Column.objects.filter(
            column_name='gross_floor_area').first()
        result = old_column.rename_column(new_col_name, force=True)
        self.assertTrue(result)

        results = list(
            PropertyState.objects.filter(
                organization=self.org).order_by('id').values_list(new_col_name,
                                                                  flat=True))

        self.assertListEqual(results, expected_data)

        # Check that gross_floor_areas were cleared
        for p in PropertyState.objects.all():
            self.assertIsNone(p.gross_floor_area)

    def test_rename_quantity_field_to_another_quantity_field_unsuccessful(
            self):
        # This should be unsuccessful because conversions don't exist between certain column units
        expected_data = []
        original_column_count = Column.objects.count()

        new_col_name = 'site_eui'

        for i in range(1, 5):
            area = i * 100.5
            self.property_state_factory.get_property_state(
                data_state=DATA_STATE_MATCHING, gross_floor_area=area)
            # Capture these pre-rename-attempt values
            expected_data.append(ureg.Quantity(area, "foot ** 2"))

        old_column = Column.objects.filter(
            column_name='gross_floor_area').first()
        result = old_column.rename_column(new_col_name, force=True)
        self.assertEqual(result, [
            False,
            "The column data can't be converted to the new column due to conversion contraints (e.g., converting square feet to kBtu etc.)."
        ])

        new_column_count = Column.objects.count()
        self.assertEqual(original_column_count, new_column_count)

        # check that the states' gross_floor_area values were unchanged
        results = list(
            PropertyState.objects.filter(
                organization=self.org).order_by('id').values_list(
                    'gross_floor_area', flat=True))

        self.assertListEqual(results, expected_data)

    def test_rename_property_campus_field_unsuccessful(self):
        old_column = Column.objects.filter(column_name='campus').first()
        result = old_column.rename_column("new_col_name", force=True)
        self.assertEqual(
            result, [False, "Can't move data out of reserved column 'campus'"])
예제 #16
0
class DefaultColumnsViewTests(DeleteModelsTestCase):
    """
    Tests of the SEED default custom saved columns
    """

    def setUp(self):
        user_details = {
            'username': '******',
            'password': '******',
            'email': '*****@*****.**'
        }
        user_details_2 = {
            'username': '******',
            'password': '******',
            'email': '*****@*****.**'
        }
        self.user = User.objects.create_superuser(**user_details)
        self.user_2 = User.objects.create_superuser(**user_details_2)
        self.org, _, _ = create_organization(self.user, "test-organization-a")
        self.org_2, _, _ = create_organization(self.user_2, "test-organization-b")

        self.property_state_factory = FakePropertyStateFactory(organization=self.org)
        self.tax_lot_state_factory = FakeTaxLotStateFactory(organization=self.org)

        Column.objects.create(column_name='test', organization=self.org)
        Column.objects.create(column_name='extra_data_test',
                              table_name='PropertyState',
                              organization=self.org,
                              is_extra_data=True)
        self.cross_org_column = Column.objects.create(column_name='extra_data_test',
                                                      table_name='PropertyState',
                                                      organization=self.org_2,
                                                      is_extra_data=True)

        self.client.login(**user_details)

    def test_set_default_columns(self):
        url = reverse_lazy('api:v1:set_default_columns')
        columns = ['s', 'c1', 'c2']
        post_data = {
            'columns': columns,
            'show_shared_buildings': True
        }
        # set the columns
        response = self.client.post(
            url,
            content_type='application/json',
            data=json.dumps(post_data)
        )
        json_string = response.content
        data = json.loads(json_string)
        self.assertEqual(200, response.status_code)

        # get the columns
        # url = reverse_lazy('api:v1:columns-get-default-columns')
        # response = self.client.get(url)
        # json_string = response.content
        # data = json.loads(json_string)
        # self.assertEqual(data['columns'], columns)

        # get show_shared_buildings
        url = reverse_lazy('api:v2:users-shared-buildings', args=[self.user.pk])
        response = self.client.get(url)
        data = response.json()
        self.assertEqual(data['show_shared_buildings'], True)

        # set show_shared_buildings to False
        # post_data['show_shared_buildings'] = False
        # url = reverse_lazy('api:v1:set_default_columns')
        # response = self.client.post(
        #     url,
        #     content_type='application/json',
        #     data=json.dumps(post_data)
        # )
        # json_string = response.content
        # data = json.loads(json_string)
        # self.assertEqual(200, response.status_code)

        # get show_shared_buildings
        # url = reverse_lazy('api:v2:users-shared-buildings', args=[self.user.pk])
        # response = self.client.get(url)
        # json_string = response.content
        # data = json.loads(json_string)
        # self.assertEqual(data['show_shared_buildings'], False)

    def test_get_all_columns(self):
        # test building list columns
        response = self.client.get(reverse('api:v2:columns-list'), {
            'organization_id': self.org.id
        })
        data = json.loads(response.content)['columns']

        # remove the id columns to make checking existence easier
        for result in data:
            del result['id']
            del result['name']  # name is hard to compare because it is name_{ID}
            del result['organization_id']  # org changes based on test

        expected = {
            'table_name': 'PropertyState',
            'column_name': 'pm_property_id',
            'display_name': 'PM Property ID',
            'is_extra_data': False,
            'merge_protection': 'Favor New',
            'data_type': 'string',
            'related': False,
            'sharedFieldType': 'None',
            'pinnedLeft': True,
            'unit_name': None,
            'unit_type': None,
            'is_matching_criteria': True,
        }

        # randomly check a column
        self.assertIn(expected, data)

    def test_rename_column_property(self):
        column = Column.objects.filter(
            organization=self.org, table_name='PropertyState', column_name='address_line_1'
        ).first()

        for i in range(1, 10):
            self.property_state_factory.get_property_state(data_state=DATA_STATE_MATCHING)
            self.tax_lot_state_factory.get_taxlot_state(data_state=DATA_STATE_MATCHING)

        for ps in PropertyState.objects.filter(organization=self.org).order_by("pk"):
            # orig_data = [{"al1": ps.address_line_1,
            #               "ed": ps.extra_data,
            #               "na": ps.normalized_address}]
            expected_data = [{"al1": None,
                              "ed": {"address_line_1_extra_data": ps.address_line_1},
                              "na": None}]

        # test building list columns
        response = self.client.post(
            reverse('api:v2:columns-rename', args=[column.pk]),
            content_type='application/json',
            data=json.dumps({
                'new_column_name': 'address_line_1_extra_data',
                'overwrite': False
            })
        )
        result = response.json()
        self.assertEqual(response.status_code, 200)
        self.assertTrue(result['success'])

        for ps in PropertyState.objects.filter(organization=self.org).order_by("pk"):
            new_data = [{"al1": ps.address_line_1,
                         "ed": ps.extra_data,
                         "na": ps.normalized_address}]

        self.assertListEqual(expected_data, new_data)

    def test_rename_column_property_existing(self):
        column = Column.objects.filter(
            organization=self.org, table_name='PropertyState', column_name='address_line_1'
        ).first()

        for i in range(1, 10):
            self.property_state_factory.get_property_state(data_state=DATA_STATE_MATCHING)

        for ps in PropertyState.objects.filter(organization=self.org).order_by("pk"):
            expected_data = [{"al1": None,
                              "pn": ps.address_line_1,
                              "na": None}]

        response = self.client.post(
            reverse('api:v2:columns-rename', args=[column.pk]),
            content_type='application/json',
            data=json.dumps({
                'new_column_name': 'property_name',
                'overwrite': False
            })
        )
        result = response.json()
        self.assertEqual(response.status_code, 400)
        self.assertFalse(result['success'])

        response = self.client.post(
            reverse('api:v2:columns-rename', args=[column.pk]),
            content_type='application/json',
            data=json.dumps({
                'new_column_name': 'property_name',
                'overwrite': True
            })
        )
        result = response.json()
        self.assertEqual(response.status_code, 200)
        self.assertTrue(result['success'])

        for ps in PropertyState.objects.filter(organization=self.org).order_by("pk"):
            new_data = [{"al1": ps.address_line_1,
                         "pn": ps.property_name,
                         "na": ps.normalized_address}]

        self.assertListEqual(expected_data, new_data)

    def test_rename_column_taxlot(self):
        column = Column.objects.filter(
            organization=self.org, table_name='TaxLotState', column_name='address_line_1'
        ).first()

        for i in range(1, 10):
            self.property_state_factory.get_property_state(data_state=DATA_STATE_MATCHING)
            self.tax_lot_state_factory.get_taxlot_state(data_state=DATA_STATE_MATCHING)

        for ps in TaxLotState.objects.filter(organization=self.org).order_by("pk"):
            # orig_data = [{"al1": ps.address_line_1,
            #               "ed": ps.extra_data,
            #               "na": ps.normalized_address}]
            expected_data = [{"al1": None,
                              "ed": {"address_line_1_extra_data": ps.address_line_1},
                              "na": None}]

        # test building list columns
        response = self.client.post(
            reverse('api:v2:columns-rename', args=[column.pk]),
            content_type='application/json',
            data=json.dumps({
                'new_column_name': 'address_line_1_extra_data',
                'overwrite': False
            })
        )
        result = response.json()
        self.assertEqual(response.status_code, 200)
        self.assertTrue(result['success'])

        for ps in TaxLotState.objects.filter(organization=self.org).order_by("pk"):
            new_data = [{"al1": ps.address_line_1,
                         "ed": ps.extra_data,
                         "na": ps.normalized_address}]

        self.assertListEqual(expected_data, new_data)

    def test_rename_column_wrong_org(self):
        response = self.client.post(
            reverse('api:v2:columns-rename', args=[self.cross_org_column.pk]),
            content_type='application/json',
        )
        result = response.json()
        # self.assertFalse(result['success'])
        self.assertEqual(
            'Cannot find column in org=%s with pk=%s' % (self.org.id, self.cross_org_column.pk),
            result['message'],
        )

    def test_rename_column_dne(self):
        # test building list columns
        response = self.client.post(
            reverse('api:v2:columns-rename', args=[-999]),
            content_type='application/json',
        )
        self.assertEqual(response.status_code, 404)
        result = response.json()
        self.assertFalse(result['success'])
        self.assertEqual(result['message'], 'Cannot find column in org=%s with pk=-999' % self.org.id)
예제 #17
0
class MeterUsageImportAdjustedScenarioTest(DataMappingBaseTestCase):
    def setUp(self):
        selfvars = self.set_up(ASSESSED_RAW)
        self.user, self.org, self.import_file_1, self.import_record, self.cycle = selfvars

        user_details = {
            'username': '******',
            'password': '******',
        }
        self.client.login(**user_details)

        self.property_state_factory = FakePropertyStateFactory(
            organization=self.org)

    def test_property_states_not_associated_to_properties_are_not_targetted_on_meter_import(
            self):
        # Create three pm_property_id = 5766973 properties that are exact duplicates
        base_details = {
            'address_line_1': '123 Match Street',
            'pm_property_id': '5766973',
            'import_file_id': self.import_file_1.id,
            'data_state': DATA_STATE_MAPPING,
            'no_default_data': False,
        }

        # Create 1 property with a duplicate in the first ImportFile
        self.property_state_factory.get_property_state(**base_details)
        self.property_state_factory.get_property_state(**base_details)

        # set import_file mapping done so that matching can occur.
        self.import_file_1.mapping_done = True
        self.import_file_1.save()
        geocode_and_match_buildings_task(self.import_file_1.id)

        import_record_2, import_file_2 = self.create_import_file(
            self.user, self.org, self.cycle)

        # Create another duplicate property coming from second ImportFile
        base_details['import_file_id'] = import_file_2.id
        self.property_state_factory.get_property_state(**base_details)

        # set import_file mapping done so that matching can occur.
        import_file_2.mapping_done = True
        import_file_2.save()
        geocode_and_match_buildings_task(import_file_2.id)

        # Import the PM Meters
        filename = "example-pm-monthly-meter-usage.xlsx"
        filepath = os.path.dirname(
            os.path.abspath(__file__)) + "/data/" + filename

        pm_meter_file = ImportFile.objects.create(
            import_record=self.import_record,
            source_type="PM Meter Usage",
            uploaded_filename=filename,
            file=SimpleUploadedFile(name=filename,
                                    content=open(filepath, 'rb').read()),
            cycle=self.cycle)

        # Check that meters pre-upload confirmation runs without problems
        confirmation_url = reverse('api:v3:import_files-pm-meters-preview',
                                   kwargs={'pk': pm_meter_file.id})
        confirmation_url += f'?organization_id={self.org.pk}'
        self.client.get(confirmation_url)

        url = reverse("api:v3:import_files-start-save-data",
                      args=[pm_meter_file.id])
        url += f'?organization_id={self.org.pk}'
        post_params = {
            'cycle_id': self.cycle.pk,
        }
        self.client.post(url, post_params)

        # Check that Meters have been uploaded successfully (there's only 2 since only pm_property_id 5766973 exists)
        self.assertEqual(Meter.objects.count(), 2)

        # Ensure that no meters were associated to the duplicate PropertyStates via PropertyViews
        delete_flagged_ids = PropertyState.objects.filter(
            data_state=DATA_STATE_DELETE).values_list('id', flat=True)
        for meter in Meter.objects.all():
            self.assertEqual(
                meter.property.views.filter(
                    state_id__in=delete_flagged_ids).count(), 0)
예제 #18
0
class TestMeasures(DeleteModelsTestCase):
    def setUp(self):
        user_details = {
            'username': '******',
            'password': '******',
            'email': '*****@*****.**'
        }
        self.user = User.objects.create_user(**user_details)
        self.org, _, _ = create_organization(self.user)
        self.property_state_factory = FakePropertyStateFactory(
            organization=self.org)

    def test_scenario_meters(self):
        ps = FakePropertyMeasureFactory(self.org).get_property_state()

        self.assertEqual(ps.measures.count(), 5)
        self.assertEqual(ps.propertymeasure_set.count(), 5)

        # for m in ps.propertymeasure_set.all():
        #     print(m.measure)
        #     print(m.cost_mv)

        # s = Scenario.objects.create(
        #     name='Test'
        # )
        # s.property_state = ps
        # s.save()

        # create a new meter
        # s.meters.add()

    def test_copy_initial_meters_regression_1933(self):
        """This test tracks the bug from GH issue 1933
        When updating a property with a BuildingSync file, cloned meter readings were
        not being linked to cloned meters.
        """
        # -- Setup
        property_state = self.property_state_factory.get_property_state()
        source_scenario = Scenario.objects.create(
            property_state=property_state)

        # create new property, state, and view
        new_property_state = self.property_state_factory.get_property_state()
        new_property = Property.objects.create(organization_id=1)
        _ = PropertyView.objects.create(cycle_id=1,
                                        state_id=new_property_state.id,
                                        property_id=new_property.id)
        new_scenario = Scenario.objects.create(
            property_state=new_property_state)

        # create a meter and meter readings for the source
        meter = Meter.objects.create(scenario_id=source_scenario.id)
        MeterReading.objects.create(
            meter=meter,
            start_time=parse_datetime('2016-10-03T19:00:00+0200'),
            end_time=parse_datetime('2016-10-04T19:00:00+0200'),
            conversion_factor=1.0)
        self.assertEqual(
            MeterReading.objects.filter(meter_id=meter.id).count(), 1)

        # -- Act
        # call copy_initial_meters
        new_scenario.copy_initial_meters(source_scenario.id)

        # -- Assert
        new_meter = Meter.objects.filter(scenario=new_scenario,
                                         property=new_property)
        self.assertEqual(new_meter.count(), 1)
        self.assertEqual(new_meter.first().meter_readings.count(), 1)
예제 #19
0
class TestPropertyViewAsStateSerializers(DeleteModelsTestCase):
    def setUp(self):
        self.maxDiff = None
        user_details = {
            'username': '******',
            'password': '******',
        }
        self.user = User.objects.create_superuser(email='*****@*****.**',
                                                  **user_details)
        self.org, _, _ = create_organization(self.user)
        self.audit_log_factory = FakePropertyAuditLogFactory(
            organization=self.org, user=self.user)
        self.cycle_factory = FakeCycleFactory(organization=self.org,
                                              user=self.user)
        self.property_state_factory = FakePropertyStateFactory(
            organization=self.org)
        self.property_view_factory = FakePropertyViewFactory(
            organization=self.org, user=self.user)
        self.ga_factory = FakeGreenAssessmentFactory(organization=self.org)
        self.gap_factory = FakeGreenAssessmentPropertyFactory(
            organization=self.org, user=self.user)
        self.taxlot_property_factory = FakeTaxLotPropertyFactory(
            organization=self.org, user=self.user)
        self.taxlot_state_factory = FakeTaxLotStateFactory(
            organization=self.org)
        self.taxlot_view_factory = FakeTaxLotViewFactory(organization=self.org,
                                                         user=self.user)
        self.assessment = self.ga_factory.get_green_assessment()
        self.cycle = self.cycle_factory.get_cycle()
        self.property_state = self.property_state_factory.get_property_state()
        self.property_view = self.property_view_factory.get_property_view(
            state=self.property_state, cycle=self.cycle)
        self.taxlot_state = self.taxlot_state_factory.get_taxlot_state()
        self.taxlot_view = self.taxlot_view_factory.get_taxlot_view(
            state=self.taxlot_state, cycle=self.cycle)
        self.audit_log = self.audit_log_factory.get_property_audit_log(
            state=self.property_state,
            view=self.property_view,
            record_type=AUDIT_USER_EDIT,
            description=json.dumps(['a', 'b']))
        self.audit_log2 = self.audit_log_factory.get_property_audit_log(
            view=self.property_view)
        self.gap_data = {
            'source': 'test',
            'status': 'complete',
            'status_date': datetime.date(2017, 0o1, 0o1),
            'metric': 5,
            'version': '0.1',
            'date': datetime.date(2016, 0o1, 0o1),
            'eligibility': True,
            'assessment': self.assessment,
            'view': self.property_view,
        }
        self.urls = ['http://example.com', 'http://example.org']
        self.gap = self.gap_factory.get_green_assessment_property(
            **self.gap_data)
        self.serializer = PropertyViewAsStateSerializer(
            instance=self.property_view)

    def test_init(self):
        """Test __init__."""
        expected = PropertyAuditLogReadOnlySerializer(self.audit_log).data

        # for now convert the site_eui to a magnitude to get the test to pass
        # this really needs to be at another level
        data = self.serializer.current
        # data['state']['site_eui'] = data['state']['site_eui'].magnitude
        self.assertEqual(data, expected)

    def test_get_certifications(self):
        """Test get_certifications"""
        expected = [GreenAssessmentPropertyReadOnlySerializer(self.gap).data]
        self.assertEqual(
            self.serializer.get_certifications(self.property_view), expected)

    def test_get_changed_fields(self):
        """Test get_changed_fields"""
        expected = ['a', 'b']
        self.assertEqual(self.serializer.get_changed_fields(None), expected)

    def test_get_date_edited(self):
        """Test get_date_edited"""
        expected = self.audit_log.created.ctime()
        self.assertEqual(self.serializer.get_date_edited(None), expected)

    def test_get_filename(self):
        """Test get_filename"""
        expected = self.audit_log.import_filename
        self.assertEqual(self.serializer.get_filename(None), expected)

    def test_get_history(self):
        """Test get_history"""
        obj = mock.MagicMock()
        obj.state = self.property_state

        data = self.serializer.get_history(obj)
        # Really need to figure out how to get the serializer to save the magnitude correctly.
        # data[0]['state']['site_eui'] = data[0]['state']['site_eui'].magnitude

        expected = [PropertyAuditLogReadOnlySerializer(self.audit_log2).data]
        self.assertEqual(data, expected)

    def test_get_state(self):
        obj = mock.MagicMock()
        obj.state = self.property_state

    def test_get_source(self):
        """Test get_source"""
        expected = self.audit_log.get_record_type_display()
        self.assertEqual(self.serializer.get_source(None), expected)

    def test_get_taxlots(self):
        """Test get_taxlots"""
        self.taxlot_property_factory.get_taxlot_property(
            cycle=self.cycle,
            property_view=self.property_view,
            taxlot_view=self.taxlot_view)
        result = self.serializer.get_taxlots(self.property_view)
        self.assertEqual(result[0]['state']['id'], self.taxlot_state.id)

    @mock.patch('seed.serializers.properties.PropertyView')
    @mock.patch('seed.serializers.properties.PropertyStateWritableSerializer')
    def test_create(self, mock_serializer, mock_pview):
        """Test create"""
        mock_serializer.return_value.is_valid.return_value = True
        mock_serializer.return_value.save.return_value = self.property_state
        mock_pview.objects.create.return_value = self.property_view
        data = {'org_id': 1, 'cycle': 2, 'state': {'test': 3}, 'property': 4}

        serializer = PropertyViewAsStateSerializer()
        serializer.create(data)
        mock_serializer.assert_called_with(data={'test': 3})
        self.assertTrue(mock_serializer.return_value.save.called)
        mock_pview.objects.create.assert_called_with(state=self.property_state,
                                                     cycle_id=2,
                                                     property_id=4,
                                                     org_id=1)

    @mock.patch('seed.serializers.properties.PropertyStateWritableSerializer')
    def test_update_put(self, mock_serializer):
        """Test update with PUT"""
        mock_serializer.return_value.is_valid.return_value = True
        mock_serializer.return_value.save.return_value = self.property_state
        mock_request = mock.MagicMock()
        data = {'org_id': 1, 'cycle': 2, 'state': {'test': 3}, 'property': 4}

        serializer = PropertyViewAsStateSerializer(
            context={'request': mock_request})
        mock_request.METHOD = 'PUT'
        serializer.update(self.property_view, data)
        mock_serializer.assert_called_with(data={'test': 3})
        self.assertTrue(mock_serializer.return_value.save.called)

    @mock.patch('seed.serializers.properties.PropertyStateWritableSerializer')
    def test_update_patch(self, mock_serializer):
        """Test update with PATCH"""
        mock_serializer.return_value.is_valid.return_value = True
        mock_serializer.return_value.save.return_value = self.property_state
        mock_request = mock.MagicMock()
        mock_request.method = 'PATCH'
        data = {'org_id': 1, 'cycle': 2, 'state': {'test': 3}, 'property': 4}
        serializer = PropertyViewAsStateSerializer(
            context={'request': mock_request})
        serializer.update(self.property_view, data)
        mock_serializer.assert_called_with(self.property_state,
                                           data={'test': 3})
        self.assertTrue(mock_serializer.return_value.save.called)
예제 #20
0
class PropertyViewTests(DeleteModelsTestCase):
    def setUp(self):
        user_details = {
            'username': '******',
            'password': '******',
            'email': '*****@*****.**'
        }
        self.user = User.objects.create_superuser(**user_details)
        self.org, self.org_user, _ = create_organization(self.user)
        self.column_factory = FakeColumnFactory(organization=self.org)
        self.cycle_factory = FakeCycleFactory(organization=self.org,
                                              user=self.user)
        self.property_factory = FakePropertyFactory(organization=self.org)
        self.property_state_factory = FakePropertyStateFactory(
            organization=self.org)
        self.property_view_factory = FakePropertyViewFactory(
            organization=self.org)
        self.taxlot_state_factory = FakeTaxLotStateFactory(
            organization=self.org)
        self.cycle = self.cycle_factory.get_cycle(
            start=datetime(2010, 10, 10, tzinfo=get_current_timezone()))
        self.column_list_factory = FakeColumnListSettingsFactory(
            organization=self.org)
        self.client.login(**user_details)

    def test_get_and_edit_properties(self):
        state = self.property_state_factory.get_property_state()
        prprty = self.property_factory.get_property()
        view = PropertyView.objects.create(property=prprty,
                                           cycle=self.cycle,
                                           state=state)
        params = {
            'organization_id': self.org.pk,
            'page': 1,
            'per_page': 999999999,
            'columns': COLUMNS_TO_SEND,
        }

        url = reverse('api:v2.1:properties-list') + '?cycle_id={}'.format(
            self.cycle.pk)
        response = self.client.get(url, params)
        data = json.loads(response.content)
        self.assertEqual(len(data['properties']), 1)
        result = data['properties'][0]
        self.assertEqual(result['state']['address_line_1'],
                         state.address_line_1)

        db_created_time = result['created']
        db_updated_time = result['updated']
        self.assertTrue(db_created_time is not None)
        self.assertTrue(db_updated_time is not None)

        # update the address
        new_data = {"state": {"address_line_1": "742 Evergreen Terrace"}}
        url = reverse('api:v2:properties-detail', args=[
            view.id
        ]) + '?organization_id={}'.format(self.org.pk)
        response = self.client.put(url,
                                   json.dumps(new_data),
                                   content_type='application/json')
        data = json.loads(response.content)
        self.assertEqual(data['status'], 'success')

        # the above call returns data from the PropertyState, need to get the Property --
        # call the get on the same API to retrieve it
        response = self.client.get(url, content_type='application/json')
        data = json.loads(response.content)
        # make sure the address was updated and that the datetimes were modified
        self.assertEqual(data['status'], 'success')
        self.assertEqual(data['state']['address_line_1'],
                         '742 Evergreen Terrace')
        self.assertEqual(
            datetime.strptime(db_created_time,
                              "%Y-%m-%dT%H:%M:%S.%fZ").replace(microsecond=0),
            datetime.strptime(data['property']['created'],
                              "%Y-%m-%dT%H:%M:%S.%fZ").replace(microsecond=0))
        self.assertGreater(
            datetime.strptime(data['property']['updated'],
                              "%Y-%m-%dT%H:%M:%S.%fZ"),
            datetime.strptime(db_updated_time, "%Y-%m-%dT%H:%M:%S.%fZ"))

    def test_list_properties_with_profile_id(self):
        state = self.property_state_factory.get_property_state(
            extra_data={"field_1": "value_1"})
        prprty = self.property_factory.get_property()
        PropertyView.objects.create(property=prprty,
                                    cycle=self.cycle,
                                    state=state)

        # save all the columns in the state to the database so we can setup column list settings
        Column.save_column_names(state)
        # get the columnlistsetting (default) for all columns
        columnlistsetting = self.column_list_factory.get_columnlistsettings(
            columns=['address_line_1', 'field_1'])

        params = {
            'organization_id': self.org.pk,
            'profile_id': columnlistsetting.id,
        }
        url = reverse('api:v2.1:properties-list') + '?cycle_id={}'.format(
            self.cycle.pk)
        response = self.client.get(url, params)
        data = response.json()
        self.assertEqual(len(data['properties']), 1)
        result = data['properties'][0]
        self.assertEqual(result['state']['address_line_1'],
                         state.address_line_1)
        self.assertEqual(result['state']['extra_data']['field_1'], 'value_1')
        self.assertFalse(result['state'].get('city', None))

    def test_search_identifier(self):
        self.property_view_factory.get_property_view(cycle=self.cycle,
                                                     custom_id_1='123456')
        self.property_view_factory.get_property_view(
            cycle=self.cycle, custom_id_1='987654 Long Street')
        self.property_view_factory.get_property_view(
            cycle=self.cycle, address_line_1='123 Main Street')
        self.property_view_factory.get_property_view(
            cycle=self.cycle,
            address_line_1='Hamilton Road',
            analysis_state=PropertyState.ANALYSIS_STATE_QUEUED)
        self.property_view_factory.get_property_view(
            cycle=self.cycle,
            custom_id_1='long road',
            analysis_state=PropertyState.ANALYSIS_STATE_QUEUED)

        # Typically looks like this
        # http://localhost:8000/api/v2.1/properties/?organization_id=265&cycle=219&identifier=09-IS

        # check for all items
        query_params = "?cycle={}&organization_id={}".format(
            self.cycle.pk, self.org.pk)
        url = reverse('api:v2.1:properties-list') + query_params
        response = self.client.get(url)
        result = json.loads(response.content)
        self.assertEqual(result['status'], 'success')
        results = result['properties']
        self.assertEqual(len(results), 5)

        # check for 2 items with 123
        query_params = "?cycle={}&organization_id={}&identifier={}".format(
            self.cycle.pk, self.org.pk, '123')
        url = reverse('api:v2.1:properties-list') + query_params
        response = self.client.get(url)
        result = json.loads(response.content)
        self.assertEqual(result['status'], 'success')
        results = result['properties']
        # print out the result of this when there are more than two in an attempt to catch the
        # non-deterministic part of this test
        if len(results) > 2:
            print(results)

        self.assertEqual(len(results), 2)

        # check the analysis states
        query_params = "?cycle={}&organization_id={}&analysis_state={}".format(
            self.cycle.pk, self.org.pk, 'Completed')
        url = reverse('api:v2.1:properties-list') + query_params
        response = self.client.get(url)
        result = json.loads(response.content)
        self.assertEqual(result['status'], 'success')
        results = result['properties']
        self.assertEqual(len(results), 0)

        query_params = "?cycle={}&organization_id={}&analysis_state={}".format(
            self.cycle.pk, self.org.pk, 'Not Started')
        url = reverse('api:v2.1:properties-list') + query_params
        response = self.client.get(url)
        result = json.loads(response.content)
        self.assertEqual(result['status'], 'success')
        results = result['properties']
        self.assertEqual(len(results), 3)

        query_params = "?cycle={}&organization_id={}&analysis_state={}".format(
            self.cycle.pk, self.org.pk, 'Queued')
        url = reverse('api:v2.1:properties-list') + query_params
        response = self.client.get(url)
        result = json.loads(response.content)
        self.assertEqual(result['status'], 'success')
        results = result['properties']
        self.assertEqual(len(results), 2)

        # check the combination of both the identifier and the analysis state
        query_params = "?cycle={}&organization_id={}&identifier={}&analysis_state={}".format(
            self.cycle.pk, self.org.pk, 'Long', 'Queued')
        url = reverse('api:v2.1:properties-list') + query_params
        response = self.client.get(url)
        result = json.loads(response.content)
        self.assertEqual(result['status'], 'success')
        results = result['properties']
        self.assertEqual(len(results), 1)

    def test_meters_exist(self):
        # Create a property set with meters
        state_1 = self.property_state_factory.get_property_state()
        property_1 = self.property_factory.get_property()
        PropertyView.objects.create(property=property_1,
                                    cycle=self.cycle,
                                    state=state_1)

        import_record = ImportRecord.objects.create(
            owner=self.user,
            last_modified_by=self.user,
            super_organization=self.org)
        filename = "example-GreenButton-data.xml"
        filepath = os.path.dirname(
            os.path.abspath(__file__)) + "/data/" + filename
        import_file = ImportFile.objects.create(
            import_record=import_record,
            source_type="GreenButton",
            uploaded_filename=filename,
            file=SimpleUploadedFile(name=filename,
                                    content=open(filepath, 'rb').read()),
            cycle=self.cycle,
            matching_results_data={
                "property_id": property_1.id
            }  # this is how target property is specified
        )
        gb_import_url = reverse("api:v2:import_files-save-raw-data",
                                args=[import_file.id])
        gb_import_post_params = {
            'cycle_id': self.cycle.pk,
            'organization_id': self.org.pk,
        }
        self.client.post(gb_import_url, gb_import_post_params)

        # Create a property set without meters
        state_2 = self.property_state_factory.get_property_state()
        property_2 = self.property_factory.get_property()
        PropertyView.objects.create(property=property_2,
                                    cycle=self.cycle,
                                    state=state_2)

        url = reverse('api:v2:properties-meters-exist')

        true_post_params = json.dumps(
            {'inventory_ids': [property_2.pk, property_1.pk]})
        true_result = self.client.post(url,
                                       true_post_params,
                                       content_type='application/json')
        self.assertEqual(b'true', true_result.content)

        false_post_params = json.dumps({'inventory_ids': [property_2.pk]})
        false_result = self.client.post(url,
                                        false_post_params,
                                        content_type='application/json')
        self.assertEqual(b'false', false_result.content)
예제 #21
0
class TestOrganizationPreviewViews(DataMappingBaseTestCase):
    def setUp(self):
        selfvars = self.set_up(ASSESSED_RAW)
        self.user, self.org, self.import_file_1, self.import_record_1, self.cycle_1 = selfvars

        cycle_factory = FakeCycleFactory(organization=self.org, user=self.user)
        self.cycle_2 = cycle_factory.get_cycle(name="Cycle 2")
        self.import_record_2, self.import_file_2 = self.create_import_file(
            self.user, self.org, self.cycle_2
        )

        user_details = {
            'username': '******',
            'password': '******',
            'email': '*****@*****.**'
        }

        self.client.login(**user_details)

        self.property_state_factory = FakePropertyStateFactory(organization=self.org)
        self.taxlot_state_factory = FakeTaxLotStateFactory(organization=self.org)

    def test_whole_org_match_merge_link_preview_endpoint_invalid_columns(self):
        url = reverse('api:v3:organizations-match-merge-link-preview', args=[self.org.id])
        post_params = json.dumps({
            "inventory_type": "properties",
            "add": ['DNE col 1'],
            "remove": ['DNE col 2']
        })
        raw_result = self.client.post(url, post_params, content_type='application/json')
        self.assertEqual(404, raw_result.status_code)

    def test_whole_org_match_merge_link_preview_endpoint_properties(self):
        # Cycle 1 / ImportFile 1 - Create 1 property
        base_property_details = {
            'pm_property_id': '1st Non-Match Set',
            'city': 'City 1',
            'property_name': 'Match Set',
            'import_file_id': self.import_file_1.id,
            'data_state': DATA_STATE_MAPPING,
            'no_default_data': False,
        }

        ps_1 = self.property_state_factory.get_property_state(**base_property_details)

        self.import_file_1.mapping_done = True
        self.import_file_1.save()
        match_buildings(self.import_file_1.id)

        # Cycle 2 / ImportFile 2 - Create 1 unlinked property
        base_property_details['pm_property_id'] = '2nd Non-Match Set'
        base_property_details['property_name'] = 'Match Set'
        base_property_details['import_file_id'] = self.import_file_2.id
        ps_2 = self.property_state_factory.get_property_state(**base_property_details)

        self.import_file_2.mapping_done = True
        self.import_file_2.save()
        match_buildings(self.import_file_2.id)

        # Check there doesn't exist links
        self.assertNotEqual(ps_1.propertyview_set.first().property_id, ps_2.propertyview_set.first().property_id)

        url = reverse('api:v3:organizations-match-merge-link-preview', args=[self.org.id])
        post_params = json.dumps({
            "inventory_type": "properties",
            "add": ['property_name'],
            "remove": ['pm_property_id']
        })
        raw_result = self.client.post(url, post_params, content_type='application/json')

        # Check there *still* doesn't exist links
        self.assertNotEqual(ps_1.propertyview_set.first().property_id, ps_2.propertyview_set.first().property_id)

        self.assertEqual(200, raw_result.status_code)

        raw_content = json.loads(raw_result.content)

        identifier = ProgressData.from_key(raw_content['progress_key']).data['unique_id']
        result_key = "org_match_merge_link_result__%s" % identifier
        raw_summary = get_cache_raw(result_key)
        summary = {str(k): v for k, v in raw_summary.items() if v}  # ignore empty cycles

        # Check format of summary
        self.assertCountEqual([str(self.cycle_1.id), str(self.cycle_2.id)], summary.keys())

        # Check that preview shows links would be created
        self.assertEqual(summary[str(self.cycle_1.id)][0]['id'], summary[str(self.cycle_2.id)][0]['id'])

        # try to get result using results endpoint
        get_result_url = reverse('api:v3:organizations-match-merge-link-result', args=[self.org.id]) + '?match_merge_link_id=' + str(identifier)

        get_result_raw_response = self.client.get(get_result_url)
        raw_summary = json.loads(get_result_raw_response.content)

        summary = {str(k): v for k, v in raw_summary.items() if v}  # ignore empty cycles

        # Check format of summary
        self.assertCountEqual([str(self.cycle_1.id), str(self.cycle_2.id)], summary.keys())

        # Check that preview shows links would be created
        self.assertEqual(summary[str(self.cycle_1.id)][0]['id'], summary[str(self.cycle_2.id)][0]['id'])

    def test_whole_org_match_merge_link_preview_endpoint_taxlots(self):
        # Cycle 1 / ImportFile 1 - Create 1 taxlot
        base_taxlot_details = {
            'jurisdiction_tax_lot_id': '1st Non-Match Set',
            'city': 'City 1',
            'district': 'Match Set',
            'import_file_id': self.import_file_1.id,
            'data_state': DATA_STATE_MAPPING,
            'no_default_data': False,
        }

        tls_1 = self.taxlot_state_factory.get_taxlot_state(**base_taxlot_details)

        self.import_file_1.mapping_done = True
        self.import_file_1.save()
        match_buildings(self.import_file_1.id)

        # Cycle 2 / ImportFile 2 - Create 1 unlinked taxlot
        base_taxlot_details['jurisdiction_tax_lot_id'] = '2nd Non-Match Set'
        base_taxlot_details['district'] = 'Match Set'
        base_taxlot_details['import_file_id'] = self.import_file_2.id
        tls_2 = self.taxlot_state_factory.get_taxlot_state(**base_taxlot_details)

        self.import_file_2.mapping_done = True
        self.import_file_2.save()
        match_buildings(self.import_file_2.id)

        # Check there doesn't exist links
        self.assertNotEqual(tls_1.taxlotview_set.first().taxlot_id, tls_2.taxlotview_set.first().taxlot_id)

        url = reverse('api:v3:organizations-match-merge-link-preview', args=[self.org.id])
        post_params = json.dumps({
            "inventory_type": "taxlots",
            "add": ['district'],
            "remove": ['jurisdiction_tax_lot_id']
        })
        raw_result = self.client.post(url, post_params, content_type='application/json')

        # Check there *still* doesn't exist links
        self.assertNotEqual(tls_1.taxlotview_set.first().taxlot_id, tls_2.taxlotview_set.first().taxlot_id)

        self.assertEqual(200, raw_result.status_code)

        raw_content = json.loads(raw_result.content)

        identifier = ProgressData.from_key(raw_content['progress_key']).data['unique_id']
        result_key = "org_match_merge_link_result__%s" % identifier
        raw_summary = get_cache_raw(result_key)

        summary = {str(k): v for k, v in raw_summary.items() if v}  # ignore empty cycles

        # Check format of summary
        self.assertCountEqual([str(self.cycle_1.id), str(self.cycle_2.id)], summary.keys())

        # Check that preview shows links would be created
        self.assertEqual(summary[str(self.cycle_1.id)][0]['id'], summary[str(self.cycle_2.id)][0]['id'])

        # try to get result using results endpoint
        get_result_url = reverse('api:v3:organizations-match-merge-link-result', args=[self.org.id]) + '?match_merge_link_id=' + str(identifier)

        get_result_raw_response = self.client.get(get_result_url)
        raw_summary = json.loads(get_result_raw_response.content)

        summary = {str(k): v for k, v in raw_summary.items() if v}  # ignore empty cycles

        # Check format of summary
        self.assertCountEqual([str(self.cycle_1.id), str(self.cycle_2.id)], summary.keys())

        # Check that preview shows links would be created
        self.assertEqual(summary[str(self.cycle_1.id)][0]['id'], summary[str(self.cycle_2.id)][0]['id'])
예제 #22
0
    def setUp(self):
        user_details = {
            'username': '******',
            'password': '******',
            'email': '*****@*****.**',
            'first_name': 'Test',
            'last_name': 'User',
        }
        self.user = User.objects.create_user(**user_details)
        self.org, self.org_user, _ = create_organization(self.user)
        self.org_b, self.org_user, _ = create_organization(self.user)
        self.client.login(**user_details)

        cycle_factory = FakeCycleFactory(organization=self.org, user=self.user)
        cycle_a = cycle_factory.get_cycle(name="Cycle A")
        cycle_b = cycle_factory.get_cycle(name="Cycle B")

        property_factory = FakePropertyFactory(organization=self.org)
        self.property_a = property_factory.get_property()
        property_b = property_factory.get_property()

        property_state_factory = FakePropertyStateFactory(
            organization=self.org)
        property_state_a = property_state_factory.get_property_state()
        property_state_b = property_state_factory.get_property_state()
        property_state_c = property_state_factory.get_property_state()
        property_state_d = property_state_factory.get_property_state()

        # create an analysis with two property views, each with the same property but a different cycle
        self.analysis_a = Analysis.objects.create(name='test a',
                                                  service=Analysis.BSYNCR,
                                                  status=Analysis.CREATING,
                                                  user=self.user,
                                                  organization=self.org)
        self.analysis_property_view_a = AnalysisPropertyView.objects.create(
            analysis=self.analysis_a,
            property=self.property_a,
            cycle=cycle_a,
            property_state=property_state_a)
        self.analysis_property_view_b = AnalysisPropertyView.objects.create(
            analysis=self.analysis_a,
            property=self.property_a,
            cycle=cycle_b,
            property_state=property_state_b)

        # create an analysis with two property views, each with the same cycle but a different property
        self.analysis_b = Analysis.objects.create(name='test b',
                                                  service=Analysis.BSYNCR,
                                                  status=Analysis.READY,
                                                  user=self.user,
                                                  organization=self.org)
        self.analysis_property_view_c = AnalysisPropertyView.objects.create(
            analysis=self.analysis_b,
            property=self.property_a,
            cycle=cycle_a,
            property_state=property_state_c)
        self.analysis_property_view_d = AnalysisPropertyView.objects.create(
            analysis=self.analysis_b,
            property=property_b,
            cycle=cycle_a,
            property_state=property_state_d)

        # create an analysis with no property views
        self.analysis_c = Analysis.objects.create(name='test c',
                                                  service=Analysis.BSYNCR,
                                                  status=Analysis.QUEUED,
                                                  user=self.user,
                                                  organization=self.org)

        # create an analysis with a different organization
        self.analysis_d = Analysis.objects.create(name='test d',
                                                  service=Analysis.BSYNCR,
                                                  status=Analysis.RUNNING,
                                                  user=self.user,
                                                  organization=self.org_b)

        # create an output file and add to 3 analysis property views
        self.analysis_output_file_a = AnalysisOutputFile.objects.create(
            file=SimpleUploadedFile('test file a', b'test file a contents'),
            content_type=AnalysisOutputFile.BUILDINGSYNC)
        self.analysis_output_file_a.analysis_property_views.add(
            self.analysis_property_view_a)
        self.analysis_output_file_a.analysis_property_views.add(
            self.analysis_property_view_b)
        self.analysis_output_file_a.analysis_property_views.add(
            self.analysis_property_view_c)

        # create an output file and add to 1 analysis property view
        self.analysis_output_file_b = AnalysisOutputFile.objects.create(
            file=SimpleUploadedFile('test file b', b'test file b contents'),
            content_type=AnalysisOutputFile.BUILDINGSYNC)
        self.analysis_output_file_b.analysis_property_views.add(
            self.analysis_property_view_a)
예제 #23
0
class TestMatchingOutsideImportFile(DataMappingBaseTestCase):
    def setUp(self):
        selfvars = self.set_up(ASSESSED_RAW)
        self.user, self.org, self.import_file_1, self.import_record_1, self.cycle = selfvars

        self.import_record_2, self.import_file_2 = self.create_import_file(
            self.user, self.org, self.cycle)

        self.property_state_factory = FakePropertyStateFactory(
            organization=self.org)
        self.taxlot_state_factory = FakeTaxLotStateFactory(
            organization=self.org)

    def test_duplicate_properties_identified(self):
        base_details = {
            'address_line_1': '123 Match Street',
            'import_file_id': self.import_file_1.id,
            'data_state': DATA_STATE_MAPPING,
            'no_default_data': False,
        }
        # Create property in first ImportFile
        ps_1 = self.property_state_factory.get_property_state(**base_details)

        self.import_file_1.mapping_done = True
        self.import_file_1.save()
        match_buildings(self.import_file_1.id)

        # Create duplicate property coming from second ImportFile
        base_details['import_file_id'] = self.import_file_2.id
        ps_2 = self.property_state_factory.get_property_state(**base_details)

        self.import_file_2.mapping_done = True
        self.import_file_2.save()
        match_buildings(self.import_file_2.id)

        # 1 Property, 1 PropertyViews, 2 PropertyStates
        self.assertEqual(Property.objects.count(), 1)
        self.assertEqual(PropertyView.objects.count(), 1)
        self.assertEqual(PropertyState.objects.count(), 2)

        # Be sure the first property is used in the -View and the second is marked for "deletion"
        self.assertEqual(PropertyView.objects.first().state_id, ps_1.id)
        self.assertEqual(
            PropertyState.objects.get(data_state=DATA_STATE_DELETE).id,
            ps_2.id)

    def test_match_properties_if_all_default_fields_match(self):
        base_details = {
            'address_line_1': '123 Match Street',
            'import_file_id': self.import_file_1.id,
            'data_state': DATA_STATE_MAPPING,
            'no_default_data': False,
        }
        # Create property in first ImportFile
        ps_1 = self.property_state_factory.get_property_state(**base_details)

        self.import_file_1.mapping_done = True
        self.import_file_1.save()
        match_buildings(self.import_file_1.id)

        # Create properties from second ImportFile, one matching existing PropertyState
        base_details['import_file_id'] = self.import_file_2.id

        base_details['city'] = 'Denver'
        ps_2 = self.property_state_factory.get_property_state(**base_details)

        base_details['pm_property_id'] = '11111'
        base_details['city'] = 'Philadelphia'
        ps_3 = self.property_state_factory.get_property_state(**base_details)

        self.import_file_2.mapping_done = True
        self.import_file_2.save()
        match_buildings(self.import_file_2.id)

        # 2 Property, 2 PropertyViews, 4 PropertyStates (3 imported, 1 merge result)
        self.assertEqual(Property.objects.count(), 2)
        self.assertEqual(PropertyView.objects.count(), 2)
        self.assertEqual(PropertyState.objects.count(), 4)

        cities_from_views = []
        ps_ids_from_views = []
        for pv in PropertyView.objects.all():
            cities_from_views.append(pv.state.city)
            ps_ids_from_views.append(pv.state_id)

        self.assertIn('Denver', cities_from_views)
        self.assertIn('Philadelphia', cities_from_views)

        self.assertIn(ps_3.id, ps_ids_from_views)
        self.assertNotIn(ps_1.id, ps_ids_from_views)
        self.assertNotIn(ps_2.id, ps_ids_from_views)

        # Refresh -States and check data_state and merge_state values
        rps_1 = PropertyState.objects.get(pk=ps_1.id)
        self.assertEqual(rps_1.data_state, DATA_STATE_MATCHING)
        self.assertEqual(rps_1.merge_state, MERGE_STATE_NEW)

        rps_2 = PropertyState.objects.get(pk=ps_2.id)
        self.assertEqual(rps_2.data_state, DATA_STATE_MATCHING)
        self.assertEqual(rps_2.merge_state, MERGE_STATE_UNKNOWN)

        ps_1_plus_2 = PropertyState.objects.filter(
            pm_property_id__isnull=True,
            city='Denver',
            address_line_1='123 Match Street').exclude(
                data_state=DATA_STATE_MATCHING,
                merge_state=MERGE_STATE_UNKNOWN).get()
        self.assertEqual(ps_1_plus_2.data_state, DATA_STATE_MATCHING)
        self.assertEqual(ps_1_plus_2.merge_state, MERGE_STATE_MERGED)

        rps_3 = PropertyState.objects.get(pk=ps_3.id)
        self.assertEqual(rps_3.data_state, DATA_STATE_MATCHING)
        self.assertEqual(rps_3.merge_state, MERGE_STATE_NEW)

    def test_match_properties_rolls_up_multiple_existing_matches_in_id_order_if_they_exist(
            self):
        base_details = {
            'pm_property_id': '123MatchID',
            'city': 'Golden',
            'import_file_id': self.import_file_1.id,
            'data_state': DATA_STATE_MAPPING,
            'no_default_data': False,
        }
        # Create 3 non-matching properties in first ImportFile
        ps_1 = self.property_state_factory.get_property_state(**base_details)

        base_details['pm_property_id'] = '789DifferentID'
        base_details['city'] = 'Denver'
        ps_2 = self.property_state_factory.get_property_state(**base_details)

        base_details['pm_property_id'] = '1337AnotherDifferentID'
        base_details['city'] = 'Philadelphia'
        ps_3 = self.property_state_factory.get_property_state(**base_details)

        self.import_file_1.mapping_done = True
        self.import_file_1.save()
        match_buildings(self.import_file_1.id)

        # Make all those states match
        PropertyState.objects.filter(pk__in=[ps_2.id, ps_3.id]).update(
            pm_property_id='123MatchID')

        # Verify that none of the 3 have been merged
        self.assertEqual(Property.objects.count(), 3)
        self.assertEqual(PropertyState.objects.count(), 3)
        self.assertEqual(PropertyView.objects.count(), 3)

        # Import a property that will identify the first 3 as matches.
        base_details['import_file_id'] = self.import_file_2.id
        base_details['pm_property_id'] = '123MatchID'
        del base_details['city']
        ps_4 = self.property_state_factory.get_property_state(**base_details)

        self.import_file_2.mapping_done = True
        self.import_file_2.save()
        match_buildings(self.import_file_2.id)

        # There should only be one PropertyView which is associated to new, merged -State
        self.assertEqual(PropertyView.objects.count(), 1)
        view = PropertyView.objects.first()
        self.assertNotIn(view.state_id, [ps_1.id, ps_2.id, ps_3.id, ps_4.id])

        # It will have a -State having city as Philadelphia
        self.assertEqual(view.state.city, 'Philadelphia')

        # The corresponding log should be a System Match
        audit_log = PropertyAuditLog.objects.get(state_id=view.state_id)
        self.assertEqual(audit_log.name, 'System Match')

    def test_match_taxlots_if_all_default_fields_match(self):
        base_details = {
            'address_line_1': '123 Match Street',
            'import_file_id': self.import_file_1.id,
            'data_state': DATA_STATE_MAPPING,
            'no_default_data': False,
        }
        # Create property in first ImportFile
        tls_1 = self.taxlot_state_factory.get_taxlot_state(**base_details)

        self.import_file_1.mapping_done = True
        self.import_file_1.save()
        match_buildings(self.import_file_1.id)

        # Create properties from second ImportFile, one matching existing PropertyState
        base_details['import_file_id'] = self.import_file_2.id

        base_details['city'] = 'Denver'
        tls_2 = self.taxlot_state_factory.get_taxlot_state(**base_details)

        base_details['jurisdiction_tax_lot_id'] = '11111'
        base_details['city'] = 'Philadelphia'
        tls_3 = self.taxlot_state_factory.get_taxlot_state(**base_details)

        self.import_file_2.mapping_done = True
        self.import_file_2.save()
        match_buildings(self.import_file_2.id)

        # 2 TaxLot, 2 TaxLotViews, 4 TaxLotStates (3 imported, 1 merge result)
        self.assertEqual(TaxLot.objects.count(), 2)
        self.assertEqual(TaxLotView.objects.count(), 2)
        self.assertEqual(TaxLotState.objects.count(), 4)

        cities_from_views = []
        tls_ids_from_views = []
        for tlv in TaxLotView.objects.all():
            cities_from_views.append(tlv.state.city)
            tls_ids_from_views.append(tlv.state_id)

        self.assertIn('Denver', cities_from_views)
        self.assertIn('Philadelphia', cities_from_views)

        self.assertIn(tls_3.id, tls_ids_from_views)
        self.assertNotIn(tls_1.id, tls_ids_from_views)
        self.assertNotIn(tls_2.id, tls_ids_from_views)

        # Refresh -States and check data_state and merge_state values
        rtls_1 = TaxLotState.objects.get(pk=tls_1.id)
        self.assertEqual(rtls_1.data_state, DATA_STATE_MATCHING)
        self.assertEqual(rtls_1.merge_state, MERGE_STATE_NEW)

        rtls_2 = TaxLotState.objects.get(pk=tls_2.id)
        self.assertEqual(rtls_2.data_state, DATA_STATE_MATCHING)
        self.assertEqual(rtls_2.merge_state, MERGE_STATE_UNKNOWN)

        tls_1_plus_2 = TaxLotState.objects.filter(
            jurisdiction_tax_lot_id__isnull=True,
            city='Denver',
            address_line_1='123 Match Street').exclude(
                data_state=DATA_STATE_MATCHING,
                merge_state=MERGE_STATE_UNKNOWN).get()
        self.assertEqual(tls_1_plus_2.data_state, DATA_STATE_MATCHING)
        self.assertEqual(tls_1_plus_2.merge_state, MERGE_STATE_MERGED)

        rtls_3 = TaxLotState.objects.get(pk=tls_3.id)
        self.assertEqual(rtls_3.data_state, DATA_STATE_MATCHING)
        self.assertEqual(rtls_3.merge_state, MERGE_STATE_NEW)

    def test_match_taxlots_rolls_up_multiple_existing_matches_in_id_order_if_they_exist(
            self):
        base_details = {
            'jurisdiction_tax_lot_id': '123MatchID',
            'city': 'Golden',
            'import_file_id': self.import_file_1.id,
            'data_state': DATA_STATE_MAPPING,
            'no_default_data': False,
        }
        # Create 3 non-matching taxlots in first ImportFile
        tls_1 = self.taxlot_state_factory.get_taxlot_state(**base_details)

        base_details['jurisdiction_tax_lot_id'] = '789DifferentID'
        base_details['city'] = 'Denver'
        tls_2 = self.taxlot_state_factory.get_taxlot_state(**base_details)

        base_details['jurisdiction_tax_lot_id'] = '1337AnotherDifferentID'
        base_details['city'] = 'Philadelphia'
        tls_3 = self.taxlot_state_factory.get_taxlot_state(**base_details)

        self.import_file_1.mapping_done = True
        self.import_file_1.save()
        match_buildings(self.import_file_1.id)

        # Make all those states match
        TaxLotState.objects.filter(pk__in=[tls_2.id, tls_3.id]).update(
            jurisdiction_tax_lot_id='123MatchID')

        # Verify that none of the 3 have been merged
        self.assertEqual(TaxLot.objects.count(), 3)
        self.assertEqual(TaxLotState.objects.count(), 3)
        self.assertEqual(TaxLotView.objects.count(), 3)

        # Import a property that will identify the first 3 as matches.
        base_details['import_file_id'] = self.import_file_2.id
        base_details['jurisdiction_tax_lot_id'] = '123MatchID'
        del base_details['city']
        tls_4 = self.taxlot_state_factory.get_taxlot_state(**base_details)

        self.import_file_2.mapping_done = True
        self.import_file_2.save()
        match_buildings(self.import_file_2.id)

        # There should only be one TaxLotView which is associated to new, merged -State
        self.assertEqual(TaxLotView.objects.count(), 1)
        view = TaxLotView.objects.first()
        self.assertNotIn(view.state_id,
                         [tls_1.id, tls_2.id, tls_3.id, tls_4.id])

        # It will have a -State having city as Philadelphia
        self.assertEqual(view.state.city, 'Philadelphia')

        # The corresponding log should be a System Match
        audit_log = TaxLotAuditLog.objects.get(state_id=view.state_id)
        self.assertEqual(audit_log.name, 'System Match')
예제 #24
0
class TestMatchingPostEdit(DataMappingBaseTestCase):
    def setUp(self):
        selfvars = self.set_up(ASSESSED_RAW)
        self.user, self.org, self.import_file, self.import_record, self.cycle = selfvars

        user_details = {
            'username': '******',
            'password': '******',
            'email': '*****@*****.**'
        }
        self.client.login(**user_details)

        self.property_state_factory = FakePropertyStateFactory(
            organization=self.org)
        self.taxlot_state_factory = FakeTaxLotStateFactory(
            organization=self.org)

    def test_match_merge_happens_after_property_edit(self):
        base_details = {
            'pm_property_id': '789DifferentID',
            'city': 'Golden',
            'import_file_id': self.import_file.id,
            'data_state': DATA_STATE_MAPPING,
            'no_default_data': False,
        }
        # Create 3 non-matching properties
        ps_1 = self.property_state_factory.get_property_state(**base_details)

        base_details['pm_property_id'] = '123MatchID'
        base_details['city'] = 'Denver'
        self.property_state_factory.get_property_state(**base_details)

        base_details['pm_property_id'] = '1337AnotherDifferentID'
        base_details['city'] = 'Philadelphia'
        ps_3 = self.property_state_factory.get_property_state(**base_details)

        self.import_file.mapping_done = True
        self.import_file.save()
        match_buildings(self.import_file.id)

        # Edit the first property to match the second
        new_data = {"state": {"pm_property_id": "123MatchID"}}
        target_view_id = ps_1.propertyview_set.first().id
        url = reverse('api:v2:properties-detail', args=[
            target_view_id
        ]) + '?organization_id={}'.format(self.org.pk)
        raw_response = self.client.put(url,
                                       json.dumps(new_data),
                                       content_type='application/json')
        response = json.loads(raw_response.content)

        self.assertEqual(response['match_merged_count'], 2)

        changed_view = PropertyView.objects.exclude(state_id=ps_3).get()
        self.assertEqual(response['view_id'], changed_view.id)

        # Verify that properties 1 and 2 have been merged
        self.assertEqual(Property.objects.count(), 2)
        self.assertEqual(PropertyState.objects.count(),
                         5)  # Original 3 + 1 edit + 1 merge result
        self.assertEqual(PropertyView.objects.count(), 2)

        # It will have a -State having city as Golden
        self.assertEqual(changed_view.state.city, 'Golden')

        # The corresponding log should be a System Match
        audit_log = PropertyAuditLog.objects.get(
            state_id=changed_view.state_id)
        self.assertEqual(audit_log.name, 'System Match')

        # Update the edit and match-merge result -State
        new_data = {"state": {"pm_property_id": "1337AnotherDifferentID"}}
        url = reverse('api:v2:properties-detail', args=[
            changed_view.id
        ]) + '?organization_id={}'.format(self.org.pk)
        raw_response = self.client.put(url,
                                       json.dumps(new_data),
                                       content_type='application/json')
        response = json.loads(raw_response.content)

        # Verify that there's only 1 canonical Property and View left
        self.assertEqual(Property.objects.count(), 1)
        # 6 -States since, 5 from 1st round + 1 from merge
        # None created during edit since the audit log isn't named 'Import Creation'
        self.assertEqual(PropertyState.objects.count(), 6)
        self.assertEqual(PropertyView.objects.count(), 1)
        view = PropertyView.objects.first()

        self.assertEqual(response['view_id'], view.id)

        # Check that city is still Golden, since the edited -State takes precedence
        self.assertEqual(view.state.city, 'Golden')

    def test_match_merge_happens_after_taxlot_edit(self):
        base_details = {
            'jurisdiction_tax_lot_id': '789DifferentID',
            'city': 'Golden',
            'import_file_id': self.import_file.id,
            'data_state': DATA_STATE_MAPPING,
            'no_default_data': False,
        }
        # Create 3 non-matching taxlots
        tls_1 = self.taxlot_state_factory.get_taxlot_state(**base_details)

        base_details['jurisdiction_tax_lot_id'] = '123MatchID'
        base_details['city'] = 'Denver'
        self.taxlot_state_factory.get_taxlot_state(**base_details)

        base_details['jurisdiction_tax_lot_id'] = '1337AnotherDifferentID'
        base_details['city'] = 'Philadelphia'
        tls_3 = self.taxlot_state_factory.get_taxlot_state(**base_details)

        self.import_file.mapping_done = True
        self.import_file.save()
        match_buildings(self.import_file.id)

        # Edit the first taxlot to match the second
        new_data = {"state": {"jurisdiction_tax_lot_id": "123MatchID"}}
        target_view_id = tls_1.taxlotview_set.first().id
        url = reverse('api:v2:taxlots-detail', args=[
            target_view_id
        ]) + '?organization_id={}'.format(self.org.pk)
        raw_response = self.client.put(url,
                                       json.dumps(new_data),
                                       content_type='application/json')
        response = json.loads(raw_response.content)

        changed_view = TaxLotView.objects.exclude(state_id=tls_3).get()
        self.assertEqual(response['view_id'], changed_view.id)

        # Verify that taxlots 1 and 2 have been merged
        self.assertEqual(TaxLot.objects.count(), 2)
        self.assertEqual(TaxLotState.objects.count(),
                         5)  # Original 3 + 1 edit + 1 merge result
        self.assertEqual(TaxLotView.objects.count(), 2)

        # It will have a -State having city as Golden
        self.assertEqual(changed_view.state.city, 'Golden')

        # The corresponding log should be a System Match
        audit_log = TaxLotAuditLog.objects.get(state_id=changed_view.state_id)
        self.assertEqual(audit_log.name, 'System Match')

        # Update the edit and match-merge result -State
        new_data = {
            "state": {
                "jurisdiction_tax_lot_id": "1337AnotherDifferentID"
            }
        }
        url = reverse('api:v2:taxlots-detail', args=[
            changed_view.id
        ]) + '?organization_id={}'.format(self.org.pk)
        raw_response = self.client.put(url,
                                       json.dumps(new_data),
                                       content_type='application/json')
        response = json.loads(raw_response.content)

        self.assertEqual(response['match_merged_count'], 2)

        # Verify that there's only 1 canonical TaxLot and View left
        self.assertEqual(TaxLot.objects.count(), 1)
        # 6 -States since, 5 from 1st round + 1 from merge
        # None created during edit since the audit log isn't named 'Import Creation'
        self.assertEqual(TaxLotState.objects.count(), 6)
        self.assertEqual(TaxLotView.objects.count(), 1)
        view = TaxLotView.objects.first()

        self.assertEqual(response['view_id'], view.id)

        # Check that city is still Golden, since the edited -State takes precedence
        self.assertEqual(view.state.city, 'Golden')
예제 #25
0
class TestMatchingExistingViewFullOrgMatching(DataMappingBaseTestCase):
    def setUp(self):
        selfvars = self.set_up(ASSESSED_RAW)
        self.user, self.org, self.import_file_1, self.import_record_1, self.cycle_1 = selfvars

        cycle_factory = FakeCycleFactory(organization=self.org, user=self.user)
        self.cycle_2 = cycle_factory.get_cycle(name="Cycle 2")
        self.import_record_2, self.import_file_2 = self.create_import_file(
            self.user, self.org, self.cycle_2)

        self.property_state_factory = FakePropertyStateFactory(
            organization=self.org)
        self.taxlot_state_factory = FakeTaxLotStateFactory(
            organization=self.org)

    def test_whole_org_match_merge(self):
        """
        The set up for this test is lengthy and includes multiple Property sets
        and TaxLot sets across multiple Cycles. In this context, a "set"
        includes a -State, -View, and canonical record.

        Cycle 1 - 5 property & 5 taxlot sets - 2 & 2 sets match, 1 set doesn't match
        Cycle 2 - 5 property & 5 taxlot sets - 3 sets match, 2 sets w/ null fields
        """
        # Cycle 1 / ImportFile 1
        base_property_details = {
            'pm_property_id': '1st Match Set',
            'city': 'Golden',
            'import_file_id': self.import_file_1.id,
            'data_state': DATA_STATE_MAPPING,
            'no_default_data': False,
        }
        # Create 5 initially non-matching properties in first Cycle
        ps_11 = self.property_state_factory.get_property_state(
            **base_property_details)

        base_property_details[
            'pm_property_id'] = 'To be updated - 1st Match Set'
        base_property_details['city'] = 'Denver'
        ps_12 = self.property_state_factory.get_property_state(
            **base_property_details)

        base_property_details['pm_property_id'] = '2nd Match Set'
        base_property_details['city'] = 'Philadelphia'
        ps_13 = self.property_state_factory.get_property_state(
            **base_property_details)

        base_property_details[
            'pm_property_id'] = 'To be updated - 2nd Match Set'
        base_property_details['city'] = 'Colorado Springs'
        ps_14 = self.property_state_factory.get_property_state(
            **base_property_details)

        base_property_details['pm_property_id'] = 'Single Unmatched'
        base_property_details['city'] = 'Grand Junction'
        ps_15 = self.property_state_factory.get_property_state(
            **base_property_details)

        base_taxlot_details = {
            'jurisdiction_tax_lot_id': '1st Match Set',
            'city': 'Golden',
            'import_file_id': self.import_file_1.id,
            'data_state': DATA_STATE_MAPPING,
            'no_default_data': False,
        }
        # Create 5 initially non-matching taxlots in first Cycle
        tls_11 = self.taxlot_state_factory.get_taxlot_state(
            **base_taxlot_details)

        base_taxlot_details[
            'jurisdiction_tax_lot_id'] = 'To be updated - 1st Match Set'
        base_taxlot_details['city'] = 'Denver'
        tls_12 = self.taxlot_state_factory.get_taxlot_state(
            **base_taxlot_details)

        base_taxlot_details['jurisdiction_tax_lot_id'] = '2nd Match Set'
        base_taxlot_details['city'] = 'Philadelphia'
        tls_13 = self.taxlot_state_factory.get_taxlot_state(
            **base_taxlot_details)

        base_taxlot_details[
            'jurisdiction_tax_lot_id'] = 'To be updated - 2nd Match Set'
        base_taxlot_details['city'] = 'Colorado Springs'
        tls_14 = self.taxlot_state_factory.get_taxlot_state(
            **base_taxlot_details)

        base_taxlot_details['jurisdiction_tax_lot_id'] = 'Single Unmatched'
        base_taxlot_details['city'] = 'Grand Junction'
        tls_15 = self.taxlot_state_factory.get_taxlot_state(
            **base_taxlot_details)

        # Import file and create -Views and canonical records.
        self.import_file_1.mapping_done = True
        self.import_file_1.save()
        match_buildings(self.import_file_1.id)

        # Make some match but don't trigger matching round
        PropertyState.objects.filter(pk=ps_12.id).update(
            pm_property_id='1st Match Set')
        PropertyState.objects.filter(pk=ps_14.id).update(
            pm_property_id='2nd Match Set')
        TaxLotState.objects.filter(pk=tls_12.id).update(
            jurisdiction_tax_lot_id='1st Match Set')
        TaxLotState.objects.filter(pk=tls_14.id).update(
            jurisdiction_tax_lot_id='2nd Match Set')

        # Check all property and taxlot sets were created without match merges
        self.assertEqual(5, Property.objects.count())
        self.assertEqual(5, PropertyState.objects.count())
        self.assertEqual(5, PropertyView.objects.count())
        self.assertEqual(5, TaxLot.objects.count())
        self.assertEqual(5, TaxLotState.objects.count())
        self.assertEqual(5, TaxLotView.objects.count())

        # Cycle 2 / ImportFile 2
        base_property_details = {
            'pm_property_id': '1st Match Set',
            'city': 'Golden',
            'import_file_id': self.import_file_2.id,
            'data_state': DATA_STATE_MAPPING,
            'no_default_data': False,
        }
        # Create 5 initially non-matching properties in second Cycle
        ps_21 = self.property_state_factory.get_property_state(
            **base_property_details)

        base_property_details[
            'pm_property_id'] = 'To be updated 1 - 1st Match Set'
        base_property_details['city'] = 'Denver'
        ps_22 = self.property_state_factory.get_property_state(
            **base_property_details)

        base_property_details[
            'pm_property_id'] = 'To be updated 2 - 1st Match Set'
        base_property_details['city'] = 'Philadelphia'
        ps_23 = self.property_state_factory.get_property_state(
            **base_property_details)

        del base_property_details['pm_property_id']
        base_property_details['city'] = 'Null Fields 1'
        ps_24 = self.property_state_factory.get_property_state(
            **base_property_details)

        base_property_details['city'] = 'Null Fields 2'
        ps_25 = self.property_state_factory.get_property_state(
            **base_property_details)

        base_taxlot_details = {
            'jurisdiction_tax_lot_id': '1st Match Set',
            'city': 'Golden',
            'import_file_id': self.import_file_2.id,
            'data_state': DATA_STATE_MAPPING,
            'no_default_data': False,
        }
        # Create 5 initially non-matching taxlots in second Cycle
        tls_21 = self.taxlot_state_factory.get_taxlot_state(
            **base_taxlot_details)

        base_taxlot_details[
            'jurisdiction_tax_lot_id'] = 'To be updated 1 - 1st Match Set'
        base_taxlot_details['city'] = 'Denver'
        tls_22 = self.taxlot_state_factory.get_taxlot_state(
            **base_taxlot_details)

        base_taxlot_details[
            'jurisdiction_tax_lot_id'] = 'To be updated 2 - 1st Match Set'
        base_taxlot_details['city'] = 'Philadelphia'
        tls_23 = self.taxlot_state_factory.get_taxlot_state(
            **base_taxlot_details)

        del base_taxlot_details['jurisdiction_tax_lot_id']
        base_taxlot_details['city'] = 'Null Fields 1'
        tls_24 = self.taxlot_state_factory.get_taxlot_state(
            **base_taxlot_details)

        base_taxlot_details['city'] = 'Null Fields 2'
        tls_25 = self.taxlot_state_factory.get_taxlot_state(
            **base_taxlot_details)

        # Import file and create -Views and canonical records.
        self.import_file_2.mapping_done = True
        self.import_file_2.save()
        match_buildings(self.import_file_2.id)

        # Make some match but don't trigger matching round
        PropertyState.objects.filter(pk__in=[ps_22.id, ps_23.id]).update(
            pm_property_id='1st Match Set')
        TaxLotState.objects.filter(pk__in=[tls_22.id, tls_23.id]).update(
            jurisdiction_tax_lot_id='1st Match Set')

        # Check all property and taxlot sets were created without match merges
        self.assertEqual(10, Property.objects.count())
        self.assertEqual(10, PropertyState.objects.count())
        self.assertEqual(10, PropertyView.objects.count())
        self.assertEqual(10, TaxLot.objects.count())
        self.assertEqual(10, TaxLotState.objects.count())
        self.assertEqual(10, TaxLotView.objects.count())

        # Set up complete - run method
        summary = whole_org_match_merge(self.org.id)

        # Check -View and canonical counts
        self.assertEqual(6, PropertyView.objects.count())
        self.assertEqual(6, TaxLotView.objects.count())
        self.assertEqual(6, Property.objects.count())
        self.assertEqual(6, TaxLot.objects.count())

        # For each -State model, there should be 14
        # 14 = 10 + 2 from Cycle-1 merges + 2 from Cycle-2 merges
        self.assertEqual(14, TaxLotState.objects.count())
        self.assertEqual(14, PropertyState.objects.count())

        # Check -States part of merges are no longer associated to -Views
        merged_ps_ids = [
            ps_11.id,
            ps_12.id,  # Cycle 1
            ps_13.id,
            ps_14.id,  # Cycle 1
            ps_21.id,
            ps_22.id,
            ps_23.id  # Cycle 2
        ]
        self.assertFalse(
            PropertyView.objects.filter(state_id__in=merged_ps_ids).exists())

        merged_tls_ids = [
            tls_11.id,
            tls_12.id,  # Cycle 1
            tls_13.id,
            tls_14.id,  # Cycle 1
            tls_21.id,
            tls_22.id,
            tls_23.id  # Cycle 2
        ]
        self.assertFalse(
            TaxLotView.objects.filter(state_id__in=merged_tls_ids).exists())

        # Check -States NOT part of merges are still associated to -Views
        self.assertTrue(PropertyView.objects.filter(state_id=ps_15).exists())
        self.assertTrue(PropertyView.objects.filter(state_id=ps_24).exists())
        self.assertTrue(PropertyView.objects.filter(state_id=ps_25).exists())
        self.assertTrue(TaxLotView.objects.filter(state_id=tls_15).exists())
        self.assertTrue(TaxLotView.objects.filter(state_id=tls_24).exists())
        self.assertTrue(TaxLotView.objects.filter(state_id=tls_25).exists())

        # Check Merges occurred correctly, with priority given to newer -States as evidenced by 'city' values
        cycle_1_pviews = PropertyView.objects.filter(cycle_id=self.cycle_1.id)
        cycle_1_pstates = PropertyState.objects.filter(
            pk__in=Subquery(cycle_1_pviews.values('state_id')))

        self.assertEqual(3, cycle_1_pstates.count())
        self.assertEqual(1, cycle_1_pstates.filter(city='Denver').count())
        self.assertEqual(
            1,
            cycle_1_pstates.filter(city='Colorado Springs').count())
        self.assertEqual(1,
                         cycle_1_pstates.filter(city='Grand Junction').count())

        cycle_2_pviews = PropertyView.objects.filter(cycle_id=self.cycle_2.id)
        cycle_2_pstates = PropertyState.objects.filter(
            pk__in=Subquery(cycle_2_pviews.values('state_id')))

        self.assertEqual(3, cycle_2_pstates.count())
        self.assertEqual(1,
                         cycle_2_pstates.filter(city='Philadelphia').count())
        self.assertEqual(1,
                         cycle_2_pstates.filter(city='Null Fields 1').count())
        self.assertEqual(1,
                         cycle_2_pstates.filter(city='Null Fields 2').count())

        cycle_1_tlviews = TaxLotView.objects.filter(cycle_id=self.cycle_1.id)
        cycle_1_tlstates = TaxLotState.objects.filter(
            pk__in=Subquery(cycle_1_tlviews.values('state_id')))

        self.assertEqual(3, cycle_1_tlstates.count())
        self.assertEqual(1, cycle_1_tlstates.filter(city='Denver').count())
        self.assertEqual(
            1,
            cycle_1_tlstates.filter(city='Colorado Springs').count())
        self.assertEqual(
            1,
            cycle_1_tlstates.filter(city='Grand Junction').count())

        cycle_2_tlviews = TaxLotView.objects.filter(cycle_id=self.cycle_2.id)
        cycle_2_tlstates = TaxLotState.objects.filter(
            pk__in=Subquery(cycle_2_tlviews.values('state_id')))

        self.assertEqual(3, cycle_2_tlstates.count())
        self.assertEqual(1,
                         cycle_2_tlstates.filter(city='Philadelphia').count())
        self.assertEqual(1,
                         cycle_2_tlstates.filter(city='Null Fields 1').count())
        self.assertEqual(1,
                         cycle_2_tlstates.filter(city='Null Fields 2').count())

        # Finally, check method returned expected summary
        expected_summary = {
            'PropertyState': {
                'merged_count':
                7,
                'new_merged_state_ids': [
                    cycle_1_pstates.filter(city='Denver').get().id,
                    cycle_1_pstates.filter(city='Colorado Springs').get().id,
                    cycle_2_pstates.filter(city='Philadelphia').get().id,
                ]
            },
            'TaxLotState': {
                'merged_count':
                7,
                'new_merged_state_ids': [
                    cycle_1_tlstates.filter(city='Denver').get().id,
                    cycle_1_tlstates.filter(city='Colorado Springs').get().id,
                    cycle_2_tlstates.filter(city='Philadelphia').get().id,
                ]
            },
        }

        self.assertEqual(summary['PropertyState']['merged_count'],
                         expected_summary['PropertyState']['merged_count'])
        self.assertEqual(summary['TaxLotState']['merged_count'],
                         expected_summary['TaxLotState']['merged_count'])

        self.assertCountEqual(
            summary['PropertyState']['new_merged_state_ids'],
            expected_summary['PropertyState']['new_merged_state_ids'])
        self.assertCountEqual(
            summary['TaxLotState']['new_merged_state_ids'],
            expected_summary['TaxLotState']['new_merged_state_ids'])
예제 #26
0
class DataQualityCheckTests(DataMappingBaseTestCase):
    def setUp(self):
        selfvars = self.set_up(ASSESSED_RAW)

        self.user, self.org, self.import_file, self.import_record, self.cycle = selfvars

        self.property_factory = FakePropertyFactory(organization=self.org)
        self.property_state_factory = FakePropertyStateFactory(organization=self.org)
        self.taxlot_state_factory = FakeTaxLotStateFactory(organization=self.org)

    def test_default_create(self):
        dq = DataQualityCheck.retrieve(self.org.id)
        self.assertEqual(dq.rules.count(), 22)
        # Example rule to check
        ex_rule = {
            'table_name': 'PropertyState',
            'field': 'conditioned_floor_area',
            'data_type': Rule.TYPE_AREA,
            'rule_type': Rule.RULE_TYPE_DEFAULT,
            'min': 0,
            'max': 7000000,
            'severity': Rule.SEVERITY_ERROR,
            'units': 'ft**2',
        }

        rule = Rule.objects.filter(
            table_name='PropertyState', field='conditioned_floor_area', severity=Rule.SEVERITY_ERROR
        )
        self.assertDictContainsSubset(ex_rule, model_to_dict(rule.first()))

    def test_remove_rules(self):
        dq = DataQualityCheck.retrieve(self.org.id)
        self.assertEqual(dq.rules.count(), 22)
        dq.remove_all_rules()
        self.assertEqual(dq.rules.count(), 0)

    def test_add_custom_rule(self):
        dq = DataQualityCheck.retrieve(self.org.id)
        dq.remove_all_rules()

        ex_rule = {
            'table_name': 'PropertyState',
            'field': 'some_floor_area',
            'data_type': Rule.TYPE_AREA,
            'rule_type': Rule.RULE_TYPE_DEFAULT,
            'min': 8760,
            'max': 525600,
            'severity': Rule.SEVERITY_ERROR,
            'units': 'm**2',
        }

        dq.add_rule(ex_rule)
        self.assertEqual(dq.rules.count(), 1)
        self.assertDictContainsSubset(ex_rule, model_to_dict(dq.rules.first()))

    def test_add_custom_rule_exception(self):
        dq = DataQualityCheck.retrieve(self.org.id)
        dq.remove_all_rules()

        ex_rule = {
            'table_name_does_not_exist': 'PropertyState',
        }

        with self.assertRaises(Exception) as exc:
            dq.add_rule(ex_rule)
        self.assertEqual(
            str(exc.exception),
            "Rule data is not defined correctly: Rule() got an unexpected keyword argument 'table_name_does_not_exist'"
        )

    def test_check_property_state_example_data(self):
        dq = DataQualityCheck.retrieve(self.org.id)
        ps_data = {
            'no_default_data': True,
            'custom_id_1': 'abcd',
            'address_line_1': '742 Evergreen Terrace',
            'pm_property_id': 'PMID',
            'site_eui': 525600,
        }
        ps = self.property_state_factory.get_property_state(None, **ps_data)

        dq.check_data(ps.__class__.__name__, [ps])

        # {
        #   11: {
        #           'id': 11,
        #           'custom_id_1': 'abcd',
        #           'pm_property_id': 'PMID',
        #           'address_line_1': '742 Evergreen Terrace',
        #           'data_quality_results': [
        #               {
        #                  'severity': 'error', 'value': '525600', 'field': 'site_eui', 'table_name': 'PropertyState', 'message': 'Site EUI out of range', 'detailed_message': 'Site EUI [525600] > 1000', 'formatted_field': 'Site EUI'
        #               }
        #           ]
        #       }
        error_found = False
        for index, row in dq.results.items():
            self.assertEqual(row['custom_id_1'], 'abcd')
            self.assertEqual(row['pm_property_id'], 'PMID')
            self.assertEqual(row['address_line_1'], '742 Evergreen Terrace')
            for violation in row['data_quality_results']:
                if violation['message'] == 'Site EUI out of range':
                    error_found = True
                    self.assertEqual(violation['detailed_message'], 'Site EUI [525600] > 1000')

        self.assertEqual(error_found, True)

    def test_check_property_state_example_data_with_labels(self):
        dq = DataQualityCheck.retrieve(self.org.id)

        # Create labels and apply them to the rules being triggered later
        site_eui_label = StatusLabel.objects.create(name='Check Site EUI', super_organization=self.org)
        site_eui_rule = dq.rules.get(table_name='PropertyState', field='site_eui', max='1000')
        site_eui_rule.status_label = site_eui_label
        site_eui_rule.save()

        year_built_label = StatusLabel.objects.create(name='Check Year Built', super_organization=self.org)
        year_built_rule = dq.rules.get(table_name='PropertyState', field='year_built')
        year_built_rule.status_label = year_built_label
        year_built_rule.save()

        # Create state and associate it to view
        ps_data = {
            'no_default_data': True,
            'custom_id_1': 'abcd',
            'address_line_1': '742 Evergreen Terrace',
            'pm_property_id': 'PMID',
            'site_eui': 525600,
            'year_built': 1699,
        }
        ps = self.property_state_factory.get_property_state(None, **ps_data)
        property = self.property_factory.get_property()
        PropertyView.objects.create(
            property=property, cycle=self.cycle, state=ps
        )

        dq.check_data(ps.__class__.__name__, [ps])

        dq_results = dq.results[ps.id]['data_quality_results']
        labels = [r['label'] for r in dq_results]

        self.assertCountEqual(['Check Site EUI', 'Check Year Built'], labels)

    def test_text_match(self):
        dq = DataQualityCheck.retrieve(self.org.id)
        dq.remove_all_rules()
        new_rule = {
            'table_name': 'PropertyState',
            'field': 'address_line_1',
            'data_type': Rule.TYPE_STRING,
            'rule_type': Rule.RULE_TYPE_DEFAULT,
            'severity': Rule.SEVERITY_ERROR,
            'not_null': True,
            'text_match': 742,
        }
        dq.add_rule(new_rule)
        ps_data = {
            'no_default_data': True,
            'custom_id_1': 'abcd',
            'address_line_1': '742 Evergreen Terrace',
            'pm_property_id': 'PMID',
            'site_eui': 525600,
        }
        ps = self.property_state_factory.get_property_state(None, **ps_data)
        dq.check_data(ps.__class__.__name__, [ps])
        self.assertEqual(dq.results, {})

    def test_str_to_data_type_string(self):
        rule = Rule.objects.create(name='str_rule', data_type=Rule.TYPE_STRING)
        self.assertEqual(rule.str_to_data_type(' '), '')
        self.assertEqual(rule.str_to_data_type(None), None)
        self.assertEqual(rule.str_to_data_type(27.5), 27.5)

    def test_str_to_data_type_float(self):
        rule = Rule.objects.create(name='flt_rule', data_type=Rule.TYPE_NUMBER)
        self.assertEqual(rule.str_to_data_type('   '), None)
        self.assertEqual(rule.str_to_data_type(None), None)
        self.assertEqual(rule.str_to_data_type(27.5), 27.5)
        with self.assertRaises(DataQualityTypeCastError):
            self.assertEqual(rule.str_to_data_type('not-a-number'), '')

    def test_str_to_data_type_date(self):
        rule = Rule.objects.create(name='date_rule', data_type=Rule.TYPE_DATE)
        d = rule.str_to_data_type('07/04/2000 08:55:30')
        self.assertEqual(d.strftime("%Y-%m-%d %H  %M  %S"), '2000-07-04 08  55  30')
        self.assertEqual(rule.str_to_data_type(None), None)
        self.assertEqual(rule.str_to_data_type(27.5), 27.5)  # floats should return float

    def test_str_to_data_type_datetime(self):
        rule = Rule.objects.create(name='year_rule', data_type=Rule.TYPE_YEAR)
        d = rule.str_to_data_type('07/04/2000')
        self.assertEqual(d.strftime("%Y-%m-%d"), '2000-07-04')
        self.assertEqual(rule.str_to_data_type(None), None)
        self.assertEqual(rule.str_to_data_type(27.5), 27.5)  # floats should return float

    def test_min_value(self):
        rule = Rule.objects.create(name='min_str_rule', data_type=Rule.TYPE_NUMBER, min=0.5)
        self.assertTrue(rule.minimum_valid(1000))
        self.assertTrue(rule.minimum_valid('1000'))
        self.assertFalse(rule.minimum_valid(0.1))
        self.assertFalse(rule.minimum_valid('0.1'))
        with self.assertRaises(DataQualityTypeCastError):
            self.assertEqual(rule.minimum_valid('not-a-number'), '')

    def test_max_value(self):
        rule = Rule.objects.create(name='max_str_rule', data_type=Rule.TYPE_NUMBER, max=1000)
        self.assertTrue(rule.maximum_valid(0.1))
        self.assertTrue(rule.maximum_valid('0.1'))
        self.assertFalse(rule.maximum_valid(9999))
        self.assertFalse(rule.maximum_valid('9999'))
        with self.assertRaises(DataQualityTypeCastError):
            self.assertEqual(rule.maximum_valid('not-a-number'), '')

    def test_min_value_quantities(self):
        rule = Rule.objects.create(name='min_str_rule', data_type=Rule.TYPE_EUI, min=10, max=100, units='kBtu/ft**2/year')
        self.assertTrue(rule.minimum_valid(15))
        self.assertTrue(rule.minimum_valid('15'))
        self.assertTrue(rule.maximum_valid(15))
        self.assertTrue(rule.maximum_valid('15'))
        self.assertFalse(rule.minimum_valid(5))
        self.assertFalse(rule.minimum_valid('5'))
        self.assertFalse(rule.maximum_valid(150))
        self.assertFalse(rule.maximum_valid('150'))

        # All of these should value since they are less than 10 (e.g. 5 kbtu/m2/year =~ 0.5 kbtu/ft2/year)
        # different units on check data
        self.assertFalse(rule.minimum_valid(ureg.Quantity(5, "kBtu/ft**2/year")))
        self.assertFalse(rule.minimum_valid(ureg.Quantity(5, "kBtu/m**2/year")))  # ~ 0.5 kbtu/ft2/year
        self.assertFalse(rule.maximum_valid(ureg.Quantity(110, "kBtu/ft**2/year")))
        self.assertFalse(rule.maximum_valid(ureg.Quantity(1100, "kBtu/m**2/year")))  # ~ 102.2 kbtu/ft2/year

        # these should all pass
        self.assertTrue(rule.minimum_valid(ureg.Quantity(10, "kBtu/ft**2/year")))
        self.assertTrue(rule.minimum_valid(ureg.Quantity(110, "kBtu/m**2/year")))  # 10.22 kbtu/ft2/year

        # test the rule with different units
        rule = Rule.objects.create(name='min_str_rule', data_type=Rule.TYPE_EUI, min=10, max=100, units='kBtu/m**2/year')
        self.assertFalse(rule.minimum_valid(ureg.Quantity(0.05, "kBtu/ft**2/year")))  # ~ 0.538 kbtu/m2/year
        self.assertFalse(rule.maximum_valid(ureg.Quantity(15, "kBtu/ft**2/year")))  # ~ 161 kbtu/m2/year
        self.assertFalse(rule.minimum_valid(ureg.Quantity(5, "kBtu/m**2/year")))
        self.assertFalse(rule.maximum_valid(ureg.Quantity(110, "kBtu/m**2/year")))

    def test_incorrect_pint_unit_conversions(self):
        rule = Rule.objects.create(name='min_str_rule', data_type=Rule.TYPE_EUI, min=10, max=100, units='ft**2')
        # this should error out nicely
        with self.assertRaises(UnitMismatchError):
            self.assertFalse(rule.minimum_valid(ureg.Quantity(5, "kBtu/ft**2/year")))

        with self.assertRaises(UnitMismatchError):
            self.assertFalse(rule.maximum_valid(ureg.Quantity(5, "kBtu/ft**2/year")))
예제 #27
0
class PropertyUnmergeViewTests(DeleteModelsTestCase):
    def setUp(self):
        user_details = {
            'username': '******',
            'password': '******',
            'email': '*****@*****.**'
        }
        self.user = User.objects.create_superuser(**user_details)
        self.org, self.org_user, _ = create_organization(self.user)

        cycle_factory = FakeCycleFactory(organization=self.org, user=self.user)
        self.property_factory = FakePropertyFactory(organization=self.org)
        self.property_state_factory = FakePropertyStateFactory(
            organization=self.org)

        self.cycle = cycle_factory.get_cycle(
            start=datetime(2010, 10, 10, tzinfo=get_current_timezone()))
        self.client.login(**user_details)

        self.state_1 = self.property_state_factory.get_property_state(
            address_line_1='1 property state',
            pm_property_id=
            '5766973'  # this allows the Property to be targetted for PM meter additions
        )
        self.property_1 = self.property_factory.get_property()
        PropertyView.objects.create(property=self.property_1,
                                    cycle=self.cycle,
                                    state=self.state_1)

        self.state_2 = self.property_state_factory.get_property_state(
            address_line_1='2 property state')
        self.property_2 = self.property_factory.get_property()
        PropertyView.objects.create(property=self.property_2,
                                    cycle=self.cycle,
                                    state=self.state_2)

        self.import_record = ImportRecord.objects.create(
            owner=self.user,
            last_modified_by=self.user,
            super_organization=self.org)

        # Give 2 meters to one of the properties
        gb_filename = "example-GreenButton-data.xml"
        filepath = os.path.dirname(
            os.path.abspath(__file__)) + "/data/" + gb_filename
        gb_import_file = ImportFile.objects.create(
            import_record=self.import_record,
            source_type="GreenButton",
            uploaded_filename=gb_filename,
            file=SimpleUploadedFile(name=gb_filename,
                                    content=open(filepath, 'rb').read()),
            cycle=self.cycle,
            matching_results_data={
                "property_id": self.property_1.id
            }  # this is how target property is specified
        )
        gb_import_url = reverse("api:v2:import_files-save-raw-data",
                                args=[gb_import_file.id])
        gb_import_post_params = {
            'cycle_id': self.cycle.pk,
            'organization_id': self.org.pk,
        }
        self.client.post(gb_import_url, gb_import_post_params)

        # Merge the properties
        url = reverse('api:v2:properties-merge'
                      ) + '?organization_id={}'.format(self.org.pk)
        post_params = json.dumps({
            'state_ids': [self.state_2.pk,
                          self.state_1.pk]  # priority given to state_1
        })
        self.client.post(url, post_params, content_type='application/json')

    def test_unmerging_two_properties_with_meters_gives_meters_to_both_of_the_resulting_records(
            self):
        # Unmerge the properties
        view_id = PropertyView.objects.first(
        ).id  # There's only one PropertyView
        url = reverse('api:v2:properties-unmerge', args=[
            view_id
        ]) + '?organization_id={}'.format(self.org.pk)
        self.client.post(url, content_type='application/json')

        # Verify 2 -Views now exist
        self.assertEqual(PropertyView.objects.count(), 2)

        # Check that meters and readings of each -View exists and verify they are identical.
        reading_sets = []
        for view in PropertyView.objects.all():
            self.assertEqual(view.property.meters.count(), 1)
            self.assertEqual(
                view.property.meters.first().meter_readings.count(), 2)
            reading_sets.append([{
                'start_time':
                reading.start_time,
                'end_time':
                reading.end_time,
                'reading':
                reading.reading,
                'source_unit':
                reading.source_unit,
                'conversion_factor':
                reading.conversion_factor,
            } for reading in view.property.meters.first().meter_readings.all().
                                 order_by('start_time')])

        self.assertEqual(reading_sets[0], reading_sets[1])
예제 #28
0
class DataQualityCheckTests(DataMappingBaseTestCase):
    def setUp(self):
        selfvars = self.set_up(ASSESSED_RAW)

        self.user, self.org, self.import_file, self.import_record, self.cycle = selfvars

        self.property_factory = FakePropertyFactory(organization=self.org)
        self.property_state_factory = FakePropertyStateFactory(organization=self.org)
        self.taxlot_state_factory = FakeTaxLotStateFactory(organization=self.org)

    def test_default_create(self):
        dq = DataQualityCheck.retrieve(self.org.id)
        self.assertEqual(dq.rules.count(), 22)
        # Example rule to check
        ex_rule = {
            'table_name': 'PropertyState',
            'field': 'conditioned_floor_area',
            'data_type': Rule.TYPE_AREA,
            'rule_type': Rule.RULE_TYPE_DEFAULT,
            'min': 0,
            'max': 7000000,
            'severity': Rule.SEVERITY_ERROR,
            'units': 'ft**2',
        }

        rule = Rule.objects.filter(
            table_name='PropertyState', field='conditioned_floor_area', severity=Rule.SEVERITY_ERROR
        )
        self.assertDictContainsSubset(ex_rule, model_to_dict(rule.first()))

    def test_remove_rules(self):
        dq = DataQualityCheck.retrieve(self.org.id)
        self.assertEqual(dq.rules.count(), 22)
        dq.remove_all_rules()
        self.assertEqual(dq.rules.count(), 0)

    def test_add_custom_rule(self):
        dq = DataQualityCheck.retrieve(self.org.id)
        dq.remove_all_rules()

        ex_rule = {
            'table_name': 'PropertyState',
            'field': 'some_floor_area',
            'data_type': Rule.TYPE_AREA,
            'rule_type': Rule.RULE_TYPE_DEFAULT,
            'min': 8760,
            'max': 525600,
            'severity': Rule.SEVERITY_ERROR,
            'units': 'm**2',
        }

        dq.add_rule(ex_rule)
        self.assertEqual(dq.rules.count(), 1)
        self.assertDictContainsSubset(ex_rule, model_to_dict(dq.rules.first()))

    def test_add_custom_rule_exception(self):
        dq = DataQualityCheck.retrieve(self.org.id)
        dq.remove_all_rules()

        ex_rule = {
            'table_name_does_not_exist': 'PropertyState',
        }

        with self.assertRaises(Exception) as exc:
            dq.add_rule(ex_rule)
        self.assertEqual(
            str(exc.exception),
            "Rule data is not defined correctly: Rule() got an unexpected keyword argument 'table_name_does_not_exist'"
        )

    def test_check_property_state_example_data(self):
        """Trigger 5 rules - 2 default and 3 custom rules - one of each condition type"""
        ps_data = {
            'no_default_data': True,
            'custom_id_1': 'abcd',
            'pm_property_id': 'PMID',
            'site_eui': 525600,
        }
        ps = self.property_state_factory.get_property_state(None, **ps_data)

        # Add 3 additionals rule to default set
        dq = DataQualityCheck.retrieve(self.org.id)
        rule_info = {
            'field': 'custom_id_1',
            'table_name': 'PropertyState',
            'enabled': True,
            'data_type': Rule.TYPE_STRING,
            'rule_type': Rule.RULE_TYPE_DEFAULT,
            'condition': Rule.RULE_INCLUDE,
            'required': False,
            'not_null': False,
            'min': None,
            'max': None,
            'text_match': 'zzzzzzzzz',
            'severity': Rule.SEVERITY_ERROR,
            'units': "",
        }
        dq.add_rule(rule_info)

        rule_info['field'] = 'pm_property_id'
        rule_info['condition'] = Rule.RULE_EXCLUDE
        rule_info['text_match'] = 'PMID'
        dq.add_rule(rule_info)

        rule_info['field'] = 'address_line_2'
        rule_info['condition'] = Rule.RULE_REQUIRED
        dq.add_rule(rule_info)

        # Run DQ check and test that each rule was triggered
        dq.check_data(ps.__class__.__name__, [ps])

        # {
        #   11: {
        #           'id': 11,
        #           'custom_id_1': 'abcd',
        #           'pm_property_id': 'PMID',
        #           'data_quality_results': [
        #               {
        #                  'severity': 'error', 'value': '525600', 'field': 'site_eui', 'table_name': 'PropertyState', 'message': 'Site EUI out of range', 'detailed_message': 'Site EUI [525600] > 1000', 'formatted_field': 'Site EUI'
        #                  ...
        #               }
        #           ]
        #       }
        record_results = dq.results[ps.id]
        self.assertEqual(record_results['custom_id_1'], 'abcd')
        self.assertEqual(record_results['pm_property_id'], 'PMID')

        violation_fields = []
        for violation in record_results['data_quality_results']:
            field = violation['field']

            if field == 'address_line_1':
                self.assertEqual(violation['detailed_message'], 'Address Line 1 is null')
            elif field == 'address_line_2':
                self.assertEqual(violation['detailed_message'], 'Address Line 2 is required but is None')
            elif field == 'custom_id_1':
                self.assertEqual(violation['detailed_message'], 'Custom ID 1 [abcd] does not contain "zzzzzzzzz"')
            elif field == 'pm_property_id':
                self.assertEqual(violation['detailed_message'], 'PM Property ID [PMID] contains "PMID"')
            elif field == 'site_eui':
                self.assertEqual(violation['detailed_message'], 'Site EUI [525600] > 1000')
            else:  # we should have hit one of the cases above
                self.fail('invalid "field" provided')

            violation_fields.append(field)

        expected_fields = [
            'address_line_1',
            'address_line_2',
            'custom_id_1',
            'pm_property_id',
            'site_eui',
        ]
        self.assertCountEqual(expected_fields, violation_fields)

    def test_check_example_with_extra_data_fields(self):
        """Trigger 5 ED rules - 2 default and 3 custom rules - one of each condition type"""
        ps_data = {
            'no_default_data': True,
            'custom_id_1': 'abcd',
            'extra_data': {
                'range_and_out_of_range': 1,
                'include_and_doesnt': 'aaaaa',
                'exclude_and_does': 'foo',
            }
        }
        ps = self.property_state_factory.get_property_state(None, **ps_data)

        # Create 5 column objects that correspond to the 3 ED rules since rules don't get
        # checked for anything other than REQUIRED if they don't have a corresponding col object
        column_names = [
            'required_and_missing',
            'not_null_and_missing',
            'range_and_out_of_range',
            'include_and_doesnt',
            'exclude_and_does'
        ]
        for col_name in column_names:
            Column.objects.create(
                column_name=col_name,
                table_name='PropertyState',
                organization=self.org,
                is_extra_data=True,
            )

        dq = DataQualityCheck.retrieve(self.org.id)
        dq.remove_all_rules()
        rule_info = {
            'field': 'required_and_missing',
            'table_name': 'PropertyState',
            'enabled': True,
            'data_type': Rule.TYPE_STRING,
            'rule_type': Rule.RULE_TYPE_DEFAULT,
            'condition': Rule.RULE_REQUIRED,
            'required': False,
            'not_null': False,
            'min': None,
            'max': None,
            'text_match': None,
            'severity': Rule.SEVERITY_ERROR,
            'units': "",
        }
        dq.add_rule(rule_info)

        rule_info['field'] = 'not_null_and_missing'
        rule_info['condition'] = Rule.RULE_NOT_NULL
        dq.add_rule(rule_info)

        rule_info['field'] = 'range_and_out_of_range'
        rule_info['condition'] = Rule.RULE_RANGE
        rule_info['min'] = 100
        dq.add_rule(rule_info)

        rule_info['field'] = 'include_and_doesnt'
        rule_info['condition'] = Rule.RULE_INCLUDE
        rule_info['text_match'] = 'zzzzzzzzz'
        dq.add_rule(rule_info)

        rule_info['field'] = 'exclude_and_does'
        rule_info['condition'] = Rule.RULE_EXCLUDE
        rule_info['text_match'] = 'foo'
        dq.add_rule(rule_info)

        # Run DQ check and test that each rule was triggered
        dq.check_data(ps.__class__.__name__, [ps])
        record_results = dq.results[ps.id]

        violation_fields = []
        for violation in record_results['data_quality_results']:
            field = violation['field']

            if field == 'required_and_missing':
                self.assertEqual(violation['detailed_message'], 'required_and_missing is required but is None')
            elif field == 'not_null_and_missing':
                self.assertEqual(violation['detailed_message'], 'not_null_and_missing is null')
            elif field == 'range_and_out_of_range':
                self.assertEqual(violation['detailed_message'], 'range_and_out_of_range [1] < 100')
            elif field == 'include_and_doesnt':
                self.assertEqual(violation['detailed_message'], 'include_and_doesnt [aaaaa] does not contain "zzzzzzzzz"')
            elif field == 'exclude_and_does':
                self.assertEqual(violation['detailed_message'], 'exclude_and_does [foo] contains "foo"')
            else:  # we should have hit one of the cases above
                self.fail('invalid "field" provided')

            violation_fields.append(field)

        self.assertCountEqual(column_names, violation_fields)

    def test_check_property_state_example_data_with_labels(self):
        dq = DataQualityCheck.retrieve(self.org.id)

        # Create labels and apply them to the rules being triggered later
        site_eui_label = StatusLabel.objects.create(name='Check Site EUI', super_organization=self.org)
        site_eui_rule = dq.rules.get(table_name='PropertyState', field='site_eui', max='1000')
        site_eui_rule.status_label = site_eui_label
        site_eui_rule.save()

        year_built_label = StatusLabel.objects.create(name='Check Year Built', super_organization=self.org)
        year_built_rule = dq.rules.get(table_name='PropertyState', field='year_built')
        year_built_rule.status_label = year_built_label
        year_built_rule.save()

        # Create state and associate it to view
        ps_data = {
            'no_default_data': True,
            'custom_id_1': 'abcd',
            'address_line_1': '742 Evergreen Terrace',
            'pm_property_id': 'PMID',
            'site_eui': 525600,
            'year_built': 1699,
        }
        ps = self.property_state_factory.get_property_state(None, **ps_data)
        property = self.property_factory.get_property()
        PropertyView.objects.create(
            property=property, cycle=self.cycle, state=ps
        )

        dq.check_data(ps.__class__.__name__, [ps])

        dq_results = dq.results[ps.id]['data_quality_results']
        labels = [r['label'] for r in dq_results]
        self.assertCountEqual(['Check Site EUI', 'Check Year Built'], labels)

    def test_text_match(self):
        dq = DataQualityCheck.retrieve(self.org.id)
        dq.remove_all_rules()
        new_rule = {
            'table_name': 'PropertyState',
            'field': 'address_line_1',
            'data_type': Rule.TYPE_STRING,
            'rule_type': Rule.RULE_TYPE_DEFAULT,
            'severity': Rule.SEVERITY_ERROR,
            'not_null': True,
            'text_match': 742,
        }
        dq.add_rule(new_rule)
        ps_data = {
            'no_default_data': True,
            'custom_id_1': 'abcd',
            'address_line_1': '742 Evergreen Terrace',
            'pm_property_id': 'PMID',
            'site_eui': 525600,
        }
        ps = self.property_state_factory.get_property_state(None, **ps_data)
        dq.check_data(ps.__class__.__name__, [ps])
        self.assertEqual(dq.results, {})

    def test_str_to_data_type_string(self):
        rule = Rule.objects.create(name='str_rule', data_type=Rule.TYPE_STRING)
        self.assertEqual(rule.str_to_data_type(' '), '')
        self.assertEqual(rule.str_to_data_type(None), None)
        self.assertEqual(rule.str_to_data_type(27.5), 27.5)

    def test_str_to_data_type_float(self):
        rule = Rule.objects.create(name='flt_rule', data_type=Rule.TYPE_NUMBER)
        self.assertEqual(rule.str_to_data_type('   '), None)
        self.assertEqual(rule.str_to_data_type(None), None)
        self.assertEqual(rule.str_to_data_type(27.5), 27.5)
        with self.assertRaises(DataQualityTypeCastError):
            self.assertEqual(rule.str_to_data_type('not-a-number'), '')

    def test_str_to_data_type_date(self):
        rule = Rule.objects.create(name='date_rule', data_type=Rule.TYPE_DATE)
        d = rule.str_to_data_type('07/04/2000 08:55:30')
        self.assertEqual(d.strftime("%Y-%m-%d %H  %M  %S"), '2000-07-04 08  55  30')
        self.assertEqual(rule.str_to_data_type(None), None)
        self.assertEqual(rule.str_to_data_type(27.5), 27.5)  # floats should return float

    def test_str_to_data_type_datetime(self):
        rule = Rule.objects.create(name='year_rule', data_type=Rule.TYPE_YEAR)
        d = rule.str_to_data_type('07/04/2000')
        self.assertEqual(d.strftime("%Y-%m-%d"), '2000-07-04')
        self.assertEqual(rule.str_to_data_type(None), None)
        self.assertEqual(rule.str_to_data_type(27.5), 27.5)  # floats should return float

    def test_min_value(self):
        rule = Rule.objects.create(name='min_str_rule', data_type=Rule.TYPE_NUMBER, min=0.5)
        self.assertTrue(rule.minimum_valid(1000))
        self.assertTrue(rule.minimum_valid('1000'))
        self.assertFalse(rule.minimum_valid(0.1))
        self.assertFalse(rule.minimum_valid('0.1'))
        with self.assertRaises(DataQualityTypeCastError):
            self.assertEqual(rule.minimum_valid('not-a-number'), '')

    def test_max_value(self):
        rule = Rule.objects.create(name='max_str_rule', data_type=Rule.TYPE_NUMBER, max=1000)
        self.assertTrue(rule.maximum_valid(0.1))
        self.assertTrue(rule.maximum_valid('0.1'))
        self.assertFalse(rule.maximum_valid(9999))
        self.assertFalse(rule.maximum_valid('9999'))
        with self.assertRaises(DataQualityTypeCastError):
            self.assertEqual(rule.maximum_valid('not-a-number'), '')

    def test_min_value_quantities(self):
        rule = Rule.objects.create(name='min_str_rule', data_type=Rule.TYPE_EUI, min=10, max=100, units='kBtu/ft**2/year')
        self.assertTrue(rule.minimum_valid(15))
        self.assertTrue(rule.minimum_valid('15'))
        self.assertTrue(rule.maximum_valid(15))
        self.assertTrue(rule.maximum_valid('15'))
        self.assertFalse(rule.minimum_valid(5))
        self.assertFalse(rule.minimum_valid('5'))
        self.assertFalse(rule.maximum_valid(150))
        self.assertFalse(rule.maximum_valid('150'))

        # All of these should value since they are less than 10 (e.g. 5 kbtu/m2/year =~ 0.5 kbtu/ft2/year)
        # different units on check data
        self.assertFalse(rule.minimum_valid(ureg.Quantity(5, "kBtu/ft**2/year")))
        self.assertFalse(rule.minimum_valid(ureg.Quantity(5, "kBtu/m**2/year")))  # ~ 0.5 kbtu/ft2/year
        self.assertFalse(rule.maximum_valid(ureg.Quantity(110, "kBtu/ft**2/year")))
        self.assertFalse(rule.maximum_valid(ureg.Quantity(1100, "kBtu/m**2/year")))  # ~ 102.2 kbtu/ft2/year

        # these should all pass
        self.assertTrue(rule.minimum_valid(ureg.Quantity(10, "kBtu/ft**2/year")))
        self.assertTrue(rule.minimum_valid(ureg.Quantity(110, "kBtu/m**2/year")))  # 10.22 kbtu/ft2/year

        # test the rule with different units
        rule = Rule.objects.create(name='min_str_rule', data_type=Rule.TYPE_EUI, min=10, max=100, units='kBtu/m**2/year')
        self.assertFalse(rule.minimum_valid(ureg.Quantity(0.05, "kBtu/ft**2/year")))  # ~ 0.538 kbtu/m2/year
        self.assertFalse(rule.maximum_valid(ureg.Quantity(15, "kBtu/ft**2/year")))  # ~ 161 kbtu/m2/year
        self.assertFalse(rule.minimum_valid(ureg.Quantity(5, "kBtu/m**2/year")))
        self.assertFalse(rule.maximum_valid(ureg.Quantity(110, "kBtu/m**2/year")))

    def test_incorrect_pint_unit_conversions(self):
        rule = Rule.objects.create(name='min_str_rule', data_type=Rule.TYPE_EUI, min=10, max=100, units='ft**2')
        # this should error out nicely
        with self.assertRaises(UnitMismatchError):
            self.assertFalse(rule.minimum_valid(ureg.Quantity(5, "kBtu/ft**2/year")))

        with self.assertRaises(UnitMismatchError):
            self.assertFalse(rule.maximum_valid(ureg.Quantity(5, "kBtu/ft**2/year")))
class InventoryViewTests(DeleteModelsTestCase):
    def setUp(self):
        user_details = {
            'username': '******',
            'password': '******',
            'email': '*****@*****.**'
        }
        self.user = User.objects.create_superuser(**user_details)
        self.org, _, _ = create_organization(self.user)
        self.status_label = StatusLabel.objects.create(
            name='test', super_organization=self.org)

        self.column_factory = FakeColumnFactory(organization=self.org)
        self.cycle_factory = FakeCycleFactory(organization=self.org,
                                              user=self.user)
        self.property_factory = FakePropertyFactory(organization=self.org)
        self.property_state_factory = FakePropertyStateFactory(
            organization=self.org)
        self.taxlot_state_factory = FakeTaxLotStateFactory(
            organization=self.org)

        self.cycle = self.cycle_factory.get_cycle(start=datetime(
            2010, 10, 10, tzinfo=timezone.get_current_timezone()))

        self.client.login(**user_details)

    def test_get_building_sync(self):
        state = self.property_state_factory.get_property_state()
        prprty = self.property_factory.get_property()
        pv = PropertyView.objects.create(property=prprty,
                                         cycle=self.cycle,
                                         state=state)

        # go to buildingsync endpoint
        params = {'organization_id': self.org.pk}
        url = reverse('api:v2.1:properties-building-sync', args=[pv.id])
        response = self.client.get(url, params)
        self.assertIn(
            '<auc:FloorAreaValue>%s.0</auc:FloorAreaValue>' %
            state.gross_floor_area, response.content.decode("utf-8"))

    def test_upload_and_get_building_sync(self):
        # import_record =
        filename = path.join(BASE_DIR, 'seed', 'building_sync', 'tests',
                             'data', 'ex_1.xml')

        url = reverse('api:v2:building_file-list')
        fsysparams = {
            'file': open(filename, 'rb'),
            'file_type': 'BuildingSync',
            'organization_id': self.org.id,
            'cycle_id': self.cycle.id
        }

        response = self.client.post(url, fsysparams)
        self.assertEqual(response.status_code, 200)
        result = json.loads(response.content)
        self.assertEqual(result['status'], 'success')
        self.assertEqual(result['message'], 'successfully imported file')
        self.assertEqual(
            result['data']['property_view']['state']['year_built'], 1967)
        self.assertEqual(
            result['data']['property_view']['state']['postal_code'], '94111')

        # now get the building sync that was just uploaded
        property_id = result['data']['property_view']['id']
        url = reverse('api:v2.1:properties-building-sync', args=[property_id])
        response = self.client.get(url)
        self.assertIn('<auc:YearOfConstruction>1967</auc:YearOfConstruction>',
                      response.content.decode("utf-8"))

    def test_upload_with_measure_duplicates(self):
        # import_record =
        filename = path.join(BASE_DIR, 'seed', 'building_sync', 'tests',
                             'data', 'buildingsync_ex01_measures.xml')

        url = reverse('api:v2:building_file-list')
        fsysparams = {
            'file': open(filename, 'rb'),
            'file_type': 'BuildingSync',
            'organization_id': self.org.id,
            'cycle_id': self.cycle.id
        }
        response = self.client.post(url, fsysparams)
        self.assertEqual(response.status_code, 200)
        result = json.loads(response.content)
        self.assertEqual(result['status'], 'success')
        expected_message = "successfully imported file with warnings ['Measure category and name is not valid other_electric_motors_and_drives:replace_with_higher_efficiency', 'Measure category and name is not valid other_hvac:install_demand_control_ventilation', 'Measure associated with scenario not found. Scenario: Replace with higher efficiency Only, Measure name: Measure22', 'Measure associated with scenario not found. Scenario: Install demand control ventilation Only, Measure name: Measure24']"
        self.assertEqual(result['message'], expected_message)
        self.assertEqual(
            len(result['data']['property_view']['state']['measures']), 28)
        self.assertEqual(
            len(result['data']['property_view']['state']['scenarios']), 31)
        self.assertEqual(
            result['data']['property_view']['state']['year_built'], 1967)
        self.assertEqual(
            result['data']['property_view']['state']['postal_code'], '94111')

        # upload the same file again
        url = reverse('api:v2:building_file-list')
        fsysparams = {
            'file': open(filename, 'rb'),
            'file_type': 'BuildingSync',
            'organization_id': self.org.id,
            'cycle_id': self.cycle.id
        }
        response = self.client.post(url, fsysparams)
        self.assertEqual(response.status_code, 200)
        result = json.loads(response.content)

        self.assertEqual(
            len(result['data']['property_view']['state']['measures']), 28)
        self.assertEqual(
            len(result['data']['property_view']['state']['scenarios']), 31)

    def test_upload_and_get_building_sync_diff_ns(self):
        # import_record =
        filename = path.join(BASE_DIR, 'seed', 'building_sync', 'tests',
                             'data', 'ex_1_different_namespace.xml')

        url = reverse('api:v2:building_file-list')

        fsysparams = {
            'file': open(filename, 'rb'),
            'file_type': 'BuildingSync',
            'organization_id': self.org.id,
            'cycle_id': self.cycle.id
        }

        response = self.client.post(url, fsysparams)
        self.assertEqual(response.status_code, 200)
        result = json.loads(response.content)
        self.assertEqual(result['status'], 'success')
        self.assertEqual(result['message'], 'successfully imported file')
        self.assertEqual(
            result['data']['property_view']['state']['year_built'], 1889)

        # now get the building sync that was just uploaded
        property_id = result['data']['property_view']['id']
        url = reverse('api:v2.1:properties-building-sync', args=[property_id])
        response = self.client.get(url)
        self.assertIn('<auc:YearOfConstruction>1889</auc:YearOfConstruction>',
                      response.content.decode('utf-8'))

    def test_get_hpxml(self):
        state = self.property_state_factory.get_property_state()
        prprty = self.property_factory.get_property()
        pv = PropertyView.objects.create(property=prprty,
                                         cycle=self.cycle,
                                         state=state)

        # go to buildingsync endpoint
        params = {'organization_id': self.org.pk}
        url = reverse('api:v2.1:properties-hpxml', args=[pv.id])
        response = self.client.get(url, params)
        self.assertIn(
            '<GrossFloorArea>%s.0</GrossFloorArea>' % state.gross_floor_area,
            response.content.decode('utf-8'))
예제 #30
0
    def test_merged_indicators_provided_on_filter_endpoint(self):
        _import_record, import_file_1 = self.create_import_file(
            self.user, self.org, self.cycle)

        base_details = {
            'address_line_1': '123 Match Street',
            'import_file_id': import_file_1.id,
            'data_state': DATA_STATE_MAPPING,
            'no_default_data': False,
        }
        self.taxlot_state_factory.get_taxlot_state(**base_details)

        # set import_file_1 mapping done so that record is "created for users to view".
        import_file_1.mapping_done = True
        import_file_1.save()
        match_buildings(import_file_1.id)

        _import_record_2, import_file_2 = self.create_import_file(
            self.user, self.org, self.cycle)

        url = reverse(
            'api:v2:taxlots-filter'
        ) + '?cycle_id={}&organization_id={}&page=1&per_page=999999999'.format(
            self.cycle.pk, self.org.pk)
        response = self.client.post(url)
        data = json.loads(response.content)

        self.assertFalse(data['results'][0]['merged_indicator'])

        # make sure merged_indicator is True when merge occurs
        base_details['city'] = 'Denver'
        base_details['import_file_id'] = import_file_2.id
        self.taxlot_state_factory.get_taxlot_state(**base_details)

        # set import_file_2 mapping done so that match merging can occur.
        import_file_2.mapping_done = True
        import_file_2.save()
        match_buildings(import_file_2.id)

        url = reverse(
            'api:v2:taxlots-filter'
        ) + '?cycle_id={}&organization_id={}&page=1&per_page=999999999'.format(
            self.cycle.pk, self.org.pk)
        response = self.client.post(url)
        data = json.loads(response.content)

        self.assertTrue(data['results'][0]['merged_indicator'])

        # Create pairings and check if paired object has indicator as well
        property_factory = FakePropertyFactory(organization=self.org)
        property_state_factory = FakePropertyStateFactory(
            organization=self.org)

        property = property_factory.get_property()
        property_state = property_state_factory.get_property_state()
        property_view = PropertyView.objects.create(property=property,
                                                    cycle=self.cycle,
                                                    state=property_state)

        # attach pairing to one and only taxlot_view
        TaxLotProperty(primary=True,
                       cycle_id=self.cycle.id,
                       property_view_id=property_view.id,
                       taxlot_view_id=TaxLotView.objects.get().id).save()

        url = reverse(
            'api:v2:taxlots-filter'
        ) + '?cycle_id={}&organization_id={}&page=1&per_page=999999999'.format(
            self.cycle.pk, self.org.pk)
        response = self.client.post(url)
        data = json.loads(response.content)

        related = data['results'][0]['related'][0]

        self.assertTrue('merged_indicator' in related)
        self.assertFalse(related['merged_indicator'])