Пример #1
0
    def setUp(self):
        # for now just import some test data. I'd rather create fake data... next time.
        filename = getattr(self, 'filename', 'example-data-properties.xlsx')
        self.fake_mappings = copy.copy(FAKE_MAPPINGS['portfolio'])
        selfvars = self.set_up(ASSESSED_RAW)
        self.user, self.org, self.import_file, self.import_record, self.cycle = selfvars
        filepath = osp.join(osp.dirname(__file__), '..', 'data', filename)
        self.import_file.file = SimpleUploadedFile(name=filename,
                                                   content=open(
                                                       filepath, 'rb').read())
        self.import_file.save()

        tasks.save_raw_data(self.import_file.pk)
        Column.create_mappings(self.fake_mappings, self.org, self.user,
                               self.import_file.id)
        tasks.map_data(self.import_file.pk)
        tasks.match_buildings(self.import_file.id)

        # import second file with tax lot information
        filename_2 = getattr(self, 'filename', 'example-data-taxlots.xlsx')
        self.fake_mappings = copy.copy(FAKE_MAPPINGS['taxlot'])
        _, self.import_file_2 = self.create_import_file(
            self.user, self.org, self.cycle)
        filepath = osp.join(osp.dirname(__file__), '..', 'data', filename_2)
        self.import_file_2.file = SimpleUploadedFile(name=filename_2,
                                                     content=open(
                                                         filepath,
                                                         'rb').read())
        self.import_file_2.save()

        tasks.save_raw_data(self.import_file_2.pk)
        Column.create_mappings(self.fake_mappings, self.org, self.user,
                               self.import_file.id)
        tasks.map_data(self.import_file_2.pk)
        tasks.match_buildings(self.import_file_2.id)
Пример #2
0
    def test_promote_properties(self):
        """Test if the promoting of a property works as expected"""
        tasks._save_raw_data(self.import_file.pk, 'fake_cache_key', 1)
        Column.create_mappings(self.fake_mappings, self.org, self.user,
                               self.import_file.pk)
        tasks.map_data(self.import_file.pk)

        cycle2, _ = Cycle.objects.get_or_create(
            name=u'Hack Cycle 2016',
            organization=self.org,
            start=datetime.datetime(2016,
                                    1,
                                    1,
                                    tzinfo=timezone.get_current_timezone()),
            end=datetime.datetime(2016,
                                  12,
                                  31,
                                  tzinfo=timezone.get_current_timezone()),
        )

        # make sure that the new data was loaded correctly
        ps = PropertyState.objects.filter(address_line_1='50 Willow Ave SE')[0]
        self.assertEqual(ps.site_eui.magnitude, 125)

        # Promote the PropertyState to a PropertyView
        pv1 = ps.promote(self.cycle)
        pv2 = ps.promote(self.cycle)  # should just return the same object
        self.assertEqual(pv1, pv2)

        # promote the same state for a new cycle, same data
        pv3 = ps.promote(cycle2)
        self.assertNotEqual(pv3, pv1)

        props = PropertyView.objects.all()
        self.assertEqual(len(props), 2)
Пример #3
0
    def test_duplicate_headers_throws_400(self):
        tasks.save_raw_data(self.import_file.pk)
        Column.create_mappings(self.fake_mappings, self.org, self.user,
                               self.import_file.pk)

        with self.assertRaises(Exception):
            tasks.map_data(self.import_file.pk)
Пример #4
0
    def setUp(self):
        super().setUp()

        # for now just import some test data. I'd rather create fake data... next time.
        filename = getattr(self, 'filename', 'example-data-properties.xlsx')
        self.fake_mappings = copy.copy(FAKE_MAPPINGS['portfolio'])
        selfvars = self.set_up(ASSESSED_RAW)
        self.user, self.org, self.import_file, self.import_record, self.cycle = selfvars
        filepath = osp.join(osp.dirname(__file__), '..', 'data', filename)
        self.import_file.file = SimpleUploadedFile(name=filename,
                                                   content=open(
                                                       filepath, 'rb').read())
        self.import_file.save()

        tasks.save_raw_data(self.import_file.pk)
        Column.create_mappings(self.fake_mappings, self.org, self.user,
                               self.import_file.id)
        tasks.map_data(self.import_file.pk)
        tasks.geocode_and_match_buildings_task(self.import_file.id)

        # import second file that is currently the same, but should be slightly different
        filename_2 = getattr(self, 'filename',
                             'example-data-properties-small-changes.xlsx')
        _, self.import_file_2 = self.create_import_file(
            self.user, self.org, self.cycle)
        filepath = osp.join(osp.dirname(__file__), '..', 'data', filename_2)
        self.import_file_2.file = SimpleUploadedFile(name=filename_2,
                                                     content=open(
                                                         filepath,
                                                         'rb').read())
        self.import_file_2.save()

        tasks.save_raw_data(self.import_file_2.pk)
        tasks.map_data(self.import_file_2.pk)
        tasks.geocode_and_match_buildings_task(self.import_file_2.id)
Пример #5
0
    def setUp(self):
        filename = getattr(self, 'filename', 'example-data-properties.xlsx')
        self.fake_mappings = copy.copy(FAKE_MAPPINGS['portfolio'])
        selfvars = self.set_up(ASSESSED_RAW)
        self.user, self.org, self.import_file, self.import_record, self.cycle = selfvars
        self.import_file.load_import_file(
            osp.join(osp.dirname(__file__), 'data', filename))
        tasks._save_raw_data(self.import_file.pk, 'fake_cache_key', 1)
        Column.create_mappings(self.fake_mappings, self.org, self.user,
                               self.import_file.id)
        tasks.map_data(self.import_file.pk)
        tasks.match_buildings(self.import_file.id)

        # import second file that is currently the same, but should be slightly different
        filename_2 = getattr(self, 'filename',
                             'example-data-properties-small-changes.xlsx')
        _, self.import_file_2 = self.create_import_file(
            self.user, self.org, self.cycle)
        self.import_file_2.load_import_file(
            osp.join(osp.dirname(__file__), 'data', filename_2))
        tasks._save_raw_data(self.import_file_2.pk, 'fake_cache_key_2', 1)
        Column.create_mappings(self.fake_mappings, self.org, self.user,
                               self.import_file_2.id)
        tasks.map_data(self.import_file_2.pk)
        tasks.match_buildings(self.import_file_2.id)

        # for api tests
        user_details = {
            'username': '******',
            'password': '******',
        }
        self.client.login(**user_details)
Пример #6
0
    def setUp(self):
        self.user_details = {
            'username': '******',
            'email': '*****@*****.**',
            'password': '******'
        }
        self.user = User.objects.create_user(**self.user_details)
        self.login_url = reverse('landing:login')

        self.org = Organization.objects.create()
        OrganizationUser.objects.create(user=self.user, organization=self.org)

        self.import_record = ImportRecord.objects.create(owner=self.user)
        self.import_record.super_organization = self.org
        self.import_record.save()
        self.import_file = ImportFile.objects.create(
            import_record=self.import_record)

        self.import_file.source_type = 'ASSESSED_RAW'
        self.import_file.file = File(
            open(
                path.join(path.dirname(__file__),
                          '../data/covered-buildings-sample-with-errors.csv')))
        self.import_file.save()
        self.import_file_mapping = path.join(
            path.dirname(__file__),
            "../data/covered-buildings-sample-with-errors-mappings.csv")

        tasks.save_raw_data(self.import_file.id)
        Column.create_mappings_from_file(self.import_file_mapping, self.org,
                                         self.user, self.import_file.id)
        tasks.map_data(self.import_file.id)
Пример #7
0
    def test_promote_properties(self):
        """Good case for testing our matching system."""
        tasks.save_raw_data(self.import_file.id)
        Column.create_mappings(self.fake_mappings, self.org, self.user)
        tasks.map_data(self.import_file.pk)

        cycle2, _ = Cycle.objects.get_or_create(
            name=u'Hack Cycle 2016',
            organization=self.org,
            start=datetime.datetime(2016, 1, 1),
            end=datetime.datetime(2016, 12, 31),
        )

        # make sure that the new data was loaded correctly
        ps = PropertyState.objects.filter(address_line_1='1181 Douglas Street')[0]
        self.assertEqual(ps.site_eui, 439.9)
        self.assertEqual(ps.extra_data['CoStar Property ID'], '1575599')

        # Promote the PropertyState to a PropertyView
        pv1 = ps.promote(self.cycle)
        pv2 = ps.promote(self.cycle)  # should just return the same object
        self.assertEqual(pv1, pv2)

        # promote the same state for a new cycle, same data
        pv3 = ps.promote(cycle2)
        self.assertNotEqual(pv3, pv1)

        props = PropertyView.objects.all()
        self.assertEqual(len(props), 2)
Пример #8
0
    def setUp(self):
        # for now just import some test data. I'd rather create fake data... next time.
        filename = getattr(self, 'filename', 'example-data-properties.xlsx')
        self.fake_mappings = copy.copy(FAKE_MAPPINGS['portfolio'])
        selfvars = self.set_up(ASSESSED_RAW)
        self.user, self.org, self.import_file, self.import_record, self.cycle = selfvars
        self.import_file.load_import_file(
            osp.join(osp.dirname(__file__), '../data_importer/tests/data',
                     filename))
        tasks._save_raw_data(self.import_file.pk, 'fake_cache_key', 1)
        Column.create_mappings(self.fake_mappings, self.org, self.user,
                               self.import_file.id)
        tasks.map_data(self.import_file.pk)
        tasks.match_buildings(self.import_file.id)

        # import second file that is currently the same, but should be slightly different
        filename_2 = getattr(self, 'filename',
                             'example-data-properties-small-changes.xlsx')
        _, self.import_file_2 = self.create_import_file(
            self.user, self.org, self.cycle)
        self.import_file_2.load_import_file(
            osp.join(osp.dirname(__file__), '../data_importer/tests/data',
                     filename_2))
        tasks._save_raw_data(self.import_file_2.pk, 'fake_cache_key_2', 1)
        tasks.map_data(self.import_file_2.pk)
        tasks.match_buildings(self.import_file_2.id)
Пример #9
0
    def test_multiple_id_matches(self):
        tasks.save_raw_data(self.import_file.pk)
        Column.create_mappings(self.fake_mappings, self.org, self.user,
                               self.import_file.pk)
        tasks.map_data(self.import_file.pk)

        # verify that there are no properties listed as canonical
        property_states = tasks.list_canonical_property_states(self.org)
        self.assertEqual(len(property_states), 0)

        # promote two properties
        ps = PropertyState.objects.filter(custom_id_1='13').order_by('id')
        ps_test = ps.first()
        ps_test_2 = ps.last()
        for p in ps:
            p.promote(self.cycle)
            # from seed.utils.generic import pp
            # pp(p)

        property_states = tasks.list_canonical_property_states(self.org)
        self.assertEqual(len(property_states), 2)

        # no arguments passed should return no results
        matches = self.query_property_matches(property_states, None, None,
                                              None, None)
        self.assertEqual(len(matches), 0)
        # should return 2 properties
        matches = self.query_property_matches(property_states, None, '13',
                                              None, None)
        self.assertEqual(len(matches), 2)
        self.assertEqual(matches[0], ps_test)
        self.assertEqual(matches[1], ps_test_2)
        # should return only the second property
        matches = self.query_property_matches(property_states, '2342', None,
                                              None, None)
        self.assertEqual(len(matches), 1)
        self.assertEqual(matches[0], ps_test_2)
        # should return both properties, the first one should be the pm match, i.e. the first prop
        matches = self.query_property_matches(property_states, '481516', '13',
                                              None, None)
        self.assertEqual(len(matches), 2)
        self.assertEqual(matches[0], ps_test)
        self.assertEqual(matches[1], ps_test_2)
        # if passing in the second pm then it will not be the first
        matches = self.query_property_matches(property_states, '2342', '13',
                                              None, None)
        self.assertEqual(len(matches), 2)
        self.assertEqual(matches[1], ps_test_2)
        # pass the pm id into the custom id. it should still return the correct buildings.
        # not sure that this is the right behavior, but this is what it does, so just testing.
        matches = self.query_property_matches(property_states, None, '2342',
                                              None, None)
        self.assertEqual(len(matches), 1)
        self.assertEqual(matches[0], ps_test_2)
        matches = self.query_property_matches(property_states, '13', None,
                                              None, None)
        self.assertEqual(len(matches), 2)
        self.assertEqual(matches[0], ps_test)
        self.assertEqual(matches[1], ps_test_2)
Пример #10
0
    def test_map_data(self):
        """Save mappings based on user specifications."""
        # Create new import file to test
        import_record = ImportRecord.objects.create(
            owner=self.user, last_modified_by=self.user, super_organization=self.org
        )
        import_file = ImportFile.objects.create(
            import_record=import_record,
            source_type=ASSESSED_RAW,
        )
        import_file.raw_save_done = True
        import_file.save()

        fake_raw_bs = PropertyState.objects.create(
            organization=self.org,
            import_file=import_file,
            extra_data=self.fake_row,
            source_type=ASSESSED_RAW,
            data_state=DATA_STATE_IMPORT,
        )

        # tasks._save_raw_data(import_file.pk, 'fake_cache_key', 1)

        self.fake_mappings = FAKE_MAPPINGS['fake_row']
        Column.create_mappings(self.fake_mappings, self.org, self.user)
        tasks.map_data(import_file.pk)

        mapped_bs = list(PropertyState.objects.filter(
            import_file=import_file,
            source_type=ASSESSED_BS,
        ))

        self.assertEqual(len(mapped_bs), 1)

        test_bs = mapped_bs[0]

        self.assertNotEqual(test_bs.pk, fake_raw_bs.pk)
        self.assertEqual(test_bs.property_name, self.fake_row['Name'])
        self.assertEqual(test_bs.address_line_1, self.fake_row['Address Line 1'])
        self.assertEqual(
            test_bs.year_built,
            parser.parse(self.fake_row['Year Built']).year
        )

        # Make sure that we saved the extra_data column mappings
        data_columns = Column.objects.filter(
            organization=self.org,
            is_extra_data=True
        )

        # There's only one piece of data that didn't cleanly map.
        # Note that as of 09/15/2016 - extra data still needs to be defined in the mappings, it
        # will no longer magically appear in the extra_data field if the user did not specify to
        # map it!
        self.assertListEqual(
            sorted([d.column_name for d in data_columns]), ['Double Tester']
        )
Пример #11
0
    def test_import_file(self):
        tasks._save_raw_data(self.import_file.pk, 'fake_cache_key', 1)
        Column.create_mappings(self.fake_mappings, self.org, self.user)
        tasks.map_data(self.import_file.pk)

        ps = PropertyState.objects.filter(pm_property_id='2264').first()
        ps.promote(self.cycle)

        # should only be 11 unmatched_properties because one was promoted.
        ps = self.import_file.find_unmatched_property_states()
        self.assertEqual(len(ps), 13)
Пример #12
0
    def test_map_data(self):
        """Save mappings based on user specifications."""
        # Create new import file to test
        import_record = ImportRecord.objects.create(
            owner=self.user, last_modified_by=self.user, super_organization=self.org
        )
        import_file = ImportFile.objects.create(
            import_record=import_record,
            source_type=ASSESSED_RAW,
        )
        import_file.raw_save_done = True
        import_file.save()

        fake_raw_bs = PropertyState.objects.create(
            organization=self.org,
            import_file=import_file,
            extra_data=self.fake_row,
            source_type=ASSESSED_RAW,
            data_state=DATA_STATE_IMPORT,
        )

        self.fake_mappings = copy.deepcopy(FAKE_MAPPINGS['fake_row'])
        Column.create_mappings(self.fake_mappings, self.org, self.user, import_file.pk)
        tasks.map_data(import_file.pk)

        mapped_bs = list(PropertyState.objects.filter(
            import_file=import_file,
            source_type=ASSESSED_BS,
        ))

        self.assertEqual(len(mapped_bs), 1)

        test_bs = mapped_bs[0]
        self.assertNotEqual(test_bs.pk, fake_raw_bs.pk)
        self.assertEqual(test_bs.property_name, self.fake_row['Name'])
        self.assertEqual(test_bs.address_line_1, self.fake_row['Address Line 1'])
        self.assertEqual(
            test_bs.year_built,
            parser.parse(self.fake_row['Year Built']).year
        )

        # Make sure that we saved the extra_data column mappings
        data_columns = Column.objects.filter(
            organization=self.org,
            is_extra_data=True
        ).exclude(table_name='')

        # There's only one piece of data that did not cleanly map.
        # Note that as of 09/15/2016 - extra data still needs to be defined in the mappings, it
        # will no longer magically appear in the extra_data field if the user did not specify to
        # map it!
        self.assertListEqual(
            sorted([d.column_name for d in data_columns]), ['Double Tester']
        )
Пример #13
0
    def test_import_file(self):
        tasks._save_raw_data(self.import_file.pk, 'fake_cache_key', 1)
        Column.create_mappings(self.fake_mappings, self.org, self.user)
        tasks.map_data(self.import_file.pk)

        ps = PropertyState.objects.filter(pm_property_id='2264').first()
        ps.promote(self.cycle)

        # should only be 11 unmatched_properties because one was promoted.
        ps = self.import_file.find_unmatched_property_states()
        self.assertEqual(len(ps), 13)
Пример #14
0
    def test_match_buildings(self):
        """ case A (one property <-> one tax lot) """
        tasks.save_raw_data(self.import_file.pk)
        Column.create_mappings(self.fake_mappings, self.org, self.user,
                               self.import_file.pk)
        tasks.map_data(self.import_file.pk)

        # Check to make sure all the properties imported
        ps = PropertyState.objects.filter(
            data_state=DATA_STATE_MAPPING,
            organization=self.org,
            import_file=self.import_file,
        )
        self.assertEqual(len(ps), 14)

        # Check to make sure the taxlots were imported
        ts = TaxLotState.objects.filter(
            data_state=DATA_STATE_MAPPING,
            organization=self.org,
            import_file=self.import_file,
        )
        self.assertEqual(len(ts), 18)

        # Check a single case of the taxlotstate
        ts = TaxLotState.objects.filter(
            jurisdiction_tax_lot_id='1552813').first()
        self.assertEqual(ts.jurisdiction_tax_lot_id, '1552813')
        self.assertEqual(ts.address_line_1, None)
        self.assertEqual(ts.extra_data["data_008"], 1)

        # Check a single case of the propertystate
        ps = PropertyState.objects.filter(pm_property_id='2264')
        self.assertEqual(len(ps), 1)
        ps = ps.first()
        self.assertEqual(ps.pm_property_id, '2264')
        self.assertEqual(ps.address_line_1, '50 Willow Ave SE')
        self.assertEqual('data_007' in ps.extra_data, True)
        self.assertEqual('data_008' in ps.extra_data, False)
        self.assertEqual(ps.extra_data["data_007"], 'a')

        # verify that the lot_number has the tax_lot information. For this case it is one-to-one
        self.assertEqual(ps.lot_number, ts.jurisdiction_tax_lot_id)

        tasks.match_buildings(self.import_file.id)

        self.assertEqual(TaxLot.objects.count(), 10)

        qry = PropertyView.objects.filter(state__custom_id_1='7')
        self.assertEqual(qry.count(), 1)
        state = qry.first().state

        self.assertEqual(state.address_line_1, "12 Ninth Street")
        self.assertEqual(state.property_name, "Grange Hall")
Пример #15
0
 def setUp(self):
     filename = getattr(self, 'filename', 'example-data-properties.xlsx')
     import_file_source_type = ASSESSED_RAW
     self.fake_mappings = FAKE_MAPPINGS['portfolio']
     self.fake_extra_data = FAKE_EXTRA_DATA
     self.fake_row = FAKE_ROW
     selfvars = self.set_up(import_file_source_type)
     self.user, self.org, self.import_file, self.import_record, self.cycle = selfvars
     self.import_file = self.load_import_file_file(filename,
                                                   self.import_file)
     tasks._save_raw_data(self.import_file.pk, 'fake_cache_key', 1)
     Column.create_mappings(self.fake_mappings, self.org, self.user)
     tasks.map_data(self.import_file.pk)
Пример #16
0
    def test_match_buildings(self):
        """ case A (one property <-> one tax lot) """
        tasks._save_raw_data(self.import_file.pk, 'fake_cache_key', 1)
        Column.create_mappings(self.fake_mappings, self.org, self.user)
        tasks.map_data(self.import_file.pk)

        # Check to make sure all the properties imported
        ps = PropertyState.objects.filter(
            data_state=DATA_STATE_MAPPING,
            organization=self.org,
            import_file=self.import_file,
        )
        self.assertEqual(len(ps), 14)

        # Check to make sure the taxlots were imported
        ts = TaxLotState.objects.filter(
            data_state=DATA_STATE_MAPPING,
            organization=self.org,
            import_file=self.import_file,
        )
        # self.assertEqual(len(ts), 10)  # 10 unique taxlots after duplicates and delimeters

        # Check a single case of the taxlotstate
        ts = TaxLotState.objects.filter(jurisdiction_tax_lot_id='1552813').first()
        self.assertEqual(ts.jurisdiction_tax_lot_id, '1552813')
        self.assertEqual(ts.address_line_1, None)
        self.assertEqual(ts.extra_data["data_008"], 1)

        # Check a single case of the propertystate
        ps = PropertyState.objects.filter(pm_property_id='2264')
        self.assertEqual(len(ps), 1)
        ps = ps.first()
        self.assertEqual(ps.pm_property_id, '2264')
        self.assertEqual(ps.address_line_1, '50 Willow Ave SE')
        self.assertEqual('data_007' in ps.extra_data.keys(), True)
        self.assertEqual('data_008' in ps.extra_data.keys(), False)
        self.assertEqual(ps.extra_data["data_007"], 'a')

        # verify that the lot_number has the tax_lot information. For this case it is one-to-one
        self.assertEqual(ps.lot_number, ts.jurisdiction_tax_lot_id)

        tasks.match_buildings(self.import_file.id, self.user.id)

        self.assertEqual(TaxLot.objects.count(), 10)

        qry = PropertyView.objects.filter(state__custom_id_1='7')
        self.assertEqual(qry.count(), 1)
        state = qry.first().state

        self.assertEqual(state.address_line_1, "12 Ninth Street")
        self.assertEqual(state.property_name, "Grange Hall")
Пример #17
0
    def test_multiple_id_matches(self):
        tasks._save_raw_data(self.import_file.pk, 'fake_cache_key', 1)
        Column.create_mappings(self.fake_mappings, self.org, self.user)
        tasks.map_data(self.import_file.pk)

        # verify that there are no properties listed as canonical
        property_states = tasks.list_canonical_property_states(self.org)
        self.assertEqual(len(property_states), 0)

        # promote two properties
        ps = PropertyState.objects.filter(custom_id_1='13')
        ps_test = ps.first()
        ps_test_2 = ps.last()
        for p in ps:
            p.promote(self.cycle)
            # from seed.utils.generic import pp
            # pp(p)

        property_states = tasks.list_canonical_property_states(self.org)
        self.assertEqual(len(property_states), 2)

        # no arguments passed should return no results
        matches = tasks.query_property_matches(property_states, None, None)
        self.assertEqual(len(matches), 0)
        # should return 2 properties
        matches = tasks.query_property_matches(property_states, None, '13')
        self.assertEqual(len(matches), 2)
        self.assertEqual(matches[0], ps_test)
        self.assertEqual(matches[1], ps_test_2)
        # should return only the second property
        matches = tasks.query_property_matches(property_states, '2342', None)
        self.assertEqual(len(matches), 1)
        self.assertEqual(matches[0], ps_test_2)
        # should return both properties, the first one should be the pm match, i.e. the first prop
        matches = tasks.query_property_matches(property_states, '481516', '13')
        self.assertEqual(len(matches), 2)
        self.assertEqual(matches[0], ps_test)
        self.assertEqual(matches[1], ps_test_2)
        # if passing in the second pm then it will not be the first
        matches = tasks.query_property_matches(property_states, '2342', '13')
        self.assertEqual(len(matches), 2)
        self.assertEqual(matches[1], ps_test_2)
        # pass the pm id into the custom id. it should still return the correct buildings.
        # not sure that this is the right behavior, but this is what it does, so just testing.
        matches = tasks.query_property_matches(property_states, None, '2342')
        self.assertEqual(len(matches), 1)
        self.assertEqual(matches[0], ps_test_2)
        matches = tasks.query_property_matches(property_states, '13', None)
        self.assertEqual(len(matches), 2)
        self.assertEqual(matches[0], ps_test)
        self.assertEqual(matches[1], ps_test_2)
Пример #18
0
    def test_mapping(self):
        """Test objects in database can be converted to mapped fields"""
        # for mapping, you have to create an import file, even it is just one record. This is
        # more of an ID to track imports

        state = self.property_state_factory.get_property_state_as_extra_data(
            import_file_id=self.import_file.id,
            source_type=ASSESSED_RAW,
            data_state=DATA_STATE_IMPORT,
            random_extra=42)
        # set import_file save done to true
        self.import_file.raw_save_done = True
        self.import_file.save()

        # Create mappings from the new states
        # TODO #239: Convert this to a single helper method to suggest and save
        suggested_mappings = mapper.build_column_mapping(
            list(state.extra_data.keys()),
            Column.retrieve_all_by_tuple(self.org),
            previous_mapping=get_column_mapping,
            map_args=[self.org],
            thresh=80)

        # Convert mapping suggests to the format needed for saving
        mappings = []
        for raw_column, suggestion in suggested_mappings.items():
            # Single suggestion looks like:'lot_number': ['PropertyState', 'lot_number', 100]
            mapping = {
                "from_field": raw_column,
                "from_units": None,
                "to_table_name": suggestion[0],
                "to_field": suggestion[1],
                "to_field_display_name": suggestion[1],
            }
            mappings.append(mapping)

        # Now save the mappings
        # print(mappings)
        Column.create_mappings(mappings, self.org, self.user,
                               self.import_file.id)
        # END TODO

        tasks.map_data(self.import_file.id)

        props = self.import_file.find_unmatched_property_states()
        self.assertEqual(len(props), 1)
        self.assertEqual(state.extra_data['year_built'],
                         props.first().year_built)
        self.assertEqual(state.extra_data['random_extra'],
                         props.first().extra_data['random_extra'])
Пример #19
0
def _create_propertyview(request, org, user, dataset_name):
    cycle = Cycle.objects.filter(
        organization=org).last()  # might need to hardcode this
    dataset = ImportRecord.objects.get(name=dataset_name,
                                       super_organization=org)
    result = [{'City': request.GET['city'], 'State': request.GET['state']}]
    if 'street' in request.GET:
        result[0]['Address Line 1'] = request.GET['street']
    else:
        result[0]['Address Line 1'] = request.GET['address_line_1']
    if 'zipcode' in request.GET:
        result[0]['Postal Code'] = request.GET['zipcode']
    else:
        result[0]['Postal Code'] = request.GET['postal_code']
    if 'property_uid' in request.GET:
        result[0]['Custom ID 1'] = request.GET['property_uid']
    file_pk = utils.save_and_load(user, dataset, cycle, result,
                                  "profile_data.csv")
    # save data
    resp = save_raw_data(file_pk)
    save_prog_key = resp['progress_key']
    utils.wait_for_task(save_prog_key)
    # map data
    #        save_column_mappings(file_id, col_mappings) #perform column mapping
    resp = map_data(file_pk)
    map_prog_key = resp['progress_key']
    utils.wait_for_task(map_prog_key)
    resp = match_buildings(file_pk)
    #        resp = geocode_buildings_task(file_pk)
    if (resp['status'] == 'error'):
        return resp
    match_prog_key = resp['progress_key']
    utils.wait_for_task(match_prog_key)
    propertyview = utils.propertyview_find(request)
    return propertyview
Пример #20
0
    def test_postal_code_property(self):

        new_mappings = copy.deepcopy(self.fake_mappings['portfolio'])

        tasks.save_raw_data(self.import_file.pk)
        Column.create_mappings(new_mappings, self.org, self.user, self.import_file.pk)
        tasks.map_data(self.import_file.pk)

        # get mapped property postal_code
        ps = PropertyState.objects.filter(address_line_1='11 Ninth Street')[0]
        self.assertEqual(ps.postal_code, '00340')

        ps = PropertyState.objects.filter(address_line_1='20 Tenth Street')[0]
        self.assertEqual(ps.postal_code, '00000')

        ps = PropertyState.objects.filter(address_line_1='93029 Wellington Blvd')[0]
        self.assertEqual(ps.postal_code, '00001-0002')
Пример #21
0
 def setUp(self):
     filename = getattr(self, 'filename', 'example-data-properties.xlsx')
     import_file_source_type = ASSESSED_RAW
     self.fake_mappings = FAKE_MAPPINGS['portfolio']
     self.fake_extra_data = FAKE_EXTRA_DATA
     self.fake_row = FAKE_ROW
     selfvars = self.set_up(import_file_source_type)
     self.user, self.org, self.import_file, self.import_record, self.cycle = selfvars
     filepath = osp.join(osp.dirname(__file__), 'data', filename)
     self.import_file.file = SimpleUploadedFile(name=filename,
                                                content=open(
                                                    filepath, 'rb').read())
     self.import_file.save()
     tasks._save_raw_data(self.import_file.pk, 'fake_cache_key', 1)
     Column.create_mappings(self.fake_mappings, self.org, self.user,
                            self.import_file.pk)
     tasks.map_data(self.import_file.pk)
Пример #22
0
    def test_mapping_no_properties(self):
        # update the mappings to not include any taxlot tables in the data
        for m in self.fake_mappings:
            if m["to_table_name"] == 'PropertyState':
                m["to_table_name"] = 'TaxLotState'

        tasks._save_raw_data(self.import_file.pk, 'fake_cache_key', 1)
        Column.create_mappings(self.fake_mappings, self.org, self.user)
        tasks.map_data(self.import_file.pk)

        # make sure that no taxlot objects were created. the 12 here are the import extra_data.
        ps = PropertyState.objects.all()
        self.assertEqual(len(ps), 14)

        # make sure that the new data was loaded correctly
        ts = TaxLotState.objects.filter(address_line_1='50 Willow Ave SE').first()
        self.assertEqual(ts.extra_data['site_eui'], 125)
Пример #23
0
    def test_mapping_tax_lots_only(self):
        # update the mappings to not include any taxlot tables in the data
        new_mappings = copy.deepcopy(self.fake_mappings)
        for m in new_mappings:
            if m["to_table_name"] == 'PropertyState':
                m["to_table_name"] = 'TaxLotState'

        tasks._save_raw_data(self.import_file.pk, 'fake_cache_key', 1)
        Column.create_mappings(new_mappings, self.org, self.user, self.import_file.pk)
        tasks.map_data(self.import_file.pk)

        # make sure that no taxlot objects were created. the 12 here are the import extra_data.
        ps = PropertyState.objects.all()
        self.assertEqual(len(ps), 14)

        # make sure that the new data was loaded correctly
        ts = TaxLotState.objects.filter(address_line_1='50 Willow Ave SE').first()
        self.assertEqual(ts.extra_data['site_eui'], 125)
    def setUp(self):
        super(TestEquivalenceWithFile, self).setUp()

        filename = getattr(self, 'filename', 'covered-buildings-sample.csv')
        import_file_source_type = ASSESSED_RAW
        self.fake_mappings = FAKE_MAPPINGS['covered_building']
        selfvars = self.set_up(import_file_source_type)
        self.user, self.org, self.import_file, self.import_record, self.cycle = selfvars
        filepath = osp.join(osp.dirname(__file__), '..', '..', '..', 'tests',
                            'data', filename)
        self.import_file.file = SimpleUploadedFile(name=filename,
                                                   content=open(
                                                       filepath, 'rb').read())
        self.import_file.save()

        tasks.save_raw_data(self.import_file.pk)
        Column.create_mappings(self.fake_mappings, self.org, self.user,
                               self.import_file.id)
        tasks.map_data(self.import_file.pk)
Пример #25
0
    def test_match_buildings(self):
        """ case B (many property <-> one tax lot) """
        tasks._save_raw_data(self.import_file.pk, 'fake_cache_key', 1)
        Column.create_mappings(self.fake_mappings, self.org, self.user,
                               self.import_file.pk)
        # Set remap to True because for some reason this file id has been imported before.
        tasks.map_data(self.import_file.pk, True)

        # Check to make sure all the properties imported
        ps = PropertyState.objects.filter(
            data_state=DATA_STATE_MAPPING,
            organization=self.org,
            import_file=self.import_file,
        )
        self.assertEqual(len(ps), 14)

        # Check to make sure the tax lots were imported
        ts = TaxLotState.objects.filter(
            data_state=DATA_STATE_MAPPING,
            organization=self.org,
            import_file=self.import_file,
        )
        self.assertEqual(len(ts), 18)

        # verify that the lot_number has the tax_lot information. For this case it is one-to-many
        p_test = PropertyState.objects.filter(
            pm_property_id='5233255',
            organization=self.org,
            data_state=DATA_STATE_MAPPING,
            import_file=self.import_file,
        ).first()
        self.assertEqual(p_test.lot_number, "333/66555;333/66125;333/66148")

        tasks.match_buildings(self.import_file.id)

        # make sure the the property only has one tax lot and vice versa
        tlv = TaxLotView.objects.filter(
            state__jurisdiction_tax_lot_id='11160509', cycle=self.cycle)
        self.assertEqual(len(tlv), 1)
        tlv = tlv[0]
        properties = tlv.property_states()
        self.assertEqual(len(properties), 3)
Пример #26
0
    def setUp(self):
        data_importer_data_dir = os.path.join(os.path.dirname(__file__), '..',
                                              'data_importer', 'tests', 'data')
        filename = getattr(self, 'filename', 'example-data-properties.xlsx')
        self.fake_mappings = copy.copy(FAKE_MAPPINGS['portfolio'])
        selfvars = self.set_up(ASSESSED_RAW)
        self.user, self.org, self.import_file, self.import_record, self.cycle = selfvars
        filepath = os.path.join(data_importer_data_dir, filename)
        self.import_file.file = SimpleUploadedFile(name=filename,
                                                   content=open(
                                                       filepath, 'rb').read())
        self.import_file.save()
        tasks.save_raw_data(self.import_file.pk)
        Column.create_mappings(self.fake_mappings, self.org, self.user,
                               self.import_file.id)
        tasks.map_data(self.import_file.pk)
        tasks.match_buildings(self.import_file.id)

        # import second file that is currently the same, but should be slightly different
        filename_2 = getattr(self, 'filename',
                             'example-data-properties-small-changes.xlsx')
        _, self.import_file_2 = self.create_import_file(
            self.user, self.org, self.cycle)
        filepath = os.path.join(data_importer_data_dir, filename_2)
        self.import_file_2.file = SimpleUploadedFile(name=filename_2,
                                                     content=open(
                                                         filepath,
                                                         'rb').read())
        self.import_file_2.save()

        tasks.save_raw_data(self.import_file_2.pk)
        Column.create_mappings(self.fake_mappings, self.org, self.user,
                               self.import_file_2.id)
        tasks.map_data(self.import_file_2.pk)
        tasks.match_buildings(self.import_file_2.id)

        # for api tests
        user_details = {
            'username': '******',
            'password': '******',
        }
        self.client.login(**user_details)
Пример #27
0
    def test_mapping(self):
        tasks._save_raw_data(self.import_file.pk, 'fake_cache_key', 1)
        Column.create_mappings(self.fake_mappings, self.org, self.user)
        tasks.map_data(self.import_file.pk)

        # There are a total of 18 tax lot ids in the import file
        ts = TaxLotState.objects.all()

        self.assertEqual(len(ts), 18)

        # make sure that the new data was loaded correctly and that the lot_number was set
        # appropriately
        ps = PropertyState.objects.filter(address_line_1='2700 Welstone Ave NE')[0]
        self.assertEqual(ps.site_eui, 1202)
        self.assertEqual(ps.lot_number, '11160509')

        ps = PropertyState.objects.filter(address_line_1='521 Elm Street')[0]
        self.assertEqual(ps.site_eui, 1358)
        # The lot_number should also have the normalized code run, then re-delimited
        self.assertEqual(ps.lot_number, '33366555;33366125;33366148')
Пример #28
0
    def test_postal_code_taxlot(self):

        new_mappings = copy.deepcopy(self.fake_mappings['taxlot'])

        tasks.save_raw_data(self.import_file.pk)
        Column.create_mappings(new_mappings, self.org, self.user, self.import_file.pk)
        tasks.map_data(self.import_file.pk)

        # get mapped taxlot postal_code
        ts = TaxLotState.objects.filter(address_line_1='35 Tenth Street').first()

        if ts is None:
            raise TypeError("Invalid Taxlot Address!")
        self.assertEqual(ts.postal_code, '00333')

        ts = TaxLotState.objects.filter(address_line_1='93030 Wellington Blvd').first()

        if ts is None:
            raise TypeError("Invalid Taxlot Address!")
        self.assertEqual(ts.postal_code, '00000-0000')
Пример #29
0
    def test_mapping_no_taxlot(self):
        # update the mappings to not include any taxlot tables in the data
        # note that save_data reads in from the propertystate table, so that will always
        # have entries in the db (for now).
        for m in self.fake_mappings:
            if m["to_table_name"] == 'TaxLotState':
                m["to_table_name"] = 'PropertyState'

        tasks._save_raw_data(self.import_file.pk, 'fake_cache_key', 1)
        Column.create_mappings(self.fake_mappings, self.org, self.user)
        tasks.map_data(self.import_file.pk)

        # make sure that no taxlot objects were created
        ts = TaxLotState.objects.all()
        self.assertEqual(len(ts), 0)

        # make sure that the new data was loaded correctly
        ps = PropertyState.objects.filter(address_line_1='2700 Welstone Ave NE')[0]
        self.assertEqual(ps.site_eui, 1202)
        self.assertEqual(ps.extra_data['jurisdiction_tax_lot_id'], '11160509')
Пример #30
0
    def test_mapping(self):
        tasks._save_raw_data(self.import_file.pk, 'fake_cache_key', 1)
        Column.create_mappings(self.fake_mappings, self.org, self.user, self.import_file.pk)
        tasks.map_data(self.import_file.pk)

        # There are a total of 18 tax lot ids in the import file
        ts = TaxLotState.objects.all()

        self.assertEqual(len(ts), 18)

        # make sure that the new data was loaded correctly and that the lot_number was set
        # appropriately
        ps = PropertyState.objects.filter(address_line_1='2700 Welstone Ave NE')[0]
        self.assertEqual(ps.site_eui, 1202)
        self.assertEqual(ps.lot_number, '11160509')

        ps = PropertyState.objects.filter(address_line_1='521 Elm Street')[0]
        self.assertEqual(ps.site_eui, 1358)
        # The lot_number should also have the normalized code run, then re-delimited
        self.assertEqual(ps.lot_number, '333/66555;333/66125;333/66148')
Пример #31
0
    def test_single_id_matches(self):
        tasks._save_raw_data(self.import_file.pk, 'fake_cache_key', 1)
        Column.create_mappings(self.fake_mappings, self.org, self.user)
        tasks.map_data(self.import_file.pk)

        # verify that there are no properties listed as canonical
        property_states = tasks.list_canonical_property_states(self.org)
        self.assertEqual(len(property_states), 0)

        # promote a properties
        ps = PropertyState.objects.filter(pm_property_id='2264').first()
        ps.promote(self.cycle)

        property_states = tasks.list_canonical_property_states(self.org)
        self.assertEqual(len(property_states), 1)

        matches = tasks.query_property_matches(property_states, None, None)
        self.assertEqual(len(matches), 0)
        matches = tasks.query_property_matches(property_states, '2264', None)
        self.assertEqual(len(matches), 1)
        self.assertEqual(matches[0], ps)
Пример #32
0
    def test_single_id_matches(self):
        tasks._save_raw_data(self.import_file.pk, 'fake_cache_key', 1)
        Column.create_mappings(self.fake_mappings, self.org, self.user)
        tasks.map_data(self.import_file.pk)

        # verify that there are no properties listed as canonical
        property_states = tasks.list_canonical_property_states(self.org)
        self.assertEqual(len(property_states), 0)

        # promote a properties
        ps = PropertyState.objects.filter(pm_property_id='2264').first()
        ps.promote(self.cycle)

        property_states = tasks.list_canonical_property_states(self.org)
        self.assertEqual(len(property_states), 1)

        matches = tasks.query_property_matches(property_states, None, None)
        self.assertEqual(len(matches), 0)
        matches = tasks.query_property_matches(property_states, '2264', None)
        self.assertEqual(len(matches), 1)
        self.assertEqual(matches[0], ps)
Пример #33
0
    def test_mapping_properties_only(self):
        # update the mappings to not include any taxlot tables in the data
        # note that save_data reads in from the propertystate table, so that will always
        # have entries in the db (for now).
        new_mappings = copy.deepcopy(self.fake_mappings)
        for m in new_mappings:
            if m["to_table_name"] == 'TaxLotState':
                m["to_table_name"] = 'PropertyState'

        tasks._save_raw_data(self.import_file.pk, 'fake_cache_key', 1)
        Column.create_mappings(new_mappings, self.org, self.user, self.import_file.pk)
        tasks.map_data(self.import_file.pk)

        # make sure that no taxlot objects were created
        ts = TaxLotState.objects.all()
        self.assertEqual(len(ts), 0)

        # make sure that the new data was loaded correctly
        ps = PropertyState.objects.filter(address_line_1='2700 Welstone Ave NE')[0]
        self.assertEqual(ps.site_eui, 1202)
        self.assertEqual(ps.extra_data['jurisdiction_tax_lot_id'], '11160509')
Пример #34
0
    def test_map_all_models_xml(self):
        # -- Setup
        with patch.object(ImportFile, 'cache_first_rows', return_value=None):
            progress_info = tasks.save_raw_data(self.import_file.pk)
        self.assertEqual('success', progress_info['status'],
                         json.dumps(progress_info))
        self.assertEqual(
            PropertyState.objects.filter(import_file=self.import_file).count(),
            1)

        # make the column mappings
        self.fake_mappings = default_buildingsync_profile_mappings()
        Column.create_mappings(self.fake_mappings, self.org, self.user,
                               self.import_file.pk)

        # map the data
        progress_info = tasks.map_data(self.import_file.pk)
        self.assertEqual('success', progress_info['status'])
        # verify there were no errors with the files
        self.assertEqual({}, progress_info.get('file_info', {}))
        ps = PropertyState.objects.filter(address_line_1='123 MAIN BLVD',
                                          import_file=self.import_file)
        self.assertEqual(len(ps), 1)

        # -- Act
        tasks.map_additional_models(self.import_file.pk)

        # -- Assert
        ps = PropertyState.objects.filter(address_line_1='123 MAIN BLVD',
                                          import_file=self.import_file)

        self.assertEqual(ps.count(), 1)

        # verify the property view, scenario and meter data were created
        pv = PropertyView.objects.filter(state=ps[0])
        self.assertEqual(pv.count(), 1)

        scenario = Scenario.objects.filter(property_state=ps[0])
        self.assertEqual(scenario.count(), 3)

        # for bsync, meters are linked to scenarios only (not properties)
        meters = Meter.objects.filter(scenario__in=scenario)
        self.assertEqual(meters.count(), 6)
Пример #35
0
    def test_map_all_models_xml(self):
        # -- Setup
        with patch.object(ImportFile, 'cache_first_rows', return_value=None):
            progress_info = tasks.save_raw_data(self.import_file.pk)
        self.assertEqual('success', progress_info['status'],
                         json.dumps(progress_info))
        self.assertEqual(
            PropertyState.objects.filter(import_file=self.import_file).count(),
            1)

        # make the column mappings
        self.fake_mappings = default_buildingsync_profile_mappings()
        Column.create_mappings(self.fake_mappings, self.org, self.user,
                               self.import_file.pk)

        # map the data
        progress_info = tasks.map_data(self.import_file.pk)
        self.assertEqual('success', progress_info['status'])
        # verify there were no errors with the files
        self.assertEqual({}, progress_info.get('file_info', {}))
        ps = PropertyState.objects.filter(address_line_1='123 Main St',
                                          import_file=self.import_file)
        self.assertEqual(ps.count(), 1)

        # -- Act
        progress_info = tasks.geocode_and_match_buildings_task(
            self.import_file.pk)

        # -- Assert
        ps = PropertyState.objects.filter(address_line_1='123 Main St',
                                          import_file=self.import_file)
        self.assertEqual(ps.count(), 1)

        # !! we should have warnings for our file because of the bad measure names !!
        self.assertNotEqual({}, progress_info.get('file_info', {}))
        self.assertIn(self.import_file.uploaded_filename,
                      list(progress_info['file_info'].keys()))
        self.assertNotEqual(
            [],
            progress_info['file_info'][self.import_file.uploaded_filename].get(
                'warnings', []))
Пример #36
0
 def perform_mapping(self, request, pk=None):
     """
     Starts a background task to convert imported raw data into
     BuildingSnapshots, using user's column mappings.
     ---
     type:
         status:
             required: true
             type: string
             description: either success or error
         progress_key:
             type: integer
             description: ID of background job, for retrieving job progress
     parameter_strategy: replace
     parameters:
         - name: pk
           description: Import file ID
           required: true
           paramType: path
     """
     return JsonResponse(map_data(pk))
Пример #37
0
    def map(self, request, pk=None):
        """
        Starts a background task to convert imported raw data into
        PropertyState and TaxLotState, using user's column mappings.
        """

        body = request.data

        remap = body.get('remap', False)
        mark_as_done = body.get('mark_as_done', True)

        org_id = request.query_params.get('organization_id', None)
        import_file = ImportFile.objects.filter(
            pk=pk, import_record__super_organization_id=org_id)
        if not import_file.exists():
            return {
                'status': 'error',
                'message': 'ImportFile {} does not exist'.format(pk)
            }

        # return remap_data(import_file_id)
        return JsonResponse(map_data(pk, remap, mark_as_done))
Пример #38
0
    def test_map_data_zip(self):
        # -- Setup
        with patch.object(ImportFile, 'cache_first_rows', return_value=None):
            progress_info = tasks.save_raw_data(self.import_file.pk)
        self.assertEqual('success', progress_info['status'],
                         json.dumps(progress_info))
        self.assertEqual(
            PropertyState.objects.filter(import_file=self.import_file).count(),
            2)

        # make the column mappings
        self.fake_mappings = default_buildingsync_profile_mappings()
        Column.create_mappings(self.fake_mappings, self.org, self.user,
                               self.import_file.pk)

        # -- Act
        progress_info = tasks.map_data(self.import_file.pk)

        # -- Assert
        self.assertEqual('success', progress_info['status'])
        ps = PropertyState.objects.filter(address_line_1='123 Main St',
                                          import_file=self.import_file)
        self.assertEqual(len(ps), 2)
Пример #39
0
    def test_map_all_models_zip(self):
        # -- Setup
        with patch.object(ImportFile, 'cache_first_rows', return_value=None):
            progress_info = tasks.save_raw_data(self.import_file.pk)
        self.assertEqual('success', progress_info['status'],
                         json.dumps(progress_info))
        self.assertEqual(
            PropertyState.objects.filter(import_file=self.import_file).count(),
            2)

        # make the column mappings
        self.fake_mappings = default_buildingsync_profile_mappings()
        Column.create_mappings(self.fake_mappings, self.org, self.user,
                               self.import_file.pk)

        # map the data
        progress_info = tasks.map_data(self.import_file.pk)
        self.assertEqual('success', progress_info['status'])
        ps = PropertyState.objects.filter(address_line_1='123 Main St',
                                          import_file=self.import_file)
        self.assertEqual(ps.count(), 2)

        # -- Act
        tasks.map_additional_models(self.import_file.pk)

        # -- Assert
        ps = PropertyState.objects.filter(address_line_1='123 Main St',
                                          import_file=self.import_file)
        self.assertEqual(ps.count(), 2)

        # verify there are 2 building files
        bfs = BuildingFile.objects.all()
        self.assertEqual(bfs.count(), 2)

        # check that scenarios were created
        scenarios = Scenario.objects.all()
        self.assertEqual(scenarios.count(), 31)
Пример #40
0
    def test_cleanse(self):
        # Import the file and run mapping

        # Year Ending,Energy Score,Total GHG Emissions (MtCO2e),Weather Normalized Site EUI (kBtu/ft2),
        # National Median Site EUI (kBtu/ft2),Source EUI (kBtu/ft2),Weather Normalized Source EUI (kBtu/ft2),
        # National Median Source EUI (kBtu/ft2),Parking - Gross Floor Area (ft2),Organization
        # Release Date
        fake_mappings = [
            {
                "from_field": u'Property Id',
                "to_table_name": u'PropertyState',
                "to_field": u'pm_property_id',
            }, {
                "from_field": u'Property Name',
                "to_table_name": u'PropertyState',
                "to_field": u'property_name',
            }, {
                "from_field": u'Address 1',
                "to_table_name": u'PropertyState',
                "to_field": u'address_line_1',
            }, {
                "from_field": u'Address 2',
                "to_table_name": u'PropertyState',
                "to_field": u'address_line_2',
            }, {
                "from_field": u'City',
                "to_table_name": u'PropertyState',
                "to_field": u'city',
            }, {
                "from_field": u'State/Province',
                "to_table_name": u'PropertyState',
                "to_field": u'state_province',
            }, {
                "from_field": u'Postal Code',
                "to_table_name": u'PropertyState',
                "to_field": u'postal_code',
            }, {
                "from_field": u'Year Built',
                "to_table_name": u'PropertyState',
                "to_field": u'year_built',
            }, {
                "from_field": u'Property Floor Area (Buildings and Parking) (ft2)',
                "to_table_name": u'PropertyState',
                "to_field": u'gross_floor_area',
            }, {
                "from_field": u'Site EUI (kBtu/ft2)',
                "to_table_name": u'PropertyState',
                "to_field": u'site_eui',
            }, {
                "from_field": u'Generation Date',
                "to_table_name": u'PropertyState',
                "to_field": u'generation_date',
            }
        ]

        tasks.save_raw_data(self.import_file.id)
        Column.create_mappings(fake_mappings, self.org, self.user)
        tasks.map_data(self.import_file.id)

        qs = PropertyState.objects.filter(
            import_file=self.import_file,
            source_type=PORTFOLIO_BS,
        ).iterator()

        c = Cleansing(self.org)
        c.cleanse('property', qs)

        _log.debug(c.results)

        self.assertEqual(len(c.results), 2)

        result = [v for v in c.results.values() if v['address_line_1'] == '120243 E True Lane']
        if len(result) == 1:
            result = result[0]
        else:
            raise RuntimeError('Non unity results')

        res = [{
            'field': u'pm_property_id',
            'formatted_field': u'PM Property ID',
            'value': u'',
            'message': u'PM Property ID is missing',
            'detailed_message': u'PM Property ID is missing',
            'severity': u'error'
        }]
        self.assertEqual(res, result['cleansing_results'])

        result = [v for v in c.results.values() if v['address_line_1'] == '95373 E Peach Avenue']
        if len(result) == 1:
            result = result[0]
        else:
            raise RuntimeError('Non unity results')

        res = [{
            'field': u'site_eui',
            'formatted_field': u'Site EUI',
            'value': 0.1,
            'message': u'Site EUI out of range',
            'detailed_message': u'Site EUI [0.1] < 10.0',
            'severity': u'warning'
        }]
        self.assertEqual(res, result['cleansing_results'])
Пример #41
0
    def test_check_multiple_text_match(self):
        d = DataQualityCheck.retrieve(self.org)
        d.remove_all_rules()

        sl_data = {'name': 'No meters present', 'super_organization': self.org}
        sl_ok_1, _ = StatusLabel.objects.get_or_create(**sl_data)
        new_rule = {
            'table_name': 'PropertyState',
            'field': 'meters_present',
            'data_type': TYPE_STRING,
            'rule_type': RULE_TYPE_CUSTOM,
            'text_match': 'OK',
            'severity': SEVERITY_ERROR,
            'status_label': sl_ok_1,
        }
        d.add_rule(new_rule)

        sl_data = {
            'name': 'No 12 Consectutive Months',
            'super_organization': self.org
        }
        sl_ok_2, _ = StatusLabel.objects.get_or_create(**sl_data)
        new_rule = {
            'table_name': 'PropertyState',
            'field': '12 Consectutive Months',
            'data_type': TYPE_STRING,
            'rule_type': RULE_TYPE_CUSTOM,
            'text_match': 'OK',
            'severity': SEVERITY_ERROR,
            'status_label': sl_ok_2,
        }
        d.add_rule(new_rule)

        sl_data = {'name': 'No Monthly Data', 'super_organization': self.org}
        sl_ok_3, _ = StatusLabel.objects.get_or_create(**sl_data)
        new_rule = {
            'table_name': 'PropertyState',
            'field': 'Monthly Data',
            'data_type': TYPE_STRING,
            'rule_type': RULE_TYPE_CUSTOM,
            'text_match': 'OK',
            'severity': SEVERITY_ERROR,
            'status_label': sl_ok_3,
        }
        d.add_rule(new_rule)

        # import data
        tasks.save_raw_data(self.import_file.id)
        Column.create_mappings(self.fake_mappings, self.org, self.user,
                               self.import_file.pk)
        tasks.map_data(self.import_file.id)
        tasks.match_buildings(self.import_file.id)

        qs = PropertyState.objects.filter(
            import_file=self.import_file,
            source_type=ASSESSED_BS,
        ).iterator()
        d.reset_results()
        d.check_data('PropertyState', qs)

        # Check multiple strings
        props = PropertyView.objects.filter(
            property__labels=sl_ok_1).select_related('state')
        addresses = sorted([p.state.address_line_1 for p in props])
        expected = [
            u'1 International Road', u'17246 Esch Drive',
            u'2581 Schiller Parkway', u'3 Northport Place',
            u'84807 Buell Trail'
        ]
        self.assertListEqual(expected, addresses)

        props = PropertyView.objects.filter(
            property__labels=sl_ok_2).select_related('state')
        addresses = sorted([p.state.address_line_1 for p in props])
        expected = [
            u'1 International Road', u'2581 Schiller Parkway',
            u'49705 Harper Crossing'
        ]
        self.assertListEqual(expected, addresses)

        props = PropertyView.objects.filter(
            property__labels=sl_ok_3).select_related('state')
        addresses = sorted([p.state.address_line_1 for p in props])
        expected = [
            u'1 International Road', u'17246 Esch Drive', u'84807 Buell Trail',
            u'88263 Scoville Park'
        ]
        self.assertListEqual(expected, addresses)
Пример #42
0
    def test_cleanse(self):
        # Import the file and run mapping

        # This is silly, the mappings are backwards from what you would expect. The key is the BS field, and the
        # value is the value in the CSV
        # fake_mappings = {
        #     'city': 'city',
        #     'postal_code': 'Zip',
        #     'gross_floor_area': 'GBA',
        #     'building_count': 'BLDGS',
        #     'year_built': 'AYB_YearBuilt',
        #     'state_province': 'State',
        #     'address_line_1': 'Address',
        #     'owner': 'Owner',
        #     'property_notes': 'Property Type',
        #     'tax_lot_id': 'UBI',
        #     'custom_id_1': 'Custom ID',
        #     'pm_property_id': 'PM Property ID'
        # }

        tasks.save_raw_data(self.import_file.id)
        # util.make_fake_mappings(fake_mappings, self.org) -> convert to Column.create_mappings()
        tasks.map_data(self.import_file.id)

        qs = PropertyState.objects.filter(
            import_file=self.import_file,
            source_type=ASSESSED_BS,
        ).iterator()

        c = Cleansing(self.org)
        c.cleanse(qs)
        # print c.results

        self.assertEqual(len(c.results), 2)

        result = [v for v in c.results.values() if
                  v['address_line_1'] == '95373 E Peach Avenue']
        if len(result) == 1:
            result = result[0]
        else:
            raise RuntimeError('Non unity results')

        self.assertTrue(result['address_line_1'], '95373 E Peach Avenue')
        self.assertTrue(result['tax_lot_id'], '10107/c6596')
        res = [{
            'field': u'pm_property_id',
            'formatted_field': u'PM Property ID',
            'value': u'',
            'message': u'PM Property ID is missing',
            'detailed_message': u'PM Property ID is missing',
            'severity': u'error'
        }]
        self.assertEqual(res, result['cleansing_results'])

        result = [v for v in c.results.values() if
                  v['address_line_1'] == '120243 E True Lane']
        if len(result) == 1:
            result = result[0]
        else:
            raise RuntimeError('Non unity results')

        res = [{
            'field': u'year_built',
            'formatted_field': u'Year Built',
            'value': 0,
            'message': u'Year Built out of range',
            'detailed_message': u'Year Built [0] < 1700',
            'severity': u'error'
        }, {
            'field': u'gross_floor_area',
            'formatted_field': u'Gross Floor Area',
            'value': 10000000000.0,
            'message': u'Gross Floor Area out of range',
            'detailed_message': u'Gross Floor Area [10000000000.0] > 7000000.0',
            'severity': u'error'
        }, {
            'field': u'custom_id_1',
            'formatted_field': u'Custom ID 1',
            'value': u'',
            'message': u'Custom ID 1 is missing',
            'detailed_message': u'Custom ID 1 is missing',
            'severity': u'error'
        }, {
            'field': u'pm_property_id',
            'formatted_field': u'PM Property ID',
            'value': u'',
            'message': u'PM Property ID is missing',
            'detailed_message': u'PM Property ID is missing',
            'severity': u'error'
        }]
        self.assertItemsEqual(res, result['cleansing_results'])

        result = [v for v in c.results.values() if
                  v['address_line_1'] == '1234 Peach Tree Avenue']
        self.assertEqual(len(result), 0)
        self.assertEqual(result, [])
Пример #43
0
    def import_exported_data(self, filename):
        """
        Import test files from Stephen for many-to-many testing. This imports
        and maps the data accordingly. Presently these files are missing a
        couple of attributes to make them valid:
            1) the master campus record to define the pm_property_id
            2) the joins between propertystate and taxlotstate
        """

        # Do a bunch of work to flatten out this temp file that has extra_data
        # asa string representation of a dict
        data = []
        keys = None
        new_keys = set()

        f = os.path.join(os.path.dirname(__file__), 'data', filename)
        with open(f, 'rb') as csvfile:
            reader = csv.DictReader(csvfile)
            keys = reader.fieldnames
            for row in reader:
                ed = json.loads(row.pop('extra_data'))
                for k, v in ed.iteritems():
                    new_keys.add(k)
                    row[k] = v
                data.append(row)

        # remove the extra_data column and add in the new columns
        keys.remove('extra_data')
        for k in new_keys:
            keys.append(k)

        # save the new file
        new_file_name = 'tmp_{}_flat.csv'.format(
            os.path.splitext(os.path.basename(filename))[0]
        )
        f_new = os.path.join(os.path.dirname(__file__), 'data', new_file_name)
        with open(f_new, 'w') as csvfile:
            writer = csv.DictWriter(csvfile, fieldnames=keys)
            writer.writeheader()
            for d in data:
                writer.writerow(d)

        # save the keys         This doesn't appear to be used anywhere
        new_file_name = 'tmp_{}_keys.csv'.format(
            os.path.splitext(os.path.basename(filename))[0]
        )
        f_new = os.path.join(os.path.dirname(__file__), 'data', new_file_name)
        with open(f_new, 'w') as outfile:
            outfile.writelines([str(key) + '\n' for key in keys])

        # Continue saving the raw data
        new_file_name = "tmp_{}_flat.csv".format(
            os.path.splitext(os.path.basename(filename))[0]
        )
        f_new = os.path.join(os.path.dirname(__file__), 'data', new_file_name)
        self.import_file.file = File(open(f_new))
        self.import_file.save()

        save_raw_data(self.import_file.id)

        # the mapping is just the 'keys' repeated since the file
        # was created as a database dump
        mapping = []
        for k in keys:
            if k == 'id':
                continue
            mapping.append(
                {
                    "from_field": k,
                    "to_table_name": "PropertyState",
                    "to_field": k
                }
            )

        Column.create_mappings(mapping, self.org, self.user)

        # call the mapping function from the tasks file
        map_data(self.import_file.id)
Пример #44
0
    def test_demo_v2(self):
        tasks._save_raw_data(self.import_file_tax_lot.pk, 'fake_cache_key', 1)
        Column.create_mappings(self.fake_taxlot_mappings, self.org, self.user)
        Column.create_mappings(self.fake_portfolio_mappings, self.org, self.user)
        tasks.map_data(self.import_file_tax_lot.pk)

        # Check to make sure the taxlots were imported
        ts = TaxLotState.objects.filter(
            data_state=DATA_STATE_MAPPING,
            organization=self.org,
            import_file=self.import_file_tax_lot,
        )

        ps = PropertyState.objects.filter(
            data_state=DATA_STATE_MAPPING,
            organization=self.org,
            import_file=self.import_file_property,
        )

        self.assertEqual(len(ps), 0)
        self.assertEqual(len(ts), 9)

        tasks.match_buildings(self.import_file_tax_lot.id, self.user.id)

        # Check a single case of the taxlotstate
        self.assertEqual(TaxLotState.objects.filter(address_line_1='050 Willow Ave SE').count(), 1)
        self.assertEqual(
            TaxLotView.objects.filter(state__address_line_1='050 Willow Ave SE').count(), 1
        )

        self.assertEqual(TaxLotView.objects.count(), 9)

        # Import the property data
        tasks._save_raw_data(self.import_file_property.pk, 'fake_cache_key', 1)
        tasks.map_data(self.import_file_property.pk)

        ts = TaxLotState.objects.filter(
            # data_state=DATA_STATE_MAPPING,  # Look at all taxlotstates
            organization=self.org,
            import_file=self.import_file_tax_lot,
        )

        ps = PropertyState.objects.filter(
            data_state=DATA_STATE_MAPPING,
            organization=self.org,
            import_file=self.import_file_property,
        )

        self.assertEqual(len(ts), 9)
        self.assertEqual(len(ps), 14)

        tasks.match_buildings(self.import_file_property.id, self.user.id)

        ps = PropertyState.objects.filter(
            data_state=DATA_STATE_MAPPING,
            organization=self.org,
            import_file=self.import_file_property,
        )

        # there shouldn't be any properties left in the mapping state
        self.assertEqual(len(ps), 0)

        # psv = PropertyView.objects.filter(state__organization=self.org)
        # self.assertEqual(len(psv), 12)

        # tlv = TaxLotView.objects.filter(state__organization=self.org)
        # self.assertEqual(len(tlv), 9)

        self.assertEqual(PropertyView.objects.filter(state__organization=self.org,
                                                     state__pm_property_id='2264').count(), 1)
        pv = PropertyView.objects.filter(state__organization=self.org,
                                         state__pm_property_id='2264').first()

        self.assertEqual(pv.state.property_name, 'University Inn')
        self.assertEqual(pv.state.address_line_1, '50 Willow Ave SE')
Пример #45
0
    def test_cleanse(self):
        # Import the file and run mapping

        # This is silly, the mappings are backwards from what you would expect.
        # The key is the BS field, and the value is the value in the CSV

        fake_mappings = [
            {
                "from_field": u'block_number',
                "to_table_name": u'PropertyState',
                "to_field": u'block_number',
            }, {
                "from_field": u'error_type',
                "to_table_name": u'PropertyState',
                "to_field": u'error_type',
            }, {
                "from_field": u'building_count',
                "to_table_name": u'PropertyState',
                "to_field": u'building_count',
            }, {
                "from_field": u'conditioned_floor_area',
                "to_table_name": u'PropertyState',
                "to_field": u'conditioned_floor_area',
            }, {
                "from_field": u'energy_score',
                "to_table_name": u'PropertyState',
                "to_field": u'energy_score',
            }, {
                "from_field": u'gross_floor_area',
                "to_table_name": u'PropertyState',
                "to_field": u'gross_floor_area',
            }, {
                "from_field": u'lot_number',
                "to_table_name": u'PropertyState',
                "to_field": u'lot_number',
            }, {
                "from_field": u'occupied_floor_area',
                "to_table_name": u'PropertyState',
                "to_field": u'occupied_floor_area',
            }, {
                "from_field": u'conditioned_floor_area',
                "to_table_name": u'PropertyState',
                "to_field": u'conditioned_floor_area',
            }, {
                "from_field": u'postal_code',
                "to_table_name": u'PropertyState',
                "to_field": u'postal_code',
            }, {
                "from_field": u'site_eui',
                "to_table_name": u'PropertyState',
                "to_field": u'site_eui',
            }, {
                "from_field": u'site_eui_weather_normalized',
                "to_table_name": u'PropertyState',
                "to_field": u'site_eui_weather_normalized',
            }, {
                "from_field": u'source_eui',
                "to_table_name": u'PropertyState',
                "to_field": u'source_eui',
            }, {
                "from_field": u'source_eui_weather_normalized',
                "to_table_name": u'PropertyState',
                "to_field": u'source_eui_weather_normalized',
            }, {
                "from_field": u'address_line_1',
                "to_table_name": u'PropertyState',
                "to_field": u'address_line_1',
            }, {
                "from_field": u'address_line_2',
                "to_table_name": u'PropertyState',
                "to_field": u'address_line_2',
            }, {
                "from_field": u'building_certification',
                "to_table_name": u'PropertyState',
                "to_field": u'building_certification',
            }, {
                "from_field": u'city',
                "to_table_name": u'PropertyState',
                "to_field": u'city',
            }, {
                "from_field": u'custom_id_1',
                "to_table_name": u'PropertyState',
                "to_field": u'custom_id_1',
            }, {
                "from_field": u'district',
                "to_table_name": u'PropertyState',
                "to_field": u'district',
            }, {
                "from_field": u'energy_alerts',
                "to_table_name": u'PropertyState',
                "to_field": u'energy_alerts',
            }, {
                "from_field": u'owner_address',
                "to_table_name": u'PropertyState',
                "to_field": u'owner_address',
            }, {
                "from_field": u'owner_city_state',
                "to_table_name": u'PropertyState',
                "to_field": u'owner_city_state',
            }, {
                "from_field": u'owner_email',
                "to_table_name": u'PropertyState',
                "to_field": u'owner_email',
            }, {
                "from_field": u'owner_postal_code',
                "to_table_name": u'PropertyState',
                "to_field": u'owner_postal_code',
            }, {
                "from_field": u'owner_telephone',
                "to_table_name": u'PropertyState',
                "to_field": u'owner_telephone',
            }, {
                "from_field": u'pm_property_id',
                "to_table_name": u'PropertyState',
                "to_field": u'pm_property_id',
            }, {
                "from_field": u'property_name',
                "to_table_name": u'PropertyState',
                "to_field": u'property_name',
            }, {
                "from_field": u'property_notes',
                "to_table_name": u'PropertyState',
                "to_field": u'property_notes',
            }, {
                "from_field": u'space_alerts',
                "to_table_name": u'PropertyState',
                "to_field": u'space_alerts',
            }, {
                "from_field": u'state_province',
                "to_table_name": u'PropertyState',
                "to_field": u'state_province',
            }, {
                "from_field": u'tax_lot_id',
                "to_table_name": u'PropertyState',
                "to_field": u'tax_lot_id',
            }, {
                "from_field": u'use_description',
                "to_table_name": u'PropertyState',
                "to_field": u'use_description',
            }, {
                "from_field": u'generation_date',
                "to_table_name": u'PropertyState',
                "to_field": u'generation_date',
            }, {
                "from_field": u'recent_sale_date',
                "to_table_name": u'PropertyState',
                "to_field": u'recent_sale_date',
            }, {
                "from_field": u'generation_date',
                "to_table_name": u'PropertyState',
                "to_field": u'generation_date',
            }, {
                "from_field": u'release_date',
                "to_table_name": u'PropertyState',
                "to_field": u'release_date',
            }, {
                "from_field": u'year_built',
                "to_table_name": u'PropertyState',
                "to_field": u'year_built',
            }, {
                "from_field": u'year_ending',
                "to_table_name": u'PropertyState',
                "to_field": u'year_ending',
            }
        ]

        tasks.save_raw_data(self.import_file.id)

        Column.create_mappings(fake_mappings, self.org, self.user)
        tasks.map_data(self.import_file.id)

        qs = PropertyState.objects.filter(
            import_file=self.import_file,
            source_type=ASSESSED_BS,
        ).iterator()

        c = Cleansing(self.org)
        c.cleanse('property', qs)

        # _log.debug(c.results)
        # This only checks to make sure the 34 errors have occurred.
        self.assertEqual(len(c.results), 34)