Exemple #1
0
    def test_promote_properties(self):
        """Good case for testing our matching system."""
        tasks.save_raw_data(self.import_file.id)
        Column.create_mappings(self.fake_mappings, self.org, self.user)
        tasks.map_data(self.import_file.pk)

        cycle2, _ = Cycle.objects.get_or_create(
            name=u'Hack Cycle 2016',
            organization=self.org,
            start=datetime.datetime(2016, 1, 1),
            end=datetime.datetime(2016, 12, 31),
        )

        # make sure that the new data was loaded correctly
        ps = PropertyState.objects.filter(address_line_1='1181 Douglas Street')[0]
        self.assertEqual(ps.site_eui, 439.9)
        self.assertEqual(ps.extra_data['CoStar Property ID'], '1575599')

        # Promote the PropertyState to a PropertyView
        pv1 = ps.promote(self.cycle)
        pv2 = ps.promote(self.cycle)  # should just return the same object
        self.assertEqual(pv1, pv2)

        # promote the same state for a new cycle, same data
        pv3 = ps.promote(cycle2)
        self.assertNotEqual(pv3, pv1)

        props = PropertyView.objects.all()
        self.assertEqual(len(props), 2)
Exemple #2
0
    def test_duplicate_headers_throws_400(self):
        tasks.save_raw_data(self.import_file.pk)
        Column.create_mappings(self.fake_mappings, self.org, self.user,
                               self.import_file.pk)

        with self.assertRaises(Exception):
            tasks.map_data(self.import_file.pk)
Exemple #3
0
    def setUp(self):
        super().setUp()

        # for now just import some test data. I'd rather create fake data... next time.
        filename = getattr(self, 'filename', 'example-data-properties.xlsx')
        self.fake_mappings = copy.copy(FAKE_MAPPINGS['portfolio'])
        selfvars = self.set_up(ASSESSED_RAW)
        self.user, self.org, self.import_file, self.import_record, self.cycle = selfvars
        filepath = osp.join(osp.dirname(__file__), '..', 'data', filename)
        self.import_file.file = SimpleUploadedFile(name=filename,
                                                   content=open(
                                                       filepath, 'rb').read())
        self.import_file.save()

        tasks.save_raw_data(self.import_file.pk)
        Column.create_mappings(self.fake_mappings, self.org, self.user,
                               self.import_file.id)
        tasks.map_data(self.import_file.pk)
        tasks.geocode_and_match_buildings_task(self.import_file.id)

        # import second file that is currently the same, but should be slightly different
        filename_2 = getattr(self, 'filename',
                             'example-data-properties-small-changes.xlsx')
        _, self.import_file_2 = self.create_import_file(
            self.user, self.org, self.cycle)
        filepath = osp.join(osp.dirname(__file__), '..', 'data', filename_2)
        self.import_file_2.file = SimpleUploadedFile(name=filename_2,
                                                     content=open(
                                                         filepath,
                                                         'rb').read())
        self.import_file_2.save()

        tasks.save_raw_data(self.import_file_2.pk)
        tasks.map_data(self.import_file_2.pk)
        tasks.geocode_and_match_buildings_task(self.import_file_2.id)
Exemple #4
0
    def setUp(self):
        self.user_details = {
            'username': '******',
            'email': '*****@*****.**',
            'password': '******'
        }
        self.user = User.objects.create_user(**self.user_details)
        self.login_url = reverse('landing:login')

        self.org = Organization.objects.create()
        OrganizationUser.objects.create(user=self.user, organization=self.org)

        self.import_record = ImportRecord.objects.create(owner=self.user)
        self.import_record.super_organization = self.org
        self.import_record.save()
        self.import_file = ImportFile.objects.create(
            import_record=self.import_record)

        self.import_file.source_type = 'ASSESSED_RAW'
        self.import_file.file = File(
            open(
                path.join(path.dirname(__file__),
                          '../data/covered-buildings-sample-with-errors.csv')))
        self.import_file.save()
        self.import_file_mapping = path.join(
            path.dirname(__file__),
            "../data/covered-buildings-sample-with-errors-mappings.csv")

        tasks.save_raw_data(self.import_file.id)
        Column.create_mappings_from_file(self.import_file_mapping, self.org,
                                         self.user, self.import_file.id)
        tasks.map_data(self.import_file.id)
Exemple #5
0
    def test_promote_properties(self):
        """Test if the promoting of a property works as expected"""
        tasks.save_raw_data(self.import_file.pk)
        Column.create_mappings(self.fake_mappings, self.org, self.user,
                               self.import_file.pk)
        tasks.map_data(self.import_file.pk)

        cycle2, _ = Cycle.objects.get_or_create(
            name='Hack Cycle 2016',
            organization=self.org,
            start=datetime.datetime(2016,
                                    1,
                                    1,
                                    tzinfo=timezone.get_current_timezone()),
            end=datetime.datetime(2016,
                                  12,
                                  31,
                                  tzinfo=timezone.get_current_timezone()),
        )

        # make sure that the new data was loaded correctly
        ps = PropertyState.objects.filter(address_line_1='50 Willow Ave SE')[0]
        self.assertEqual(ps.site_eui.magnitude, 125)

        # Promote the PropertyState to a PropertyView
        pv1 = ps.promote(self.cycle)
        pv2 = ps.promote(self.cycle)  # should just return the same object
        self.assertEqual(pv1, pv2)

        # promote the same state for a new cycle, same data
        pv3 = ps.promote(cycle2)
        self.assertNotEqual(pv3, pv1)

        props = PropertyView.objects.all()
        self.assertEqual(len(props), 2)
Exemple #6
0
    def setUp(self):
        # for now just import some test data. I'd rather create fake data... next time.
        filename = getattr(self, 'filename', 'example-data-properties.xlsx')
        self.fake_mappings = copy.copy(FAKE_MAPPINGS['portfolio'])
        selfvars = self.set_up(ASSESSED_RAW)
        self.user, self.org, self.import_file, self.import_record, self.cycle = selfvars
        filepath = osp.join(osp.dirname(__file__), '..', 'data', filename)
        self.import_file.file = SimpleUploadedFile(name=filename,
                                                   content=open(
                                                       filepath, 'rb').read())
        self.import_file.save()

        tasks.save_raw_data(self.import_file.pk)
        Column.create_mappings(self.fake_mappings, self.org, self.user,
                               self.import_file.id)
        tasks.map_data(self.import_file.pk)
        tasks.match_buildings(self.import_file.id)

        # import second file with tax lot information
        filename_2 = getattr(self, 'filename', 'example-data-taxlots.xlsx')
        self.fake_mappings = copy.copy(FAKE_MAPPINGS['taxlot'])
        _, self.import_file_2 = self.create_import_file(
            self.user, self.org, self.cycle)
        filepath = osp.join(osp.dirname(__file__), '..', 'data', filename_2)
        self.import_file_2.file = SimpleUploadedFile(name=filename_2,
                                                     content=open(
                                                         filepath,
                                                         'rb').read())
        self.import_file_2.save()

        tasks.save_raw_data(self.import_file_2.pk)
        Column.create_mappings(self.fake_mappings, self.org, self.user,
                               self.import_file.id)
        tasks.map_data(self.import_file_2.pk)
        tasks.match_buildings(self.import_file_2.id)
Exemple #7
0
    def test_cached_first_row_order(self):
        """Tests to make sure the first row is saved in the correct order.
        It should be the order of the headers in the original file."""

        tasks.save_raw_data(self.import_file.pk)

        # reload the import file
        self.import_file = ImportFile.objects.get(pk=self.import_file.pk)
        first_row = self.import_file.cached_first_row
        expected_first_row = "Property Id|#*#|Property Name|#*#|Year Ending|#*#|Property Notes|#*#|Address 1|#*#|Address 2|#*#|City|#*#|State/Province|#*#|Postal Code|#*#|Year Built|#*#|ENERGY STAR Score|#*#|Site EUI (kBtu/ft2)|#*#|Total GHG Emissions (MtCO2e)|#*#|Weather Normalized Site EUI (kBtu/ft2)|#*#|National Median Site EUI (kBtu/ft2)|#*#|Source EUI (kBtu/ft2)|#*#|Weather Normalized Source EUI (kBtu/ft2)|#*#|Parking - Gross Floor Area (ft2)|#*#|Organization|#*#|Generation Date|#*#|Release Date"
        self.assertEqual(first_row, expected_first_row)

        # setup the API access
        user_details = {
            'username': '******',
            'password': '******',
        }
        self.client.login(**user_details)

        url = reverse_lazy("api:v2:import_files-first-five-rows", args=[self.import_file.pk])
        resp = self.client.get(url, content_type='application/json')
        body = json.loads(resp.content)
        self.assertEqual(body['status'], 'success')
        self.assertEqual(len(body['first_five_rows']), 5)

        expected_property_notes = 'These are property notes:\n- Nice building\n- Large atrium\n- Extra crlf here'
        self.assertEqual(body['first_five_rows'][0]['Property Notes'], expected_property_notes)
Exemple #8
0
    def test_property_meters_endpoint_returns_a_list_of_meters_of_a_view(self):
        # add meters and readings to property associated to property_view_1
        save_raw_data(self.import_file.id)

        # create GB gas meter
        meter_details = {
            'source': Meter.GREENBUTTON,
            'source_id': '/v1/User/000/UsagePoint/123fakeID/MeterReading/000',
            'type': Meter.NATURAL_GAS,
            'property_id': self.property_view_1.property.id,
        }
        gb_gas_meter = Meter.objects.create(**meter_details)

        url = reverse('api:v2:meters-property-meters')

        post_params = json.dumps({
            'property_view_id': self.property_view_1.id,
        })

        result = self.client.post(url,
                                  post_params,
                                  content_type="application/json")
        result_dict = json.loads(result.content)

        electric_meter = Meter.objects.get(
            property_id=self.property_view_1.property_id,
            type=Meter.ELECTRICITY_GRID)
        gas_meter = Meter.objects.get(
            property_id=self.property_view_1.property_id,
            type=Meter.NATURAL_GAS,
            source=Meter.PORTFOLIO_MANAGER)
        expectation = [
            {
                'id': electric_meter.id,
                'type': 'Electric - Grid',
                'source': 'PM',
                'source_id': '5766973-0',
                'scenario_id': None,
                'scenario_name': None
            },
            {
                'id': gas_meter.id,
                'type': 'Natural Gas',
                'source': 'PM',
                'source_id': '5766973-1',
                'scenario_id': None,
                'scenario_name': None
            },
            {
                'id': gb_gas_meter.id,
                'type': 'Natural Gas',
                'source': 'GB',
                'source_id': '123fakeID',
                'scenario_id': None,
                'scenario_name': None
            },
        ]

        self.assertCountEqual(result_dict, expectation)
Exemple #9
0
    def test_multiple_id_matches(self):
        tasks.save_raw_data(self.import_file.pk)
        Column.create_mappings(self.fake_mappings, self.org, self.user,
                               self.import_file.pk)
        tasks.map_data(self.import_file.pk)

        # verify that there are no properties listed as canonical
        property_states = tasks.list_canonical_property_states(self.org)
        self.assertEqual(len(property_states), 0)

        # promote two properties
        ps = PropertyState.objects.filter(custom_id_1='13').order_by('id')
        ps_test = ps.first()
        ps_test_2 = ps.last()
        for p in ps:
            p.promote(self.cycle)
            # from seed.utils.generic import pp
            # pp(p)

        property_states = tasks.list_canonical_property_states(self.org)
        self.assertEqual(len(property_states), 2)

        # no arguments passed should return no results
        matches = self.query_property_matches(property_states, None, None,
                                              None, None)
        self.assertEqual(len(matches), 0)
        # should return 2 properties
        matches = self.query_property_matches(property_states, None, '13',
                                              None, None)
        self.assertEqual(len(matches), 2)
        self.assertEqual(matches[0], ps_test)
        self.assertEqual(matches[1], ps_test_2)
        # should return only the second property
        matches = self.query_property_matches(property_states, '2342', None,
                                              None, None)
        self.assertEqual(len(matches), 1)
        self.assertEqual(matches[0], ps_test_2)
        # should return both properties, the first one should be the pm match, i.e. the first prop
        matches = self.query_property_matches(property_states, '481516', '13',
                                              None, None)
        self.assertEqual(len(matches), 2)
        self.assertEqual(matches[0], ps_test)
        self.assertEqual(matches[1], ps_test_2)
        # if passing in the second pm then it will not be the first
        matches = self.query_property_matches(property_states, '2342', '13',
                                              None, None)
        self.assertEqual(len(matches), 2)
        self.assertEqual(matches[1], ps_test_2)
        # pass the pm id into the custom id. it should still return the correct buildings.
        # not sure that this is the right behavior, but this is what it does, so just testing.
        matches = self.query_property_matches(property_states, None, '2342',
                                              None, None)
        self.assertEqual(len(matches), 1)
        self.assertEqual(matches[0], ps_test_2)
        matches = self.query_property_matches(property_states, '13', None,
                                              None, None)
        self.assertEqual(len(matches), 2)
        self.assertEqual(matches[0], ps_test)
        self.assertEqual(matches[1], ps_test_2)
Exemple #10
0
    def test_cached_first_row_order(self):
        """Tests to make sure the first row is saved in the correct order.
        It should be the order of the headers in the original file."""
        with patch.object(ImportFile, 'cache_first_rows', return_value=None):
            tasks.save_raw_data(self.import_file.pk)

        expected_first_row = 'Property Id|#*#|Property Name|#*#|Year Ending|#*#|Property Floor Area (Buildings and Parking) (ft2)|#*#|Address 1|#*#|Address 2|#*#|City|#*#|State/Province|#*#|Postal Code|#*#|Year Built|#*#|ENERGY STAR Score|#*#|Site EUI (kBtu/ft2)|#*#|Total GHG Emissions (MtCO2e)|#*#|Weather Normalized Site EUI (kBtu/ft2)|#*#|National Median Site EUI (kBtu/ft2)|#*#|Source EUI (kBtu/ft2)|#*#|Weather Normalized Source EUI (kBtu/ft2)|#*#|National Median Source EUI (kBtu/ft2)|#*#|Parking - Gross Floor Area (ft2)|#*#|Organization|#*#|Generation Date|#*#|Release Date'  # NOQA

        import_file = ImportFile.objects.get(pk=self.import_file.pk)
        first_row = import_file.cached_first_row
        self.assertEqual(first_row, expected_first_row)
Exemple #11
0
    def test_match_buildings(self):
        """ case A (one property <-> one tax lot) """
        tasks.save_raw_data(self.import_file.pk)
        Column.create_mappings(self.fake_mappings, self.org, self.user,
                               self.import_file.pk)
        tasks.map_data(self.import_file.pk)

        # Check to make sure all the properties imported
        ps = PropertyState.objects.filter(
            data_state=DATA_STATE_MAPPING,
            organization=self.org,
            import_file=self.import_file,
        )
        self.assertEqual(len(ps), 14)

        # Check to make sure the taxlots were imported
        ts = TaxLotState.objects.filter(
            data_state=DATA_STATE_MAPPING,
            organization=self.org,
            import_file=self.import_file,
        )
        self.assertEqual(len(ts), 18)

        # Check a single case of the taxlotstate
        ts = TaxLotState.objects.filter(
            jurisdiction_tax_lot_id='1552813').first()
        self.assertEqual(ts.jurisdiction_tax_lot_id, '1552813')
        self.assertEqual(ts.address_line_1, None)
        self.assertEqual(ts.extra_data["data_008"], 1)

        # Check a single case of the propertystate
        ps = PropertyState.objects.filter(pm_property_id='2264')
        self.assertEqual(len(ps), 1)
        ps = ps.first()
        self.assertEqual(ps.pm_property_id, '2264')
        self.assertEqual(ps.address_line_1, '50 Willow Ave SE')
        self.assertEqual('data_007' in ps.extra_data, True)
        self.assertEqual('data_008' in ps.extra_data, False)
        self.assertEqual(ps.extra_data["data_007"], 'a')

        # verify that the lot_number has the tax_lot information. For this case it is one-to-one
        self.assertEqual(ps.lot_number, ts.jurisdiction_tax_lot_id)

        tasks.match_buildings(self.import_file.id)

        self.assertEqual(TaxLot.objects.count(), 10)

        qry = PropertyView.objects.filter(state__custom_id_1='7')
        self.assertEqual(qry.count(), 1)
        state = qry.first().state

        self.assertEqual(state.address_line_1, "12 Ninth Street")
        self.assertEqual(state.property_name, "Grange Hall")
Exemple #12
0
    def test_save_raw_data(self):
        """Save information in extra_data, set other attrs."""
        with patch.object(ImportFile, 'cache_first_rows', return_value=None):
            tasks.save_raw_data(self.import_file.pk)

        self.assertEqual(PropertyState.objects.filter(import_file=self.import_file).count(), 512)
        raw_saved = PropertyState.objects.filter(
            import_file=self.import_file,
        ).latest('id')

        self.assertDictEqual(raw_saved.extra_data, self.fake_extra_data)
        self.assertEqual(raw_saved.organization, self.org)
Exemple #13
0
    def test_import_file(self):
        tasks.save_raw_data(self.import_file.pk)
        Column.create_mappings(self.fake_mappings, self.org, self.user,
                               self.import_file.pk)
        tasks.map_data(self.import_file.pk)

        ps = PropertyState.objects.filter(pm_property_id='2264').first()
        ps.promote(self.cycle)

        # should only be 11 unmatched_properties because one was promoted.
        ps = self.import_file.find_unmatched_property_states()
        self.assertEqual(len(ps), 13)
Exemple #14
0
    def test_generates_headers_for_those_missing(self):
        """Tests to make sure the first row is saved in the correct order and includes
        generated names for missing headers"""
        with patch.object(ImportFile, 'cache_first_rows', return_value=None):
            tasks.save_raw_data(self.import_file.pk)

        expected_first_row = 'Property Id|#*#|Property Name|#*#|SEED Generated Header 1|#*#|SEED Generated Header 2|#*#|Address 1|#*#|SEED Generated Header 3|#*#|City|#*#|State/Province|#*#|Postal Code|#*#|Year Built|#*#|ENERGY STAR Score|#*#|Site EUI (kBtu/ft2)|#*#|Total GHG Emissions (MtCO2e)|#*#|Weather Normalized Site EUI (kBtu/ft2)|#*#|National Median Site EUI (kBtu/ft2)|#*#|Source EUI (kBtu/ft2)|#*#|Weather Normalized Source EUI (kBtu/ft2)|#*#|National Median Source EUI (kBtu/ft2)|#*#|Parking - Gross Floor Area (ft2)|#*#|Organization|#*#|Generation Date|#*#|Release Date'  # NOQA

        import_file = ImportFile.objects.get(pk=self.import_file.pk)
        first_row = import_file.cached_first_row
        self.assertEqual(first_row, expected_first_row)

        self.assertEqual(import_file.has_generated_headers, True)
Exemple #15
0
def _create_propertyview(request, org, user, dataset_name):
    cycle = Cycle.objects.filter(
        organization=org).last()  # might need to hardcode this
    dataset = ImportRecord.objects.get(name=dataset_name,
                                       super_organization=org)
    result = [{'City': request.GET['city'], 'State': request.GET['state']}]
    if 'street' in request.GET:
        result[0]['Address Line 1'] = request.GET['street']
    else:
        result[0]['Address Line 1'] = request.GET['address_line_1']
    if 'zipcode' in request.GET:
        result[0]['Postal Code'] = request.GET['zipcode']
    else:
        result[0]['Postal Code'] = request.GET['postal_code']
    if 'property_uid' in request.GET:
        result[0]['Custom ID 1'] = request.GET['property_uid']
    file_pk = utils.save_and_load(user, dataset, cycle, result,
                                  "profile_data.csv")
    # save data
    resp = save_raw_data(file_pk)
    save_prog_key = resp['progress_key']
    utils.wait_for_task(save_prog_key)
    # map data
    #        save_column_mappings(file_id, col_mappings) #perform column mapping
    resp = map_data(file_pk)
    map_prog_key = resp['progress_key']
    utils.wait_for_task(map_prog_key)
    resp = match_buildings(file_pk)
    #        resp = geocode_buildings_task(file_pk)
    if (resp['status'] == 'error'):
        return resp
    match_prog_key = resp['progress_key']
    utils.wait_for_task(match_prog_key)
    propertyview = utils.propertyview_find(request)
    return propertyview
Exemple #16
0
    def test_postal_code_property(self):

        new_mappings = copy.deepcopy(self.fake_mappings['portfolio'])

        tasks.save_raw_data(self.import_file.pk)
        Column.create_mappings(new_mappings, self.org, self.user, self.import_file.pk)
        tasks.map_data(self.import_file.pk)

        # get mapped property postal_code
        ps = PropertyState.objects.filter(address_line_1='11 Ninth Street')[0]
        self.assertEqual(ps.postal_code, '00340')

        ps = PropertyState.objects.filter(address_line_1='20 Tenth Street')[0]
        self.assertEqual(ps.postal_code, '00000')

        ps = PropertyState.objects.filter(address_line_1='93029 Wellington Blvd')[0]
        self.assertEqual(ps.postal_code, '00001-0002')
Exemple #17
0
    def test_mapping_tax_lots_only(self):
        # update the mappings to not include any taxlot tables in the data
        new_mappings = copy.deepcopy(self.fake_mappings)
        for m in new_mappings:
            if m["to_table_name"] == 'PropertyState':
                m["to_table_name"] = 'TaxLotState'

        tasks.save_raw_data(self.import_file.pk)
        Column.create_mappings(new_mappings, self.org, self.user, self.import_file.pk)
        tasks.map_data(self.import_file.pk)

        # make sure that no taxlot objects were created. the 12 here are the import extra_data.
        ps = PropertyState.objects.all()
        self.assertEqual(len(ps), 14)

        # make sure that the new data was loaded correctly
        ts = TaxLotState.objects.filter(address_line_1='50 Willow Ave SE').first()
        self.assertEqual(ts.extra_data['site_eui'], 125)
Exemple #18
0
    def test_save_raw_data_zip(self):
        # -- Act
        with patch.object(ImportFile, 'cache_first_rows', return_value=None):
            progress_info = tasks.save_raw_data(self.import_file.pk)

        # -- Assert
        self.assertEqual('error', progress_info['status'])
        self.assertIn('Invalid or missing schema specification',
                      progress_info['message'])
Exemple #19
0
    def setUp(self):
        filename = getattr(self, 'filename', 'example-data-properties-duplicates.xlsx')
        import_file_source_type = ASSESSED_RAW
        self.fake_mappings = FAKE_MAPPINGS['portfolio']
        self.fake_extra_data = FAKE_EXTRA_DATA
        self.fake_row = FAKE_ROW
        selfvars = self.set_up(import_file_source_type)
        self.user, self.org, self.import_file, self.import_record, self.cycle = selfvars
        filepath = osp.join(osp.dirname(__file__), '..', 'data', filename)
        self.import_file.file = SimpleUploadedFile(
            name=filename,
            content=open(filepath, 'rb').read()
        )
        self.import_file.save()

        tasks.save_raw_data(self.import_file.pk)
        Column.create_mappings(self.fake_mappings, self.org, self.user, self.import_file.pk)
        tasks.map_data(self.import_file.pk)
    def setUp(self):
        super(TestEquivalenceWithFile, self).setUp()

        filename = getattr(self, 'filename', 'covered-buildings-sample.csv')
        import_file_source_type = ASSESSED_RAW
        self.fake_mappings = FAKE_MAPPINGS['covered_building']
        selfvars = self.set_up(import_file_source_type)
        self.user, self.org, self.import_file, self.import_record, self.cycle = selfvars
        filepath = osp.join(osp.dirname(__file__), '..', '..', '..', 'tests',
                            'data', filename)
        self.import_file.file = SimpleUploadedFile(name=filename,
                                                   content=open(
                                                       filepath, 'rb').read())
        self.import_file.save()

        tasks.save_raw_data(self.import_file.pk)
        Column.create_mappings(self.fake_mappings, self.org, self.user,
                               self.import_file.id)
        tasks.map_data(self.import_file.pk)
Exemple #21
0
    def setUp(self):
        data_importer_data_dir = os.path.join(os.path.dirname(__file__), '..',
                                              'data_importer', 'tests', 'data')
        filename = getattr(self, 'filename', 'example-data-properties.xlsx')
        self.fake_mappings = copy.copy(FAKE_MAPPINGS['portfolio'])
        selfvars = self.set_up(ASSESSED_RAW)
        self.user, self.org, self.import_file, self.import_record, self.cycle = selfvars
        filepath = os.path.join(data_importer_data_dir, filename)
        self.import_file.file = SimpleUploadedFile(name=filename,
                                                   content=open(
                                                       filepath, 'rb').read())
        self.import_file.save()
        tasks.save_raw_data(self.import_file.pk)
        Column.create_mappings(self.fake_mappings, self.org, self.user,
                               self.import_file.id)
        tasks.map_data(self.import_file.pk)
        tasks.match_buildings(self.import_file.id)

        # import second file that is currently the same, but should be slightly different
        filename_2 = getattr(self, 'filename',
                             'example-data-properties-small-changes.xlsx')
        _, self.import_file_2 = self.create_import_file(
            self.user, self.org, self.cycle)
        filepath = os.path.join(data_importer_data_dir, filename_2)
        self.import_file_2.file = SimpleUploadedFile(name=filename_2,
                                                     content=open(
                                                         filepath,
                                                         'rb').read())
        self.import_file_2.save()

        tasks.save_raw_data(self.import_file_2.pk)
        Column.create_mappings(self.fake_mappings, self.org, self.user,
                               self.import_file_2.id)
        tasks.map_data(self.import_file_2.pk)
        tasks.match_buildings(self.import_file_2.id)

        # for api tests
        user_details = {
            'username': '******',
            'password': '******',
        }
        self.client.login(**user_details)
Exemple #22
0
    def test_match_buildings(self):
        """ case B (many property <-> one tax lot) """
        tasks.save_raw_data(self.import_file.pk)
        Column.create_mappings(self.fake_mappings, self.org, self.user,
                               self.import_file.pk)
        # Set remap to True because for some reason this file id has been imported before.
        tasks.map_data(self.import_file.pk, True)

        # Check to make sure all the properties imported
        ps = PropertyState.objects.filter(
            data_state=DATA_STATE_MAPPING,
            organization=self.org,
            import_file=self.import_file,
        )
        self.assertEqual(len(ps), 14)

        # Check to make sure the tax lots were imported
        ts = TaxLotState.objects.filter(
            data_state=DATA_STATE_MAPPING,
            organization=self.org,
            import_file=self.import_file,
        )
        self.assertEqual(len(ts), 18)

        # verify that the lot_number has the tax_lot information. For this case it is one-to-many
        p_test = PropertyState.objects.filter(
            pm_property_id='5233255',
            organization=self.org,
            data_state=DATA_STATE_MAPPING,
            import_file=self.import_file,
        ).first()
        self.assertEqual(p_test.lot_number, "333/66555;333/66125;333/66148")

        tasks.match_buildings(self.import_file.id)

        # make sure the the property only has one tax lot and vice versa
        tlv = TaxLotView.objects.filter(
            state__jurisdiction_tax_lot_id='11160509', cycle=self.cycle)
        self.assertEqual(len(tlv), 1)
        tlv = tlv[0]
        properties = tlv.property_states()
        self.assertEqual(len(properties), 3)
Exemple #23
0
    def test_postal_code_taxlot(self):

        new_mappings = copy.deepcopy(self.fake_mappings['taxlot'])

        tasks.save_raw_data(self.import_file.pk)
        Column.create_mappings(new_mappings, self.org, self.user, self.import_file.pk)
        tasks.map_data(self.import_file.pk)

        # get mapped taxlot postal_code
        ts = TaxLotState.objects.filter(address_line_1='35 Tenth Street').first()

        if ts is None:
            raise TypeError("Invalid Taxlot Address!")
        self.assertEqual(ts.postal_code, '00333')

        ts = TaxLotState.objects.filter(address_line_1='93030 Wellington Blvd').first()

        if ts is None:
            raise TypeError("Invalid Taxlot Address!")
        self.assertEqual(ts.postal_code, '00000-0000')
Exemple #24
0
    def test_mapping(self):
        tasks.save_raw_data(self.import_file.pk)
        Column.create_mappings(self.fake_mappings, self.org, self.user, self.import_file.pk)
        tasks.map_data(self.import_file.pk)

        # There are a total of 18 tax lot ids in the import file
        ts = TaxLotState.objects.all()

        self.assertEqual(len(ts), 18)

        # make sure that the new data was loaded correctly and that the lot_number was set
        # appropriately
        ps = PropertyState.objects.filter(address_line_1='2700 Welstone Ave NE')[0]
        self.assertEqual(ps.site_eui.magnitude, 1202)
        self.assertEqual(ps.lot_number, '11160509')

        ps = PropertyState.objects.filter(address_line_1='521 Elm Street')[0]
        self.assertEqual(ps.site_eui.magnitude, 1358)
        # The lot_number should also have the normalized code run, then re-delimited
        self.assertEqual(ps.lot_number, '333/66555;333/66125;333/66148')
Exemple #25
0
    def test_single_id_matches(self):
        tasks.save_raw_data(self.import_file.pk)
        Column.create_mappings(self.fake_mappings, self.org, self.user, self.import_file.pk)
        tasks.map_data(self.import_file.pk)

        # verify that there are no properties listed as canonical
        property_states = tasks.list_canonical_property_states(self.org)
        self.assertEqual(len(property_states), 0)

        # promote a properties
        ps = PropertyState.objects.filter(pm_property_id='2264').first()
        ps.promote(self.cycle)

        property_states = tasks.list_canonical_property_states(self.org)
        self.assertEqual(len(property_states), 1)

        matches = self.query_property_matches(property_states, None, None, None, None)
        self.assertEqual(len(matches), 0)
        matches = self.query_property_matches(property_states, '2264', None, None, None)
        self.assertEqual(len(matches), 1)
        self.assertEqual(matches[0], ps)
Exemple #26
0
    def test_mapping_properties_only(self):
        # update the mappings to not include any taxlot tables in the data
        # note that save_data reads in from the propertystate table, so that will always
        # have entries in the db (for now).
        new_mappings = copy.deepcopy(self.fake_mappings)
        for m in new_mappings:
            if m["to_table_name"] == 'TaxLotState':
                m["to_table_name"] = 'PropertyState'

        tasks.save_raw_data(self.import_file.pk)
        Column.create_mappings(new_mappings, self.org, self.user, self.import_file.pk)
        tasks.map_data(self.import_file.pk)

        # make sure that no taxlot objects were created
        ts = TaxLotState.objects.all()
        self.assertEqual(len(ts), 0)

        # make sure that the new data was loaded correctly
        ps = PropertyState.objects.filter(address_line_1='2700 Welstone Ave NE')[0]
        self.assertEqual(ps.site_eui.magnitude, 1202)
        self.assertEqual(ps.extra_data['jurisdiction_tax_lot_id'], '11160509')
Exemple #27
0
    def test_save_raw_data_zip(self):
        # -- Act
        with patch.object(ImportFile, 'cache_first_rows', return_value=None):
            progress_info = tasks.save_raw_data(self.import_file.pk)

        # -- Assert
        self.assertEqual('success', progress_info['status'],
                         json.dumps(progress_info))
        self.assertEqual(
            PropertyState.objects.filter(import_file=self.import_file).count(),
            2)
        raw_saved = PropertyState.objects.filter(
            import_file=self.import_file, ).latest('id')
        self.assertEqual(raw_saved.organization, self.org)
Exemple #28
0
    def test_map_all_models_xml(self):
        # -- Setup
        with patch.object(ImportFile, 'cache_first_rows', return_value=None):
            progress_info = tasks.save_raw_data(self.import_file.pk)
        self.assertEqual('success', progress_info['status'],
                         json.dumps(progress_info))
        self.assertEqual(
            PropertyState.objects.filter(import_file=self.import_file).count(),
            1)

        # make the column mappings
        self.fake_mappings = default_buildingsync_profile_mappings()
        Column.create_mappings(self.fake_mappings, self.org, self.user,
                               self.import_file.pk)

        # map the data
        progress_info = tasks.map_data(self.import_file.pk)
        self.assertEqual('success', progress_info['status'])
        # verify there were no errors with the files
        self.assertEqual({}, progress_info.get('file_info', {}))
        ps = PropertyState.objects.filter(address_line_1='123 MAIN BLVD',
                                          import_file=self.import_file)
        self.assertEqual(len(ps), 1)

        # -- Act
        tasks.map_additional_models(self.import_file.pk)

        # -- Assert
        ps = PropertyState.objects.filter(address_line_1='123 MAIN BLVD',
                                          import_file=self.import_file)

        self.assertEqual(ps.count(), 1)

        # verify the property view, scenario and meter data were created
        pv = PropertyView.objects.filter(state=ps[0])
        self.assertEqual(pv.count(), 1)

        scenario = Scenario.objects.filter(property_state=ps[0])
        self.assertEqual(scenario.count(), 3)

        # for bsync, meters are linked to scenarios only (not properties)
        meters = Meter.objects.filter(scenario__in=scenario)
        self.assertEqual(meters.count(), 6)
Exemple #29
0
    def test_map_all_models_xml(self):
        # -- Setup
        with patch.object(ImportFile, 'cache_first_rows', return_value=None):
            progress_info = tasks.save_raw_data(self.import_file.pk)
        self.assertEqual('success', progress_info['status'],
                         json.dumps(progress_info))
        self.assertEqual(
            PropertyState.objects.filter(import_file=self.import_file).count(),
            1)

        # make the column mappings
        self.fake_mappings = default_buildingsync_profile_mappings()
        Column.create_mappings(self.fake_mappings, self.org, self.user,
                               self.import_file.pk)

        # map the data
        progress_info = tasks.map_data(self.import_file.pk)
        self.assertEqual('success', progress_info['status'])
        # verify there were no errors with the files
        self.assertEqual({}, progress_info.get('file_info', {}))
        ps = PropertyState.objects.filter(address_line_1='123 Main St',
                                          import_file=self.import_file)
        self.assertEqual(ps.count(), 1)

        # -- Act
        progress_info = tasks.geocode_and_match_buildings_task(
            self.import_file.pk)

        # -- Assert
        ps = PropertyState.objects.filter(address_line_1='123 Main St',
                                          import_file=self.import_file)
        self.assertEqual(ps.count(), 1)

        # !! we should have warnings for our file because of the bad measure names !!
        self.assertNotEqual({}, progress_info.get('file_info', {}))
        self.assertIn(self.import_file.uploaded_filename,
                      list(progress_info['file_info'].keys()))
        self.assertNotEqual(
            [],
            progress_info['file_info'][self.import_file.uploaded_filename].get(
                'warnings', []))
Exemple #30
0
    def test_map_data_zip(self):
        # -- Setup
        with patch.object(ImportFile, 'cache_first_rows', return_value=None):
            progress_info = tasks.save_raw_data(self.import_file.pk)
        self.assertEqual('success', progress_info['status'],
                         json.dumps(progress_info))
        self.assertEqual(
            PropertyState.objects.filter(import_file=self.import_file).count(),
            2)

        # make the column mappings
        self.fake_mappings = default_buildingsync_profile_mappings()
        Column.create_mappings(self.fake_mappings, self.org, self.user,
                               self.import_file.pk)

        # -- Act
        progress_info = tasks.map_data(self.import_file.pk)

        # -- Assert
        self.assertEqual('success', progress_info['status'])
        ps = PropertyState.objects.filter(address_line_1='123 Main St',
                                          import_file=self.import_file)
        self.assertEqual(len(ps), 2)
Exemple #31
0
    def test_map_all_models_zip(self):
        # -- Setup
        with patch.object(ImportFile, 'cache_first_rows', return_value=None):
            progress_info = tasks.save_raw_data(self.import_file.pk)
        self.assertEqual('success', progress_info['status'],
                         json.dumps(progress_info))
        self.assertEqual(
            PropertyState.objects.filter(import_file=self.import_file).count(),
            2)

        # make the column mappings
        self.fake_mappings = default_buildingsync_profile_mappings()
        Column.create_mappings(self.fake_mappings, self.org, self.user,
                               self.import_file.pk)

        # map the data
        progress_info = tasks.map_data(self.import_file.pk)
        self.assertEqual('success', progress_info['status'])
        ps = PropertyState.objects.filter(address_line_1='123 Main St',
                                          import_file=self.import_file)
        self.assertEqual(ps.count(), 2)

        # -- Act
        tasks.map_additional_models(self.import_file.pk)

        # -- Assert
        ps = PropertyState.objects.filter(address_line_1='123 Main St',
                                          import_file=self.import_file)
        self.assertEqual(ps.count(), 2)

        # verify there are 2 building files
        bfs = BuildingFile.objects.all()
        self.assertEqual(bfs.count(), 2)

        # check that scenarios were created
        scenarios = Scenario.objects.all()
        self.assertEqual(scenarios.count(), 31)
Exemple #32
0
    def test_cleanse(self):
        # Import the file and run mapping

        # This is silly, the mappings are backwards from what you would expect.
        # The key is the BS field, and the value is the value in the CSV

        fake_mappings = [
            {
                "from_field": u'block_number',
                "to_table_name": u'PropertyState',
                "to_field": u'block_number',
            }, {
                "from_field": u'error_type',
                "to_table_name": u'PropertyState',
                "to_field": u'error_type',
            }, {
                "from_field": u'building_count',
                "to_table_name": u'PropertyState',
                "to_field": u'building_count',
            }, {
                "from_field": u'conditioned_floor_area',
                "to_table_name": u'PropertyState',
                "to_field": u'conditioned_floor_area',
            }, {
                "from_field": u'energy_score',
                "to_table_name": u'PropertyState',
                "to_field": u'energy_score',
            }, {
                "from_field": u'gross_floor_area',
                "to_table_name": u'PropertyState',
                "to_field": u'gross_floor_area',
            }, {
                "from_field": u'lot_number',
                "to_table_name": u'PropertyState',
                "to_field": u'lot_number',
            }, {
                "from_field": u'occupied_floor_area',
                "to_table_name": u'PropertyState',
                "to_field": u'occupied_floor_area',
            }, {
                "from_field": u'conditioned_floor_area',
                "to_table_name": u'PropertyState',
                "to_field": u'conditioned_floor_area',
            }, {
                "from_field": u'postal_code',
                "to_table_name": u'PropertyState',
                "to_field": u'postal_code',
            }, {
                "from_field": u'site_eui',
                "to_table_name": u'PropertyState',
                "to_field": u'site_eui',
            }, {
                "from_field": u'site_eui_weather_normalized',
                "to_table_name": u'PropertyState',
                "to_field": u'site_eui_weather_normalized',
            }, {
                "from_field": u'source_eui',
                "to_table_name": u'PropertyState',
                "to_field": u'source_eui',
            }, {
                "from_field": u'source_eui_weather_normalized',
                "to_table_name": u'PropertyState',
                "to_field": u'source_eui_weather_normalized',
            }, {
                "from_field": u'address_line_1',
                "to_table_name": u'PropertyState',
                "to_field": u'address_line_1',
            }, {
                "from_field": u'address_line_2',
                "to_table_name": u'PropertyState',
                "to_field": u'address_line_2',
            }, {
                "from_field": u'building_certification',
                "to_table_name": u'PropertyState',
                "to_field": u'building_certification',
            }, {
                "from_field": u'city',
                "to_table_name": u'PropertyState',
                "to_field": u'city',
            }, {
                "from_field": u'custom_id_1',
                "to_table_name": u'PropertyState',
                "to_field": u'custom_id_1',
            }, {
                "from_field": u'district',
                "to_table_name": u'PropertyState',
                "to_field": u'district',
            }, {
                "from_field": u'energy_alerts',
                "to_table_name": u'PropertyState',
                "to_field": u'energy_alerts',
            }, {
                "from_field": u'owner_address',
                "to_table_name": u'PropertyState',
                "to_field": u'owner_address',
            }, {
                "from_field": u'owner_city_state',
                "to_table_name": u'PropertyState',
                "to_field": u'owner_city_state',
            }, {
                "from_field": u'owner_email',
                "to_table_name": u'PropertyState',
                "to_field": u'owner_email',
            }, {
                "from_field": u'owner_postal_code',
                "to_table_name": u'PropertyState',
                "to_field": u'owner_postal_code',
            }, {
                "from_field": u'owner_telephone',
                "to_table_name": u'PropertyState',
                "to_field": u'owner_telephone',
            }, {
                "from_field": u'pm_property_id',
                "to_table_name": u'PropertyState',
                "to_field": u'pm_property_id',
            }, {
                "from_field": u'property_name',
                "to_table_name": u'PropertyState',
                "to_field": u'property_name',
            }, {
                "from_field": u'property_notes',
                "to_table_name": u'PropertyState',
                "to_field": u'property_notes',
            }, {
                "from_field": u'space_alerts',
                "to_table_name": u'PropertyState',
                "to_field": u'space_alerts',
            }, {
                "from_field": u'state_province',
                "to_table_name": u'PropertyState',
                "to_field": u'state_province',
            }, {
                "from_field": u'tax_lot_id',
                "to_table_name": u'PropertyState',
                "to_field": u'tax_lot_id',
            }, {
                "from_field": u'use_description',
                "to_table_name": u'PropertyState',
                "to_field": u'use_description',
            }, {
                "from_field": u'generation_date',
                "to_table_name": u'PropertyState',
                "to_field": u'generation_date',
            }, {
                "from_field": u'recent_sale_date',
                "to_table_name": u'PropertyState',
                "to_field": u'recent_sale_date',
            }, {
                "from_field": u'generation_date',
                "to_table_name": u'PropertyState',
                "to_field": u'generation_date',
            }, {
                "from_field": u'release_date',
                "to_table_name": u'PropertyState',
                "to_field": u'release_date',
            }, {
                "from_field": u'year_built',
                "to_table_name": u'PropertyState',
                "to_field": u'year_built',
            }, {
                "from_field": u'year_ending',
                "to_table_name": u'PropertyState',
                "to_field": u'year_ending',
            }
        ]

        tasks.save_raw_data(self.import_file.id)

        Column.create_mappings(fake_mappings, self.org, self.user)
        tasks.map_data(self.import_file.id)

        qs = PropertyState.objects.filter(
            import_file=self.import_file,
            source_type=ASSESSED_BS,
        ).iterator()

        c = Cleansing(self.org)
        c.cleanse('property', qs)

        # _log.debug(c.results)
        # This only checks to make sure the 34 errors have occurred.
        self.assertEqual(len(c.results), 34)
Exemple #33
0
    def test_cleanse(self):
        # Import the file and run mapping

        # This is silly, the mappings are backwards from what you would expect. The key is the BS field, and the
        # value is the value in the CSV
        # fake_mappings = {
        #     'city': 'city',
        #     'postal_code': 'Zip',
        #     'gross_floor_area': 'GBA',
        #     'building_count': 'BLDGS',
        #     'year_built': 'AYB_YearBuilt',
        #     'state_province': 'State',
        #     'address_line_1': 'Address',
        #     'owner': 'Owner',
        #     'property_notes': 'Property Type',
        #     'tax_lot_id': 'UBI',
        #     'custom_id_1': 'Custom ID',
        #     'pm_property_id': 'PM Property ID'
        # }

        tasks.save_raw_data(self.import_file.id)
        # util.make_fake_mappings(fake_mappings, self.org) -> convert to Column.create_mappings()
        tasks.map_data(self.import_file.id)

        qs = PropertyState.objects.filter(
            import_file=self.import_file,
            source_type=ASSESSED_BS,
        ).iterator()

        c = Cleansing(self.org)
        c.cleanse(qs)
        # print c.results

        self.assertEqual(len(c.results), 2)

        result = [v for v in c.results.values() if
                  v['address_line_1'] == '95373 E Peach Avenue']
        if len(result) == 1:
            result = result[0]
        else:
            raise RuntimeError('Non unity results')

        self.assertTrue(result['address_line_1'], '95373 E Peach Avenue')
        self.assertTrue(result['tax_lot_id'], '10107/c6596')
        res = [{
            'field': u'pm_property_id',
            'formatted_field': u'PM Property ID',
            'value': u'',
            'message': u'PM Property ID is missing',
            'detailed_message': u'PM Property ID is missing',
            'severity': u'error'
        }]
        self.assertEqual(res, result['cleansing_results'])

        result = [v for v in c.results.values() if
                  v['address_line_1'] == '120243 E True Lane']
        if len(result) == 1:
            result = result[0]
        else:
            raise RuntimeError('Non unity results')

        res = [{
            'field': u'year_built',
            'formatted_field': u'Year Built',
            'value': 0,
            'message': u'Year Built out of range',
            'detailed_message': u'Year Built [0] < 1700',
            'severity': u'error'
        }, {
            'field': u'gross_floor_area',
            'formatted_field': u'Gross Floor Area',
            'value': 10000000000.0,
            'message': u'Gross Floor Area out of range',
            'detailed_message': u'Gross Floor Area [10000000000.0] > 7000000.0',
            'severity': u'error'
        }, {
            'field': u'custom_id_1',
            'formatted_field': u'Custom ID 1',
            'value': u'',
            'message': u'Custom ID 1 is missing',
            'detailed_message': u'Custom ID 1 is missing',
            'severity': u'error'
        }, {
            'field': u'pm_property_id',
            'formatted_field': u'PM Property ID',
            'value': u'',
            'message': u'PM Property ID is missing',
            'detailed_message': u'PM Property ID is missing',
            'severity': u'error'
        }]
        self.assertItemsEqual(res, result['cleansing_results'])

        result = [v for v in c.results.values() if
                  v['address_line_1'] == '1234 Peach Tree Avenue']
        self.assertEqual(len(result), 0)
        self.assertEqual(result, [])
Exemple #34
0
    def import_exported_data(self, filename):
        """
        Import test files from Stephen for many-to-many testing. This imports
        and maps the data accordingly. Presently these files are missing a
        couple of attributes to make them valid:
            1) the master campus record to define the pm_property_id
            2) the joins between propertystate and taxlotstate
        """

        # Do a bunch of work to flatten out this temp file that has extra_data
        # asa string representation of a dict
        data = []
        keys = None
        new_keys = set()

        f = os.path.join(os.path.dirname(__file__), 'data', filename)
        with open(f, 'rb') as csvfile:
            reader = csv.DictReader(csvfile)
            keys = reader.fieldnames
            for row in reader:
                ed = json.loads(row.pop('extra_data'))
                for k, v in ed.iteritems():
                    new_keys.add(k)
                    row[k] = v
                data.append(row)

        # remove the extra_data column and add in the new columns
        keys.remove('extra_data')
        for k in new_keys:
            keys.append(k)

        # save the new file
        new_file_name = 'tmp_{}_flat.csv'.format(
            os.path.splitext(os.path.basename(filename))[0]
        )
        f_new = os.path.join(os.path.dirname(__file__), 'data', new_file_name)
        with open(f_new, 'w') as csvfile:
            writer = csv.DictWriter(csvfile, fieldnames=keys)
            writer.writeheader()
            for d in data:
                writer.writerow(d)

        # save the keys         This doesn't appear to be used anywhere
        new_file_name = 'tmp_{}_keys.csv'.format(
            os.path.splitext(os.path.basename(filename))[0]
        )
        f_new = os.path.join(os.path.dirname(__file__), 'data', new_file_name)
        with open(f_new, 'w') as outfile:
            outfile.writelines([str(key) + '\n' for key in keys])

        # Continue saving the raw data
        new_file_name = "tmp_{}_flat.csv".format(
            os.path.splitext(os.path.basename(filename))[0]
        )
        f_new = os.path.join(os.path.dirname(__file__), 'data', new_file_name)
        self.import_file.file = File(open(f_new))
        self.import_file.save()

        save_raw_data(self.import_file.id)

        # the mapping is just the 'keys' repeated since the file
        # was created as a database dump
        mapping = []
        for k in keys:
            if k == 'id':
                continue
            mapping.append(
                {
                    "from_field": k,
                    "to_table_name": "PropertyState",
                    "to_field": k
                }
            )

        Column.create_mappings(mapping, self.org, self.user)

        # call the mapping function from the tasks file
        map_data(self.import_file.id)
Exemple #35
0
    def test_cleanse(self):
        # Import the file and run mapping

        # Year Ending,Energy Score,Total GHG Emissions (MtCO2e),Weather Normalized Site EUI (kBtu/ft2),
        # National Median Site EUI (kBtu/ft2),Source EUI (kBtu/ft2),Weather Normalized Source EUI (kBtu/ft2),
        # National Median Source EUI (kBtu/ft2),Parking - Gross Floor Area (ft2),Organization
        # Release Date
        fake_mappings = [
            {
                "from_field": u'Property Id',
                "to_table_name": u'PropertyState',
                "to_field": u'pm_property_id',
            }, {
                "from_field": u'Property Name',
                "to_table_name": u'PropertyState',
                "to_field": u'property_name',
            }, {
                "from_field": u'Address 1',
                "to_table_name": u'PropertyState',
                "to_field": u'address_line_1',
            }, {
                "from_field": u'Address 2',
                "to_table_name": u'PropertyState',
                "to_field": u'address_line_2',
            }, {
                "from_field": u'City',
                "to_table_name": u'PropertyState',
                "to_field": u'city',
            }, {
                "from_field": u'State/Province',
                "to_table_name": u'PropertyState',
                "to_field": u'state_province',
            }, {
                "from_field": u'Postal Code',
                "to_table_name": u'PropertyState',
                "to_field": u'postal_code',
            }, {
                "from_field": u'Year Built',
                "to_table_name": u'PropertyState',
                "to_field": u'year_built',
            }, {
                "from_field": u'Property Floor Area (Buildings and Parking) (ft2)',
                "to_table_name": u'PropertyState',
                "to_field": u'gross_floor_area',
            }, {
                "from_field": u'Site EUI (kBtu/ft2)',
                "to_table_name": u'PropertyState',
                "to_field": u'site_eui',
            }, {
                "from_field": u'Generation Date',
                "to_table_name": u'PropertyState',
                "to_field": u'generation_date',
            }
        ]

        tasks.save_raw_data(self.import_file.id)
        Column.create_mappings(fake_mappings, self.org, self.user)
        tasks.map_data(self.import_file.id)

        qs = PropertyState.objects.filter(
            import_file=self.import_file,
            source_type=PORTFOLIO_BS,
        ).iterator()

        c = Cleansing(self.org)
        c.cleanse('property', qs)

        _log.debug(c.results)

        self.assertEqual(len(c.results), 2)

        result = [v for v in c.results.values() if v['address_line_1'] == '120243 E True Lane']
        if len(result) == 1:
            result = result[0]
        else:
            raise RuntimeError('Non unity results')

        res = [{
            'field': u'pm_property_id',
            'formatted_field': u'PM Property ID',
            'value': u'',
            'message': u'PM Property ID is missing',
            'detailed_message': u'PM Property ID is missing',
            'severity': u'error'
        }]
        self.assertEqual(res, result['cleansing_results'])

        result = [v for v in c.results.values() if v['address_line_1'] == '95373 E Peach Avenue']
        if len(result) == 1:
            result = result[0]
        else:
            raise RuntimeError('Non unity results')

        res = [{
            'field': u'site_eui',
            'formatted_field': u'Site EUI',
            'value': 0.1,
            'message': u'Site EUI out of range',
            'detailed_message': u'Site EUI [0.1] < 10.0',
            'severity': u'warning'
        }]
        self.assertEqual(res, result['cleansing_results'])