def make_fake_property(import_file, init_data, bs_type, is_canon=False, org=None): """For making fake mapped PropertyState to test matching against.""" if not org: raise "no org" ps = PropertyState.objects.create(**init_data) ps.import_file = import_file ps.organization = org if import_file is None: ps.import_record = None else: ps.import_record = import_file.import_record ps.source_type = bs_type # TODO: can we remove set_initial sources? Seems like this is invalid in the new data model world. set_initial_sources(ps) ps.save() # The idea of canon is no longer applicable. The linked property state # in the PropertyView is now canon if is_canon: # need to create a cycle and add it to the PropertyView table cycle, _ = Cycle.objects.get_or_create( name=u'Test Cycle', organization=org, start=datetime.datetime(2015, 1, 1), end=datetime.datetime(2015, 12, 31), ) ps.promote(cycle) return ps
def make_fake_snapshot(import_file, init_data, bs_type, is_canon=False): """For making fake mapped BuildingSnapshots to test matching against.""" snapshot = BuildingSnapshot.objects.create(**init_data) snapshot.import_file = import_file if import_file is None: snapshot.import_record = None else: snapshot.import_record = import_file.import_record snapshot.source_type = bs_type set_initial_sources(snapshot) snapshot.save() if is_canon: canonical_building = CanonicalBuilding.objects.create( canonical_snapshot=snapshot ) snapshot.canonical_building = canonical_building snapshot.save() return snapshot
def make_fake_snapshot( import_file, init_data, bs_type, is_canon=False, org=None ): """For making fake mapped BuildingSnapshots to test matching against.""" snapshot = BuildingSnapshot.objects.create(**init_data) snapshot.import_file = import_file snapshot.super_organization = org if import_file is None: snapshot.import_record = None else: snapshot.import_record = import_file.import_record snapshot.source_type = bs_type set_initial_sources(snapshot) snapshot.save() if is_canon: canonical_building = CanonicalBuilding.objects.create( canonical_snapshot=snapshot ) snapshot.canonical_building = canonical_building snapshot.save() return snapshot
def _save_raw_data_chunk(chunk, file_pk, prog_key, increment, *args, **kwargs): """Save the raw data to the database.""" import_file = ImportFile.objects.get(pk=file_pk) # Save our "column headers" and sample rows for F/E. source_type = get_source_type(import_file) for c in chunk: raw_bs = BuildingSnapshot() raw_bs.import_file = import_file raw_bs.extra_data = c raw_bs.source_type = source_type # We require a save to get our PK # We save here to set our initial source PKs. raw_bs.save() super_org = import_file.import_record.super_organization raw_bs.super_organization = super_org set_initial_sources(raw_bs) raw_bs.save() # Indicate progress increment_cache(prog_key, increment)
def test_update_building(self): """Good case for updating a building.""" fake_building_extra = { u'Assessor Data 1': u'2342342', u'Assessor Data 2': u'245646', } fake_building_kwargs = { u'property_name': u'Place pl.', u'address_line_1': u'332 Place pl.', u'owner': u'Duke of Earl', u'postal_code': u'68674', } fake_building = util.make_fake_snapshot( self.import_file2, fake_building_kwargs, seed_models.COMPOSITE_BS, is_canon=True ) fake_building.super_organization = self.fake_org fake_building.extra_data = fake_building_extra fake_building.save() # add building to a project project = seed_models.Project.objects.create( name='test project', owner=self.fake_user, super_organization=self.fake_org, ) seed_models.ProjectBuilding.objects.create( building_snapshot=fake_building, project=project ) fake_building_pk = fake_building.pk fake_building = seed_models.BuildingSnapshot.objects.filter(pk=fake_building_pk).first() fake_building_kwargs[u'property_name_source'] = fake_building.pk fake_building_kwargs[u'address_line_1_source'] = fake_building.pk fake_building_kwargs[u'owner_source'] = fake_building.pk seed_models.set_initial_sources(fake_building) # Hydrated JS version will have this, we'll query off it. fake_building_kwargs[u'pk'] = fake_building.pk # "update" one of the field values. fake_building_kwargs[u'import_file'] = self.import_file1 fake_building_kwargs[u'postal_code'] = u'99999' fake_building_extra[u'Assessor Data 1'] = u'NUP.' # Need to simulate JS hydrated payload here. fake_building_kwargs[u'extra_data'] = fake_building_extra new_snap = seed_models.update_building( fake_building, fake_building_kwargs, self.fake_user ) # Make sure new building is also in project. pbs = seed_models.ProjectBuilding.objects.filter( building_snapshot=new_snap, ) self.assertEqual(pbs.count(), 1) # Make sure our value was updated. self.assertEqual( new_snap.postal_code, fake_building_kwargs[u'postal_code'] ) self.assertNotEqual(new_snap.pk, fake_building.pk) # Make sure that the extra data were saved, with orig sources. self.assertDictEqual( new_snap.extra_data, fake_building_extra ) # Make sure we have the same orgs. self.assertEqual( new_snap.super_organization, fake_building.super_organization ) self.assertEqual(new_snap.match_type, fake_building.match_type) # Make sure we're set as the source for updated info!!! self.assertEqual(new_snap, new_snap.postal_code_source) # Make sure our sources from parent get set properly. for attr in ['property_name', 'address_line_1', 'owner']: self.assertEqual( getattr(new_snap, '{0}_source'.format(attr)).pk, fake_building.pk ) # Make sure our parent is set. self.assertEqual(new_snap.parents.all()[0].pk, fake_building.pk) # Make sure we captured all of the extra_data column names after update data_columns = seed_models.Column.objects.filter( organization=fake_building.super_organization, is_extra_data=True ) self.assertEqual(data_columns.count(), len(fake_building_extra)) self.assertListEqual( sorted([d.column_name for d in data_columns]), sorted(fake_building_extra.keys()) )
def test_update_building(self): """Good case for updating a building.""" fake_building_extra = { u'Assessor Data 1': u'2342342', u'Assessor Data 2': u'245646', } fake_building_kwargs = { u'property_name': u'Place pl.', u'address_line_1': u'332 Place pl.', u'owner': u'Duke of Earl', u'postal_code': u'68674', } fake_building = util.make_fake_snapshot(self.import_file2, fake_building_kwargs, seed_models.COMPOSITE_BS, is_canon=True) fake_building.super_organization = self.fake_org fake_building.extra_data = fake_building_extra fake_building.save() # add building to a project project = seed_models.Project.objects.create( name='test project', owner=self.fake_user, super_organization=self.fake_org, ) seed_models.ProjectBuilding.objects.create( building_snapshot=fake_building, project=project) fake_building_pk = fake_building.pk fake_building = seed_models.BuildingSnapshot.objects.filter( pk=fake_building_pk).first() fake_building_kwargs[u'property_name_source'] = fake_building.pk fake_building_kwargs[u'address_line_1_source'] = fake_building.pk fake_building_kwargs[u'owner_source'] = fake_building.pk seed_models.set_initial_sources(fake_building) # Hydrated JS version will have this, we'll query off it. fake_building_kwargs[u'pk'] = fake_building.pk # "update" one of the field values. fake_building_kwargs[u'import_file'] = self.import_file1 fake_building_kwargs[u'postal_code'] = u'99999' fake_building_extra[u'Assessor Data 1'] = u'NUP.' # Need to simulate JS hydrated payload here. fake_building_kwargs[u'extra_data'] = fake_building_extra new_snap = seed_models.update_building(fake_building, fake_building_kwargs, self.fake_user) # Make sure new building is also in project. pbs = seed_models.ProjectBuilding.objects.filter( building_snapshot=new_snap, ) self.assertEqual(pbs.count(), 1) # Make sure our value was updated. self.assertEqual(new_snap.postal_code, fake_building_kwargs[u'postal_code']) self.assertNotEqual(new_snap.pk, fake_building.pk) # Make sure that the extra data were saved, with orig sources. self.assertDictEqual(new_snap.extra_data, fake_building_extra) # Make sure we have the same orgs. self.assertEqual(new_snap.super_organization, fake_building.super_organization) self.assertEqual(new_snap.match_type, fake_building.match_type) # Make sure we're set as the source for updated info!!! self.assertEqual(new_snap, new_snap.postal_code_source) # Make sure our sources from parent get set properly. for attr in ['property_name', 'address_line_1', 'owner']: self.assertEqual( getattr(new_snap, '{0}_source'.format(attr)).pk, fake_building.pk) # Make sure our parent is set. self.assertEqual(new_snap.parents.all()[0].pk, fake_building.pk) # Make sure we captured all of the extra_data column names after update data_columns = seed_models.Column.objects.filter( organization=fake_building.super_organization, is_extra_data=True) self.assertEqual(data_columns.count(), len(fake_building_extra)) self.assertListEqual(sorted([d.column_name for d in data_columns]), sorted(fake_building_extra.keys()))
def create_models(data, import_file): """ Create a BuildingSnapshot, a CanonicalBuilding, and a Meter. Then, create TimeSeries models for each meter reading in data. :param data: dictionary of building data from a Green Button XML file in the form returned by xml_importer.building_data :param import_file: ImportFile referencing the original xml file; needed for linking to BuildingSnapshot and for determining super_organization :returns: the created CanonicalBuilding """ # cache data on import_file; this is a proof of concept and we # only have two example files available so we hardcode the only # heading present. import_file.cached_first_row = ROW_DELIMITER.join(["address"]) import_file.cached_second_to_fifth_row = ROW_DELIMITER.join( [data['address']] ) import_file.save() raw_bs = BuildingSnapshot() raw_bs.import_file = import_file # We require a save to get our PK # We save here to set our initial source PKs. raw_bs.save() super_org = import_file.import_record.super_organization raw_bs.super_organization = super_org set_initial_sources(raw_bs) raw_bs.address_line_1 = data['address'] raw_bs.source_type = GREEN_BUTTON_BS raw_bs.save() # create canonical building cb = CanonicalBuilding.objects.create(canonical_snapshot=raw_bs) raw_bs.canonical_building = cb raw_bs.save() # log building creation AuditLog.objects.create( organization=import_file.import_record.super_organization, user=import_file.import_record.owner, content_object=cb, action="create_building", action_note="Created building", ) # create meter for this dataset (each dataset is a single energy type) e_type = energy_type(data['service_category']) e_type_string = next( pair[1] for pair in seed.models.ENERGY_TYPES if pair[0] == e_type ) m_name = "gb_{0}[{1}]".format(str(raw_bs.id), e_type_string) m_energy_units = energy_units(data['meter']['uom']) meter = Meter.objects.create( name=m_name, energy_type=e_type, energy_units=m_energy_units ) meter.building_snapshot.add(raw_bs) meter.save() # now time series data for the meter for reading in data['interval']['readings']: start_time = int(reading['start_time']) duration = int(reading['duration']) begin_time = datetime.fromtimestamp(start_time) end_time = datetime.fromtimestamp(start_time + duration) value = reading['value'] cost = reading['cost'] new_ts = TimeSeries.objects.create( begin_time=begin_time, end_time=end_time, reading=value, cost=cost ) new_ts.meter = meter new_ts.save() return cb
def create_models(data, import_file): """ Create a BuildingSnapshot, a CanonicalBuilding, and a Meter. Then, create TimeSeries models for each meter reading in data. :params data: dictionary of building data from a green button xml file in the form returned by xml_importer.building_data :params import_file: ImportFile referencing the original xml file; needed for linking to BuildingSnapshot and for determining super_organization :returns: the created CanonicalBuilding """ # cache data on import_file; this is a proof of concept and we # only have two example files available so we hardcode the only # heading present. import_file.cached_first_row = ROW_DELIMITER.join(["address"]) import_file.cached_second_to_fifth_row = ROW_DELIMITER.join( [data['address']] ) import_file.save() raw_bs = BuildingSnapshot() raw_bs.import_file = import_file # We require a save to get our PK # We save here to set our initial source PKs. raw_bs.save() super_org = import_file.import_record.super_organization raw_bs.super_organization = super_org set_initial_sources(raw_bs) raw_bs.address_line_1 = data['address'] raw_bs.source_type = GREEN_BUTTON_BS raw_bs.save() # create canonical building cb = CanonicalBuilding.objects.create(canonical_snapshot=raw_bs) raw_bs.canonical_building = cb raw_bs.save() # log building creation AuditLog.objects.create( organization=import_file.import_record.super_organization, user=import_file.import_record.owner, content_object=cb, action="create_building", action_note="Created building", ) # create meter for this dataset (each dataset is a single energy type) e_type = energy_type(data['service_category']) e_type_string = next( pair[1] for pair in seed.models.ENERGY_TYPES if pair[0] == e_type ) m_name = "gb_{0}[{1}]".format(str(raw_bs.id), e_type_string) m_energy_units = energy_units(data['meter']['uom']) meter = Meter.objects.create( name=m_name, energy_type=e_type, energy_units=m_energy_units ) meter.building_snapshot.add(raw_bs) meter.save() # now timeseries data for the meter for reading in data['interval']['readings']: start_time = int(reading['start_time']) duration = int(reading['duration']) begin_time = datetime.fromtimestamp(start_time) end_time = datetime.fromtimestamp(start_time + duration) value = reading['value'] cost = reading['cost'] new_ts = TimeSeries.objects.create( begin_time=begin_time, end_time=end_time, reading=value, cost=cost ) new_ts.meter = meter new_ts.save() return cb
def test_update_building(self): """Good case for updating a building.""" fake_building_extra = { u'Assessor Data 1': u'2342342', u'Assessor Data 2': u'245646', } fake_building_kwargs = { u'property_name': u'Place pl.', u'address_line_1': u'332 Place pl.', u'owner': u'Duke of Earl', u'postal_code': u'68674', } fake_building = util.make_fake_snapshot( self.import_file2, fake_building_kwargs, seed_models.COMPOSITE_BS, is_canon=True ) fake_building.super_org = self.fake_org fake_building.extra_data = fake_building_extra fake_building.save() fake_building_kwargs[u'property_name_source'] = fake_building.pk fake_building_kwargs[u'address_line_1_source'] = fake_building.pk fake_building_kwargs[u'owner_source'] = fake_building.pk seed_models.set_initial_sources(fake_building) # Hydrated JS version will have this, we'll query off it. fake_building_kwargs[u'pk'] = fake_building.pk # "update" one of the field values. fake_building_kwargs[u'import_file'] = self.import_file1 fake_building_kwargs[u'postal_code'] = u'99999' fake_building_extra[u'Assessor Data 1'] = u'NUP.' # Need to simulate JS hydrated payload here. fake_building_kwargs[u'extra_data'] = fake_building_extra new_snap = seed_models.update_building( fake_building, fake_building_kwargs, self.fake_user ) # Make sure our value was updated. self.assertEqual( new_snap.postal_code, fake_building_kwargs[u'postal_code'] ) self.assertNotEqual(new_snap.pk, fake_building.pk) # Make sure that the extra data were saved, with orig sources. self.assertDictEqual( new_snap.extra_data, fake_building_extra ) # Make sure we have the same orgs. self.assertEqual( new_snap.super_organization, fake_building.super_organization ) self.assertEqual(new_snap.match_type, fake_building.match_type) # Make sure we're set as the source for updated info!!! self.assertEqual(new_snap, new_snap.postal_code_source) # Make sure our sources from parent get set properly. for attr in ['property_name', 'address_line_1', 'owner']: self.assertEqual( getattr(new_snap, '{0}_source'.format(attr)).pk, fake_building.pk ) # Make sure our parent is set. self.assertEqual(new_snap.parents.all()[0].pk, fake_building.pk)