def _save_raw_data_chunk(chunk, file_pk, prog_key, increment, *args, **kwargs): """Save the raw data to the database.""" import_file = ImportFile.objects.get(pk=file_pk) # Save our "column headers" and sample rows for F/E. source_type = get_source_type(import_file) for c in chunk: raw_bs = BuildingSnapshot() raw_bs.import_file = import_file raw_bs.extra_data = c raw_bs.source_type = source_type # We require a save to get our PK # We save here to set our initial source PKs. raw_bs.save() super_org = import_file.import_record.super_organization raw_bs.super_organization = super_org set_initial_sources(raw_bs) raw_bs.save() # Indicate progress increment_cache(prog_key, increment)
def create_models(data, import_file): """ Create a BuildingSnapshot, a CanonicalBuilding, and a Meter. Then, create TimeSeries models for each meter reading in data. :param data: dictionary of building data from a Green Button XML file in the form returned by xml_importer.building_data :param import_file: ImportFile referencing the original xml file; needed for linking to BuildingSnapshot and for determining super_organization :returns: the created CanonicalBuilding """ # cache data on import_file; this is a proof of concept and we # only have two example files available so we hardcode the only # heading present. import_file.cached_first_row = ROW_DELIMITER.join(["address"]) import_file.cached_second_to_fifth_row = ROW_DELIMITER.join( [data['address']] ) import_file.save() raw_bs = BuildingSnapshot() raw_bs.import_file = import_file # We require a save to get our PK # We save here to set our initial source PKs. raw_bs.save() super_org = import_file.import_record.super_organization raw_bs.super_organization = super_org set_initial_sources(raw_bs) raw_bs.address_line_1 = data['address'] raw_bs.source_type = GREEN_BUTTON_BS raw_bs.save() # create canonical building cb = CanonicalBuilding.objects.create(canonical_snapshot=raw_bs) raw_bs.canonical_building = cb raw_bs.save() # log building creation AuditLog.objects.create( organization=import_file.import_record.super_organization, user=import_file.import_record.owner, content_object=cb, action="create_building", action_note="Created building", ) # create meter for this dataset (each dataset is a single energy type) e_type = energy_type(data['service_category']) e_type_string = next( pair[1] for pair in seed.models.ENERGY_TYPES if pair[0] == e_type ) m_name = "gb_{0}[{1}]".format(str(raw_bs.id), e_type_string) m_energy_units = energy_units(data['meter']['uom']) meter = Meter.objects.create( name=m_name, energy_type=e_type, energy_units=m_energy_units ) meter.building_snapshot.add(raw_bs) meter.save() # now time series data for the meter for reading in data['interval']['readings']: start_time = int(reading['start_time']) duration = int(reading['duration']) begin_time = datetime.fromtimestamp(start_time) end_time = datetime.fromtimestamp(start_time + duration) value = reading['value'] cost = reading['cost'] new_ts = TimeSeries.objects.create( begin_time=begin_time, end_time=end_time, reading=value, cost=cost ) new_ts.meter = meter new_ts.save() return cb
def create_models(data, import_file): """ Create a BuildingSnapshot, a CanonicalBuilding, and a Meter. Then, create TimeSeries models for each meter reading in data. :param data: dictionary of building data from a Green Button XML file in the form returned by xml_importer.building_data :param import_file: ImportFile referencing the original xml file; needed for linking to BuildingSnapshot and for determining super_organization :returns: the created CanonicalBuilding """ # cache data on import_file; this is a proof of concept and we # only have two example files available so we hardcode the only # heading present. import_file.cached_first_row = ROW_DELIMITER.join(["address"]) import_file.cached_second_to_fifth_row = ROW_DELIMITER.join( [data['address']]) import_file.save() raw_bs = BuildingSnapshot() raw_bs.import_file = import_file # We require a save to get our PK # We save here to set our initial source PKs. raw_bs.save() super_org = import_file.import_record.super_organization raw_bs.super_organization = super_org raw_bs.address_line_1 = data['address'] raw_bs.source_type = GREEN_BUTTON_BS raw_bs.save() # create canonical building cb = CanonicalBuilding.objects.create(canonical_snapshot=raw_bs) raw_bs.canonical_building = cb raw_bs.save() # log building creation AuditLog.objects.create( organization=import_file.import_record.super_organization, user=import_file.import_record.owner, content_object=cb, action="create_building", action_note="Created building", ) # create meter for this dataset (each dataset is a single energy type) e_type = energy_type(data['service_category']) e_type_string = next(pair[1] for pair in seed.models.ENERGY_TYPES if pair[0] == e_type) m_name = "gb_{0}[{1}]".format(str(raw_bs.id), e_type_string) m_energy_units = energy_units(data['meter']['uom']) meter = Meter.objects.create(name=m_name, energy_type=e_type, energy_units=m_energy_units) meter.building_snapshot.add(raw_bs) meter.save() # now time series data for the meter for reading in data['interval']['readings']: start_time = int(reading['start_time']) duration = int(reading['duration']) begin_time = datetime.fromtimestamp(start_time, tz=timezone.get_current_timezone()) end_time = datetime.fromtimestamp(start_time + duration, tz=timezone.get_current_timezone()) value = reading['value'] cost = reading['cost'] new_ts = TimeSeries.objects.create(begin_time=begin_time, end_time=end_time, reading=value, cost=cost) new_ts.meter = meter new_ts.save() return cb