def compile_records(self): compiler = EnergyRecordCompiler() for target in self.record_targets: syrx_num = target["syrx_num"] start_date = target["start_date"] end_date = target["end_date"] compiler.compile_component_point_records_by_year_span(syrx_num, start_date, end_date)
def import_account_energy_star_file(self, model, sqft, account_type): account = Account({ "name": model["name"], "group_id": self.group.id, "type": account_type, "weatherstation_id": model["weatherstation_id"], "timezone": model["timezone"] }) account.id = self.uow.accounts.insert(account) records = self.retrieve_excel_records(model, sqft, account) self.apply_weather_data(model['weatherstation_id'], records) years = set() buffer = [] record_num = 0 for r in records: years.add(r["readingdatelocal"].year) buffer.append(r) record_num += 1 if len(buffer) == 250 or record_num == len(records): self.uow.energy_records.insert_many(buffer) buffer = [] years = list(years) sorted(years) compiler = EnergyRecordCompiler() compiler.compile_energy_records_by_year_span(years[0], years[len(years) - 1], account.id)
def _compile_equipment_point_records(self): compiler = EnergyRecordCompiler() for r in self.point_ranges: syrx_num = r["syrx_num"] start_date = r["start_date"] end_date = r["end_date"] self.uow.compiled_energy_records.delete_compiled_equipment_point_records(syrx_num, start_date, end_date) compiler.compile_component_point_records_by_year_span(syrx_num, start_date, end_date)
def compile_equipment_point_records(self, ranges): compiler = EnergyRecordCompiler() for r in ranges: syrx_num = r.syrx_num start_date = r.start_date end_date = r.end_date self.uow.compiled_energy_records.delete_compiled_equipment_point_records(syrx_num, start_date, end_date) compiler.compile_component_point_records_by_year_span(syrx_num, start_date, end_date)
def _add_data_for_mapping(self, mapping, syrx_num): if mapping['source'] == 'johnson': vendor_point_records = self.uow.unmapped_vendor_point_records.get_all_vendor_point_records_for_johnson(mapping) elif mapping['source'] == 'fieldserver': vendor_point_records = self.uow.unmapped_vendor_point_records.get_all_vendor_point_records_for_fieldserver(mapping) elif mapping['source'] == 'invensys': vendor_point_records = self.uow.unmapped_vendor_point_records.get_all_vendor_point_records_for_invensys(mapping) else: vendor_point_records = self.uow.unmapped_vendor_point_records.get_all_vendor_point_records_for_siemens(mapping) insert_list = [] if len(vendor_point_records) < 1: return for record in vendor_point_records: record['syrx_num'] = syrx_num if mapping['source'] == 'johnson': del record['johnson_fqr'] del record['johnson_site_id'] elif mapping['source'] == 'fieldserver': del record['fieldserver_offset'] del record['fieldserver_site_id'] elif mapping['source'] == 'invensys': del record['invensys_point_name'] del record['invensys_equipment_name'] del record['invensys_site_name'] else: del record['siemens_meter_name'] del record['source'] del record['id'] insert_list.append(record) date_list = [r['date'] for r in insert_list] min_date = min(date_list) max_date = max(date_list) self.uow.energy_records.insert_equipment_point_records(insert_list) energy_record_compiler = EnergyRecordCompiler() energy_record_compiler.compile_component_point_records_by_year_span(syrx_num, min_date, max_date) if mapping['source'] == 'johnson': self.uow.unmapped_vendor_point_records.delete_all_for_johnson_point(mapping) elif mapping['source'] == 'fieldserver': self.uow.unmapped_vendor_point_records.delete_all_for_fieldserver_point(mapping) elif mapping['source'] == 'invensys': self.uow.unmapped_vendor_point_records.delete_all_for_invensys_point(mapping) else: self.uow.unmapped_vendor_point_records.delete_all_for_siemens_point(mapping)
class UpdateNormalizationThread(Thread): def __init__(self, norm_type, normalization): self.compiler = EnergyRecordCompiler() self.norm_type = norm_type self.normalization = normalization self.uow = UoW(None) super(UpdateNormalizationThread, self).__init__() def run(self): if self.norm_type.lower() == "size": end_date = self.uow.energy_records.update_size_norms(self.normalization) elif self.norm_type.lower() == "price": end_date = self.uow.energy_records.update_price_norms(self.normalization) else: raise Exception("The type specified is invalid.") self.compiler.compile_energy_records_by_date_span(self.normalization.effective_date, end_date, self.normalization.account_id)
def __init__(self, norm_type, normalization): self.compiler = EnergyRecordCompiler() self.norm_type = norm_type self.normalization = normalization self.uow = UoW(None) super(UpdateNormalizationThread, self).__init__()
def _insert_new_records(self, mapping, records): insert_list = [] error_list = [] self._populate_weatherstation_id_on_records(mapping['syrx_num'], records) # weather_data is a dict keyed by the weatherstation id try: weather_data = self._get_weather_data(records) except: weather_data = dict() for r in records: error_messages = [] value = None equipment_point_record = None try: value = float(r["value"]) except: error_messages.append("Value is not a number") try: date = r['date'] syrx_num = mapping["syrx_num"] weather = None weather_time = date.replace(minute=0) if "weatherstation_id" not in r or r["weatherstation_id"] not in weather_data: error_messages.append("Weatherstation not found") elif weather_time not in weather_data[r["weatherstation_id"]]: weather_time_str = weather_time.strftime("%Y-%m-%d %H:%M:%S") error_messages.append("Time " + weather_time_str + " not found in weather data") else: weather = weather_data[r["weatherstation_id"]][weather_time] equipment_point_record = self.uow.energy_records.get_equipment_point_record(date, syrx_num, value, weather, self.date_time) except: error_messages.append("Timestamp could not be parsed") if len(error_messages) < 1: insert_list.append(equipment_point_record) else: mapping["errors"] = error_messages error_list.append(mapping) self.logger.debug("Found " + str(len(insert_list)) + " good records for " + str(mapping['syrx_num']) + ".") # insert the good records self.uow.energy_records.insert_equipment_point_records(insert_list) # get date range date_list = [x['date'] for x in insert_list] min_date = min(date_list) max_date = max(date_list) compiler = EnergyRecordCompiler() self.uow.compiled_energy_records.delete_compiled_equipment_point_records(mapping['syrx_num'], min_date, max_date) compiler.compile_component_point_records_by_year_span(mapping['syrx_num'], min_date, max_date) if len(error_list) > 0: # write bad records to file self._handle_bad_records(error_list)
def run(self): try: start_time = clock() logging.info("Start importer") self.uploaded_file_handle.seek(0) if self.uploaded_file_type == "duke": self.message = "Running pre processor..." logging.info("Running pre processor") new_file_handle = duke_pre_processor.run(self.uploaded_file_handle) logging.info("Preprocessor completed after " + str(clock() - start_time)) self.uploaded_file_handle.close() self.uploaded_file_handle = new_file_handle self.reader = csv.reader(self.uploaded_file_handle) self.message = "Deleting old records..." self.uploaded_file_handle.seek(0) self.uploaded_file_handle.readline() logging.info("Deleting old records") self.delete_old_records() logging.info("Deleting old records completed after " + str(clock() - start_time)) self.local_tz = pytz.timezone("America/New_York") self.uploaded_file_handle.seek(0) self.get_normalizations() account_info = self.uow.accounts.get_by_id(self.account_id) self.account_type = account_info.type.lower() self.weatherstation_id = account_info.weatherstation_id self.get_energy_units() self.message = "Loading weather data..." self.uploaded_file_handle.seek(0) self.uploaded_file_handle.readline() logging.info("Getting weather data") self.get_weather_data() logging.info("Get weather data completed after " + str(clock() - start_time)) self.message = "Saving energy data..." self.uploaded_file_handle.seek(0) self.uploaded_file_handle.readline() self.current_progress_point += 1 logging.info("Getting records") self.get_records() logging.info("Get records completed after " + str(clock() - start_time)) self.message = "Compiling energy reports..." compiler = EnergyRecordCompiler() logging.info("Compiling energy records") compiler.compile_energy_records_by_year_span(self.start_year, self.end_year, self.account_id) logging.info("Compiler completed after " + str(clock() - start_time)) self.current_progress_point = self.total_progress_points self.complete = True self.message = "Complete" except: logging.exception("An error occured importing energy data") self.error = True self.message = "An error has occurred." raise