def _query_api(self, api_query): """Uses PVWatts API to query for a PV profile.""" ssl_verify, proxy_settings = check_connection_settings() try: with requests.Session() as req: http_request = req.get(api_query, proxies=proxy_settings, timeout=10, verify=ssl_verify, stream=True) if http_request.status_code != requests.codes.ok: http_request.raise_for_status() except requests.HTTPError as e: logging.error('PVProfileDM: {0}'.format(repr(e))) raise requests.ConnectionError except requests.exceptions.ProxyError: logging.error('PVProfileDM: Could not connect to proxy.') raise requests.ConnectionError except requests.ConnectionError as e: logging.error( 'PVProfileDM: Failed to establish a connection to the host server.' ) raise requests.ConnectionError except requests.Timeout as e: logging.error('PVProfileDM: The connection timed out.') raise requests.ConnectionError except requests.RequestException as e: logging.error('PVProfileDM: {0}'.format(repr(e))) raise requests.ConnectionError except Exception as e: # Something else went wrong. logging.error( 'PVProfileDM: An unexpected error has occurred. ({0})'.format( repr(e))) raise requests.ConnectionError else: request_content = http_request.json() if not self.save_name_field.text: popup = WarningPopup() popup.popup_text.text = 'Please specify a name to save the PV profile as.' popup.open() return else: outname = self.save_name_field.text # Strip non-alphanumeric chars from given name for filename. delchars = ''.join(c for c in map(chr, range(256)) if not c.isalnum()) outname = outname.translate({ord(i): None for i in delchars}) # Save. destination_dir = os.path.join(DATA_HOME, 'pv') os.makedirs(destination_dir, exist_ok=True) destination_file = os.path.join(destination_dir, outname + '.json') if not os.path.exists(destination_file): with open(destination_file, 'w') as outfile: json.dump(request_content, outfile) logging.info('PVProfileDM: Profile successfully saved.') popup = WarningPopup() popup.title = 'Success!' popup.popup_text.text = 'PV profile successfully saved.' popup.open() else: # File already exists with same name. popup = WarningPopup() popup.popup_text.text = 'A PV profile with the provided name already exists. Please specify a new name.' popup.open() finally: self.save_button.disabled = False
def __init__(self, write_directory=None, write_function=None, chooser_description=None, format_description=None, data_validation_function=None, **kwargs): super(DataImporter, self).__init__(**kwargs) if write_directory is None: self.write_directory = "" else: self.write_directory = write_directory if write_function is None: def _write_time_series_csv(fname, dataframe): """Writes a generic time series dataframe to a two-column csv. The data is inferred to be at an hourly time resolution for one standard year. Parameters ---------- fname : str Name of the file to be saved without an extension dataframe : Pandas DataFrame DataFrame containing a single Series of the data Returns ------- str The save destination of the resulting file. """ save_destination = os.path.join(self.write_directory, fname + ".csv") data_column_name = dataframe.columns[0] datetime_start = datetime(2019, 1, 1, 0) hour_range = pd.date_range(start=datetime_start, periods=len(dataframe), freq="H") dataframe["DateTime"] = hour_range dataframe[["DateTime", data_column_name]].to_csv(save_destination, index=False) return save_destination self.write_function = _write_time_series_csv else: self.write_function = write_function if chooser_description is None: self.chooser_description = "Select a .csv file to import data from." else: self.chooser_description = chooser_description file_chooser_screen = self.screen_manager.get_screen("FileChooser") file_chooser_screen.file_chooser_body_text.text = self.chooser_description if format_description is None: self.format_description = "Specify the data column." else: self.format_description = format_description format_analyzer_screen = self.screen_manager.get_screen("FormatAnalyzer") format_analyzer_screen.format_analyzer_body_text.text = self.format_description if data_validation_function is None: def _default_data_validation_function(dataframe, data_column_name): if len(dataframe) != 8760: raise ValueError("The length of the time series must be 8760 (got {0}).".format(len(dataframe))) data_column = dataframe[data_column_name] try: data_column.astype("float") except ValueError: raise ValueError("The selected data column could not be interpeted as numeric float values.") self.data_validation_function = _default_data_validation_function else: self.data_validation_function = data_validation_function # Bind DataImporter dismissal to successful data import. completion_popup = WarningPopup() completion_popup.title = "Success!" completion_popup.popup_text.text = "Data successfully imported." completion_popup.bind(on_dismiss=self.dismiss) self.screen_manager.completion_popup = completion_popup
def save_load_data(self): """Saves the data for the building type selected.""" try: csv_link = self._validate_selections() except Exception as e: print(e) else: ssl_verify, proxy_settings = check_connection_settings() attempt_download = True n_tries = 0 self.connection_error_occurred = False while attempt_download: n_tries += 1 if n_tries >= MAX_WHILE_ATTEMPTS: logging.warning('LoadProfileDM: Hit download retry limit.') attempt_download = False self.connection_error_occurred = True break if App.get_running_app().root.stop.is_set(): return try: with requests.Session() as req: page = req.get(csv_link, timeout=10, verify=ssl_verify, proxies=proxy_settings) if page.status_code != requests.codes.ok: page.raise_for_status() else: attempt_download = False except requests.HTTPError as e: logging.error('LoadProfileDM: {0}'.format(repr(e))) except requests.exceptions.ProxyError: logging.error('LoadProfileDM: Could not connect to proxy.') except requests.ConnectionError as e: logging.error( 'LoadProfileDM: Failed to establish a connection to the host server.' ) except requests.Timeout as e: logging.error('LoadProfileDM: The connection timed out.') except requests.RequestException as e: logging.error('LoadProfileDM: {0}'.format(repr(e))) except Exception as e: # Something else went wrong. logging.error( 'LoadProfileDM: An unexpected error has occurred. ({0})' .format(repr(e))) else: data_down = page.content.decode(page.encoding) csv_data = pd.read_csv(io.StringIO(data_down)) electricity_data = csv_data[[ 'Date/Time', 'Electricity:Facility [kW](Hourly)' ]] # Save to persistent object on disk. url_split = csv_link.split('/') destination_dir = os.path.join(DATA_HOME, 'load', 'residential', url_split[-2]) os.makedirs(destination_dir, exist_ok=True) destination_file = os.path.join(destination_dir, url_split[-1]) electricity_data.to_csv(destination_file, sep=',', index=False) popup = WarningPopup() popup.title = 'Success!' popup.popup_text.text = 'Load data successfully saved.' popup.open() logging.info( 'LoadProfileDM: Load data successfully saved.')
def save_rate_structure(self): """Saves the rate structure details to an object on disk.""" # Retrieve selections from other screens. utility_search_screen = self.manager.get_screen('start') utility_selected, rate_structure_selected = utility_search_screen.get_selections( ) energy_rate_screen = self.manager.get_screen('energy_rate_structure') energy_weekday_schedule, energy_weekend_schedule, energy_rates_dict = energy_rate_screen.get_selections( ) demand_rate_screen = self.manager.get_screen('demand_rate_structure') demand_weekday_schedule, demand_weekend_schedule, demand_tou_rates_dict, demand_flat_rates_dict = demand_rate_screen.get_selections( ) try: peak_kw_min, peak_kw_max, net_metering_type, sell_price = self.get_selections( ) except TypeError: return # Form dictionary object for saving. rate_structure_object = {} if not self.save_name_field.text: popup = WarningPopup() popup.popup_text.text = 'Please specify a name to save the rate structure as.' popup.open() return else: rate_structure_object['name'] = self.save_name_field.text rate_structure_object['utility'] = { 'utility name': utility_selected['utility_name'], 'rate structure name': rate_structure_selected['name'] } rate_structure_object['energy rate structure'] = { 'weekday schedule': energy_weekday_schedule, 'weekend schedule': energy_weekend_schedule, 'energy rates': energy_rates_dict } rate_structure_object['demand rate structure'] = { 'weekday schedule': demand_weekday_schedule, 'weekend schedule': demand_weekend_schedule, 'time of use rates': demand_tou_rates_dict, 'flat rates': demand_flat_rates_dict, 'minimum peak demand': peak_kw_min, 'maximum peak demand': peak_kw_max } rate_structure_object['net metering'] = { 'type': net_metering_type, 'energy sell price': sell_price } # Save to JSON. # Strip non-alphanumeric chars from given name for filename. delchars = ''.join(c for c in map(chr, range(256)) if not c.isalnum()) fname = rate_structure_object['name'].translate( {ord(i): None for i in delchars}) destination_dir = os.path.join(DATA_HOME, 'rate_structures') os.makedirs(destination_dir, exist_ok=True) destination_file = os.path.join(destination_dir, fname + '.json') if not os.path.exists(destination_file): with open(destination_file, 'w') as outfile: json.dump(rate_structure_object, outfile) popup = WarningPopup() popup.title = 'Success!' popup.popup_text.text = 'Rate structure data successfully saved.' popup.open() else: # File already exists with same name. popup = WarningPopup() popup.popup_text.text = 'A rate structure with the provided name already exists. Please specify a new name.' popup.open()