def on_failure(self, exc, task_id, args, kwargs, einfo): """ log a bunch of stuff for debugging save message: error and outputs: Scenario: status need to stop rest of chain!? :param exc: The exception raised by the task. :param task_id: Unique id of the failed task. (not the run_uuid) :param args: Original arguments for the task that failed. :param kwargs: Original keyword arguments for the task that failed. :param einfo: ExceptionInfo instance, containing the traceback. :return: None, The return value of this handler is ignored. """ if not isinstance(exc, REoptError): exc_type, exc_value, exc_traceback = sys.exc_info() exc = UnexpectedError(exc_type, exc_value.args[0], exc_traceback, task=self.name, run_uuid=kwargs['run_uuid'], user_uuid=kwargs['data']['inputs'] ['Scenario'].get('user_uuid')) msg = exc.message exc.save_to_db() self.data["messages"]["error"] = msg self.data["outputs"]["Scenario"][ "status"] = "An error occurred. See messages for more." ModelManager.update_scenario_and_messages(self.data, run_uuid=self.run_uuid) self.request.chain = None # stop the chain? self.request.callback = None self.request.chord = None # this seems to stop the infinite chord_unlock call
def validate_run_uuid(run_uuid): try: uuid.UUID(run_uuid) # raises ValueError if not valid uuid except ValueError as e: if e.args[0] == "badly formed hexadecimal UUID string": raise ValidationError("Error:" + str(e.args[0])) else: exc_type, exc_value, exc_traceback = sys.exc_info() err = UnexpectedError(exc_type, exc_value.args[0], exc_traceback, task='resilience_stats', run_uuid=run_uuid) err.save_to_db() raise ValidationError("Error" + str(err.message))
def load_builder(request): """ Convert the SolarResilient Component Load Builder CSV into an 8760 Load :param request: :return: 8760 list for critical_load_kw input into REOpt """ try: if request.method == 'POST': post = request.body try: # Try to import JSON, then try to import CSV try: loads_table = json.loads(post) except: loads_table = unicode(post, "utf-8") finally: if not isinstance(loads_table, list): csv_reader = csv.DictReader(io.StringIO(loads_table)) loads_table = list(csv_reader) except: return JsonResponse({"Error": "Invalid JSON or CSV"}) # Validation if not check_load_builder_inputs(loads_table): return JsonResponse({ "Error": "There are missing required inputs. Must include the following: 'Power (W)', 'Quantity', '% Run Time', 'Start Mo.', 'Stop Mo.', 'Start Hr.', 'Stop Hr.'" }) if not validate_load_builder_inputs(loads_table): return JsonResponse({"Error": "Some input values are invalid"}) # Run conversion and respond loads_kw = convert_loads(loads_table) return JsonResponse({"critical_loads_kw": loads_kw}) else: return JsonResponse({ "Error": "Must POST a JSON based on the SolarResilient component based load builder downloadable CSV" }) except Exception as e: exc_type, exc_value, exc_traceback = sys.exc_info() err = UnexpectedError(exc_type, exc_value, exc_traceback, task='load_builder') err.save_to_db() return JsonResponse({"Error": err.message}, status=500)
def proforma(request, run_uuid): try: uuid.UUID(run_uuid) # raises ValueError if not valid uuid except ValueError as e: if e.args[0] == "badly formed hexadecimal UUID string": resp = {"Error": e.args[0]} return JsonResponse(resp, status=400) else: exc_type, exc_value, exc_traceback = sys.exc_info() err = UnexpectedError(exc_type, exc_value, exc_traceback, task='proforma', run_uuid=run_uuid) err.save_to_db() return JsonResponse({"Error": str(err.message)}, status=400) try: scenario = ScenarioModel.objects.get(run_uuid=run_uuid) if scenario.status.lower() == "optimizing...": return HttpResponse( "Problem is still solving. Please try again later.", status=425) # too early status try: # see if Proforma already created pf = ProForma.objects.get(scenariomodel=scenario) except: pf = ProForma.create(scenariomodel=scenario) pf.generate_spreadsheet() pf.save() wrapper = FileWrapper(open(pf.output_file, "rb")) response = HttpResponse( wrapper, content_type='application/vnd.ms-excel.sheet.macroEnabled.12') response['Content-Length'] = os.path.getsize(pf.output_file) response['Content-Disposition'] = 'attachment; filename=%s' % ( pf.output_file_name) return response except Exception as e: if type(e).__name__ == 'DoesNotExist': msg = "Scenario {} does not exist.".format(run_uuid) return HttpResponse(msg, status=404) else: exc_type, exc_value, exc_traceback = sys.exc_info() err = UnexpectedError(exc_type, exc_value, exc_traceback, task='proforma', run_uuid=run_uuid) err.save_to_db() return HttpResponse({"Error": str(err.message)}, status=400)
def remove(request, run_uuid): try: ModelManager.remove( run_uuid) # ModelManager has some internal exception handling return JsonResponse({"Success": True}, status=204) except Exception: exc_type, exc_value, exc_traceback = sys.exc_info() err = UnexpectedError(exc_type, exc_value.args[0], tb.format_tb(exc_traceback), task='reo.views.results', run_uuid=run_uuid) err.save_to_db() resp = make_error_resp(err.message) return JsonResponse(resp)
def unlink(request, user_uuid, run_uuid): """ Retrieve a summary of scenarios for given user_uuid :param request: :param user_uuid: :return True, bool """ content = {'user_uuid':user_uuid, 'run_uuid':run_uuid} for name, check_id in content.items(): try: uuid.UUID(check_id) # raises ValueError if not valid uuid except ValueError as e: if e.args[0] == "badly formed hexadecimal UUID string": return JsonResponse({"Error": "{} {}".format(name, e.args[0]) }, status=400) else: exc_type, exc_value, exc_traceback = sys.exc_info() if name == 'user_uuid': err = UnexpectedError(exc_type, exc_value, exc_traceback, task='unlink', user_uuid=check_id) if name == 'run_uuid': err = UnexpectedError(exc_type, exc_value, exc_traceback, task='unlink', run_uuid=check_id) err.save_to_db() return JsonResponse({"Error": str(err.message)}, status=400) try: if not ScenarioModel.objects.filter(user_uuid=user_uuid).exists(): return JsonResponse({"Error":"User {} does not exist".format(user_uuid)}, status=400) if not ScenarioModel.objects.filter(run_uuid=run_uuid).exists(): return JsonResponse({"Error":"Run {} does not exist".format(run_uuid)}, status=400) runs = ScenarioModel.objects.filter(run_uuid=run_uuid) if runs.exists(): if runs[0].user_uuid != user_uuid: return JsonResponse({"Error":"Run {} is not associated with user {}".format(run_uuid, user_uuid)}, status=400) if not UserUnlinkedRuns.objects.filter(run_uuid=run_uuid).exists(): UserUnlinkedRuns.create(**content) return JsonResponse({"Success":True}, status=204) except Exception as e: exc_type, exc_value, exc_traceback = sys.exc_info() err = UnexpectedError(exc_type, exc_value, exc_traceback, task='unlink', user_uuid=user_uuid) err.save_to_db() return JsonResponse({"Error": err.message}, status=404)
def add_user_uuid(request): """ update the user_uuid associated with a Scenario run_uuid :param request POST: { "user_uuid", "run_uuid" } :return: None """ try: if request.method == 'POST': post = request.body # Try to import JSON try: data = json.loads(post) try: user_uuid = str(data['user_uuid']) run_uuid = str(data['run_uuid']) uuid.UUID(user_uuid) # raises ValueError if not valid uuid uuid.UUID(run_uuid) # raises ValueError if not valid uuid try: scenario = ScenarioModel.objects.filter(run_uuid=run_uuid).first() print (scenario.user_uuid) if scenario.user_uuid is None: ModelManager.add_user_uuid(user_uuid, run_uuid) response = JsonResponse( {"Success": "user_uuid for run_uuid {} has been set to {}".format(run_uuid, user_uuid)}) return response else: return JsonResponse({"Error": "a user_uuid already exists for run_uuid {}".format(run_uuid)}) except: return JsonResponse({"Error": "run_uuid does not exist"}) except: return JsonResponse({"Error": "Invalid inputs: must provide user_uuid and run_uuid key value pairs as valid UUIDs"}) except: return JsonResponse({"Error": "Invalid JSON"}) else: return JsonResponse({"Error": "Must POST a JSON with user_uuid and run_uuid key value pairs"}) except Exception as e: exc_type, exc_value, exc_traceback = sys.exc_info() err = UnexpectedError(exc_type, exc_value, exc_traceback, task='add_user_uuid') err.save_to_db() return JsonResponse({"Error": err.message}, status=500)
def results(request, run_uuid): try: uuid.UUID(run_uuid) # raises ValueError if not valid uuid except ValueError as e: if e.args[0] == "badly formed hexadecimal UUID string": resp = make_error_resp(e.args[0]) return JsonResponse(resp, status=400) else: exc_type, exc_value, exc_traceback = sys.exc_info() err = UnexpectedError(exc_type, exc_value.args[0], tb.format_tb(exc_traceback), task='results', run_uuid=run_uuid) err.save_to_db() return JsonResponse({"Error": str(err.args[0])}, status=400) try: d = ModelManager.make_response( run_uuid) # ModelManager has some internal exception handling response = JsonResponse(d) return response except Exception: exc_type, exc_value, exc_traceback = sys.exc_info() err = UnexpectedError(exc_type, exc_value.args[0], tb.format_tb(exc_traceback), task='reo.views.results', run_uuid=run_uuid) err.save_to_db() resp = make_error_resp(err.message) return JsonResponse(resp)
def summary(request, user_uuid): """ Retrieve a summary of scenarios for given user_uuid :param request: :param user_uuid: :return: { "user_uuid", "scenarios": [{ "run_uuid", # Run ID "status", # Status "created", # Date "description", # Description "focus", # Focus "address", # Address "urdb_rate_name", # Utility Tariff "doe_reference_name", # Load Profile "npv_us_dollars", # Net Present Value ($) "net_capital_costs", # DG System Cost ($) "year_one_savings_us_dollars",# Year 1 Savings ($) "pv_kw", # PV Size (kW) "wind_kw", # Wind Size (kW) "gen_kw", # Generator Size (kW) "batt_kw", # Battery Power (kW) "batt_kwh" # Battery Capacity (kWh) "" }] } """ try: uuid.UUID(user_uuid) # raises ValueError if not valid uuid except ValueError as e: if e.args[0] == "badly formed hexadecimal UUID string": return JsonResponse({"Error": str(e.message)}, status=404) else: exc_type, exc_value, exc_traceback = sys.exc_info() err = UnexpectedError(exc_type, exc_value, exc_traceback, task='summary', user_uuid=user_uuid) err.save_to_db() return JsonResponse({"Error": str(err.message)}, status=404) try: scenarios = ScenarioModel.objects.filter(user_uuid=user_uuid).order_by('-created') unlinked_run_uuids = [i.run_uuid for i in UserUnlinkedRuns.objects.filter(user_uuid=user_uuid)] scenarios = [s for s in scenarios if s.run_uuid not in unlinked_run_uuids] json_response = {"user_uuid": user_uuid, "scenarios": []} if len(scenarios) == 0: response = JsonResponse({"Error": "No scenarios found for user '{}'".format(user_uuid)}, content_type='application/json', status=404) return response scenario_run_uuids = [s.run_uuid for s in scenarios] scenario_run_ids = [s.id for s in scenarios] #saving time by only calling each table once messages = MessageModel.objects.filter(run_uuid__in=scenario_run_uuids).values('run_uuid','message_type','message') sites = SiteModel.objects.filter(run_uuid__in=scenario_run_uuids).values('run_uuid','address') loads = LoadProfileModel.objects.filter(run_uuid__in=scenario_run_uuids).values('run_uuid','outage_start_hour','loads_kw','doe_reference_name') batts = StorageModel.objects.filter(run_uuid__in=scenario_run_uuids).values('run_uuid','max_kw','size_kw','size_kwh') pvs = PVModel.objects.filter(run_uuid__in=scenario_run_uuids).values('run_uuid','max_kw','size_kw') winds = WindModel.objects.filter(run_uuid__in=scenario_run_uuids).values('run_uuid','max_kw','size_kw') gens = GeneratorModel.objects.filter(run_uuid__in=scenario_run_uuids).values('run_uuid', 'max_kw', 'size_kw') financials = FinancialModel.objects.filter(run_uuid__in=scenario_run_uuids).values('run_uuid','npv_us_dollars','net_capital_costs','lcc_us_dollars','lcc_bau_us_dollars','net_capital_costs_plus_om_us_dollars', 'net_capital_costs','net_om_us_dollars_bau') tariffs = ElectricTariffModel.objects.filter(run_uuid__in=scenario_run_uuids).values('run_uuid','urdb_rate_name','year_one_energy_cost_us_dollars','year_one_demand_cost_us_dollars','year_one_fixed_cost_us_dollars','year_one_min_charge_adder_us_dollars','year_one_bill_us_dollars','year_one_energy_cost_bau_us_dollars','year_one_demand_cost_bau_us_dollars','year_one_fixed_cost_bau_us_dollars','year_one_min_charge_adder_bau_us_dollars','year_one_bill_bau_us_dollars') resiliences = ResilienceModel.objects.filter(scenariomodel_id__in=scenario_run_ids).values('scenariomodel_id','resilience_hours_avg','resilience_hours_max','resilience_hours_min') def get_scenario_data(data, run_uuid): if type(data)==dict: if str(data.get('run_uuid')) == str(run_uuid): return data if str(data.get('scenariomodel_id')) == str(run_uuid): return data result = [s for s in data if str(s.get('run_uuid')) == str(run_uuid)] if len(result) > 0: return result result = [s for s in data if str(s.get('scenariomodel_id')) == str(run_uuid)] if len(result) > 0: return result return [{}] for scenario in scenarios: results = {} message_set = get_scenario_data(messages, scenario.run_uuid) if not type(message_set) == list: message_set = [message_set] site = get_scenario_data(sites, scenario.run_uuid)[0] load = get_scenario_data(loads, scenario.run_uuid)[0] batt = get_scenario_data(batts, scenario.run_uuid)[0] pv = get_scenario_data(pvs, scenario.run_uuid)[0] wind = get_scenario_data(winds, scenario.run_uuid)[0] gen = get_scenario_data(gens, scenario.run_uuid)[0] financial = get_scenario_data(financials, scenario.run_uuid)[0] tariff = get_scenario_data(tariffs, scenario.run_uuid)[0] resilience = get_scenario_data(resiliences, scenario.id)[0] # Messages results['messages'] = {} for message in message_set: if len(message.keys()) > 0: results['messages'][message.get('message_type') or "type"] = message.get('message') or "" # Run ID results['run_uuid'] = str(scenario.run_uuid) # Status results['status'] = scenario.status # Date results['created'] = scenario.created if site: # Description results['description'] = scenario.description # Focus if load['outage_start_hour']: results['focus'] = "Resilience" else: results['focus'] = "Financial" # Address results['address'] = site.get('address') # Utility Tariff if tariff['urdb_rate_name']: results['urdb_rate_name'] = tariff.get('urdb_rate_name') else: results['urdb_rate_name'] = "Custom" # Load Profile if load['loads_kw']: results['doe_reference_name'] = "Custom" else: results['doe_reference_name'] = load.get('doe_reference_name') # NPV results['npv_us_dollars'] = financial.get('npv_us_dollars') # DG System Cost results['net_capital_costs'] = financial.get('net_capital_costs') # Lifecycle Costs results['lcc_us_dollars'] = financial.get('lcc_us_dollars') # Lifecycle Costs BAU results['lcc_bau_us_dollars'] = financial.get('lcc_bau_us_dollars') #Other Financials results['net_capital_costs_plus_om_us_dollars'] = financial.get('net_capital_costs_plus_om_us_dollars') results['net_om_us_dollars_bau'] = financial.get('net_om_us_dollars_bau') results['net_capital_costs'] = financial.get('net_capital_costs') # Year 1 Savings year_one_costs = sum(filter(None, [ tariff.get('year_one_energy_cost_us_dollars') or 0, tariff.get('year_one_demand_cost_us_dollars') or 0, tariff.get('year_one_fixed_cost_us_dollars') or 0, tariff.get('year_one_min_charge_adder_us_dollars') or 0, tariff.get('year_one_bill_us_dollars') or 0 ])) year_one_costs_bau = sum(filter(None, [ tariff.get('year_one_energy_cost_bau_us_dollars') or 0, tariff.get('year_one_demand_cost_bau_us_dollars') or 0, tariff.get('year_one_fixed_cost_bau_us_dollars') or 0, tariff.get('year_one_min_charge_adder_bau_us_dollars') or 0, tariff.get('year_one_bill_bau_us_dollars') or 0 ])) #Resilience Stats results['resilience_hours_min'] = resilience.get('resilience_hours_min') results['resilience_hours_max'] = resilience.get('resilience_hours_max') results['resilience_hours_avg'] = resilience.get('resilience_hours_avg') if results['resilience_hours_max'] is None: results['resilience_hours_max'] = 'not evaluated' if results['resilience_hours_min'] is None: results['resilience_hours_min'] = 'not evaluated' if results['resilience_hours_avg'] is None: results['resilience_hours_avg'] = 'not evaluated' results['year_one_savings_us_dollars'] = year_one_costs_bau - year_one_costs # PV Size if pv is not None: if pv['max_kw'] > 0: results['pv_kw'] = pv.get('size_kw') else: results['pv_kw'] = 'not evaluated' else: results['pv_kw'] = 'not evaluated' # Wind Size if wind is not None: if wind.get('max_kw') or -1 > 0: results['wind_kw'] = wind.get('size_kw') else: results['wind_kw'] = 'not evaluated' else: results['wind_kw'] = 'not evaluated' # Generator Size if gen is not None: if gen.get('max_kw') or -1 > 0: results['gen_kw'] = gen.get('size_kw') else: results['gen_kw'] = 'not evaluated' else: results['gen_kw'] = 'not evaluated' # Battery Size if batt is not None: if batt.get('max_kw') or -1 > 0: results['batt_kw'] = batt.get('size_kw') results['batt_kwh'] = batt.get('size_kwh') else: results['batt_kw'] = 'not evaluated' results['batt_kwh'] = 'not evaluated' else: results['batt_kw'] = 'not evaluated' results['batt_kwh'] = 'not evaluated' json_response['scenarios'].append(results) response = JsonResponse(json_response, status=200) return response except Exception as e: exc_type, exc_value, exc_traceback = sys.exc_info() err = UnexpectedError(exc_type, exc_value, exc_traceback, task='summary', user_uuid=user_uuid) err.save_to_db() return JsonResponse({"Error": err.message}, status=404)
def setup_scenario(self, run_uuid, data, raw_post): """ :param run_uuid: :param inputs_dict: validated POST of input parameters """ profiler = Profiler() inputs_path = os.path.join(os.getcwd(), "input_files") self.run_uuid = run_uuid self.data = data try: inputs_dict = data['inputs']['Scenario'] dfm = DataManager(run_id=run_uuid, user_id=inputs_dict.get('user_uuid'), n_timesteps=int(inputs_dict['time_steps_per_hour'] * 8760)) # storage is always made, even if max size is zero (due to REopt's expected inputs) storage = Storage(dfm=dfm, **inputs_dict["Site"]["Storage"]) site = Site(dfm=dfm, **inputs_dict["Site"]) pvs = [] def setup_pv(pv_dict, latitude, longitude, time_steps_per_hour): """ Create PV object :param pv_dict: validated input dictionary for ["Site"]["PV"] (user's POST) :return: PV object (from reo.src.techs.py) """ pv = None if pv_dict["max_kw"] > 0 or pv_dict["existing_kw"] > 0: pv = PV(dfm=dfm, latitude=latitude, longitude=longitude, time_steps_per_hour=time_steps_per_hour, **pv_dict) station = pv.station_location # update data inputs to reflect the pvwatts station data locations # must propagate array_type_to_tilt default assignment back to database data['inputs']['Scenario']["Site"]["PV"][pv_dict["pv_number"] - 1]["tilt"] = pv.tilt tmp = dict() tmp['station_latitude'] = station[0] tmp['station_longitude'] = station[1] tmp['station_distance_km'] = station[2] tmp['tilt'] = pv.tilt # default tilt assigned within techs.py based on array_type tmp['azimuth'] = pv.azimuth tmp['max_kw'] = pv.max_kw tmp['min_kw'] = pv.min_kw ModelManager.updateModel('PVModel', tmp, run_uuid, pv_dict["pv_number"]) return pv for pv_dict in inputs_dict["Site"]["PV"]: pvs.append( setup_pv( pv_dict, latitude=inputs_dict['Site'].get('latitude'), longitude=inputs_dict['Site'].get('longitude'), time_steps_per_hour=inputs_dict['time_steps_per_hour'])) if inputs_dict["Site"]["Generator"][ "generator_only_runs_during_grid_outage"]: if inputs_dict['Site']['LoadProfile'].get( 'outage_start_hour') is not None and inputs_dict['Site'][ 'LoadProfile'].get('outage_end_hour') is not None: if inputs_dict["Site"]["Generator"][ "max_kw"] > 0 or inputs_dict["Site"]["Generator"][ "existing_kw"] > 0: gen = Generator(dfm=dfm, run_uuid=run_uuid, outage_start_hour=inputs_dict['Site'] ['LoadProfile'].get("outage_start_hour"), outage_end_hour=inputs_dict['Site'] ['LoadProfile'].get("outage_end_hour"), time_steps_per_hour=inputs_dict.get( 'time_steps_per_hour'), **inputs_dict["Site"]["Generator"]) elif not inputs_dict["Site"]["Generator"][ "generator_only_runs_during_grid_outage"]: if inputs_dict["Site"]["Generator"]["max_kw"] > 0 or inputs_dict[ "Site"]["Generator"]["existing_kw"] > 0: gen = Generator( dfm=dfm, run_uuid=run_uuid, outage_start_hour=inputs_dict['Site']['LoadProfile'].get( "outage_start_hour"), outage_end_hour=inputs_dict['Site']['LoadProfile'].get( "outage_end_hour"), time_steps_per_hour=inputs_dict.get('time_steps_per_hour'), **inputs_dict["Site"]["Generator"]) if 'gen' in locals(): lp = LoadProfile( dfm=dfm, user_profile=inputs_dict['Site']['LoadProfile'].get( 'loads_kw'), latitude=inputs_dict['Site'].get('latitude'), longitude=inputs_dict['Site'].get('longitude'), pvs=pvs, analysis_years=site.financial.analysis_years, time_steps_per_hour=inputs_dict['time_steps_per_hour'], fuel_avail_before_outage=gen.fuel_avail * gen.fuel_avail_before_outage_pct, gen_existing_kw=gen.existing_kw, gen_min_turn_down=gen.min_turn_down, fuel_slope=gen.fuel_slope, fuel_intercept=gen.fuel_intercept, **inputs_dict['Site']['LoadProfile']) else: lp = LoadProfile( dfm=dfm, user_profile=inputs_dict['Site']['LoadProfile'].get( 'loads_kw'), latitude=inputs_dict['Site'].get('latitude'), longitude=inputs_dict['Site'].get('longitude'), pvs=pvs, analysis_years=site.financial.analysis_years, time_steps_per_hour=inputs_dict['time_steps_per_hour'], fuel_avail_before_outage=0, gen_existing_kw=0, gen_min_turn_down=0, fuel_slope=0, fuel_intercept=0, **inputs_dict['Site']['LoadProfile']) # Checks that the load being sent to optimization does not contatin negative values. We check the loads against # a variable tolerance (contingent on PV size since this tech has its existing dispatch added to the loads) and # correct loads falling between the threshold and zero. #Default tolerance + negative_load_tolerance = -0.1 # If there is existing PV update the default tolerance based on capacity if pvs is not None: existing_pv_kw = 0 for pv in pvs: if getattr(pv, 'existing_kw', 0) > 0: existing_pv_kw += pv.existing_kw negative_load_tolerance = min(negative_load_tolerance, existing_pv_kw * -0.005) #kw # If values in the load profile fall below the tolerance, raise an exception if min(lp.load_list) < negative_load_tolerance: message = ( "After adding existing generation to the load profile there were still negative electricity " "loads. Loads (non-net) must be equal to or greater than 0.") log.error("Scenario.py raising error: " + message) lp_error = LoadProfileError(task=self.name, run_uuid=run_uuid, user_uuid=inputs_dict.get('user_uuid'), message=message) lp_error.save_to_db() raise lp_error # Correct load profile values that fall between the tolerance and 0 lp.load_list = [ 0 if ((x > negative_load_tolerance) and (x < 0)) else x for x in lp.load_list ] elec_tariff = ElecTariff( dfm=dfm, run_id=run_uuid, load_year=inputs_dict['Site']['LoadProfile']['year'], time_steps_per_hour=inputs_dict.get('time_steps_per_hour'), **inputs_dict['Site']['ElectricTariff']) if inputs_dict["Site"]["Wind"]["max_kw"] > 0: wind = Wind( dfm=dfm, inputs_path=inputs_path, latitude=inputs_dict['Site'].get('latitude'), longitude=inputs_dict['Site'].get('longitude'), time_steps_per_hour=inputs_dict.get('time_steps_per_hour'), run_uuid=run_uuid, **inputs_dict["Site"]["Wind"]) # must propogate these changes back to database for proforma data['inputs']['Scenario']["Site"]["Wind"][ "installed_cost_us_dollars_per_kw"] = wind.installed_cost_us_dollars_per_kw data['inputs']['Scenario']["Site"]["Wind"][ "federal_itc_pct"] = wind.incentives.federal.itc tmp = dict() tmp['federal_itc_pct'] = wind.incentives.federal.itc tmp['installed_cost_us_dollars_per_kw'] = wind.installed_cost_us_dollars_per_kw ModelManager.updateModel('WindModel', tmp, run_uuid) # TODO: remove the need for this db call by passing these values to process_results.py via reopt.jl util = Util( dfm=dfm, outage_start_hour=inputs_dict['Site']['LoadProfile'].get( "outage_start_hour"), outage_end_hour=inputs_dict['Site']['LoadProfile'].get( "outage_end_hour"), ) dfm.finalize() dfm_dict = vars(dfm) # serialize for celery # delete python objects, which are not serializable for k in [ 'storage', 'site', 'elec_tariff', 'pvs', 'pvnms', 'load', 'util' ] + dfm.available_techs: if dfm_dict.get(k) is not None: del dfm_dict[k] self.data = data profiler.profileEnd() tmp = dict() tmp['setup_scenario_seconds'] = profiler.getDuration() ModelManager.updateModel('ProfileModel', tmp, run_uuid) # TODO: remove the need for this db call by passing these values to process_results.py via reopt.jl return vars(dfm) # --> gets passed to REopt runs (BAU and with tech) except Exception as e: if isinstance(e, LoadProfileError): raise e if hasattr(e, 'args'): if len(e.args) > 0: if e.args[0] == 'Wind Dataset Timed Out': raise WindDownloadError(task=self.name, run_uuid=run_uuid, user_uuid=self.data['inputs'] ['Scenario'].get('user_uuid')) if isinstance(e.args[0], str): if e.args[0].startswith('PVWatts'): message = 'PV Watts could not locate a dataset station within the search radius' radius = data['inputs']['Scenario']["Site"]["PV"][ 0].get("radius") or 0 if radius > 0: message += ( ". A search radius of {} miles was used for the NSRDB dataset (covering the " "continental US, HI and parts of AK). A search radius twice as large ({} miles) was also used " "to query an international dataset. See https://maps.nrel.gov/nsrdb-viewer/ for a map of " "dataset availability or https://nsrdb.nrel.gov/ for dataset documentation." ).format(radius, radius * 2) else: message += ( " from the NSRDB or international datasets. No search threshold was specified when " "attempting to pull solar resource data from either dataset." ) raise PVWattsDownloadError( message=message, task=self.name, run_uuid=run_uuid, user_uuid=self.data['inputs']['Scenario'].get( 'user_uuid'), traceback=e.args[0]) exc_type, exc_value, exc_traceback = sys.exc_info() log.error("Scenario.py raising error: " + str(exc_value.args[0])) raise UnexpectedError( exc_type, exc_value.args[0], traceback.format_tb(exc_traceback), task=self.name, run_uuid=run_uuid, user_uuid=self.data['inputs']['Scenario'].get('user_uuid'))
def financial_check(request, run_uuid=None): """ Check to see if resilience scenario system sizes are the same as financial scenario sizes """ resilience_uuid = request.GET.get('resilience_uuid') if resilience_uuid is None: # preserving old behavior resilience_uuid = run_uuid financial_uuid = request.GET.get('financial_uuid') def parse_system_sizes(site): size_dict = dict() if "Generator" in site: size_dict["Generator"] = site["Generator"]["size_kw"] if "Storage" in site: size_dict["Storage_kw"] = site["Storage"]["size_kw"] size_dict["Storage_kwh"] = site["Storage"]["size_kwh"] if "Wind" in site: size_dict["Wind"] = site["Wind"]["size_kw"] if "PV" in site: size_dict["PV"] = site["PV"]["size_kw"] return size_dict # validate uuid's try: uuid.UUID(str(resilience_uuid)) # raises ValueError if not valid uuid uuid.UUID(str(financial_uuid)) # raises ValueError if not valid uuid except ValueError as e: if e.args[0] == "badly formed hexadecimal UUID string": return JsonResponse({"Error": str(e.args[0])}, status=400) else: exc_type, exc_value, exc_traceback = sys.exc_info() err = UnexpectedError(exc_type, exc_value.args[0], exc_traceback, task='resilience_stats', run_uuid=resilience_uuid) err.save_to_db() return JsonResponse({"Error": str(err.message)}, status=400) try: resil_scenario = ScenarioModel.objects.get(run_uuid=resilience_uuid) except ScenarioModel.DoesNotExist: msg = "Scenario {} does not exist.".format(resilience_uuid) return JsonResponse({"Error": msg}, content_type='application/json', status=404) if resil_scenario.status == "Optimizing...": return JsonResponse( { "Error": "The resilience scenario is still optimizing. Please try again later." }, content_type='application/json', status=500) elif "error" in resil_scenario.status.lower(): return JsonResponse( { "Error": "An error occurred in the resilience scenario. Please check the messages from your results." }, content_type='application/json', status=500) try: financial_scenario = ScenarioModel.objects.get(run_uuid=financial_uuid) except ScenarioModel.DoesNotExist: msg = "Scenario {} does not exist.".format(financial_uuid) return JsonResponse({"Error": msg}, content_type='application/json', status=404) if financial_scenario.status == "Optimizing...": return JsonResponse( { "Error": "The financial scenario is still optimizing. Please try again later." }, content_type='application/json', status=500) elif "error" in financial_scenario.status.lower(): return JsonResponse( { "Error": "An error occurred in the financial scenario. Please check the messages from your results." }, content_type='application/json', status=500) try: # retrieve sizes from db resilience_result = ModelManager.make_response(resilience_uuid) financial_result = ModelManager.make_response(financial_uuid) resilience_sizes = parse_system_sizes( resilience_result["outputs"]["Scenario"]["Site"]) financial_sizes = parse_system_sizes( financial_result["outputs"]["Scenario"]["Site"]) survives = True if resilience_sizes.keys() == financial_sizes.keys(): for tech, resil_size in resilience_sizes.items(): if float(resil_size - financial_sizes[tech]) / float( max(resil_size, 1)) > 1.0e-3: survives = False break else: survives = False response = JsonResponse({"survives_specified_outage": survives}) except Exception: exc_type, exc_value, exc_traceback = sys.exc_info() err = UnexpectedError(exc_type, exc_value.args[0], exc_traceback, task='resilience_stats', run_uuid=resilience_uuid) err.save_to_db() return JsonResponse({"Error": err.message}, status=500) else: return response
def obj_create(self, bundle, **kwargs): run_uuid = str(uuid.uuid4()) data = dict() data["outputs"] = {"Scenario": {'run_uuid': run_uuid, 'api_version': api_version, 'Profile': {'pre_setup_scenario_seconds': 0, 'setup_scenario_seconds': 0, 'reopt_seconds': 0, 'reopt_bau_seconds': 0, 'parse_run_outputs_seconds': 0}, } } # Setup and start profile profiler = Profiler() uuidFilter = UUIDFilter(run_uuid) log.addFilter(uuidFilter) log.info('Beginning run setup') try: input_validator = ValidateNestedInput(bundle.data) except Exception as e: exc_type, exc_value, exc_traceback = sys.exc_info() err = UnexpectedError(exc_type, exc_value.args[0], traceback.format_tb(exc_traceback), task='ValidateNestedInput', run_uuid=run_uuid) err.save_to_db() set_status(data, 'Internal Server Error during input validation. No optimization task has been created. Please check your POST for bad values.') data['inputs'] = bundle.data data['messages'] = {} data['messages']['error'] = err.message # "Unexpected Error." log.error("Internal Server error: " + err.message) raise ImmediateHttpResponse(HttpResponse(json.dumps(data), content_type='application/json', status=500)) # internal server error data["inputs"] = input_validator.input_dict data["messages"] = input_validator.messages if not input_validator.isValid: # 400 Bad Request log.debug("input_validator not valid") log.debug(json.dumps(data)) set_status(data, 'Error. No optimization task has been created. See messages for more information. ' \ 'Note that inputs have default values filled in.') if saveToDb: badpost = BadPost(run_uuid=run_uuid, post=json.dumps(bundle.data), errors=str(data['messages'])) badpost.save() raise ImmediateHttpResponse(HttpResponse(json.dumps(data), content_type='application/json', status=400)) log.info('Entering ModelManager') model_manager = ModelManager() profiler.profileEnd() if saveToDb: set_status(data, 'Optimizing...') data['outputs']['Scenario']['Profile']['pre_setup_scenario_seconds'] = profiler.getDuration() if bundle.request.META.get('HTTP_X_API_USER_ID') or False: if bundle.request.META.get('HTTP_X_API_USER_ID') or '' == '6f09c972-8414-469b-b3e8-a78398874103': data['outputs']['Scenario']['job_type'] = 'REopt Lite Web Tool' else: data['outputs']['Scenario']['job_type'] = 'developer.nrel.gov' else: data['outputs']['Scenario']['job_type'] = 'Internal NREL' test_case = bundle.request.META.get('HTTP_USER_AGENT') or '' if test_case.startswith('check_http/'): data['outputs']['Scenario']['job_type'] = 'Monitoring' try: model_manager.create_and_save(data) except Exception as e: log.error("Could not create and save run_uuid: {}\n Data: {}".format(run_uuid,data)) exc_type, exc_value, exc_traceback = sys.exc_info() err = UnexpectedError(exc_type, exc_value.args[0], traceback.format_tb(exc_traceback), task='ModelManager.create_and_save', run_uuid=run_uuid) err.save_to_db() set_status(data, "Internal Server Error during saving of inputs. Please see messages.") data['messages']['error'] = err.message # "Unexpected Error." log.error("Internal Server error: " + err.message) raise ImmediateHttpResponse(HttpResponse(json.dumps(data), content_type='application/json', status=500)) # internal server error setup = setup_scenario.s(run_uuid=run_uuid, data=data, raw_post=bundle.data) call_back = process_results.s(data=data, meta={'run_uuid': run_uuid, 'api_version': api_version}) # (use .si for immutable signature, if no outputs were passed from reopt_jobs) rjm = run_jump_model.s(data=data, run_uuid=run_uuid) rjm_bau = run_jump_model.s(data=data, run_uuid=run_uuid, bau=True) log.info("Starting celery chain") try: chain(setup | group(rjm, rjm_bau) | call_back)() except Exception as e: if isinstance(e, REoptError): pass # handled in each task else: # for every other kind of exception exc_type, exc_value, exc_traceback = sys.exc_info() err = UnexpectedError(exc_type, exc_value.args[0], traceback.format_tb(exc_traceback), task='api.py', run_uuid=run_uuid) err.save_to_db() set_status(data, 'Internal Server Error. See messages for more.') if 'messages' not in data.keys(): data['messages'] = {} data['messages']['error'] = err.message log.error("Internal Server error: " + err.message) raise ImmediateHttpResponse(HttpResponse(json.dumps(data), content_type='application/json', status=500)) # internal server error log.info("Returning with HTTP 201") raise ImmediateHttpResponse(HttpResponse(json.dumps({'run_uuid': run_uuid}), content_type='application/json', status=201))
def setup_scenario(self, run_uuid, data, raw_post): """ :param run_uuid: :param inputs_dict: validated POST of input parameters """ profiler = Profiler() inputs_path = os.path.join(os.getcwd(), "input_files") self.run_uuid = run_uuid self.data = data try: inputs_dict = data['inputs']['Scenario'] dfm = DataManager(run_id=run_uuid, user_id=inputs_dict.get('user_uuid'), n_timesteps=int(inputs_dict['time_steps_per_hour'] * 8760)) # storage is always made, even if max size is zero (due to REopt's expected inputs) storage = Storage(dfm=dfm, **inputs_dict["Site"]["Storage"]) # Hot TES, always made, same reason as "storage", do unit conversions as needed here hot_tes = HotTES(dfm=dfm, **inputs_dict['Site']['HotTES']) # Cold TES, always made, same reason as "storage", do unit conversions as needed here cold_tes = ColdTES(dfm=dfm, **inputs_dict['Site']['ColdTES']) site = Site(dfm=dfm, **inputs_dict["Site"]) pvs = [] def setup_pv(pv_dict, latitude, longitude, time_steps_per_hour): """ Create PV object :param pv_dict: validated input dictionary for ["Site"]["PV"] (user's POST) :return: PV object (from reo.src.techs.py) """ pv = None if pv_dict["max_kw"] > 0 or pv_dict["existing_kw"] > 0: pv = PV(dfm=dfm, latitude=latitude, longitude=longitude, time_steps_per_hour=time_steps_per_hour, **pv_dict) station = pv.station_location # update data inputs to reflect the pvwatts station data locations # must propagate array_type_to_tilt default assignment back to database data['inputs']['Scenario']["Site"]["PV"][pv_dict["pv_number"] - 1]["tilt"] = pv.tilt tmp = dict() tmp['station_latitude'] = station[0] tmp['station_longitude'] = station[1] tmp['station_distance_km'] = station[2] tmp['tilt'] = pv.tilt # default tilt assigned within techs.py based on array_type tmp['azimuth'] = pv.azimuth tmp['max_kw'] = pv.max_kw tmp['min_kw'] = pv.min_kw ModelManager.updateModel('PVModel', tmp, run_uuid, pv_dict["pv_number"]) return pv for pv_dict in inputs_dict["Site"]["PV"]: pvs.append( setup_pv( pv_dict, latitude=inputs_dict['Site'].get('latitude'), longitude=inputs_dict['Site'].get('longitude'), time_steps_per_hour=inputs_dict['time_steps_per_hour'])) if inputs_dict["Site"]["Generator"][ "generator_only_runs_during_grid_outage"]: if inputs_dict['Site']['LoadProfile'].get('outage_start_time_step') is not None and \ inputs_dict['Site']['LoadProfile'].get('outage_end_time_step') is not None: if inputs_dict["Site"]["Generator"][ "max_kw"] > 0 or inputs_dict["Site"]["Generator"][ "existing_kw"] > 0: gen = Generator( dfm=dfm, outage_start_time_step=inputs_dict['Site'] ['LoadProfile'].get("outage_start_time_step"), outage_end_time_step=inputs_dict['Site'] ['LoadProfile'].get("outage_end_time_step"), time_steps_per_hour=inputs_dict.get( 'time_steps_per_hour'), **inputs_dict["Site"]["Generator"]) elif not inputs_dict["Site"]["Generator"][ "generator_only_runs_during_grid_outage"]: if inputs_dict["Site"]["Generator"]["max_kw"] > 0 or inputs_dict[ "Site"]["Generator"]["existing_kw"] > 0: gen = Generator( dfm=dfm, outage_start_time_step=inputs_dict['Site'] ['LoadProfile'].get("outage_start_time_step"), outage_end_time_step=inputs_dict['Site'] ['LoadProfile'].get("outage_end_time_step"), time_steps_per_hour=inputs_dict.get('time_steps_per_hour'), **inputs_dict["Site"]["Generator"]) if 'gen' in locals(): lp = LoadProfile( dfm=dfm, user_profile=inputs_dict['Site']['LoadProfile'].get( 'loads_kw'), latitude=inputs_dict['Site'].get('latitude'), longitude=inputs_dict['Site'].get('longitude'), pvs=pvs, analysis_years=site.financial.analysis_years, time_steps_per_hour=inputs_dict['time_steps_per_hour'], fuel_avail_before_outage=gen.fuel_avail * gen.fuel_avail_before_outage_pct, gen_existing_kw=gen.existing_kw, gen_min_turn_down=gen.min_turn_down_pct, fuel_slope=gen.fuel_slope, fuel_intercept=gen.fuel_intercept, **inputs_dict['Site']['LoadProfile']) else: lp = LoadProfile( dfm=dfm, user_profile=inputs_dict['Site']['LoadProfile'].get( 'loads_kw'), latitude=inputs_dict['Site'].get('latitude'), longitude=inputs_dict['Site'].get('longitude'), pvs=pvs, analysis_years=site.financial.analysis_years, time_steps_per_hour=inputs_dict['time_steps_per_hour'], fuel_avail_before_outage=0, gen_existing_kw=0, gen_min_turn_down=0, fuel_slope=0, fuel_intercept=0, **inputs_dict['Site']['LoadProfile']) # Checks that the load being sent to optimization does not contatin negative values. We check the loads against # a variable tolerance (contingent on PV size since this tech has its existing dispatch added to the loads) and # correct loads falling between the threshold and zero. #Default tolerance + negative_load_tolerance = -0.1 # If there is existing PV update the default tolerance based on capacity if pvs is not None: existing_pv_kw = 0 for pv in pvs: if getattr(pv, 'existing_kw', 0) > 0: existing_pv_kw += pv.existing_kw negative_load_tolerance = min(negative_load_tolerance, existing_pv_kw * -0.005) #kw # If values in the load profile fall below the tolerance, raise an exception if min(lp.load_list) < negative_load_tolerance: message = ( "After adding existing generation to the load profile there were still negative electricity " "loads. Loads (non-net) must be equal to or greater than 0.") log.error("Scenario.py raising error: " + message) lp_error = LoadProfileError(task=self.name, run_uuid=run_uuid, user_uuid=inputs_dict.get('user_uuid'), message=message) lp_error.save_to_db() raise lp_error # Correct load profile values that fall between the tolerance and 0 lp.load_list = [ 0 if ((x > negative_load_tolerance) and (x < 0)) else x for x in lp.load_list ] # Load Profile Boiler Fuel lpbf = LoadProfileBoilerFuel( dfm=dfm, time_steps_per_hour=inputs_dict['time_steps_per_hour'], latitude=inputs_dict['Site']['latitude'], longitude=inputs_dict['Site']['longitude'], nearest_city=lp.nearest_city, year=lp.year, **inputs_dict['Site']['LoadProfileBoilerFuel']) # Boiler which supplies the bau boiler fuel load, if there is a boiler fuel load if lpbf.annual_mmbtu > 0.0: boiler = Boiler(dfm=dfm, boiler_fuel_series_bau=lpbf.load_list, **inputs_dict['Site']['Boiler']) tmp = dict() tmp['max_mmbtu_per_hr'] = boiler.max_mmbtu_per_hr ModelManager.updateModel('BoilerModel', tmp, run_uuid) else: boiler = None # Load Profile Chiller Electric lpct = LoadProfileChillerThermal( dfm=dfm, total_electric_load_list=lp.unmodified_load_list, time_steps_per_hour=inputs_dict['time_steps_per_hour'], latitude=inputs_dict['Site']['latitude'], longitude=inputs_dict['Site']['longitude'], nearest_city=lp.nearest_city or lpbf.nearest_city, year=lp.year, max_thermal_factor_on_peak_load=inputs_dict['Site'] ['ElectricChiller']['max_thermal_factor_on_peak_load'], **inputs_dict['Site']['LoadProfileChillerThermal']) chiller_elec_greater_than_total_elec = [ 1 if lpct.load_list[i] / lpct.chiller_cop > lp.load_list[i] else 0 for i in range(len(lp.load_list)) ] if sum(chiller_elec_greater_than_total_elec) > 0: timestep = None for idx, v in enumerate(chiller_elec_greater_than_total_elec): if v == 1: timestep = idx break message = ( "LoadProfileChillerThermal electric load in kW cannot be more than " "total LoadProfile load in kW. At timestep {} the LoadProfileChillerThermal load is {} kW and " "the LoadProfile load is {} kW. Note you may consider adjusting your " "LoadProfileChillerThermal chiller_cop or check the chiller load input versus the total electric load " "if you provided inputs in units of cooling tons.").format( timestep, lpct.load_list[timestep] / lpct.chiller_cop, lp.load_list[timestep]) log.error("Scenario.py raising error: " + message) lpct_error = LoadProfileError( task=self.name, run_uuid=run_uuid, user_uuid=inputs_dict.get('user_uuid'), message=message) lpct_error.save_to_db() raise lpct_error # Option 1, retrieve annual load from calculations here and add to database tmp = dict() tmp['chiller_cop'] = lpct.chiller_cop tmp['annual_calculated_kwh_bau'] = lpct.annual_kwht / lpct.chiller_cop tmp['year_one_chiller_electric_load_series_kw_bau'] = [ i / lpct.chiller_cop for i in lpct.load_list ] ModelManager.updateModel('LoadProfileChillerThermalModel', tmp, run_uuid) # Electric chiller which supplies the bau electric chiller load, if there is an electric chiller load if lpct.annual_kwht > 0.0: elecchl = ElectricChiller(dfm=dfm, lpct=lpct, **inputs_dict['Site']['ElectricChiller']) tmp = dict() tmp['max_kw'] = elecchl.max_kw ModelManager.updateModel('ElectricChillerModel', tmp, run_uuid) else: elecchl = None # Fuel tariff fuel_tariff = FuelTariff( dfm=dfm, time_steps_per_hour=inputs_dict['time_steps_per_hour'], **inputs_dict['Site']['FuelTariff']) elec_tariff = ElecTariff( dfm=dfm, run_id=run_uuid, load_year=lp.year, time_steps_per_hour=inputs_dict.get('time_steps_per_hour'), **inputs_dict['Site']['ElectricTariff']) if inputs_dict["Site"]["Wind"]["max_kw"] > 0: wind = Wind( dfm=dfm, inputs_path=inputs_path, latitude=inputs_dict['Site'].get('latitude'), longitude=inputs_dict['Site'].get('longitude'), time_steps_per_hour=inputs_dict.get('time_steps_per_hour'), run_uuid=run_uuid, **inputs_dict["Site"]["Wind"]) # must propogate these changes back to database for proforma data['inputs']['Scenario']["Site"]["Wind"][ "installed_cost_us_dollars_per_kw"] = wind.installed_cost_us_dollars_per_kw data['inputs']['Scenario']["Site"]["Wind"][ "federal_itc_pct"] = wind.incentives.federal.itc tmp = dict() tmp['federal_itc_pct'] = wind.incentives.federal.itc tmp['installed_cost_us_dollars_per_kw'] = wind.installed_cost_us_dollars_per_kw ModelManager.updateModel('WindModel', tmp, run_uuid) # TODO: remove the need for this db call by passing these values to process_results.py via reopt.jl if inputs_dict["Site"]["CHP"].get("prime_mover") is not None or \ inputs_dict["Site"]["CHP"].get("max_kw", 0.0) > 0.0: if boiler is not None: steam_or_hw = boiler.existing_boiler_production_type_steam_or_hw else: steam_or_hw = 'hot_water' chp = CHP( dfm=dfm, run_uuid=run_uuid, existing_boiler_production_type_steam_or_hw=steam_or_hw, oa_temp_degF=inputs_dict['Site']['outdoor_air_temp_degF'], site_elevation_ft=inputs_dict['Site']['elevation_ft'], outage_start_time_step=inputs_dict['Site']['LoadProfile'].get( "outage_start_time_step"), outage_end_time_step=inputs_dict['Site']['LoadProfile'].get( "outage_end_time_step"), time_steps_per_hour=inputs_dict.get('time_steps_per_hour'), year=lp.year, **inputs_dict['Site']['CHP']) # Absorption chiller if inputs_dict["Site"]["AbsorptionChiller"][ "max_ton"] > 0 and lpct.annual_kwht > 0.0: absorpchl = AbsorptionChiller( dfm=dfm, max_cooling_load_tons=elecchl.max_cooling_load_tons, hw_or_steam=boiler.existing_boiler_production_type_steam_or_hw, chp_prime_mover=chp.prime_mover, **inputs_dict['Site']['AbsorptionChiller']) tmp = dict() tmp['installed_cost_us_dollars_per_ton'] = absorpchl.installed_cost_us_dollars_per_ton tmp['om_cost_us_dollars_per_ton'] = absorpchl.om_cost_us_dollars_per_ton ModelManager.updateModel('AbsorptionChillerModel', tmp, run_uuid) util = Util( dfm=dfm, outage_start_time_step=inputs_dict['Site']['LoadProfile'].get( "outage_start_time_step"), outage_end_time_step=inputs_dict['Site']['LoadProfile'].get( "outage_end_time_step"), ) # Assign decomposition subproblem optimization parameters - only used if decomposition is selected dfm.optimality_tolerance_decomp_subproblem = inputs_dict[ 'optimality_tolerance_decomp_subproblem'] dfm.timeout_decomp_subproblem_seconds = inputs_dict[ 'timeout_decomp_subproblem_seconds'] dfm.add_soc_incentive = inputs_dict['add_soc_incentive'] dfm.finalize() dfm_dict = vars(dfm) # serialize for celery # delete python objects, which are not serializable for k in [ 'storage', 'hot_tes', 'cold_tes', 'site', 'elec_tariff', 'fuel_tariff', 'pvs', 'pvnms', 'load', 'util', 'heating_load', 'cooling_load' ] + dfm.available_techs: if dfm_dict.get(k) is not None: del dfm_dict[k] self.data = data profiler.profileEnd() tmp = dict() tmp['setup_scenario_seconds'] = profiler.getDuration() ModelManager.updateModel('ProfileModel', tmp, run_uuid) # TODO: remove the need for this db call by passing these values to process_results.py via reopt.jl return vars(dfm) # --> gets passed to REopt runs (BAU and with tech) except Exception as e: if isinstance(e, LoadProfileError): raise e if isinstance(e, PVWattsDownloadError): e.run_uuid = run_uuid e.user_uuid = self.data['inputs']['Scenario'].get('user_uuid') e.save_to_db() raise e if hasattr(e, 'args'): if len(e.args) > 0: if e.args[0] == 'Unable to download wind data': raise WindDownloadError(task=self.name, run_uuid=run_uuid, user_uuid=self.data['inputs'] ['Scenario'].get('user_uuid')) if isinstance(e.args[0], str): if e.args[0].startswith('PVWatts'): message = 'PV Watts could not locate a dataset station within the search radius' radius = data['inputs']['Scenario']["Site"]["PV"][ 0].get("radius") or 0 if radius > 0: message += ( ". A search radius of {} miles was used for the NSRDB dataset (covering the " "continental US, HI and parts of AK). A search radius twice as large ({} miles) was also used " "to query an international dataset. See https://maps.nrel.gov/nsrdb-viewer/ for a map of " "dataset availability or https://nsrdb.nrel.gov/ for dataset documentation." ).format(radius, radius * 2) else: message += ( " from the NSRDB or international datasets. No search threshold was specified when " "attempting to pull solar resource data from either dataset." ) raise PVWattsDownloadError( message=message, task=self.name, run_uuid=run_uuid, user_uuid=self.data['inputs']['Scenario'].get( 'user_uuid'), traceback=e.args[0]) exc_type, exc_value, exc_traceback = sys.exc_info() log.error("Scenario.py raising error: " + str(exc_value.args[0])) raise UnexpectedError( exc_type, exc_value.args[0], traceback.format_tb(exc_traceback), task=self.name, run_uuid=run_uuid, user_uuid=self.data['inputs']['Scenario'].get('user_uuid'))
def resilience_stats(request: Union[Dict, HttpRequest], run_uuid=None): """ Run outage simulator for given run_uuid :param request: optional parameter for 'bau', boolean :param run_uuid: :return: {"resilience_by_timestep", "resilience_hours_min", "resilience_hours_max", "resilience_hours_avg", "outage_durations", "probs_of_surviving", } Also can GET the same values as above with '_bau' appended if 'bau=true' for the site's existing capacities. """ try: uuid.UUID(run_uuid) # raises ValueError if not valid uuid except ValueError as e: if e.args[0] == "badly formed hexadecimal UUID string": return JsonResponse({"Error": str(e.args[0])}, status=400) else: exc_type, exc_value, exc_traceback = sys.exc_info() err = UnexpectedError(exc_type, exc_value.args[0], exc_traceback, task='resilience_stats', run_uuid=run_uuid) err.save_to_db() return JsonResponse({"Error": str(err.message)}, status=400) bau = False # whether or not user wants outage simulator run with existing sizes if isinstance(request, HttpRequest): if request.GET.get('bau') in ["True", "true", "1"]: bau = True elif isinstance(request, dict): bau = request.get("bau") # Safety check; No exception is expected if called after POST-ing to /outagesimjob end point try: scenario = ScenarioModel.objects.get(run_uuid=run_uuid) except ScenarioModel.DoesNotExist: msg = "Scenario {} does not exist.".format(run_uuid) return JsonResponse({"Error": msg}, content_type='application/json', status=404) if scenario.status == "Optimizing...": return JsonResponse( { "Error": "The scenario is still optimizing. Please try again later." }, content_type='application/json', status=404) elif "error" in scenario.status.lower(): return JsonResponse( { "Error": "An error occurred in the scenario. Please check the messages from your results." }, content_type='application/json', status=500) try: # catch all exceptions try: # catch specific exception not_ready_msg = ( 'Outage sim results are not ready. ' 'If you have already submitted an outagesimjob, please try again later. ' 'If not, please first submit an outagesimjob by sending a POST request to ' 'v1/outagesimjob/ with run_uuid and bau parameters. This will generate' ' outage simulation results that you can access from a GET request to the ' 'v1/job/<run uuid>/resilience_stats endpoint. ') not_ready_msg += 'Sample body data for POST-ing to /outagesimjob/: {"run_uuid\": \"6ea30f0f-3723-4fd1-8a3f-bebf8a3e4dbf\", \"bau\": false}' rm = ResilienceModel.objects.get(scenariomodel=scenario) if rm.resilience_by_timestep is None: return JsonResponse({"Error": not_ready_msg}, content_type='application/json', status=404) except ResilienceModel.DoesNotExist: # case for no resilience_stats generated yet return JsonResponse({"Error": not_ready_msg}, content_type='application/json', status=404) else: # ResilienceModel does exist results = model_to_dict(rm) # remove items that user does not need del results['scenariomodel'] del results['id'] if bau and results[ "probs_of_surviving_bau"] is None: # then need to run outage_sim with existing sizes (BAU) bau_results = run_outage_sim(run_uuid, with_tech=False, bau=bau) ResilienceModel.objects.filter(id=rm.id).update(**bau_results) results.update(bau_results) if not bau: # remove BAU results from results dict (if they're there) filtered_dict = { k: v for k, v in results.items() if "_bau" not in k } results = filtered_dict results.update({ "help_text": ("The present_worth_factor and avg_critical_load are provided such" " that one can calculate an avoided outage cost in dollars by multiplying a value " "of load load ($/kWh) by the avg_critical_load, resilience_hours_avg, and present_worth_factor." " Note that if the outage event is 'major' (i.e. only occurs once), then the present_worth_factor is 1." ) }) response = JsonResponse({"outage_sim_results": results}, content_type='application/json', status=200) return response except Exception: exc_type, exc_value, exc_traceback = sys.exc_info() err = UnexpectedError(exc_type, exc_value.args[0], exc_traceback, task='resilience_stats', run_uuid=run_uuid) err.save_to_db() return JsonResponse({"Error": err.message}, status=500)
def run_jump_model(self, dfm, data, run_uuid, bau=False): profiler = Profiler() time_dict = dict() name = 'reopt' if not bau else 'reopt_bau' reopt_inputs = dfm['reopt_inputs'] if not bau else dfm['reopt_inputs_bau'] self.data = data self.run_uuid = data['outputs']['Scenario']['run_uuid'] self.user_uuid = data['outputs']['Scenario'].get('user_uuid') if platform.system() == "Darwin": ext = ".dylib" elif platform.system() == "Windows": ext = ".dll" else: ext = ".so" # if platform.system() == "Linux": julia_img_file = os.path.join("julia_envs", "Xpress", "JuliaXpressSysimage" + ext) logger.info("Running JuMP model ...") try: if os.path.isfile(julia_img_file): # TODO: clean up this try/except block logger.info("Found Julia image file {}.".format(julia_img_file)) t_start = time.time() api = LibJulia.load() api.sysimage = julia_img_file api.init_julia() from julia import Main time_dict["pyjulia_start_seconds"] = time.time() - t_start else: t_start = time.time() j = julia.Julia() from julia import Main time_dict["pyjulia_start_seconds"] = time.time() - t_start t_start = time.time() Main.using("Pkg") from julia import Pkg time_dict["pyjulia_pkg_seconds"] = time.time() - t_start if os.environ.get("SOLVER") == "xpress": t_start = time.time() Pkg.activate("./julia_envs/Xpress/") time_dict["pyjulia_activate_seconds"] = time.time() - t_start try: t_start = time.time() Main.include("reo/src/reopt_xpress_model.jl") time_dict["pyjulia_include_model_seconds"] = time.time( ) - t_start except ImportError: # should only need to instantiate once Pkg.instantiate() Main.include("reo/src/reopt_xpress_model.jl") t_start = time.time() if bau: model = Main.reopt_model( float(data["inputs"]["Scenario"]["timeout_seconds"]), float(data["inputs"]["Scenario"] ["optimality_tolerance_bau"])) else: model = Main.reopt_model( float(data["inputs"]["Scenario"]["timeout_seconds"]), float(data["inputs"]["Scenario"] ["optimality_tolerance_techs"])) time_dict["pyjulia_make_model_seconds"] = time.time() - t_start elif os.environ.get("SOLVER") == "cbc": t_start = time.time() Pkg.activate("./julia_envs/Cbc/") time_dict["pyjulia_activate_seconds"] = time.time() - t_start t_start = time.time() Main.include("reo/src/reopt_cbc_model.jl") time_dict["pyjulia_include_model_seconds"] = time.time() - t_start t_start = time.time() model = Main.reopt_model( float(data["inputs"]["Scenario"]["timeout_seconds"]), float(data["inputs"]["Scenario"]["optimality_tolerance_bau"])) time_dict["pyjulia_make_model_seconds"] = time.time() - t_start elif os.environ.get("SOLVER") == "scip": t_start = time.time() Pkg.activate("./julia_envs/SCIP/") time_dict["pyjulia_activate_seconds"] = time.time() - t_start t_start = time.time() Main.include("reo/src/reopt_scip_model.jl") time_dict["pyjulia_include_model_seconds"] = time.time() - t_start t_start = time.time() model = Main.reopt_model( float(data["inputs"]["Scenario"]["timeout_seconds"]), float(data["inputs"]["Scenario"]["optimality_tolerance_bau"])) time_dict["pyjulia_make_model_seconds"] = time.time() - t_start else: raise REoptFailedToStartError( message= "The environment variable SOLVER must be set to one of [xpress, cbc, scip].", run_uuid=self.run_uuid, user_uuid=self.user_uuid) if bau or not data["inputs"]["Scenario"]["use_decomposition_model"]: t_start = time.time() Main.include("reo/src/reopt.jl") time_dict["pyjulia_include_reopt_seconds"] = time.time() - t_start t_start = time.time() results = Main.reopt(model, reopt_inputs) time_dict["pyjulia_run_reopt_seconds"] = time.time() - t_start else: t_start = time.time() Main.include("reo/src/reopt_decomposed.jl") time_dict["pyjulia_include_reopt_seconds"] = time.time() - t_start t_start = time.time() results = run_decomposed_model(data, model, reopt_inputs) time_dict["pyjulia_run_reopt_seconds"] = time.time() - t_start results = scrub_numpy_arrays_from_dict(results) results.update(time_dict) except Exception as e: if isinstance(e, REoptFailedToStartError): raise e elif "DimensionMismatch" in e.args[ 0]: # JuMP may mishandle a timeout when no feasible solution is returned msg = "Optimization exceeded timeout: {} seconds.".format( data["inputs"]["Scenario"]["timeout_seconds"]) logger.info(msg) raise OptimizationTimeout(task=name, message=msg, run_uuid=self.run_uuid, user_uuid=self.user_uuid) exc_type, exc_value, exc_traceback = sys.exc_info() print(exc_type) print(exc_value) print(exc_traceback) logger.error("REopt.py raise unexpected error: UUID: " + str(self.run_uuid)) raise UnexpectedError(exc_type, exc_value, traceback.format_tb(exc_traceback), task=name, run_uuid=self.run_uuid, user_uuid=self.user_uuid) else: status = results["status"] logger.info("REopt run successful. Status {}".format(status)) if bau: dfm['results_bau'] = results # will be flat dict else: dfm['results'] = results if status.strip().lower() == 'timed-out': msg = "Optimization exceeded timeout: {} seconds.".format( data["inputs"]["Scenario"]["timeout_seconds"]) logger.info(msg) raise OptimizationTimeout(task=name, message=msg, run_uuid=self.run_uuid, user_uuid=self.user_uuid) elif status.strip().lower() != 'optimal': logger.error( "REopt status not optimal. Raising NotOptimal Exception.") raise NotOptimal(task=name, run_uuid=self.run_uuid, status=status.strip(), user_uuid=self.user_uuid) profiler.profileEnd() ModelManager.updateModel('ProfileModel', {name + '_seconds': profiler.getDuration()}, run_uuid) # reduce the amount data being transferred between tasks if bau: del dfm['reopt_inputs_bau'] else: del dfm['reopt_inputs'] return dfm
def resilience_stats(request, run_uuid=None, financial_check=None): """ Run outage simulator for given run_uuid :param request: :param run_uuid: :return: {"resilience_by_timestep", "resilience_hours_min", "resilience_hours_max", "resilience_hours_avg", "outage_durations", "probs_of_surviving", } """ try: uuid.UUID(run_uuid) # raises ValueError if not valid uuid except ValueError as e: if e.args[0] == "badly formed hexadecimal UUID string": return JsonResponse({"Error": str(e.args[0])}, status=400) else: exc_type, exc_value, exc_traceback = sys.exc_info() err = UnexpectedError(exc_type, exc_value.args[0], exc_traceback, task='resilience_stats', run_uuid=run_uuid) err.save_to_db() return JsonResponse({"Error": str(err.message)}, status=400) try: # to run outage simulator scenario = ScenarioModel.objects.get(run_uuid=run_uuid) if scenario.status == "Optimizing...": raise ScenarioOptimizing elif "error" in scenario.status.lower(): raise ScenarioErrored if financial_check == "financial_check": query = request.GET financial_uuid = query['financial_uuid'] scenario = ScenarioModel.objects.get(run_uuid=financial_uuid) if scenario.status == "Optimizing...": raise ScenarioOptimizing elif "error" in scenario.status.lower(): raise ScenarioErrored ## retrieve sizes from db resilience_result = ModelManager.make_response(run_uuid) financial_result = ModelManager.make_response(financial_uuid) resilience_size = parse_system_sizes( resilience_result["outputs"]["Scenario"]["Site"]) financial_size = parse_system_sizes( financial_result["outputs"]["Scenario"]["Site"]) results = simulate_outage( resilience_run_site_result=resilience_size, financial_run_site_result=financial_size, financial_check=financial_check) results = {"survives_specified_outage": results} else: try: query = request.GET bau = query['bau'] in ["True", "true", "1"] except: bau = False wtch = True try: # see if ResilienceModel already created rm = ResilienceModel.objects.get(scenariomodel=scenario) results = model_to_dict(rm) if bau and "probs_of_surviving_bau" not in results: wtch = False raise Exception('no resilience_stat_bau in database') if not bau: for k, v in results.items(): if k[-4:] == "_bau": results.pop(k) # remove items that user does not need del results['scenariomodel'] del results['id'] except: load_profile = LoadProfileModel.objects.filter( run_uuid=scenario.run_uuid).first() gen = GeneratorModel.objects.filter( run_uuid=scenario.run_uuid).first() batt = StorageModel.objects.filter( run_uuid=scenario.run_uuid).first() pv = PVModel.objects.filter(run_uuid=scenario.run_uuid).first() financial = FinancialModel.objects.filter( run_uuid=scenario.run_uuid).first() wind = WindModel.objects.filter( run_uuid=scenario.run_uuid).first() batt_roundtrip_efficiency = batt.internal_efficiency_pct \ * batt.inverter_efficiency_pct \ * batt.rectifier_efficiency_pct results = dict() kwargs_dict = dict() # if wtch and bau: pool = Pool(processes=2 if wtch and bau else 1) # else: # pool = Pool(processes=1) if wtch: kwargs = { "batt_kwh": batt.size_kwh or 0, "batt_kw": batt.size_kw or 0, "pv_kw_ac_hourly": pv.year_one_power_production_series_kw, "wind_kw_ac_hourly": wind.year_one_power_production_series_kw, "init_soc": batt.year_one_soc_series_pct, "critical_loads_kw": load_profile.critical_load_series_kw, "batt_roundtrip_efficiency": batt_roundtrip_efficiency, "diesel_kw": gen.size_kw or 0, "fuel_available": gen.fuel_avail_gal, "b": gen.fuel_intercept_gal_per_hr, "m": gen.fuel_slope_gal_per_kwh, "diesel_min_turndown": gen.min_turn_down_pct } kwargs_dict["wtch"] = kwargs if bau: # only PV and diesel generator may have existing size kwargs = { "batt_kwh": 0, "batt_kw": 0, "pv_kw_ac_hourly": [ p * pv.size_kw * pv.existing_kw for p in pv.year_one_power_production_series_kw ], "critical_loads_kw": load_profile.critical_load_series_kw, "diesel_kw": gen.existing_kw or 0, "fuel_available": gen.fuel_avail_gal, "b": gen.fuel_intercept_gal_per_hr, "m": gen.fuel_slope_gal_per_kwh, "diesel_min_turndown": gen.min_turn_down_pct } kwargs_dict["bau"] = kwargs p = { k: pool.apply_async(simulate_outage, tuple(), v) for k, v in kwargs_dict.items() } pool.close() pool.join() for k, v in p.items(): if k == 'wtch': results.update(v.get()) if k == 'bau': results.update({ key + '_bau': val for key, val in v.get().items() }) """ add avg_crit_ld and pwf to results so that avoided outage cost can be determined as: avoided_outage_costs_us_dollars = resilience_hours_avg * value_of_lost_load_us_dollars_per_kwh * avg_crit_ld * present_worth_factor """ avg_critical_load = round( sum(load_profile.critical_load_series_kw) / len(load_profile.critical_load_series_kw), 5) if load_profile.outage_is_major_event: # assume that outage occurs only once in analysis period present_worth_factor = 1 else: present_worth_factor = annuity( financial.analysis_years, financial.escalation_pct, financial.offtaker_discount_pct) results.update({ "present_worth_factor": present_worth_factor, "avg_critical_load": avg_critical_load, }) try: # new model try: rm = ResilienceModel.create(scenariomodel=scenario) except Exception as e: if isinstance(e, REoptError): return JsonResponse({"Error": e.message}, status=500) raise e ResilienceModel.objects.filter(id=rm.id).update(**results) except IntegrityError: # have run resiliense_stat & bau=false # return both w/tech and bau ResilienceModel.objects.filter(id=rm.id).update(**results) rm = ResilienceModel.objects.get(scenariomodel=scenario) results = model_to_dict(rm) # remove items that user does not need del results['scenariomodel'] del results['id'] results.update({ "help_text": "The present_worth_factor and avg_critical_load are provided such that one can calculate an avoided outage cost in dollars by multiplying a value of load load ($/kWh) times the avg_critical_load, resilience_hours_avg, and present_worth_factor. Note that if the outage event is 'major', i.e. only occurs once, then the present_worth_factor is 1." }) response = JsonResponse(results) return response except ScenarioOptimizing: return JsonResponse( { "Error": "The scenario is still optimizing. Please try again later." }, content_type='application/json', status=500) except ScenarioErrored: return JsonResponse( { "Error": "An error occured in the scenario. Please check the messages from your results." }, content_type='application/json', status=500) except Exception as e: if type(e).__name__ == 'DoesNotExist': msg = "Scenario {} does not exist.".format(run_uuid) return JsonResponse({"Error": msg}, content_type='application/json', status=404) else: exc_type, exc_value, exc_traceback = sys.exc_info() err = UnexpectedError(exc_type, exc_value.args[0], exc_traceback, task='resilience_stats', run_uuid=run_uuid) err.save_to_db() return JsonResponse({"Error": err.message}, status=500)
def process_results(self, dfm_list, data, meta, saveToDB=True): """ Processes the two outputs from reopt.jl bau and with-Tech scenarios :param self: celery.Task :param dfm_list: list of serialized dat_file_managers (passed from group of REopt runs) :param data: nested dict mirroring API response format :param meta: ={'run_uuid': run_uuid, 'api_version': api_version} from api.py :param saveToDB: boolean for saving postgres models :return: None """ class Results: bau_attributes = [ "lcc", "fuel_used_gal", "year_one_energy_cost", "year_one_demand_cost", "year_one_fixed_cost", "year_one_min_charge_adder", "year_one_bill", "year_one_utility_kwh", "total_energy_cost", "total_demand_cost", "total_fixed_cost", "total_min_charge_adder", "net_capital_costs_plus_om", "pv_net_fixed_om_costs", "gen_net_fixed_om_costs", "gen_net_variable_om_costs", "gen_total_fuel_cost", "gen_year_one_fuel_cost", "gen_year_one_variable_om_costs", ] def __init__(self, results_dict, results_dict_bau, dfm): """ Convenience (and legacy) class for handling REopt results :param results_dict: flat dict of results from reopt.jl :param results_dict_bau: flat dict of results from reopt.jl for bau case """ self.profiler = Profiler() self.dfm = dfm # remove invalid sizes due to optimization error margins for r in [results_dict, results_dict_bau]: for key, value in r.items(): if key.endswith('kw') or key.endswith('kwh'): if value < 0: r[key] = 0 # add bau outputs to results_dict for k in Results.bau_attributes: results_dict[k + '_bau'] = results_dict_bau[k] # b/c of PV & PVNM techs in REopt, if both are zero then no value is written to REopt_results.json if results_dict.get('pv_kw') is None: results_dict['pv_kw'] = 0 # if wind is zero then no value is written to REopt results.json if results_dict.get("wind_kw") is None: results_dict['wind_kw'] = 0 # if generator is zero then no value is written to REopt results.json if results_dict.get("generator_kw") is None: results_dict['generator_kw'] = 0 results_dict['npv'] = results_dict['lcc_bau'] - results_dict['lcc'] self.results_dict = results_dict self.nested_outputs = self.setup_nested() def get_output(self): self.get_nested() output_dict = self.nested_outputs return output_dict @staticmethod def setup_nested(): """ Set up up empty nested dict for outputs. :return: nested dict for outputs with values set to None. Results are filled in using "get_nested" method """ nested_outputs = dict() nested_outputs["Scenario"] = dict() nested_outputs["Scenario"]["Profile"] = dict() nested_outputs["Scenario"]["Site"] = dict() # Loop through all sub-site dicts and init for name, d in nested_output_definitions["outputs"]["Scenario"][ "Site"].items(): nested_outputs["Scenario"]["Site"][name] = dict() for k in d.keys(): nested_outputs["Scenario"]["Site"][name].setdefault( k, None) return nested_outputs def get_nested(self): """ Translates the "flat" results_dict (which is just the JSON output from REopt mosel code) into the nested output dict. :return: None (modifies self.nested_outputs) """ # TODO: move the filling in of outputs to reopt.jl self.nested_outputs["Scenario"]["status"] = self.results_dict[ "status"] # format assumes that the flat format is still the primary default for name, d in nested_output_definitions["outputs"]["Scenario"][ "Site"].items(): if name == "LoadProfile": self.nested_outputs["Scenario"]["Site"][name][ "year_one_electric_load_series_kw"] = self.results_dict.get( "Load") self.nested_outputs["Scenario"]["Site"][name][ "critical_load_series_kw"] = self.dfm[ "LoadProfile"].get("critical_load_series_kw") self.nested_outputs["Scenario"]["Site"][name][ "annual_calculated_kwh"] = self.dfm["LoadProfile"].get( "annual_kwh") self.nested_outputs["Scenario"]["Site"][name][ "resilience_check_flag"] = self.dfm["LoadProfile"].get( "resilience_check_flag") self.nested_outputs["Scenario"]["Site"][name][ "sustain_hours"] = self.dfm["LoadProfile"].get( "sustain_hours") elif name == "Financial": self.nested_outputs["Scenario"]["Site"][name][ "lcc_us_dollars"] = self.results_dict.get("lcc") self.nested_outputs["Scenario"]["Site"][name][ "lcc_bau_us_dollars"] = self.results_dict.get( "lcc_bau") self.nested_outputs["Scenario"]["Site"][name][ "npv_us_dollars"] = self.results_dict.get("npv") self.nested_outputs["Scenario"]["Site"][name][ "net_capital_costs_plus_om_us_dollars"] = self.results_dict.get( "net_capital_costs_plus_om") self.nested_outputs["Scenario"]["Site"][name][ "net_capital_costs"] = self.results_dict.get( "net_capital_costs") self.nested_outputs["Scenario"]["Site"][name]["microgrid_upgrade_cost_us_dollars"] = \ self.results_dict.get("net_capital_costs") \ * data['inputs']['Scenario']['Site']['Financial']['microgrid_upgrade_cost_pct'] elif name == "PV": pv_model = PVModel.objects.get(run_uuid=meta['run_uuid']) self.nested_outputs["Scenario"]["Site"][name][ "size_kw"] = self.results_dict.get("pv_kw", 0) self.nested_outputs["Scenario"]["Site"][name][ "average_yearly_energy_produced_kwh"] = self.results_dict.get( "average_yearly_pv_energy_produced") self.nested_outputs["Scenario"]["Site"][name][ "average_yearly_energy_exported_kwh"] = self.results_dict.get( "average_annual_energy_exported_pv") self.nested_outputs["Scenario"]["Site"][name][ "year_one_energy_produced_kwh"] = self.results_dict.get( "year_one_energy_produced") self.nested_outputs["Scenario"]["Site"][name][ "year_one_to_battery_series_kw"] = self.results_dict.get( "PVtoBatt") self.nested_outputs["Scenario"]["Site"][name][ "year_one_to_load_series_kw"] = self.results_dict.get( "PVtoLoad") self.nested_outputs["Scenario"]["Site"][name][ "year_one_to_grid_series_kw"] = self.results_dict.get( "PVtoGrid") self.nested_outputs["Scenario"]["Site"][name][ "year_one_power_production_series_kw"] = self.compute_total_power( name) self.nested_outputs["Scenario"]["Site"][name][ "existing_pv_om_cost_us_dollars"] = self.results_dict.get( "pv_net_fixed_om_costs_bau") self.nested_outputs['Scenario']["Site"][name][ "station_latitude"] = pv_model.station_latitude self.nested_outputs['Scenario']["Site"][name][ "station_longitude"] = pv_model.station_longitude self.nested_outputs['Scenario']["Site"][name][ "station_distance_km"] = pv_model.station_distance_km elif name == "Wind": self.nested_outputs["Scenario"]["Site"][name][ "size_kw"] = self.results_dict.get("wind_kw", 0) self.nested_outputs["Scenario"]["Site"][name][ "average_yearly_energy_produced_kwh"] = self.results_dict.get( "average_wind_energy_produced") self.nested_outputs["Scenario"]["Site"][name][ "average_yearly_energy_exported_kwh"] = self.results_dict.get( "average_annual_energy_exported_wind") self.nested_outputs["Scenario"]["Site"][name][ "year_one_energy_produced_kwh"] = self.results_dict.get( "year_one_wind_energy_produced") self.nested_outputs["Scenario"]["Site"][name][ "year_one_to_battery_series_kw"] = self.results_dict.get( "WINDtoBatt") self.nested_outputs["Scenario"]["Site"][name][ "year_one_to_load_series_kw"] = self.results_dict.get( "WINDtoLoad") self.nested_outputs["Scenario"]["Site"][name][ "year_one_to_grid_series_kw"] = self.results_dict.get( "WINDtoGrid") self.nested_outputs["Scenario"]["Site"][name][ "year_one_power_production_series_kw"] = self.compute_total_power( name) elif name == "Storage": self.nested_outputs["Scenario"]["Site"][name][ "size_kw"] = self.results_dict.get("batt_kw", 0) self.nested_outputs["Scenario"]["Site"][name][ "size_kwh"] = self.results_dict.get("batt_kwh", 0) self.nested_outputs["Scenario"]["Site"][name][ "year_one_to_load_series_kw"] = self.results_dict.get( "ElecFromStore") self.nested_outputs["Scenario"]["Site"][name][ "year_one_to_grid_series_kw"] = None self.nested_outputs["Scenario"]["Site"][name]["year_one_soc_series_pct"] = \ self.results_dict.get("year_one_soc_series_pct") elif name == "ElectricTariff": self.nested_outputs["Scenario"]["Site"][name][ "year_one_energy_cost_us_dollars"] = self.results_dict.get( "year_one_energy_cost") self.nested_outputs["Scenario"]["Site"][name][ "year_one_demand_cost_us_dollars"] = self.results_dict.get( "year_one_demand_cost") self.nested_outputs["Scenario"]["Site"][name][ "year_one_fixed_cost_us_dollars"] = self.results_dict.get( "year_one_fixed_cost") self.nested_outputs["Scenario"]["Site"][name][ "year_one_min_charge_adder_us_dollars"] = self.results_dict.get( "year_one_min_charge_adder") self.nested_outputs["Scenario"]["Site"][name][ "year_one_energy_cost_bau_us_dollars"] = self.results_dict.get( "year_one_energy_cost_bau") self.nested_outputs["Scenario"]["Site"][name][ "year_one_energy_cost_us_dollars"] = self.results_dict.get( "year_one_energy_cost") self.nested_outputs["Scenario"]["Site"][name][ "year_one_demand_cost_bau_us_dollars"] = self.results_dict.get( "year_one_demand_cost_bau") self.nested_outputs["Scenario"]["Site"][name][ "year_one_fixed_cost_bau_us_dollars"] = self.results_dict.get( "year_one_fixed_cost_bau") self.nested_outputs["Scenario"]["Site"][name][ "year_one_min_charge_adder_bau_us_dollars"] = self.results_dict.get( "year_one_min_charge_adder_bau") self.nested_outputs["Scenario"]["Site"][name][ "total_energy_cost_us_dollars"] = self.results_dict.get( "total_energy_cost") self.nested_outputs["Scenario"]["Site"][name][ "total_demand_cost_us_dollars"] = self.results_dict.get( "total_demand_cost") self.nested_outputs["Scenario"]["Site"][name][ "total_fixed_cost_us_dollars"] = self.results_dict.get( "total_fixed_cost") self.nested_outputs["Scenario"]["Site"][name][ "total_min_charge_adder_us_dollars"] = self.results_dict.get( "total_min_charge_adder") self.nested_outputs["Scenario"]["Site"][name][ "total_energy_cost_bau_us_dollars"] = self.results_dict.get( "total_energy_cost_bau") self.nested_outputs["Scenario"]["Site"][name][ "total_demand_cost_bau_us_dollars"] = self.results_dict.get( "total_demand_cost_bau") self.nested_outputs["Scenario"]["Site"][name][ "total_fixed_cost_bau_us_dollars"] = self.results_dict.get( "total_fixed_cost_bau") self.nested_outputs["Scenario"]["Site"][name][ "total_min_charge_adder_bau_us_dollars"] = self.results_dict.get( "total_min_charge_adder_bau") self.nested_outputs["Scenario"]["Site"][name][ "year_one_bill_us_dollars"] = self.results_dict.get( "year_one_bill") self.nested_outputs["Scenario"]["Site"][name][ "year_one_bill_bau_us_dollars"] = self.results_dict.get( "year_one_bill_bau") self.nested_outputs["Scenario"]["Site"][name][ "year_one_export_benefit_us_dollars"] = self.results_dict.get( "year_one_export_benefit") self.nested_outputs["Scenario"]["Site"][name][ "total_export_benefit_us_dollars"] = self.results_dict.get( "total_export_benefit") self.nested_outputs["Scenario"]["Site"][name][ "year_one_energy_cost_series_us_dollars_per_kwh"] = \ self.dfm.get('year_one_energy_cost_series_us_dollars_per_kwh') self.nested_outputs["Scenario"]["Site"][name][ "year_one_demand_cost_series_us_dollars_per_kw"] = \ self.dfm.get('year_one_demand_cost_series_us_dollars_per_kw') self.nested_outputs["Scenario"]["Site"][name][ "year_one_to_load_series_kw"] = self.results_dict.get( 'GridToLoad') self.nested_outputs["Scenario"]["Site"][name][ "year_one_to_battery_series_kw"] = self.results_dict.get( 'GridToBatt') self.nested_outputs["Scenario"]["Site"][name][ "year_one_energy_supplied_kwh"] = self.results_dict.get( "year_one_utility_kwh") self.nested_outputs["Scenario"]["Site"][name][ "year_one_energy_supplied_kwh_bau"] = self.results_dict.get( "year_one_utility_kwh_bau") elif name == "Generator": self.nested_outputs["Scenario"]["Site"][name][ "size_kw"] = self.results_dict.get("generator_kw", 0) self.nested_outputs["Scenario"]["Site"][name][ "fuel_used_gal"] = self.results_dict.get( "fuel_used_gal") self.nested_outputs["Scenario"]["Site"][name][ "fuel_used_gal_bau"] = self.results_dict.get( "fuel_used_gal_bau") self.nested_outputs["Scenario"]["Site"][name][ "year_one_to_load_series_kw"] = self.results_dict.get( 'GENERATORtoLoad') self.nested_outputs["Scenario"]["Site"][name][ "year_one_to_battery_series_kw"] = self.results_dict.get( 'GENERATORtoBatt') self.nested_outputs["Scenario"]["Site"][name][ "year_one_to_grid_series_kw"] = self.results_dict.get( 'GENERATORtoGrid') self.nested_outputs["Scenario"]["Site"][name][ "average_yearly_energy_produced_kwh"] = self.results_dict.get( "average_yearly_gen_energy_produced") self.nested_outputs["Scenario"]["Site"][name][ "average_yearly_energy_exported_kwh"] = self.results_dict.get( "average_annual_energy_exported_gen") self.nested_outputs["Scenario"]["Site"][name][ "year_one_energy_produced_kwh"] = self.results_dict.get( "year_one_gen_energy_produced") self.nested_outputs["Scenario"]["Site"][name][ "year_one_power_production_series_kw"] = self.compute_total_power( name) self.nested_outputs["Scenario"]["Site"][name][ "existing_gen_total_fixed_om_cost_us_dollars"] = self.results_dict.get( "gen_net_fixed_om_costs_bau") self.nested_outputs["Scenario"]["Site"][name][ "existing_gen_total_variable_om_cost_us_dollars"] = self.results_dict.get( "gen_net_variable_om_costs_bau") self.nested_outputs["Scenario"]["Site"][name][ "existing_gen_year_one_variable_om_cost_us_dollars"] = self.results_dict.get( "gen_year_one_variable_om_costs_bau") self.nested_outputs["Scenario"]["Site"][name][ "total_variable_om_cost_us_dollars"] = self.results_dict.get( "gen_net_variable_om_costs") self.nested_outputs["Scenario"]["Site"][name][ "year_one_variable_om_cost_us_dollars"] = self.results_dict.get( "gen_year_one_variable_om_costs") self.nested_outputs["Scenario"]["Site"][name][ "total_fuel_cost_us_dollars"] = self.results_dict.get( "gen_total_fuel_cost") self.nested_outputs["Scenario"]["Site"][name][ "year_one_fuel_cost_us_dollars"] = self.results_dict.get( "gen_year_one_fuel_cost") self.nested_outputs["Scenario"]["Site"][name][ "existing_gen_total_fuel_cost_us_dollars"] = self.results_dict.get( "gen_total_fuel_cost_bau") self.nested_outputs["Scenario"]["Site"][name][ "existing_gen_year_one_fuel_cost_us_dollars"] = self.results_dict.get( "gen_year_one_fuel_cost_bau") self.profiler.profileEnd() self.nested_outputs["Scenario"]["Profile"][ "parse_run_outputs_seconds"] = self.profiler.getDuration() def compute_total_power(self, tech): power_lists = list() d = self.nested_outputs["Scenario"]["Site"][tech] if d.get("year_one_to_load_series_kw") is not None: power_lists.append(d["year_one_to_load_series_kw"]) if d.get("year_one_to_battery_series_kw") is not None: power_lists.append(d["year_one_to_battery_series_kw"]) if d.get("year_one_to_grid_series_kw") is not None: power_lists.append(d["year_one_to_grid_series_kw"]) power = [sum(x) for x in zip(*power_lists)] return power self.data = data self.run_uuid = data['outputs']['Scenario']['run_uuid'] self.user_uuid = data['outputs']['Scenario'].get('user_uuid') try: results_object = Results(results_dict=dfm_list[0]['results'], results_dict_bau=dfm_list[1]['results_bau'], dfm=dfm_list[0]) results = results_object.get_output() data['outputs'].update(results) data['outputs']['Scenario'].update(meta) # run_uuid and api_version # Calculate avoided outage costs calc_avoided_outage_costs(data, present_worth_factor=dfm_list[0]['pwf_e']) if saveToDB: ModelManager.update(data, run_uuid=self.run_uuid) except Exception: exc_type, exc_value, exc_traceback = sys.exc_info() log.info("Results.py raising the error: {}, detail: {}".format( exc_type, exc_value)) raise UnexpectedError(exc_type, exc_value.args[0], traceback.format_tb(exc_traceback), task=self.name, run_uuid=self.run_uuid, user_uuid=self.user_uuid)