Exemplo n.º 1
0
 def process_request(self, arg_set):        
     inst_args = ["load_data", "temp_data", "timezone", "temp_units", "sq_ft"]
     inst_args = { a_name: arg_set[a_name] for a_name in inst_args if arg_set.get(a_name)}
     
     event_args = ["start_at", "end_at"]
     event_args = { a_name: arg_set[a_name] for a_name in event_args if arg_set.get(a_name)}
     
     ls = Loadshape(**inst_args)
     event_performance_data = ls.event_performance(**event_args)
     
     return event_performance_data
Exemplo n.º 2
0
    def process_request(self, arg_set):
        inst_args = [
            "load_data", "temp_data", "timezone", "temp_units", "sq_ft"
        ]
        inst_args = {
            a_name: arg_set[a_name]
            for a_name in inst_args if arg_set.get(a_name)
        }

        event_args = ["start_at", "end_at"]
        event_args = {
            a_name: arg_set[a_name]
            for a_name in event_args if arg_set.get(a_name)
        }

        ls = Loadshape(**inst_args)
        event_performance_data = ls.event_performance(**event_args)

        return event_performance_data
Exemplo n.º 3
0
my_load_shape.add_named_exclusion("US_HOLIDAYS")

# ----- add tariff to enable cost calculations ----- #
tariff = Tariff(tariff_file=TARIFF, timezone='America/Los_Angeles')
tariff.add_dr_period("2013-09-23 14:00:00", "2013-09-23 16:00:00")
tariff.add_dr_period("2013-09-27 14:00:00", "2013-09-27 16:15:00")

my_load_shape.set_tariff(tariff)

# ----- build the baseline to use as a reference for performance ----- #
event_baseline = my_load_shape.baseline(weighting_days=14,
                                        modeling_interval=900,
                                        step_size=900)

# ----- calculate the performance summary for the event period ----- #
event_performance = my_load_shape.event_performance(DR_EVENT_START, DR_EVENT_END)

# ----- calculate the performance summary for the whole day ----- #
event_day_performance = my_load_shape.event_performance(DR_EVENT_DAY_START, DR_EVENT_DAY_END)

out = { "power_data": {} }
out["name"]                   = "DR Event - %s" % DR_EVENT_DAY_START
out["building"]               = BUILDING_NAME
out["event_start_at"]         = DR_EVENT_START
out["event_end_at"]           = DR_EVENT_END
out["dr_event_stats"]         = event_performance
out["dr_event_day_stats"]     = event_day_performance
out["power_data"]["actual"]   = my_load_shape.actual_data(DR_EVENT_DAY_START, DR_EVENT_DAY_END, step_size=900)
out["power_data"]["baseline"] = my_load_shape.baseline_data(DR_EVENT_DAY_START, DR_EVENT_DAY_END, step_size=900)

# ----- write output to file ----- #
Exemplo n.º 4
0
my_load_shape.add_exclusion("2013-09-23 00:00:00", "2013-09-24 00:00:00")
my_load_shape.add_exclusion("2013-09-27 00:00:00", "2013-09-28 00:00:00")
my_load_shape.add_named_exclusion("US_HOLIDAYS")

# ----- generate the appropriate baseline ----- #
baseline = my_load_shape.baseline(weighting_days=CUMULATIVE_SUM_WEIGHTING_DAYS,
								  modeling_interval=900,
								  step_size=900)

# ----- set up the cumulative sum dates ----- #
tz 			= utils.get_timezone('America/Los_Angeles')
start_at 	= utils.str_to_datetime(CUMULATIVE_SUM_START, tz)
end_at 		= utils.int_to_datetime(my_load_shape.baseline_series.end_at(), tz)

# ----- calculate long term event performance and cumulative sum series ----- #
event_performance 		= my_load_shape.event_performance(start_at, end_at)
cumulative_sum_series 	= my_load_shape.cumulative_sum(start_at, end_at)

# ----- assemble a payload summarizng the cumulative sum ----- #
out = {}
out["building"]             = BUILDING_NAME
out["event_name"]           = CUMULATIVE_SUM_NAME
out["sum_start_at"]         = start_at.strftime("%Y-%m-%d %H:%M:%S")
out["sum_end_at"]           = end_at.strftime("%Y-%m-%d %H:%M:%S")
out["event_performance"]    = event_performance
out["cumulative_sum_data"] 	= cumulative_sum_series.data()

# ----- write output to file ----- #
file_name = path.join(EXAMPLES_DIR, "output", "cumulative-sum-example.json")
write_json(data=out, file_name=file_name)
Exemplo n.º 5
0
my_load_shape.add_exclusion("2013-09-23 00:00:00", "2013-09-24 00:00:00")
my_load_shape.add_exclusion("2013-09-27 00:00:00", "2013-09-28 00:00:00")
my_load_shape.add_named_exclusion("US_HOLIDAYS")

# ----- generate the appropriate baseline ----- #
baseline = my_load_shape.baseline(weighting_days=CUMULATIVE_SUM_WEIGHTING_DAYS,
                                  modeling_interval=900,
                                  step_size=900)

# ----- set up the cumulative sum dates ----- #
tz = utils.get_timezone('America/Los_Angeles')
start_at = utils.str_to_datetime(CUMULATIVE_SUM_START, tz)
end_at = utils.int_to_datetime(my_load_shape.baseline_series.end_at(), tz)

# ----- calculate long term event performance and cumulative sum series ----- #
event_performance = my_load_shape.event_performance(start_at, end_at)
cumulative_sum_series = my_load_shape.cumulative_sum(start_at, end_at)

# ----- assemble a payload summarizng the cumulative sum ----- #
out = {}
out["building"] = BUILDING_NAME
out["event_name"] = CUMULATIVE_SUM_NAME
out["sum_start_at"] = start_at.strftime("%Y-%m-%d %H:%M:%S")
out["sum_end_at"] = end_at.strftime("%Y-%m-%d %H:%M:%S")
out["event_performance"] = event_performance
out["cumulative_sum_data"] = cumulative_sum_series.data()

# ----- write output to file ----- #
file_name = path.join(EXAMPLES_DIR, "output", "cumulative-sum-example.json")
write_json(data=out, file_name=file_name)