def test_diff(self): from loadshape.utils import get_timezone l_data = [(1379487600, 5.0), (1379488500, 5.0), (1379489400, 5.0), (1379490300, 5.0), (1379491200, 5.0)] b_data = [(1379487600, 4.0), (1379488500, 4.0), (1379489400, 4.0), (1379490300, 4.0), (1379491200, 4.0)] expected_kw_diff = [(1379488500, 1), (1379489400, 1), (1379490300, 1), (1379491200, 1)] expected_cumulative_kwh_diff = [(1379487600, 0), (1379488500, 0.25), (1379489400, 0.5), (1379490300, 0.75), (1379491200, 1.0)] expected_cumulative_kwh_base = [(1379487600, 0), (1379488500, 1.0), (1379489400, 2.0), (1379490300, 3.0), (1379491200, 4.0)] b = Loadshape(l_data, timezone='America/Los_Angeles', log_level=30) b.baseline_series = Series(b_data, get_timezone('America/Los_Angeles')) kw_diff, kw_base, cumulative_kwh_diff, cumulative_kwh_base = b.diff() assert kw_diff.data() == expected_kw_diff assert cumulative_kwh_diff.data() == expected_cumulative_kwh_diff assert kw_base.data() == b_data[1:] assert cumulative_kwh_base.data() == expected_cumulative_kwh_base
def test_baseline_with_temp(self): b = Loadshape(self.get_kw_data_filepath(), self.get_temp_data_filepath(), timezone='America/Los_Angeles', log_level=30) prediction = b.baseline() assert len(prediction.data()) > 0
def test_one_step_output_time_series_generator(self): start_at = 1379487600 end_at = 1379488500 l = Loadshape([], log_level=40, timezone='America/Los_Angeles') s = l._build_output_time_series(start_at, end_at, step_count=1) assert len(s.data()) == 2 assert s.start_at() == start_at assert s.end_at() == end_at
def test_many_step_output_time_series_generator(self): start_at = 1379487600 end_at = start_at + (900 * 5) l = Loadshape([], log_level=40, timezone='America/Los_Angeles') s = l._build_output_time_series(start_at, end_at, step_size=900) assert len(s.data()) == 6 assert s.start_at() == start_at assert s.end_at() == end_at
def test_cost(self): l_data = [(1379487600, 5.0), (1379488500, 5.0), (1379489400, 5.0), (1379490300, 5.0), (1379491200, 5.0)] cost = [(1379487600, 0.0), (1379488500, 0.17), (1379489400, 0.17), (1379490300, 0.17), (1379491200, 0.17)] cumulative_cost = [(1379487600, 0.0), (1379488500, 0.17), (1379489400, 0.34), (1379490300, 0.52), (1379491200, 0.69)] tariff = Tariff(tariff_file=self.get_test_tariff(), timezone='America/Los_Angeles') ls = Loadshape(l_data, timezone='America/Los_Angeles', log_level=30, tariff=tariff) cost_out, cumulative_cost_out = ls.cost() assert cost_out.data() == cost assert cumulative_cost_out.data() == cumulative_cost
def process_request(self, arg_set): inst_args = ["load_data", "temp_data", "timezone", "temp_units", "sq_ft"] inst_args = { a_name: arg_set[a_name] for a_name in inst_args if arg_set.get(a_name)} event_args = ["start_at", "end_at"] event_args = { a_name: arg_set[a_name] for a_name in event_args if arg_set.get(a_name)} ls = Loadshape(**inst_args) event_performance_data = ls.event_performance(**event_args) return event_performance_data
def process_request(self, arg_set): inst_args = ["load_data", "temp_data", "timezone", "temp_units", "sq_ft"] inst_args = {a_name: arg_set[a_name] for a_name in inst_args if arg_set.get(a_name)} baseline_args = ["start_at", "end_at", "weighting_days", "modeling_interval", "step_size"] baseline_args = {a_name: arg_set[a_name] for a_name in baseline_args if arg_set.get(a_name)} ls = Loadshape(**inst_args) baseline_series = ls.baseline(**baseline_args) response = {"baseline": baseline_series.data(), "error_stats": ls.error_stats} return response
def process_request(self, arg_set): inst_args = ["load_data", "temp_data", "timezone", "temp_units", "sq_ft"] inst_args = { a_name: arg_set[a_name] for a_name in inst_args if arg_set.get(a_name)} sum_args = ["start_at", "end_at", "step_size"] sum_args = { a_name: arg_set[a_name] for a_name in sum_args if arg_set.get(a_name)} ls = Loadshape(**inst_args) sum_series = ls.cumulative_sum(**sum_args) response = { "cumulative_kwh_diff": sum_series.data(), } return response
def process_request(self, arg_set): inst_args = ["load_data", "temp_data", "timezone", "temp_units", "sq_ft"] inst_args = { a_name: arg_set[a_name] for a_name in inst_args if arg_set.get(a_name)} baseline_args = ["start_at", "end_at", "weighting_days", "modeling_interval", "step_size"] baseline_args = { a_name: arg_set[a_name] for a_name in baseline_args if arg_set.get(a_name)} ls = Loadshape(**inst_args) baseline_series = ls.baseline(**baseline_args) response = { "baseline": baseline_series.data(), "error_stats": ls.error_stats } return response
def test_diff(self): l_data = [(1379487600, 5.0), (1379488500, 5.0), (1379489400, 5.0), (1379490300, 5.0), (1379491200, 5.0)] b_data = [(1379487600, 4.0), (1379488500, 4.0), (1379489400, 4.0), (1379490300, 4.0), (1379491200, 4.0)] expected_kw_diff = [(1379488500, 1), (1379489400, 1), (1379490300, 1), (1379491200, 1)] expected_cumulative_kwh_diff = [(1379487600, 0), (1379488500, 0.25), (1379489400, 0.5), (1379490300, 0.75), (1379491200, 1.0)] expected_cumulative_kwh_base = [(1379487600, 0), (1379488500, 1.0), (1379489400, 2.0), (1379490300, 3.0), (1379491200, 4.0)] b = Loadshape(l_data, timezone='America/Los_Angeles', log_level=30) b.baseline_series = Series(b_data) kw_diff, kw_base, cumulative_kwh_diff, cumulative_kwh_base = b.diff() assert kw_diff.data() == expected_kw_diff assert cumulative_kwh_diff.data() == expected_cumulative_kwh_diff assert kw_base.data() == b_data[1:] assert cumulative_kwh_base.data() == expected_cumulative_kwh_base
def process_request(self, arg_set): inst_args = [ "load_data", "temp_data", "timezone", "temp_units", "sq_ft" ] inst_args = { a_name: arg_set[a_name] for a_name in inst_args if arg_set.get(a_name) } event_args = ["start_at", "end_at"] event_args = { a_name: arg_set[a_name] for a_name in event_args if arg_set.get(a_name) } ls = Loadshape(**inst_args) event_performance_data = ls.event_performance(**event_args) return event_performance_data
def process_request(self, arg_set): inst_args = [ "load_data", "temp_data", "timezone", "temp_units", "sq_ft" ] inst_args = { a_name: arg_set[a_name] for a_name in inst_args if arg_set.get(a_name) } sum_args = ["start_at", "end_at", "step_size"] sum_args = { a_name: arg_set[a_name] for a_name in sum_args if arg_set.get(a_name) } ls = Loadshape(**inst_args) sum_series = ls.cumulative_sum(**sum_args) response = { "cumulative_kwh_diff": sum_series.data(), } return response
WEEK_END = "2013-09-29" # ----- write JSON output file ----- # def write_json(data, file_name='output.json'): print "writing file: %s" % file_name with open(file_name, 'w') as outfile: json.dump(data, outfile) outfile.close() # ----- build loadshape object ----- # my_load_shape = Loadshape( load_data=LOAD_DATA, temp_data=TEMP_DATA, # tariff_schedule=tariff_schedule timezone='America/Los_Angeles', temp_units="F", sq_ft=BUILDING_SQ_FT) # ----- add exclusions as necessary ----- # my_load_shape.add_exclusion("2013-09-23 00:00:00", "2013-09-24 00:00:00") my_load_shape.add_exclusion("2013-09-27 00:00:00", "2013-09-28 00:00:00") my_load_shape.add_named_exclusion("US_HOLIDAYS") # ----- generate a 7 day baseline ----- # seven_day_baseline = my_load_shape.baseline(start_at=WEEK_START, end_at=WEEK_END, weighting_days=14, modeling_interval=900, step_size=900)
WEEK_START = "2013-09-22" WEEK_END = "2013-09-29" # ----- write JSON output file ----- # def write_json(data, file_name="output.json"): print "writing file: %s" % file_name with open(file_name, "w") as outfile: json.dump(data, outfile) outfile.close() # ----- build loadshape object ----- # my_load_shape = Loadshape( load_data=LOAD_DATA, temp_data=TEMP_DATA, # tariff_schedule=tariff_schedule timezone="America/Los_Angeles", temp_units="F", sq_ft=BUILDING_SQ_FT, ) # ----- add exclusions as necessary ----- # my_load_shape.add_exclusion("2013-09-23 00:00:00", "2013-09-24 00:00:00") my_load_shape.add_exclusion("2013-09-27 00:00:00", "2013-09-28 00:00:00") my_load_shape.add_named_exclusion("US_HOLIDAYS") # ----- generate a 7 day baseline ----- # seven_day_baseline = my_load_shape.baseline( start_at=WEEK_START, end_at=WEEK_END, weighting_days=14, modeling_interval=900, step_size=900 ) # ----- assemble a payload summarizng the seven day baseline ----- #
DR_EVENT_NAME = "DR Event 1" DR_EVENT_START = "2013-09-27 14:00:00" DR_EVENT_END = "2013-09-27 16:15:00" DR_EVENT_DAY_START = "2013-09-27 00:00:00" DR_EVENT_DAY_END = "2013-09-28 00:00:00" # ----- write JSON output file ----- # def write_json(data, file_name='output.json'): print "writing file: %s" % file_name with open(file_name, 'w') as outfile: json.dump(data, outfile) outfile.close() # ----- build loadshape object ----- # my_load_shape = Loadshape(load_data=LOAD_DATA, temp_data=TEMP_DATA, timezone='America/Los_Angeles', temp_units="F", sq_ft=BUILDING_SQ_FT) # ----- add exclusions as necessary ----- # my_load_shape.add_exclusion("2013-09-23 00:00:00", "2013-09-24 00:00:00") my_load_shape.add_exclusion("2013-09-27 00:00:00", "2013-09-28 00:00:00") my_load_shape.add_named_exclusion("US_HOLIDAYS") # ----- add tariff to enable cost calculations ----- # tariff = Tariff(tariff_file=TARIFF, timezone='America/Los_Angeles') tariff.add_dr_period("2013-09-23 14:00:00", "2013-09-23 16:00:00") tariff.add_dr_period("2013-09-27 14:00:00", "2013-09-27 16:15:00") my_load_shape.set_tariff(tariff) # ----- build the baseline to use as a reference for performance ----- #
TARIFF = path.join(EXAMPLES_DIR, "data", "tariff.json") CUMULATIVE_SUM_NAME = "Cumulative Sum Test 1" CUMULATIVE_SUM_START = "2013-09-30" CUMULATIVE_SUM_WEIGHTING_DAYS = 14 # ----- write JSON output file ----- # def write_json(data, file_name='output.json'): print "writing file: %s" % file_name with open(file_name, 'w') as outfile: json.dump(data, outfile) outfile.close() # ----- build loadshape object ----- # my_load_shape = Loadshape(load_data=LOAD_DATA, temp_data=TEMP_DATA, # tariff_schedule=tariff_schedule timezone='America/Los_Angeles', temp_units="F", sq_ft=BUILDING_SQ_FT) # ----- add exclusions as necessary ----- # my_load_shape.add_exclusion("2013-09-23 00:00:00", "2013-09-24 00:00:00") my_load_shape.add_exclusion("2013-09-27 00:00:00", "2013-09-28 00:00:00") my_load_shape.add_named_exclusion("US_HOLIDAYS") # ----- generate the appropriate baseline ----- # baseline = my_load_shape.baseline(weighting_days=CUMULATIVE_SUM_WEIGHTING_DAYS, modeling_interval=900, step_size=900) # ----- set up the cumulative sum dates ----- # tz = utils.get_timezone('America/Los_Angeles') start_at = utils.str_to_datetime(CUMULATIVE_SUM_START, tz)
CUMULATIVE_SUM_WEIGHTING_DAYS = 14 # ----- write JSON output file ----- # def write_json(data, file_name='output.json'): print "writing file: %s" % file_name with open(file_name, 'w') as outfile: json.dump(data, outfile) outfile.close() # ----- build loadshape object ----- # my_load_shape = Loadshape( load_data=LOAD_DATA, temp_data=TEMP_DATA, # tariff_schedule=tariff_schedule timezone='America/Los_Angeles', temp_units="F", sq_ft=BUILDING_SQ_FT) # ----- add exclusions as necessary ----- # my_load_shape.add_exclusion("2013-09-23 00:00:00", "2013-09-24 00:00:00") my_load_shape.add_exclusion("2013-09-27 00:00:00", "2013-09-28 00:00:00") my_load_shape.add_named_exclusion("US_HOLIDAYS") # ----- generate the appropriate baseline ----- # baseline = my_load_shape.baseline(weighting_days=CUMULATIVE_SUM_WEIGHTING_DAYS, modeling_interval=900, step_size=900) # ----- set up the cumulative sum dates ----- #