Exemplo n.º 1
0
    def execute(self):
        """ Output is sorted loads values."""

        self.out.log("Starting application: load duration.", logging.INFO)

        self.out.log("Querying database.", logging.INFO)
        load_query = self.inp.get_query_sets('load',
                                             order_by='value',
                                             exclude={'value': None})

        self.out.log("Getting unit conversions.", logging.INFO)
        base_topic = self.inp.get_topics()
        meta_topics = self.inp.get_topics_meta()
        load_unit = meta_topics['load'][base_topic['load'][0]]['unit']
        self.out.log("Convert loads from [{}] to [kW].".format(load_unit),
                     logging.INFO)
        load_convertfactor = cu.getFactor_powertoKW(load_unit)

        self.out.log("Compiling the report table.", logging.INFO)
        ctr = 1
        for x in load_query[0]:
            self.out.insert_row(
                "Load_Duration", {
                    "sorted load": x[1] * load_convertfactor,
                    "percent time":
                    (len(load_query[0]) - ctr) / len(load_query[0])
                })
            ctr += 1
Exemplo n.º 2
0
    def execute(self):
        """Outputs values for line graph."""
        self.out.log("Starting application: load profile.", logging.INFO)

        self.out.log("Getting unit conversions.", logging.INFO)
        base_topic = self.inp.get_topics()
        meta_topics = self.inp.get_topics_meta()

        load_unit = meta_topics['load'][base_topic['load'][0]]['unit']
        self.out.log("Convert loads from [{}] to [kW].".format(load_unit),
                     logging.INFO)
        load_convertfactor = cu.getFactor_powertoKW(load_unit)

        self.out.log("Querying database.", logging.INFO)
        load_by_hour = self.inp.get_query_sets('load',
                                               exclude={'value': None},
                                               group_by='hour')[0]

        self.out.log("Reducing the records to two weeks.", logging.INFO)
        # Note: Limit the number of datapoints, have 2 weeks worth of data.
        # 24 hours x 14 days = 336.
        # if len(load_by_hour) > 336:
        #     start = len(load_by_hour) - 336
        #     end = len(load_by_hour) - 1
        # else:
        #     start = 0
        #     end = len(load_by_hour)

        self.out.log("Compiling the report table.", logging.INFO)
        #for x in load_by_hour[start:end]:
        cal = workalendar.usa.UnitedStates()
        values = []
        prev_local_time = None
        for i, x in enumerate(load_by_hour):
            local_time = self.inp.localize_sensor_time(base_topic['load'][0],
                                                       x[0])
            if (i == 0) or (local_time == prev_local_time):
                values.append(x[1])
                prev_local_time = local_time

            if (i == len(load_by_hour) - 1) or (local_time != prev_local_time):
                daytype = 'W'  #weekdays: [0,4]
                if prev_local_time.weekday() == 5:
                    daytype = 'Sat'
                if prev_local_time.weekday() == 6:
                    daytype = 'Sun'
                if cal.is_holiday(prev_local_time):
                    daytype = 'H'
                value = sum(values) / len(values)
                #print(prev_local_time.strftime('%m/%d/%Y %H:%M:%S') + "   " + daytype + "      " + str(value))
                self.out.insert_row(
                    "Load_Profiling", {
                        'datetime': prev_local_time,
                        'load': value * load_convertfactor,
                        'daytype': daytype
                    })
                values = [x[1]]
                prev_local_time = local_time
Exemplo n.º 3
0
    def execute(self):
        """Outputs values for line graph."""
        self.out.log("Starting application: load profile.", logging.INFO)

        self.out.log("Getting unit conversions.", logging.INFO)
        base_topic = self.inp.get_topics()
        meta_topics = self.inp.get_topics_meta()

        load_unit = meta_topics['load'][base_topic['load'][0]]['unit']
        self.out.log(
            "Convert loads from [{}] to [kW].".format(load_unit),
            logging.INFO
            )
        load_convertfactor = cu.getFactor_powertoKW(load_unit)

        self.out.log("Querying database.", logging.INFO)
        load_by_hour = self.inp.get_query_sets('load',
                                                exclude={'value': None},
                                                group_by='hour')[0]

        self.out.log("Reducing the records to two weeks.", logging.INFO)
        # Note: Limit the number of datapoints, have 2 weeks worth of data.
        # 24 hours x 14 days = 336.
        # if len(load_by_hour) > 336:
        #     start = len(load_by_hour) - 336
        #     end = len(load_by_hour) - 1
        # else:
        #     start = 0
        #     end = len(load_by_hour)

        self.out.log("Compiling the report table.", logging.INFO)
        #for x in load_by_hour[start:end]:
        cal = workalendar.usa.UnitedStates()
        values = []
        prev_local_time = None
        for i, x in enumerate(load_by_hour):
            local_time = self.inp.localize_sensor_time(base_topic['load'][0], x[0])
            if (i==0) or (local_time == prev_local_time):
                values.append(x[1])
                prev_local_time = local_time

            if (i==len(load_by_hour)-1) or (local_time != prev_local_time):
                daytype = 'W' #weekdays: [0,4]
                if prev_local_time.weekday() == 5:
                    daytype = 'Sat'
                if prev_local_time.weekday() == 6:
                    daytype = 'Sun'
                if cal.is_holiday(prev_local_time):
                    daytype = 'H'
                value = sum(values)/len(values)
                #print(prev_local_time.strftime('%m/%d/%Y %H:%M:%S') + "   " + daytype + "      " + str(value))
                self.out.insert_row("Load_Profiling", {
                    'datetime': prev_local_time,
                    'load': value*load_convertfactor,
                    'daytype': daytype
                })
                values = [x[1]]
                prev_local_time = local_time
Exemplo n.º 4
0
    def execute(self):
        #Called after User hits GO
        """
        Will output the following: year, aggregated load amounts,
        and aggregated gas amounts.
        """
        self.out.log("Starting application: longitudinal benchmarking.",
                     logging.INFO)

        self.out.log("Querying database.", logging.INFO)
        # Note: Assumes all of the energy data are in a per hour basis.
        # TODO: The caveat above must be made stronger.  Aggregating by summing
        #   only converts, e.g., [kW] to [kWh] for hourly observations.
        #   Similar problem for gas data.
        # TODO: The query here presumably groups by calendar year.  Need to check
        #   whether application actually wants a year's worth of data, looking
        #   backward from most recent observation.
        # Valid calculation to sum the data by 'year'.
        load_by_year = self.inp.get_query_sets('load',
                                               group_by='year',
                                               group_by_aggregation=Sum,
                                               exclude={'value': None},
                                               wrap_for_merge=True)

        gas_by_year = self.inp.get_query_sets('natgas',
                                              group_by='year',
                                              group_by_aggregation=Sum,
                                              exclude={'value': None},
                                              wrap_for_merge=True)

        merge_load_gas = self.inp.merge(load_by_year, gas_by_year)

        self.out.log("Getting unit conversions.", logging.INFO)
        base_topic = self.inp.get_topics()
        meta_topics = self.inp.get_topics_meta()

        load_unit = meta_topics['load'][base_topic['load'][0]]['unit']
        self.out.log(
            "Convert loads from [{}] to [kW]; integration will take to [kWh].".
            format(load_unit), logging.INFO)
        load_convertfactor = cu.getFactor_powertoKW(load_unit)

        natgas_unit = meta_topics['natgas'][base_topic['natgas'][0]]['unit']
        self.out.log(
            "Convert natgas from [{}] to [kBtu/hr]; integration will take to [kBtu]."
            .format(natgas_unit), logging.INFO)
        natgas_convertfactor = cu.getFactor_powertoKBtu_hr(natgas_unit)

        self.out.log("Compiling the report table.", logging.INFO)
        for x in merge_load_gas:
            self.out.insert_row(
                'Longitudinal_BM', {
                    'year': x['time'].year,
                    'load': x['load'][0] * load_convertfactor,
                    'natgas': x['natgas'][0] * natgas_convertfactor
                })
Exemplo n.º 5
0
    def execute(self):
        #Called after User hits GO
        """
        Will output the following: year, aggregated load amounts,
        and aggregated gas amounts.
        """
        self.out.log("Starting application: longitudinal benchmarking.", logging.INFO)

        self.out.log("Querying database.", logging.INFO)
        # Note: Assumes all of the energy data are in a per hour basis.
        # TODO: The caveat above must be made stronger.  Aggregating by summing
        #   only converts, e.g., [kW] to [kWh] for hourly observations.
        #   Similar problem for gas data.
        # TODO: The query here presumably groups by calendar year.  Need to check
        #   whether application actually wants a year's worth of data, looking
        #   backward from most recent observation.
        # Valid calculation to sum the data by 'year'.
        load_by_year = self.inp.get_query_sets('load', group_by='year',
                                               group_by_aggregation=Sum,
                                               exclude={'value':None},
                                               wrap_for_merge=True)

        gas_by_year = self.inp.get_query_sets('natgas', group_by='year',
                                              group_by_aggregation=Sum,
                                              exclude={'value':None},
                                              wrap_for_merge=True)

        merge_load_gas = self.inp.merge(load_by_year, gas_by_year)

        self.out.log("Getting unit conversions.", logging.INFO)
        base_topic = self.inp.get_topics()
        meta_topics = self.inp.get_topics_meta()

        load_unit = meta_topics['load'][base_topic['load'][0]]['unit']
        self.out.log(
            "Convert loads from [{}] to [kW]; integration will take to [kWh].".format(load_unit),
            logging.INFO
            )
        load_convertfactor = cu.getFactor_powertoKW(load_unit)

        natgas_unit = meta_topics['natgas'][base_topic['natgas'][0]]['unit']
        self.out.log(
            "Convert natgas from [{}] to [kBtu/hr]; integration will take to [kBtu].".format(natgas_unit),
            logging.INFO
            )
        natgas_convertfactor = cu.getFactor_powertoKBtu_hr(natgas_unit)

        self.out.log("Compiling the report table.", logging.INFO)
        for x in merge_load_gas:
            self.out.insert_row('Longitudinal_BM', {
                'year': x['time'].year,
                'load': x['load'][0]*load_convertfactor,
                'natgas': x['natgas'][0]*natgas_convertfactor
                })
Exemplo n.º 6
0
    def execute(self):
        """Outputs values for line graph."""
        self.out.log("Starting application: load profile.", logging.INFO)

        self.out.log("Getting unit conversions.", logging.INFO)
        base_topic = self.inp.get_topics()
        meta_topics = self.inp.get_topics_meta()

        load_unit = meta_topics['load'][base_topic['load'][0]]['unit']
        self.out.log(
            "Convert loads from [{}] to [kW].".format(load_unit),
            logging.INFO
            )
        load_convertfactor = cu.getFactor_powertoKW(load_unit)

        self.out.log("Querying database.", logging.INFO)
        load_by_hour = self.inp.get_query_sets('load',
                                                exclude={'value': None},
                                                group_by='hour')[0]

        self.out.log("Reducing the records to two weeks.", logging.INFO)
        # Note: Limit the number of datapoints, have 2 weeks worth of data.
        # 24 hours x 14 days = 336.
        if len(load_by_hour) > 336:
            start = len(load_by_hour) - 336
            end = len(load_by_hour) - 1
        else:
            start = 0
            end = len(load_by_hour)

        self.out.log("Compiling the report table.", logging.INFO)
        for x in load_by_hour[start:end]:
            local_time = self.inp.localize_sensor_time(base_topic['load'][0], x[0])
            self.out.insert_row("Load_Profiling", {
                'timestamp': local_time,
                'load': x[1]*load_convertfactor
                })
Exemplo n.º 7
0
    def execute(self):
        """Outputs values for line graph."""
        self.out.log("Starting application: load profile.", logging.INFO)

        self.out.log("Getting unit conversions.", logging.INFO)
        base_topic = self.inp.get_topics()
        meta_topics = self.inp.get_topics_meta()

        load_unit = meta_topics['load'][base_topic['load'][0]]['unit']
        self.out.log("Convert loads from [{}] to [kW].".format(load_unit),
                     logging.INFO)
        load_convertfactor = cu.getFactor_powertoKW(load_unit)

        self.out.log("Querying database.", logging.INFO)
        load_by_hour = self.inp.get_query_sets('load',
                                               exclude={'value': None},
                                               group_by='hour')[0]

        self.out.log("Reducing the records to two weeks.", logging.INFO)
        # Note: Limit the number of datapoints, have 2 weeks worth of data.
        # 24 hours x 14 days = 336.
        if len(load_by_hour) > 336:
            start = len(load_by_hour) - 336
            end = len(load_by_hour) - 1
        else:
            start = 0
            end = len(load_by_hour)

        self.out.log("Compiling the report table.", logging.INFO)
        for x in load_by_hour[start:end]:
            local_time = self.inp.localize_sensor_time(base_topic['load'][0],
                                                       x[0])
            self.out.insert_row("Load_Profiling", {
                'timestamp': local_time,
                'load': x[1] * load_convertfactor
            })
Exemplo n.º 8
0
    def execute(self):
        """ Output is sorted loads values."""

        self.out.log("Starting application: load duration.", logging.INFO)

        self.out.log("Querying database.", logging.INFO)
        load_query = self.inp.get_query_sets('load', order_by='value', exclude={'value':None})

        self.out.log("Getting unit conversions.", logging.INFO)
        base_topic = self.inp.get_topics()
        meta_topics = self.inp.get_topics_meta()
        load_unit = meta_topics['load'][base_topic['load'][0]]['unit']
        self.out.log(
            "Convert loads from [{}] to [kW].".format(load_unit),
            logging.INFO
            )
        load_convertfactor = cu.getFactor_powertoKW(load_unit)

        self.out.log("Compiling the report table.", logging.INFO)
        ctr = 1
        for x in load_query[0]:
            self.out.insert_row("Load_Duration", { "sorted load": x[1]*load_convertfactor,
                                                   "percent time": (len(load_query[0])-ctr) / len(load_query[0]) } )
            ctr += 1
Exemplo n.º 9
0
    def execute(self):
        """
        Calculates weather sensitivity using Spearman rank.
        Also, outputs data points for energy signature scatter plot.
        """

        self.out.log("Starting application: energy signature.", logging.INFO)

        self.out.log("Querying database.", logging.INFO)
        load_query = self.inp.get_query_sets('load', group_by='hour',
                                             group_by_aggregation=Avg,
                                             exclude={'value':None},
                                             wrap_for_merge=True)
        oat_query = self.inp.get_query_sets('oat', group_by='hour',
                                             group_by_aggregation=Avg,
                                             exclude={'value':None},
                                             wrap_for_merge=True)

        self.out.log("Getting unit conversions.", logging.INFO)
        base_topic = self.inp.get_topics()
        meta_topics = self.inp.get_topics_meta()

        load_unit = meta_topics['load'][base_topic['load'][0]]['unit']
        self.out.log(
            "Convert loads from [{}] to [kW].".format(load_unit),
            logging.INFO
            )
        load_convertfactor = cu.getFactor_powertoKW(load_unit)

        temperature_unit = meta_topics['oat'][base_topic['oat'][0]]['unit']
        self.out.log(
            "Convert temperatures from [{}] to [F].".format(temperature_unit),
            logging.INFO
            )

        #print('merge load', merged_load_oat)
        load_values = []
        oat_values = []

        self.out.log("Pulling data from database.", logging.INFO)
        merged_load_oat = self.inp.merge(load_query, oat_query)
        for x in merged_load_oat:
            if temperature_unit == 'celcius':
                convertedTemp = cu.convertCelciusToFahrenheit(x['oat'][0])
            elif temperature_unit == 'kelvin':
                convertedTemp = cu.convertKelvinToCelcius(
                                cu.convertCelciusToFahrenheit(x['oat'][0]))
            else:
                convertedTemp = x['oat'][0]

            load_values.append(x['load'][0]*load_convertfactor)
            oat_values.append(convertedTemp)
            self.out.insert_row(LOAD_VS_OAT_TABLE_NAME, {
                "oat": x['oat'][0],
                "load": x['load'][0]
                })

        self.out.log("Calculating the Spearman rank.", logging.INFO)
        #print(load_values)
        #print(oat_values)
        weather_sensitivity = findSpearmanRank(load_values, oat_values)

        self.out.log("Adding weather sensitivity to table.", logging.INFO)
        self.out.insert_row(WEATHER_SENSITIVITY_TABLE_NAME, {
            "value": "{:.2f}".format(weather_sensitivity)
            })
Exemplo n.º 10
0
    def execute(self):
        # Called after User hits GO
        """
        Calculates the following metrics and outputs.
            -Daily Load 95th Percentile
            -Daily Load 5th Percentile
            -Daily Load Ratio
            -Daily Load Range
            -Load Variability
            -Peak Load Benchmark
        """

        self.out.log("Starting application: daily summary.", logging.INFO)

        self.out.log("Querying database.", logging.INFO)
        peakLoad = self.inp.get_query_sets('load', group_by='all',
                                           group_by_aggregation=Max)[0]
        load_query = self.inp.get_query_sets('load', exclude={'value':None})[0]

        load_startDay = load_query.earliest()[0].date()
        load_endDay = load_query.latest()[0].date()
        current_Day = load_startDay
        load_day_list_95 = []
        load_day_list_5 = []

        self.out.log("Getting unit conversions.", logging.INFO)
        base_topic = self.inp.get_topics()
        meta_topics = self.inp.get_topics_meta()

        load_unit = meta_topics['load'][base_topic['load'][0]]['unit']
        self.out.log(
            "Convert loads from [{}] to [kW].".format(load_unit),
            logging.INFO
            )
        load_convertfactor = cu.getFactor_powertoKW(load_unit)

        self.out.log("Calculating peak benchmark metric.", logging.INFO)
        floorAreaSqft = self.sq_ft
        peakLoadIntensity = peakLoad / floorAreaSqft

        self.out.log("Calculating daily top and bottom percentile.", logging.INFO)
        while current_Day <= load_endDay:
            load_day_query = load_query.filter(time__year=current_Day.year,
                                            time__month=current_Day.month,
                                            time__day=current_Day.day)
            current_Day += relativedelta(days=1)

            load_day_values = [x[1] for x in load_day_query]
            if (len(load_day_values) < 5):
                continue

            load_day_list_95.append(numpy.percentile(load_day_values, 95))
            load_day_list_5.append(numpy.percentile(load_day_values, 5))

        # average them
        load_day_95_mean = numpy.mean(load_day_list_95)
        load_day_5_mean = numpy.mean(load_day_list_5)
        load_day_ratio_mean = numpy.mean(numpy.divide(load_day_list_5,
                                                      load_day_list_95))
        load_day_range_mean = numpy.mean(numpy.subtract(load_day_list_95,
                                                        load_day_list_5))

        self.out.log("Calculating load variability.", logging.INFO)
        # TODO: Generate error if there are not 24 hours worth of data for
        # every day and less than two days of data.
        hourly_variability = []

        for h in range(24):
            hourly_mean = self.inp.get_query_sets('load', group_by='all',
                                                  group_by_aggregation=Avg,
                                                  filter_={'time__hour':h})[0]
            hour_load_query = self.inp.get_query_sets('load',
                                                     filter_={'time__hour':h},
                                                     exclude={'value':None})[0]
            counts = hour_load_query.count()
            if (counts < 2):
                raise Exception("Must have more than 1 day of data!")
            rootmeansq = math.sqrt(
                sum((x[1] - hourly_mean) ** 2 for x in hour_load_query)
                / (counts - 1)
                )
            hourly_variability.append(rootmeansq / hourly_mean)

        load_variability = numpy.mean(hourly_variability)

        self.out.log("Compiling the report table.", logging.INFO)
        self.out.insert_row("Daily_Summary_Table", {
            "Metric": "Peak Load Benchmark [W/sf]",
            "value": "{:.2f}".format(peakLoadIntensity * load_convertfactor * 1000.),
            "description": "This is the absolute maximum electric load based on all of your data. "  \
                "The median for commercial buildings under 150,000 sf is 4.4 W/sf. "  \
                "Values much higher than 4.4 therefore indicate an opportunity to improve building performance."
            })
        self.out.insert_row("Daily_Summary_Table", {
            "Metric": "Daily Load 95th Percentile [kW]",
            "value": "{:.2f}".format(load_day_95_mean * load_convertfactor),
            "description": "The daily maximum usage could be dominated by a single large load, or "  \
                "could be the sum of several smaller ones. "  \
                "Long periods of usage near the maximum increase overall energy use."
            })
        self.out.insert_row("Daily_Summary_Table", {
            "Metric": "Daily Load 5th Percentile [kW]",
            "value": "{:.2f}".format(load_day_5_mean * load_convertfactor),
            "description": "Minimum usage is often dominated by loads that run 24 hours a day. "  \
                "In homes, these include refrigerators and vampire loads. "  \
                "In commercial buildings, these include ventilation, hallway lighting, computers, and vampire loads."
            })
        self.out.insert_row("Daily_Summary_Table", {
            "Metric": "Daily Load Range [kW]",
            "value": "{:.2f}".format(load_day_range_mean * load_convertfactor),
            "description": "This is a rough estimate of the total load turned on and off every day. "  \
                "Higher values may indicate good control, but could also indicate excessive peak usage."
            })
        self.out.insert_row("Daily_Summary_Table", {
            "Metric": "Daily Load Ratio",
            "value": "{:.2f}".format(load_day_ratio_mean),
            "description": "Values over 0.33 indicate that significant loads are shut off for parts of the day. "  \
                "To save energy, look to extend and deepen shutoff periods, while also reducing peak energy use."
            })
        self.out.insert_row("Daily_Summary_Table", {
            "Metric": "Load Variability",
            "value": "{:.2f}".format(load_variability),
            "description":"This metric is used to understand regularity of operations, "  \
                "and the likelihood of consistency in the building's demand responsiveness. "  \
                "It gives a coefficient of variation that ranges from 0 to 1. "  \
                "This coefficient can be interpreted based on general guidelines. "  \
                "For example, variability above 0.15 is generally considered high for commercial buildings."
            })
Exemplo n.º 11
0
    def execute(self):
        #Called after User hits GO
        """Outputs the ENERGY Star Score from Target Finder API"""
        #NOTE: Connection check happens after data is formatted into XML and
        # sent into the web service request.
        self.out.log("Starting application: cross-sectional benchmarking.", logging.INFO)

        self.out.log("Querying the database for model parameters.", logging.INFO)
        bldgMetaData = dict()
        bldgMetaData['floor-area']  = self.sq_ft
        bldgMetaData['year-built']  = self.building_year
        bldgMetaData['bldg-name']   = self.building_name
        bldgMetaData['function']    = self.building_function
        bldgMetaData['zipcode']     = self.building_zipcode

        self.out.log("Querying the database for most recent year of energy load.", logging.INFO)
        # NOTE: Hourly values are preferred to make calculations easier.
        # TODO: The caveat above must be made stronger.  Aggregating by summing
        #   only converts, e.g., [kW] to [kWh] for hourly observations.
        #   Similar problem for gas data.
        # TODO: The query here presumably groups by calendar year.  Need to check
        #   whether application actually wants a year's worth of data, looking
        #   backward from most recent observation.
        load_by_year = self.inp.get_query_sets('load', group_by='year',
                                               group_by_aggregation=Sum,
                                               exclude={'value':None},
                                               wrap_for_merge=True)
        gas_by_year = self.inp.get_query_sets('natgas', group_by='year',
                                              group_by_aggregation=Sum,
                                              exclude={'value':None},
                                              wrap_for_merge=True)

        merge_load_gas = self.inp.merge(load_by_year, gas_by_year)

        # Convert the generator to a list that can be indexed.
        merge_data_list = []
        for item in merge_load_gas:
            merge_data_list.append((item['time'], item['load'][0], item['natgas'][0]))

        recent_record = merge_data_list[len(merge_data_list)-1]

        self.out.log("Getting unit conversions.", logging.INFO)
        base_topic = self.inp.get_topics()
        meta_topics = self.inp.get_topics_meta()

        load_unit = meta_topics['load'][base_topic['load'][0]]['unit']
        self.out.log(
            "Convert loads from [{}] to [kW]; integration will take to [kWh].".format(load_unit),
            logging.INFO
            )
        load_convertfactor = cu.getFactor_powertoKW(load_unit)

        natgas_unit = meta_topics['natgas'][base_topic['natgas'][0]]['unit']
        self.out.log(
            "Convert natgas from [{}] to [kBtu/hr]; integration will take to [kBtu].".format(natgas_unit),
            logging.INFO
            )
        natgas_convertfactor = cu.getFactor_powertoKBtu_hr(natgas_unit)

        #TODO: Convert values to units that are PM Manager valid values.
        energyUseList = [['Electric','kWh (thousand Watt-hours)',int(recent_record[1]*load_convertfactor)],
                         ['Natural Gas','kBtu (thousand Btu)',int(recent_record[2]*natgas_convertfactor)]]

        self.out.log("Generate XML-formatted data to pass data to the webservice.", logging.INFO)
        targetFinder_xml = gen_xml_targetFinder(bldgMetaData,energyUseList,'z_targetFinder_xml')

        self.out.log("Function that sends a URL Request with ENERGY STAR web server.", logging.INFO)
        PMMetrics = retrieveScore(targetFinder_xml)

        self.out.log("Compile report table.", logging.INFO)
        if PMMetrics['status'] == 'success':
            self.out.log('Analysis successful', logging.INFO)
            self.out.insert_row('CrossSectional_BM', {
                'Metric Name': 'Target Finder Score',
                'Value': str(PMMetrics['designScore'][0])
                })
        else:
            self.out.log(str(PMMetrics['status'])+'\nReason:\t'+str(PMMetrics['reason']), logging.WARNING)
            self.out.insert_row('CrossSectional_BM', {
                'Metric Name': 'Target Finder Score',
                'Value': 'Check log for error.'
                })
Exemplo n.º 12
0
    def execute(self):
        """
        Calculates weather sensitivity using Spearman rank.
        Also, outputs data points for energy signature scatter plot.
        """

        self.out.log("Starting application: energy signature.", logging.INFO)

        self.out.log("Querying database.", logging.INFO)
        load_query = self.inp.get_query_sets('load',
                                             group_by='hour',
                                             group_by_aggregation=Avg,
                                             exclude={'value': None},
                                             wrap_for_merge=True)
        oat_query = self.inp.get_query_sets('oat',
                                            group_by='hour',
                                            group_by_aggregation=Avg,
                                            exclude={'value': None},
                                            wrap_for_merge=True)

        self.out.log("Getting unit conversions.", logging.INFO)
        base_topic = self.inp.get_topics()
        meta_topics = self.inp.get_topics_meta()

        load_unit = meta_topics['load'][base_topic['load'][0]]['unit']
        self.out.log("Convert loads from [{}] to [kW].".format(load_unit),
                     logging.INFO)
        load_convertfactor = cu.getFactor_powertoKW(load_unit)

        temperature_unit = meta_topics['oat'][base_topic['oat'][0]]['unit']
        self.out.log(
            "Convert temperatures from [{}] to [F].".format(temperature_unit),
            logging.INFO)

        #print('merge load', merged_load_oat)
        load_values = []
        oat_values = []

        self.out.log("Pulling data from database.", logging.INFO)
        merged_load_oat = self.inp.merge(load_query, oat_query)
        for x in merged_load_oat:
            if temperature_unit == 'celcius':
                convertedTemp = cu.convertCelciusToFahrenheit(x['oat'][0])
            elif temperature_unit == 'kelvin':
                convertedTemp = cu.convertKelvinToCelcius(
                    cu.convertCelciusToFahrenheit(x['oat'][0]))
            else:
                convertedTemp = x['oat'][0]

            load_values.append(x['load'][0] * load_convertfactor)
            oat_values.append(convertedTemp)
            self.out.insert_row(LOAD_VS_OAT_TABLE_NAME, {
                "oat": x['oat'][0],
                "load": x['load'][0]
            })

        self.out.log("Calculating the Spearman rank.", logging.INFO)
        #print(load_values)
        #print(oat_values)
        weather_sensitivity = findSpearmanRank(load_values, oat_values)

        self.out.log("Adding weather sensitivity to table.", logging.INFO)
        self.out.insert_row(WEATHER_SENSITIVITY_TABLE_NAME,
                            {"value": "{:.2f}".format(weather_sensitivity)})
Exemplo n.º 13
0
    def execute(self):
        #Called after User hits GO
        """Outputs the ENERGY Star Score from Target Finder API"""
        #NOTE: Connection check happens after data is formatted into XML and
        # sent into the web service request.
        self.out.log("Starting application: cross-sectional benchmarking.",
                     logging.INFO)

        self.out.log("Querying the database for model parameters.",
                     logging.INFO)
        bldgMetaData = dict()
        bldgMetaData['floor-area'] = self.sq_ft
        bldgMetaData['year-built'] = self.building_year
        bldgMetaData['bldg-name'] = self.building_name
        bldgMetaData['function'] = self.building_function
        bldgMetaData['zipcode'] = self.building_zipcode

        self.out.log(
            "Querying the database for most recent year of energy load.",
            logging.INFO)
        # NOTE: Hourly values are preferred to make calculations easier.
        # TODO: The caveat above must be made stronger.  Aggregating by summing
        #   only converts, e.g., [kW] to [kWh] for hourly observations.
        #   Similar problem for gas data.
        # TODO: The query here presumably groups by calendar year.  Need to check
        #   whether application actually wants a year's worth of data, looking
        #   backward from most recent observation.
        load_by_year = self.inp.get_query_sets('load',
                                               group_by='year',
                                               group_by_aggregation=Sum,
                                               exclude={'value': None},
                                               wrap_for_merge=True)
        gas_by_year = self.inp.get_query_sets('natgas',
                                              group_by='year',
                                              group_by_aggregation=Sum,
                                              exclude={'value': None},
                                              wrap_for_merge=True)

        merge_load_gas = self.inp.merge(load_by_year, gas_by_year)

        # Convert the generator to a list that can be indexed.
        merge_data_list = []
        for item in merge_load_gas:
            merge_data_list.append(
                (item['time'], item['load'][0], item['natgas'][0]))

        recent_record = merge_data_list[len(merge_data_list) - 1]

        self.out.log("Getting unit conversions.", logging.INFO)
        base_topic = self.inp.get_topics()
        meta_topics = self.inp.get_topics_meta()

        load_unit = meta_topics['load'][base_topic['load'][0]]['unit']
        self.out.log(
            "Convert loads from [{}] to [kW]; integration will take to [kWh].".
            format(load_unit), logging.INFO)
        load_convertfactor = cu.getFactor_powertoKW(load_unit)

        natgas_unit = meta_topics['natgas'][base_topic['natgas'][0]]['unit']
        self.out.log(
            "Convert natgas from [{}] to [kBtu/hr]; integration will take to [kBtu]."
            .format(natgas_unit), logging.INFO)
        natgas_convertfactor = cu.getFactor_powertoKBtu_hr(natgas_unit)

        #TODO: Convert values to units that are PM Manager valid values.
        energyUseList = [[
            'Electric', 'kWh (thousand Watt-hours)',
            int(recent_record[1] * load_convertfactor)
        ],
                         [
                             'Natural Gas', 'kBtu (thousand Btu)',
                             int(recent_record[2] * natgas_convertfactor)
                         ]]

        self.out.log(
            "Generate XML-formatted data to pass data to the webservice.",
            logging.INFO)
        targetFinder_xml = gen_xml_targetFinder(bldgMetaData, energyUseList,
                                                'z_targetFinder_xml')

        self.out.log(
            "Function that sends a URL Request with ENERGY STAR web server.",
            logging.INFO)
        PMMetrics = retrieveScore(targetFinder_xml)

        self.out.log("Compile report table.", logging.INFO)
        if PMMetrics['status'] == 'success':
            self.out.log('Analysis successful', logging.INFO)
            self.out.insert_row(
                'CrossSectional_BM', {
                    'Metric Name': 'Target Finder Score',
                    'Value': str(PMMetrics['designScore'][0])
                })
        else:
            self.out.log(
                str(PMMetrics['status']) + '\nReason:\t' +
                str(PMMetrics['reason']), logging.WARNING)
            self.out.insert_row(
                'CrossSectional_BM', {
                    'Metric Name': 'Target Finder Score',
                    'Value': 'Check log for error.'
                })
    def execute(self):
        # Called after User hits GO
        """
        Calculates weather sensitivity using Spearman rank.
        Also, outputs data points for energy signature scatter plot.
        """
        self.out.log("Starting application: whole building energy savings.", logging.INFO)

        # Gather loads and outside air temperatures. Reduced to an hourly average
        self.out.log("Querying database.", logging.INFO)
        load_query = self.inp.get_query_sets('load', group_by='hour',
                                             group_by_aggregation=Avg,
                                             exclude={'value':None},
                                             wrap_for_merge=True)
        oat_query = self.inp.get_query_sets('oat', group_by='hour',
                                             group_by_aggregation=Avg,
                                             exclude={'value':None},
                                             wrap_for_merge=True)

        self.out.log("Getting unit conversions.", logging.INFO)
        base_topic = self.inp.get_topics()
        meta_topics = self.inp.get_topics_meta()

        load_unit = meta_topics['load'][base_topic['load'][0]]['unit']
        self.out.log(
            "Convert loads from [{}] to [kW].".format(load_unit),
            logging.INFO
            )
        load_convertfactor = cu.getFactor_powertoKW(load_unit)

        temperature_unit = meta_topics['oat'][base_topic['oat'][0]]['unit']
        self.out.log(
            "Convert temperatures from [{}] to [F].".format(temperature_unit),
            logging.INFO
            )

        # Match the values by timestamp
        merged_load_oat = self.inp.merge(load_query, oat_query)

        load_values = []
        oat_values = []
        datetime_values = []

        for x in merged_load_oat:
            if temperature_unit == 'celcius':
                convertedTemp = cu.convertCelciusToFahrenheit(x['oat'][0])
            elif temperature_unit == 'kelvin':
                convertedTemp = cu.convertKelvinToCelcius(
                                cu.convertCelciusToFahrenheit(x['oat'][0]))
            else:
                convertedTemp = x['oat'][0]

            load_values.append(x['load'][0] * load_convertfactor) #Converted to kWh
            oat_values.append(convertedTemp)
            datetime_values.append(x['time'])

        indexList = {}
        indexList['trainingStart'] = ttow.findDateIndex(datetime_values, self.baseline_start)
        self.out.log('@trainingStart '+str(indexList['trainingStart']), logging.INFO)
        indexList['trainingStop'] = ttow.findDateIndex(datetime_values, self.baseline_stop)
        self.out.log('@trainingStop '+str(indexList['trainingStop']), logging.INFO)
        indexList['predictStart'] = ttow.findDateIndex(datetime_values, self.savings_start)
        self.out.log('@predictStart '+str(indexList['predictStart']), logging.INFO)
        indexList['predictStop'] = ttow.findDateIndex(datetime_values, self.savings_stop)
        self.out.log('@predictStop '+str(indexList['predictStop']), logging.INFO)

        for indx in indexList.keys():
            if indexList[indx] == None:
                self.out.log("Date not found in the datelist", logging.WARNING)

        # Break up data into training and prediction periods.
        timesTrain = datetime_values[indexList['trainingStart']:indexList['trainingStop']]
        timesPredict = datetime_values[indexList['predictStart']:indexList['predictStop']]

        valsTrain = load_values[indexList['trainingStart']:indexList['trainingStop']]
        valsActual = load_values[indexList['predictStart']:indexList['predictStop']]

        oatsTrain = oat_values[indexList['trainingStart']:indexList['trainingStop']]
        oatsPredict = oat_values[indexList['predictStart']:indexList['predictStop']]

        # Generate other information needed for model.
        timeStepMinutes = (timesTrain[1] - timesTrain[0]).total_seconds()/60
        # TODO: Should this be calculated in the utility function
        binCt = 6  # TODO: Allow caller to pass this in as an argument.

        # Form the temperature-time-of-week model.
        self.out.log("Finding baseline model", logging.INFO)
        ttowModel = ttow.formModel(timesTrain,
                                   oatsTrain,
                                   valsTrain,
                                   timeStepMinutes,
                                   binCt)

        # Apply the model.
        self.out.log("Applying baseline model", logging.INFO)
        valsPredict = ttow.applyModel(ttowModel, timesPredict, oatsPredict)

        # Output for scatter plot
        prevSum = 0
        for ctr in range(len(timesPredict)):
            # Calculate cumulative savings.
            prevSum += (valsPredict[ctr] - valsActual[ctr])
            local_time = str(self.inp.localize_sensor_time(base_topic['load'][0], timesPredict[ctr]))
            self.out.insert_row("DayTimeTemperatureModel", {
                                "datetimeValues": local_time,
                                "measured": valsActual[ctr],
                                "predicted": valsPredict[ctr],
                                "cumulativeSum": prevSum
                                })
    def execute(self):
        # Called after User hits GO
        """
        Calculates weather sensitivity using Spearman rank.
        Also, outputs data points for energy signature scatter plot.
        """
        self.out.log("Starting application: whole building energy savings.",
                     logging.INFO)

        # Gather loads and outside air temperatures. Reduced to an hourly average
        self.out.log("Querying database.", logging.INFO)
        load_query = self.inp.get_query_sets('load',
                                             group_by='hour',
                                             group_by_aggregation=Avg,
                                             exclude={'value': None},
                                             wrap_for_merge=True)
        oat_query = self.inp.get_query_sets('oat',
                                            group_by='hour',
                                            group_by_aggregation=Avg,
                                            exclude={'value': None},
                                            wrap_for_merge=True)

        self.out.log("Getting unit conversions.", logging.INFO)
        base_topic = self.inp.get_topics()
        meta_topics = self.inp.get_topics_meta()

        load_unit = meta_topics['load'][base_topic['load'][0]]['unit']
        self.out.log("Convert loads from [{}] to [kW].".format(load_unit),
                     logging.INFO)
        load_convertfactor = cu.getFactor_powertoKW(load_unit)

        temperature_unit = meta_topics['oat'][base_topic['oat'][0]]['unit']
        self.out.log(
            "Convert temperatures from [{}] to [F].".format(temperature_unit),
            logging.INFO)

        # Match the values by timestamp
        merged_load_oat = self.inp.merge(load_query, oat_query)

        load_values = []
        oat_values = []
        datetime_values = []

        for x in merged_load_oat:
            if temperature_unit == 'celcius':
                convertedTemp = cu.convertCelciusToFahrenheit(x['oat'][0])
            elif temperature_unit == 'kelvin':
                convertedTemp = cu.convertKelvinToCelcius(
                    cu.convertCelciusToFahrenheit(x['oat'][0]))
            else:
                convertedTemp = x['oat'][0]

            load_values.append(x['load'][0] *
                               load_convertfactor)  #Converted to kWh
            oat_values.append(convertedTemp)
            datetime_values.append(x['time'])

        indexList = {}
        indexList['trainingStart'] = ttow.findDateIndex(
            datetime_values, self.baseline_start)
        self.out.log('@trainingStart ' + str(indexList['trainingStart']),
                     logging.INFO)
        indexList['trainingStop'] = ttow.findDateIndex(datetime_values,
                                                       self.baseline_stop)
        self.out.log('@trainingStop ' + str(indexList['trainingStop']),
                     logging.INFO)
        indexList['predictStart'] = ttow.findDateIndex(datetime_values,
                                                       self.savings_start)
        self.out.log('@predictStart ' + str(indexList['predictStart']),
                     logging.INFO)
        indexList['predictStop'] = ttow.findDateIndex(datetime_values,
                                                      self.savings_stop)
        self.out.log('@predictStop ' + str(indexList['predictStop']),
                     logging.INFO)

        for indx in indexList.keys():
            if indexList[indx] == None:
                self.out.log("Date not found in the datelist", logging.WARNING)

        # Break up data into training and prediction periods.
        timesTrain = datetime_values[
            indexList['trainingStart']:indexList['trainingStop']]
        timesPredict = datetime_values[
            indexList['predictStart']:indexList['predictStop']]

        valsTrain = load_values[
            indexList['trainingStart']:indexList['trainingStop']]
        valsActual = load_values[
            indexList['predictStart']:indexList['predictStop']]

        oatsTrain = oat_values[
            indexList['trainingStart']:indexList['trainingStop']]
        oatsPredict = oat_values[
            indexList['predictStart']:indexList['predictStop']]

        # Generate other information needed for model.
        timeStepMinutes = (timesTrain[1] - timesTrain[0]).total_seconds() / 60
        # TODO: Should this be calculated in the utility function
        binCt = 6  # TODO: Allow caller to pass this in as an argument.

        # Form the temperature-time-of-week model.
        self.out.log("Finding baseline model", logging.INFO)
        ttowModel = ttow.formModel(timesTrain, oatsTrain, valsTrain,
                                   timeStepMinutes, binCt)

        # Apply the model.
        self.out.log("Applying baseline model", logging.INFO)
        valsPredict = ttow.applyModel(ttowModel, timesPredict, oatsPredict)

        # Output for scatter plot
        prevSum = 0
        for ctr in range(len(timesPredict)):
            # Calculate cumulative savings.
            prevSum += (valsPredict[ctr] - valsActual[ctr])
            local_time = str(
                self.inp.localize_sensor_time(base_topic['load'][0],
                                              timesPredict[ctr]))
            self.out.insert_row(
                "DayTimeTemperatureModel", {
                    "datetimeValues": local_time,
                    "measured": valsActual[ctr],
                    "predicted": valsPredict[ctr],
                    "cumulativeSum": prevSum
                })