Пример #1
0
def test_sqlite_data_collections_by_output_names():
    """Test the data_collections_by_output_name method with multiple names."""
    sql_path = './tests/assets/sql/eplusout_hourly.sql'
    sql_obj = SQLiteResult(sql_path)

    data_colls = sql_obj.data_collections_by_output_name(
        ('Zone Lights Electric Energy', 'Zone Mean Radiant Temperature'))
    assert len(data_colls) == 14
    for coll in data_colls:
        assert isinstance(coll, HourlyContinuousCollection)
        assert len(coll) == len(coll.header.analysis_period.hoys)
        assert isinstance(coll.header.data_type, (Energy, Temperature))

    data_colls = sql_obj.data_collections_by_output_name(
        ('Zone Lights Electric Energy', ))
    assert len(data_colls) == 7
Пример #2
0
def data_by_outputs(result_sql, output_names, output_file):
    """Get an array of DataCollection JSONs for a several EnergyPlus outputs.

    \b
    Args:
        result_sql: Full path to an SQLite file that was generated by EnergyPlus.
        output_names: An array of EnergyPlus output names to be retrieved from
            the SQLite result file. This can also be a nested array (an array of
            output name arrays) if each string is formatted as a JSON array
            with [] brackets.
    """
    try:
        sql_obj = SQLiteResult(result_sql)
        data_colls = []
        for output_name in output_names:
            output_name = str(output_name)
            if output_name.startswith('['):
                output_name = tuple(outp.replace('"', '').strip()
                                    for outp in output_name.strip('[]').split(','))
            data_cs = sql_obj.data_collections_by_output_name(output_name)
            data_colls.append([data.to_dict() for data in data_cs])
        output_file.write(json.dumps(data_colls))
    except Exception as e:
        _logger.exception('Failed to retrieve outputs from sql file.\n{}'.format(e))
        sys.exit(1)
    else:
        sys.exit(0)
Пример #3
0
def data_by_output(result_sql, output_name, output_file):
    """Get an array of DataCollection JSONs for a specific EnergyPlus output.

    \b
    Args:
        result_sql: Full path to an SQLite file that was generated by EnergyPlus.
        output_name: The name of an EnergyPlus output to be retrieved from
            the SQLite result file. This can also be an array of names if the
            string is formatted as a JSON array with [] brackets. Note that only
            a single array of data collection JSONs will be returned from this
            method and, if data collections must be grouped, the data_by_outputs
            method should be used.
    """
    try:
        sql_obj = SQLiteResult(result_sql)
        output_name = str(output_name)
        if output_name.startswith('['):
            output_name = tuple(outp.replace('"', '').strip()
                                for outp in output_name.strip('[]').split(','))
        data_colls = sql_obj.data_collections_by_output_name(output_name)
        output_file.write(json.dumps([data.to_dict() for data in data_colls]))
    except Exception as e:
        _logger.exception('Failed to retrieve outputs from sql file.\n{}'.format(e))
        sys.exit(1)
    else:
        sys.exit(0)
Пример #4
0
def test_sqlite_data_collections_by_output_name():
    """Test the data_collections_by_output_name method."""
    sql_path = './tests/assets/sql/eplusout_hourly.sql'
    sql_obj = SQLiteResult(sql_path)

    data_colls = sql_obj.data_collections_by_output_name(
        'Zone Lights Electric Energy')
    assert len(data_colls) == 7
    for coll in data_colls:
        assert isinstance(coll, HourlyContinuousCollection)
        assert len(coll) == len(coll.header.analysis_period.hoys)
        assert isinstance(coll.header.data_type, Energy)
        assert coll.header.unit == 'kWh'

    data_colls = sql_obj.data_collections_by_output_name(
        'Zone Mean Radiant Temperature')
    for coll in data_colls:
        assert isinstance(coll, HourlyContinuousCollection)
        assert len(coll) == len(coll.header.analysis_period.hoys)
        assert isinstance(coll.header.data_type, Temperature)
        assert coll.header.unit == 'C'

    data_colls = sql_obj.data_collections_by_output_name(
        'Zone Electric Equipment Electric Energy')
    data_colls = sql_obj.data_collections_by_output_name(
        'Zone Mean Air Temperature')
    data_colls = sql_obj.data_collections_by_output_name(
        'Zone Air Relative Humidity')
    data_colls = sql_obj.data_collections_by_output_name(
        'Zone Ideal Loads Supply Air Total Heating Energy')
    data_colls = sql_obj.data_collections_by_output_name(
        'Zone Ideal Loads Supply Air Total Cooling Energy')
Пример #5
0
def output_csv(result_sql, output_names, output_file):
    """Get CSV for specific EnergyPlus outputs.

    \b
    Args:
        result_sql: Full path to an SQLite file that was generated by EnergyPlus.
        output_names: The name of an EnergyPlus output to be retrieved from
            the SQLite result file. This can also be several output names
            for which all data collections should be retrieved.
    """
    try:
        # get the data collections
        sql_obj = SQLiteResult(result_sql)
        data_colls = []
        for output_name in output_names:
            output_name = str(output_name)
            if output_name.startswith('['):
                output_name = tuple(outp.replace('"', '').strip()
                                    for outp in output_name.strip('[]').split(','))
            data_colls.extend(sql_obj.data_collections_by_output_name(output_name))

        # create the header rows
        type_row = ['DateTime'] + [data.header.metadata['type'] for data in data_colls]
        units_row = [''] + [data.header.unit for data in data_colls]
        obj_row = ['']
        for data in data_colls:
            try:
                obj_row.append(data.header.metadata['Zone'])
            except KeyError:
                try:
                    obj_row.append(data.header.metadata['Surface'])
                except KeyError:
                    try:
                        obj_row.append(data.header.metadata['System'])
                    except KeyError:
                        obj_row.append('')

        # create the data rows
        try:
            datetimes = [data_colls[0].datetimes]
        except IndexError:  # no data for the requested type
            datetimes = []
        val_columns = datetimes + [data.values for data in data_colls]

        # write everything into the output file
        def write_row(row):
            output_file.write(','.join([str(item) for item in row]) + '\n')
        write_row(type_row)
        write_row(units_row)
        write_row(obj_row)
        for row in zip(*val_columns):
            write_row(row)
    except Exception as e:
        _logger.exception('Failed to retrieve outputs from sql file.\n{}'.format(e))
        sys.exit(1)
    else:
        sys.exit(0)
Пример #6
0
def test_sqlite_data_collections_by_output_name_openstudio():
    """Test the data_collections_by_output_name method with openstudio values."""
    sql_path = './tests/assets/sql/eplusout_openstudio.sql'
    sql_obj = SQLiteResult(sql_path)

    data_colls = sql_obj.data_collections_by_output_name(
        'Zone Lights Electric Energy')
    for coll in data_colls:
        assert isinstance(coll, HourlyContinuousCollection)
        assert len(coll) == len(coll.header.analysis_period.hoys)
        assert isinstance(coll.header.data_type, Energy)
        assert coll.header.unit == 'kWh'

    data_colls = sql_obj.data_collections_by_output_name(
        'Zone Electric Equipment Electric Energy')
    data_colls = sql_obj.data_collections_by_output_name(
        'Zone Ideal Loads Supply Air Total Heating Energy')
    data_colls = sql_obj.data_collections_by_output_name(
        'Zone Ideal Loads Supply Air Total Cooling Energy')
Пример #7
0
def test_sqlite_data_collections_by_output_name_design_day():
    """Test the data_collections_by_output_name method with several design day results."""
    sql_path = './tests/assets/sql/eplusout_design_days.sql'
    sql_obj = SQLiteResult(sql_path)

    data_colls = sql_obj.data_collections_by_output_name(
        'Zone Lights Electric Energy')
    assert len(data_colls) == 49
    for coll in data_colls:
        assert isinstance(coll, HourlyContinuousCollection)
        assert len(coll) == 24
Пример #8
0
def test_sqlite_data_collections_by_output_name_timestep():
    """Test the data_collections_by_output_name method with timestep values."""
    sql_path = './tests/assets/sql/eplusout_timestep.sql'
    sql_obj = SQLiteResult(sql_path)

    assert sql_obj.reporting_frequency == 6
    data_colls = sql_obj.data_collections_by_output_name(
        'Zone Lights Electric Energy')
    for coll in data_colls:
        assert isinstance(coll, HourlyContinuousCollection)
        assert len(coll) == 7 * 24 * 6
Пример #9
0
def test_sqlite_data_collections_by_output_name_monthly():
    """Test the data_collections_by_output_name method with monthly values."""
    sql_path = './tests/assets/sql/eplusout_monthly.sql'
    sql_obj = SQLiteResult(sql_path)

    assert sql_obj.reporting_frequency == 'Monthly'
    data_colls = sql_obj.data_collections_by_output_name(
        'Zone Lights Electric Energy')
    for coll in data_colls:
        assert isinstance(coll, MonthlyCollection)
        assert coll.header.analysis_period.is_annual
        assert len(coll) == 12
Пример #10
0
def test_sqlite_data_collections_by_output_name_single():
    """Test the data_collections_by_output_name method with a single data."""
    sql_path = './tests/assets/sql/eplusout_openstudio_error.sql'
    sql_obj = SQLiteResult(sql_path)

    data_colls = sql_obj.data_collections_by_output_name(
        'Zone Lights Electric Energy')
    assert len(data_colls) == 1
    for coll in data_colls:
        assert isinstance(coll, HourlyContinuousCollection)
        assert len(coll) == len(coll.header.analysis_period.hoys)
        assert isinstance(coll.header.data_type, Energy)
        assert coll.header.unit == 'kWh'
Пример #11
0
def test_sqlite_data_collections_by_output_name_dday_runperiod():
    """Test the data_collections_by_output_name method with several design day results."""
    sql_path = './tests/fixtures/sql/eplusout_dday_runper.sql'
    sql_obj = SQLiteResult(sql_path)

    data_colls = sql_obj.data_collections_by_output_name(
        'Zone Lights Electric Energy')
    assert len(data_colls) == 56
    for coll in data_colls[:49]:
        assert isinstance(coll, HourlyContinuousCollection)
        assert len(coll) == 24
    for coll in data_colls[49:]:
        assert isinstance(coll, HourlyContinuousCollection)
        assert len(coll) == 744
Пример #12
0
def adaptive_by_room(result_sql, epw_file, air_speed, comfort_par, result_type,
                     output_file):
    """Get data collections for Adaptive comfort in each room from an EnergyPlus sql.

    \b
    Args:
        result_sql: Path to an SQLite file that was generated by EnergyPlus. This
            file must contain hourly or sub-hourly results for zone comfort variables.
        epw_file: Path to an .epw file, used to provide prevailing outdoor
            temperature for the adaptive comfort model.
    """
    try:
        # load the energyplus results related to thermal comfort and the EPW object
        epw_obj = EPW(epw_file)
        out_temp = epw_obj.dry_bulb_temperature
        sql_obj = SQLiteResult(result_sql)
        op_temps = sql_obj.data_collections_by_output_name(
            'Zone Operative Temperature')

        # load the air speed data collection if specified
        assert len(op_temps) != 0, \
            'Input result-sql does not contain "Zone Operative Temperature" output.'
        air_speed = _load_data(air_speed, op_temps[0], AirSpeed, 'm/s')

        # run the collections through the Adaptive model and output results
        param = _load_adaptive_par_str(comfort_par)
        ad_colls = []
        for op_temp in op_temps:
            ad_obj = Adaptive(out_temp,
                              op_temp,
                              air_speed,
                              comfort_parameter=param)
            if result_type == 'DegreesFromNeutral':
                ad_colls.append(ad_obj.degrees_from_neutral)
            elif result_type == 'Comfort':
                ad_colls.append(ad_obj.is_comfortable)
            else:
                ad_colls.append(ad_obj.thermal_condition)
        output_file.write(json.dumps([col.to_dict() for col in ad_colls]))
    except Exception as e:
        _logger.exception(
            'Failed to run Adaptive model from sql file.\n{}'.format(e))
        sys.exit(1)
    else:
        sys.exit(0)
def serialize_data(data_dicts):
    """Reserialize a list of collection dictionaries."""
    if len(data_dicts) == 0:
        return []
    elif data_dicts[0]['type'] == 'HourlyContinuous':
        return [
            HourlyContinuousCollection.from_dict(data) for data in data_dicts
        ]
    elif data_dicts[0]['type'] == 'Monthly':
        return [MonthlyCollection.from_dict(data) for data in data_dicts]
    elif data_dicts[0]['type'] == 'Daily':
        return [DailyCollection.from_dict(data) for data in data_dicts]


if all_required_inputs(ghenv.Component):
    if os.name == 'nt':  # we are on windows; use IronPython like usual
        sql_obj = SQLiteResult(_sql)  # create the SQL result parsing object
        results = sql_obj.data_collections_by_output_name(_output_names)

    else:  # we are on Mac; sqlite3 module doesn't work in Mac IronPython
        # Execute the honybee CLI to obtain the results via CPython
        cmds = [
            folders.python_exe_path, '-m', 'honeybee_energy', 'result',
            'data-by-outputs', _sql, _output_names
        ]
        process = subprocess.Popen(cmds, stdout=subprocess.PIPE)
        stdout = process.communicate()
        data_dicts = json.loads(stdout[0])
        results = serialize_data(data_dicts[0])

# List of all the output strings that will be requested
oper_temp_output = 'Zone Operative Temperature'
air_temp_output = 'Zone Mean Air Temperature'
rad_temp_output = 'Zone Mean Radiant Temperature'
rel_humidity_output = 'Zone Air Relative Humidity'
all_output = [
    oper_temp_output, air_temp_output, rad_temp_output, rel_humidity_output
]

if all_required_inputs(ghenv.Component):
    if os.name == 'nt':  # we are on windows; use IronPython like usual
        sql_obj = SQLiteResult(_sql)  # create the SQL result parsing object
        # get all of the results
        oper_temp = sql_obj.data_collections_by_output_name(oper_temp_output)
        air_temp = sql_obj.data_collections_by_output_name(air_temp_output)
        rad_temp = sql_obj.data_collections_by_output_name(rad_temp_output)
        rel_humidity = sql_obj.data_collections_by_output_name(
            rel_humidity_output)

    else:  # we are on Mac; sqlite3 module doesn't work in Mac IronPython
        # Execute the honybee CLI to obtain the results via CPython
        cmds = [
            folders.python_exe_path, '-m', 'honeybee_energy', 'result',
            'data-by-outputs', _sql
        ] + all_output
        process = subprocess.Popen(cmds, stdout=subprocess.PIPE)
        stdout = process.communicate()
        data_coll_dicts = json.loads(stdout[0])
        # get all of the results
Пример #15
0
def pmv_by_room(result_sql, air_speed, met_rate, clo_value, comfort_par,
                result_type, output_file):
    """Get data collections for PMV in each room from an EnergyPlus sql.

    \b
    Args:
        result_sql: Path to an SQLite file that was generated by EnergyPlus. This
            file must contain hourly or sub-hourly results for zone comfort variables.
    """
    try:
        # load the energyplus results related to thermal comfort
        sql_obj = SQLiteResult(result_sql)
        air_temps = sql_obj.data_collections_by_output_name(
            'Zone Mean Air Temperature')
        rad_temps = sql_obj.data_collections_by_output_name(
            'Zone Mean Radiant Temperature')
        huimidities = sql_obj.data_collections_by_output_name(
            'Zone Air Relative Humidity')

        # load any of the other data collections if specified
        assert len(air_temps) != 0, \
            'Input result-sql does not contain thermal comfort outputs.'
        base_data = air_temps[0]
        air_speed = _load_data(air_speed, base_data, AirSpeed, 'm/s')
        met_rate = _load_data(met_rate, base_data, MetabolicRate, 'met')
        clo_value = _load_data(clo_value, base_data, ClothingInsulation, 'clo')

        # get aligned data for each room
        align_dict = {
            a_dat.header.metadata['Zone']: [a_dat]
            for a_dat in air_temps
        }
        for h_dat in huimidities:
            align_dict[h_dat.header.metadata['System']].append(h_dat)
        for r_dat in rad_temps:
            align_dict[r_dat.header.metadata['Zone']].append(r_dat)

        # run the collections through the PMV model and output results
        param = _load_pmv_par_str(comfort_par)
        pmv_colls = []
        for res in align_dict.values():
            pmv_obj = PMV(res[0],
                          res[1],
                          res[2],
                          air_speed,
                          met_rate,
                          clo_value,
                          comfort_parameter=param)
            if result_type == 'PMV':
                pmv_colls.append(pmv_obj.predicted_mean_vote)
            elif result_type == 'PPD':
                pmv_colls.append(pmv_obj.percentage_people_dissatisfied)
            elif result_type == 'SET':
                pmv_colls.append(pmv_obj.standard_effective_temperature)
            elif result_type == 'Comfort':
                pmv_colls.append(pmv_obj.is_comfortable)
            else:
                pmv_colls.append(pmv_obj.thermal_condition)
        output_file.write(json.dumps([col.to_dict() for col in pmv_colls]))
    except Exception as e:
        _logger.exception(
            'Failed to run PMV model from sql file.\n{}'.format(e))
        sys.exit(1)
    else:
        sys.exit(0)
Пример #16
0
    def from_sql_file(cls, model, sql_path):
        """Create a LoadBalance object from an EnergyPlus SQLite result file.

    Args:
        model: A honeybee Model, which will have its rooms matched to the input
            data collections and used to determine which heat flow values are
            through outdoor surfaces.
        sql_path: Full path to an SQLite file that was generated by EnergyPlus.
            this file should have the relevant load balance outputs in the
            ReportData table.
    """
        # create the SQL result parsing object
        sql_obj = SQLiteResult(sql_path)

        # get all of the results relevant for gains and losses
        cooling = sql_obj.data_collections_by_output_name(cls.COOLING)
        heating = sql_obj.data_collections_by_output_name(cls.HEATING)
        lighting = sql_obj.data_collections_by_output_name(cls.LIGHTING)
        people_gain = sql_obj.data_collections_by_output_name(cls.PEOPLE_GAIN)
        solar_gain = sql_obj.data_collections_by_output_name(cls.SOLAR_GAIN)
        infil_gain = sql_obj.data_collections_by_output_name(cls.INFIL_GAIN)
        infil_loss = sql_obj.data_collections_by_output_name(cls.INFIL_LOSS)
        vent_loss = sql_obj.data_collections_by_output_name(cls.VENT_LOSS)
        vent_gain = sql_obj.data_collections_by_output_name(cls.VENT_GAIN)
        nat_vent_gain = sql_obj.data_collections_by_output_name(
            cls.NAT_VENT_GAIN)
        nat_vent_loss = sql_obj.data_collections_by_output_name(
            cls.NAT_VENT_LOSS)

        # handle the case that both total elect/gas energy and zone gain are requested
        electric_equip = sql_obj.data_collections_by_output_name(
            cls.ELECTRIC_EQUIP[1])
        if len(electric_equip) == 0:
            electric_equip = sql_obj.data_collections_by_output_name(
                cls.ELECTRIC_EQUIP)
        gas_equip = sql_obj.data_collections_by_output_name(cls.GAS_EQUIP[1])
        if len(gas_equip) == 0:
            gas_equip = sql_obj.data_collections_by_output_name(cls.GAS_EQUIP)
        how_water = sql_obj.data_collections_by_output_name(cls.HOT_WATER[1])
        if len(how_water) == 0:
            how_water = sql_obj.data_collections_by_output_name(cls.HOT_WATER)

        # subtract losses from gains
        infiltration = None
        mech_vent = None
        nat_vent = None
        if len(infil_gain) == len(infil_loss):
            infiltration = cls.subtract_loss_from_gain(infil_gain, infil_loss)
        if len(vent_gain) == len(vent_loss) == len(cooling) == len(heating):
            mech_vent = cls.mech_vent_loss_gain(vent_gain, vent_loss, cooling,
                                                heating)
        if len(nat_vent_gain) == len(nat_vent_loss):
            nat_vent = cls.subtract_loss_from_gain(nat_vent_gain,
                                                   nat_vent_loss)

        # get the surface energy flow
        opaque_flow = sql_obj.data_collections_by_output_name(
            cls.OPAQUE_ENERGY_FLOW)
        window_loss = sql_obj.data_collections_by_output_name(cls.WINDOW_LOSS)
        window_gain = sql_obj.data_collections_by_output_name(cls.WINDOW_GAIN)
        window_flow = []
        if len(window_gain) == len(window_loss):
            window_flow = cls.subtract_loss_from_gain(window_gain, window_loss)
        face_energy_flow = opaque_flow + window_flow

        bal_obj = cls(model.rooms,
                      cooling,
                      heating,
                      lighting,
                      electric_equip,
                      gas_equip,
                      how_water,
                      people_gain,
                      solar_gain,
                      infiltration,
                      mech_vent,
                      nat_vent,
                      face_energy_flow,
                      model.units,
                      use_all_solar=True)
        bal_obj.floor_area = bal_obj._area_as_meters_feet(model.floor_area)
        return bal_obj
    idf = os.path.join(directory, 'in.idf')
    write_to_file_by_name(directory, 'in.idf', idf_str, True)

    # run the IDF through EnergyPlus
    silent = True if _run == 1 else False
    sql, zsz, rdd, html, err = run_idf(idf, _epw_file, silent=silent)
    if html is None and err is not None:  # something went wrong; parse the errors
        err_obj = Err(err)
        print(err_obj.file_contents)
        for error in err_obj.fatal_errors:
            raise Exception(error)

    # parse the result sql and get the monthly data collections
    if os.name == 'nt':  # we are on windows; use IronPython like usual
        sql_obj = SQLiteResult(sql)
        cool_init = sql_obj.data_collections_by_output_name(cool_out)
        heat_init = sql_obj.data_collections_by_output_name(heat_out)
        light_init = sql_obj.data_collections_by_output_name(light_out)
        elec_equip_init = sql_obj.data_collections_by_output_name(el_equip_out)
        gas_equip_init = sql_obj.data_collections_by_output_name(gas_equip_out)
        process1_init = sql_obj.data_collections_by_output_name(process1_out)
        process2_init = sql_obj.data_collections_by_output_name(process2_out)
        shw_init = sql_obj.data_collections_by_output_name(shw_out)
    else:  # we are on Mac; sqlite3 module doesn't work in Mac IronPython
        # Execute the honybee CLI to obtain the results via CPython
        cmds = [
            folders.python_exe_path, '-m', 'honeybee_energy', 'result',
            'data-by-outputs', sql
        ]
        for outp in energy_output:
            cmds.append('["{}"]'.format(outp))
            'zone-sizes', sql
        ]
        process = subprocess.Popen(cmds, stdout=subprocess.PIPE)
        stdout = process.communicate()
        peak_dicts = json.loads(stdout[0])
        peak_cool = [
            zs['calculated_design_load'] for zs in peak_dicts['cooling']
        ]
        peak_heat = [
            zs['calculated_design_load'] for zs in peak_dicts['heating']
        ]

    # construct the load balance if requested
    if run_bal_:
        if os.name == 'nt':  # we are on windows; use IronPython like usual
            light = sql_obj.data_collections_by_output_name(
                LoadBalance.LIGHTING)
            ele_equip = sql_obj.data_collections_by_output_name(
                LoadBalance.ELECTRIC_EQUIP)
            gas_equip = sql_obj.data_collections_by_output_name(
                LoadBalance.GAS_EQUIP)
            hot_water = sql_obj.data_collections_by_output_name(
                LoadBalance.HOT_WATER)
            people = sql_obj.data_collections_by_output_name(
                LoadBalance.PEOPLE_GAIN)
            solar = sql_obj.data_collections_by_output_name(
                LoadBalance.SOLAR_GAIN)
            infil_gain = sql_obj.data_collections_by_output_name(
                LoadBalance.INFIL_GAIN)
            infil_loss = sql_obj.data_collections_by_output_name(
                LoadBalance.INFIL_LOSS)
            opaque_flow = sql_obj.data_collections_by_output_name(
Пример #19
0
def _parse_enclosure_info(enclosure_info,
                          result_sql,
                          epw,
                          analysis_period=None,
                          default_air_speed=0.1,
                          include_humidity=False,
                          use_10m_wind_speed=False):
    """Get lists of comfort-related data collections from an enclosure_info JSON.

    Args:
        enclosure_info: Path to a JSON file containing information about the radiant
            enclosure that sensor points belong to.
        result_sql: Path to an SQLite file that was generated by EnergyPlus.
            This file must contain hourly or sub-hourly results for zone comfort
            variables.
        epw: An EPW object that will be used to specify data for any sensor outside
            of any enclosure.
        analysis_period: An optional AnalysisPeriod to be applied to all results.
            If None, all data collections will be for the entire run period of
            the result_sql.
        default_air_speed: A single value or data collection to be used for all
            indoor air speed.
        include_humidity: Boolean to note whether data collections of humidity should
            be returned or not.
        use_10m_wind_speed: Boolean to note whether the meteorological wind speed
            should be used as-is for any outdoor sensors or whether it should be
            converted to ground-level speed (multiplying by 2/3).

    Returns:
        A tuple of 5 values.

        * pt_air_temps -- Data collections of air temperatures.

        * pt_rad_temps -- Data collections of long wave mean radiant temperature.

        * pt_humids - Data collections of relative humidity if include_humidity is True.

        * pt_speeds - Data collections of air speed values.

        * base_a_per - The AnalysisPeriod of the data in the result_sql.
    """
    # load all comfort-related outputs from the result_sql
    sql_obj = SQLiteResult(result_sql)
    air_temps = sql_obj.data_collections_by_output_name(
        'Zone Mean Air Temperature')
    rad_temps = sql_obj.data_collections_by_output_name(
        'Zone Mean Radiant Temperature')
    if include_humidity:
        humids = sql_obj.data_collections_by_output_name(
            'Zone Air Relative Humidity')

    # check that EnergyPlus sql data is correct and note the analysis period
    assert len(air_temps) != 0, \
        'Input result-sql does not contain thermal comfort outputs.'
    assert isinstance(air_temps[0], HourlyContinuousCollection), 'EnergyPlus ' \
        'reporting frequency must be Hourly or Timestep to use thermal mapping. ' \
        'Not {}'.format(air_temps[0])
    base_a_per = air_temps[0].header.analysis_period

    # convert default air speed into a data collection if it's a list
    default_air_speed = _values_to_data(default_air_speed, base_a_per,
                                        AirSpeed, 'm/s')

    # parse the enclosure_info
    with open(enclosure_info) as json_file:
        enclosure_dict = json.load(json_file)

    # order the sql data based on the relevant zones from the enclosure_info
    rel_air_temps, rel_rad_temps, rel_humids, rel_speeds = [], [], [], []
    for zone_id in enclosure_dict['mapper']:
        zone_id = zone_id.upper(
        )  # capitalize to match the output of EnergyPlus
        for data in air_temps:
            if data.header.metadata['Zone'] == zone_id:
                rel_air_temps.append(data)
                break
        for data in rad_temps:
            if data.header.metadata['Zone'] == zone_id:
                rel_rad_temps.append(data)
                break
        if include_humidity:
            for data in humids:
                if data.header.metadata['System'] == zone_id:
                    rel_humids.append(data)
                    break
        rel_speeds.append(default_air_speed)

    # if the enclosure info includes outdoor sensors, ensure epw data is added
    if enclosure_dict['has_outdoor']:
        _add_epw_data(epw, rel_air_temps, rel_rad_temps, rel_humids,
                      rel_speeds, base_a_per, use_10m_wind_speed)

    # apply the analysis periods if it is specified
    if analysis_period is not None and base_a_per != analysis_period:
        a_per = analysis_period
        rel_air_temps = [
            data.filter_by_analysis_period(a_per) for data in rel_air_temps
        ]
        rel_rad_temps = [
            data.filter_by_analysis_period(a_per) for data in rel_rad_temps
        ]
        if include_humidity:
            rel_humids = [
                data.filter_by_analysis_period(a_per) for data in rel_humids
            ]
        new_rel_speeds = []
        for a_spd in rel_speeds:
            new_a_spd = a_spd.filter_by_analysis_period(a_per) \
                if isinstance(a_spd, HourlyContinuousCollection) else a_spd
            new_rel_speeds.append(new_a_spd)
        rel_speeds = new_rel_speeds

    # loop through the sensors and select the relevant data collections
    pt_air_temps, pt_rad_temps, pt_humids, pt_speeds = [], [], [], []
    for pt_i in enclosure_dict['sensor_indices']:
        pt_air_temps.append(rel_air_temps[pt_i])
        pt_rad_temps.append(rel_rad_temps[pt_i])
        if include_humidity:
            pt_humids.append(rel_humids[pt_i])
        pt_speeds.append(rel_speeds[pt_i])
    return pt_air_temps, pt_rad_temps, pt_humids, pt_speeds, base_a_per
Пример #20
0
def main(num):
    """Make facade traintest data from hbjsons."""

    RDD_SRF_DICT = {
        'srf_win_heat_loss': 'Surface Window Heat Loss Energy',
        'srf_win_heat_gain': 'Surface Heat Window Gain Energy',
        'srf_heat_transfer':
        'Surface Average Face Conduction Heat Transfer Energy',
        'srf_win_sol': 'Surface Window Transmitted Solar Radiation Energy',
        'srf_ext_sol':
        'Surface Outside Face Incident Solar Radiation Rate per Area',
        'srf_int_sol': 'Surface Inside Face Solar Radiation Heat Gain Rate',
        'srf_cos':
        'Surface Outside Face Beam Solar Incident Angle Cosine Value'
    }

    rdd_srf_var = RDD_SRF_DICT['srf_int_sol']
    div_by_area = False if 'per Area' in rdd_srf_var else True
    vmin = 0
    vmax = 50 if 'Inside' in rdd_srf_var else 300
    print('Plot {} with div_by_area {}.'.format(rdd_srf_var, div_by_area))
    print('Outside should be 0-300, Inside should be 0-50. '
          'Values clipped between {} - {}'.format(vmin, vmax))
    show_legend = False
    NN2 = True

    # ---------------------------------------------------------------------------------
    # Get al HB models and simfpaths
    # ---------------------------------------------------------------------------------

    deeprad_hbjsons_dir = \
        os.path.join(os.getcwd(), '../../..', 'master/git', 'deeprad/data/hbjsons/')
    deeprad_hbjsons_dir = os.path.abspath(deeprad_hbjsons_dir)
    traintest_dir = os.path.join(deeprad_hbjsons_dir, '..', 'traintest3')

    _model_fpaths = sorted(os.listdir(deeprad_hbjsons_dir),
                           key=lambda s: int(s.split('_')[0]))
    _model_fpaths = [
        os.path.join(deeprad_hbjsons_dir, mf) for mf in _model_fpaths
    ]

    _model_fpaths = _model_fpaths
    if num:
        _model_fpaths = _model_fpaths[:num]

    models, model_sims = flattened_bems(_model_fpaths)
    model_num = len(models)

    # ---------------------------------------------------------------------------------
    # Init r.mod
    # ---------------------------------------------------------------------------------

    # make R
    mod_df = GeomDataFrame({
        'model_id': [m.identifier for m in models],
        'model': models,
        'sim_fpath': model_sims
    })

    r = R(mod_df)
    model_num = len(r.mod)
    r.mod['null'] = np.ones(model_num)

    print('# of models', model_num)

    # ---------------------------------------------------------------------------------
    # Init r.srf
    # ---------------------------------------------------------------------------------

    # Add srfs
    faces, face_mod_idxs = [], []
    for i, model in enumerate(mod_df['model']):
        faces.extend(model.faces)
        face_mod_idxs.extend([i] * len(model.faces))

    # TODO: Mtx > mtx_util, Mtx_geom > mtx_geom_util, mtx_geom, move mtx_utli to Mtx?
    r.srf = GeomDataFrame({'mod_idx': face_mod_idxs, 'srf_geom': faces})
    r.srf['type'] = [srf.type.name for srf in r.srf['srf_geom']]
    r.srf['bc'] = [srf.boundary_condition.name for srf in r.srf['srf_geom']]
    srf_num = len(faces)

    # ---------------------------------------------------------------------------------
    # Add sim data to r.srf
    # ---------------------------------------------------------------------------------

    # Extract srf sim data
    r.srf['srf_heat_transfer'] = np.empty(srf_num)
    r.srf['srf_heat_transfer'] = np.NaN

    # Update surfaces for each model
    for i, sim_fpath in enumerate(r.mod['sim_fpath']):

        # Get exterior faces
        srf_mod_idx = r.srf.query('mod_idx == {}'.format(i)).index
        if not os.path.isfile(sim_fpath):
            continue

        sql = SQLiteResult(sim_fpath)
        srf_sim_data = sql.data_collections_by_output_name(rdd_srf_var)

        r_srf_geom_arr = r.srf.loc[srf_mod_idx, 'srf_geom']
        _fil_mod_idx, _fil_data = reorder_to_srf_geom_arr(
            r_srf_geom_arr, srf_mod_idx, srf_sim_data, div_by_area=div_by_area)
        r.srf.loc[_fil_mod_idx, 'srf_heat_transfer'] = _fil_data

    # ---------------------------------------------------------------------------------
    # Plot facade
    # ---------------------------------------------------------------------------------
    # Define constant camera properteis
    FOCAL_LEN = 45.0
    PITCH = deg2rad(15)  # Z
    CAM_POSN = np.array([0, -35, 2.5])  # camera placed at 2nd floor
    angle_iter = 45
    CAM_ANGLES = np.arange(0, 180 + angle_iter, angle_iter)
    PLT_NUM, PLT_HT = len(CAM_ANGLES), 4
    ORTHO = True

    def _cam_cmap(poly_sh_arr, cmap_arr, a, **kwargs):
        """Cmap from values."""
        # normalize color
        cmap = 'RdYlBu_r'
        srf_df = GeomDataFrame({'geometry': poly_sh_arr, 'var': cmap_arr})
        a = srf_df.plot(column='var',
                        edgecolor='black',
                        cmap=cmap,
                        ax=a,
                        **kwargs)
        a.axis('equal')
        a.axis('off')
        return a

    img_in_dir = os.path.join(traintest_dir, 'in_data')
    img_out_dir = os.path.join(traintest_dir, 'out_data')

    if NN2:
        img_in_dir = os.path.join(traintest_dir, 'in_data2')
        img_out_dir = os.path.join(traintest_dir, 'out_data2')

    for mod_idx in r.mod.index:
        try:
            sim_fpath = r.mod.loc[mod_idx, 'sim_fpath']
            hb_idx = sim_fpath.split('_hb')[0].split('_')[-1].split('/')[-1]
            img_out_fpath = os.path.join(
                img_out_dir, '{}_{}_hb_solrad_out.jpg'.format(mod_idx, hb_idx))
            img_in_fpath1 = os.path.join(
                img_in_dir, '{}_{}_hb_facetype_in.jpg'.format(mod_idx, hb_idx))
            img_in_fpath2 = os.path.join(
                img_in_dir, '{}_{}_hb_mask_in.jpg'.format(mod_idx, hb_idx))

            # ---------------------------------------------------------------------------------
            # Out
            # ---------------------------------------------------------------------------------

            # # TODO: delete, this is better solved through a monte-carlo simulation
            # # TODO: consider implement based on floor normal
            # # (see_top = plot last else plot first in story). I like monte-carlo better.
            # def _order_floors_last(srf_arr, type_arr, df=True):
            #     """Rearranges floors last in depth list so they are plotted in front of walls."""
            #     # TODO: need to move floors/roofs to appropriate story data.
            #     # TODO: but what about non-facing data..

            #     idx_arr = srf_arr.index \
            #         if isinstance(srf_arr, np.ndarray) else np.arange(len(srf_arr))
            #     is_floor = ['Floor' in st for st in type_arr]
            #     reorder_idx = np.concatenate([idx_arr[~is_floor], idx_arr[~is_floor]])
            #     return srf_arr[reorder_idx]

            if not NN2:
                f, a = plt.subplots(1,
                                    PLT_NUM,
                                    figsize=(2 * PLT_HT * PLT_NUM, PLT_HT),
                                    sharey=True)
                plt.setp(a, yticks=np.arange(10, -10, 2))
                f.tight_layout()

                for i, cam_angle in enumerate(CAM_ANGLES):
                    cam = Pincam(CAM_POSN, deg2rad(cam_angle), PITCH,
                                 FOCAL_LEN)
                    #mask = _ext_geom_df(r.srf.query('mod_idx == {}'.format(mod_idx)))
                    mask = r.srf.query('mod_idx == {}'.format(mod_idx)).index
                    poly_np_arr = [
                        to_poly_np(poly_lb, True)
                        for poly_lb in r.srf.loc[mask, 'srf_geom']
                    ]
                    poly_sh_arr, depths = _project(cam.P,
                                                   poly_np_arr,
                                                   ortho=ORTHO,
                                                   res=1)

                    #_srf_arr = _order_floors_last(*r.srf.loc[mask, ['srf_geom', 'type']].T.values)
                    #mask = _srf_arr.index

                    srf_heat_arr = r.srf.loc[mask, 'srf_heat_transfer'].values
                    srf_heat_arr = np.array([srf_heat_arr[d] for d in depths])
                    if not i:
                        #print(r.mod.loc[mod_idx, 'sim_fpath'])
                        print(srf_heat_arr[~np.isnan(srf_heat_arr)].min(),
                              srf_heat_arr[~np.isnan(srf_heat_arr)].max())

                    srf_heat_arr[np.where(np.isnan(srf_heat_arr))] = 0
                    #color_arr = 'lightblue' #[ for ii in srf_df['srf_heat_transfer']]
                    #_cam_color(poly_sh_arr, color_arr, a[i], linewidth=4)
                    a[i] = _cam_cmap(poly_sh_arr,
                                     srf_heat_arr,
                                     a[i],
                                     linewidth=4,
                                     vmin=vmin,
                                     vmax=vmax,
                                     legend=show_legend)

                f.savefig(img_out_fpath)
                f.clf()
                plt.close('all')
                print('Saved out img: {}'.format(img_out_fpath))

            else:
                f, a = plt.subplots(1,
                                    PLT_NUM,
                                    figsize=(2 * PLT_HT * PLT_NUM, PLT_HT),
                                    sharey=True)
                plt.setp(a, yticks=np.arange(10, -10, 2))
                f.tight_layout()

                for i, cam_angle in enumerate(CAM_ANGLES):
                    cam = Pincam(CAM_POSN, deg2rad(cam_angle), PITCH,
                                 FOCAL_LEN)
                    mask = r.srf.query('mod_idx == {}'.format(mod_idx)).index

                    in_view = set()
                    for mask_idx, srf_geom in zip(mask, r.srf.loc[mask,
                                                                  'srf_geom']):
                        verts = [v.to_array() for v in srf_geom.vertices]
                        view_factor = Pincam.view_factor(cam.P, verts)
                        if view_factor < 0.0 or r.srf.loc[mask_idx,
                                                          'type'] == 'Floor':
                            in_view.add(mask_idx)

                    mask = [
                        mask_idx for mask_idx in mask if mask_idx in in_view
                    ]
                    poly_np_arr = [
                        to_poly_np(poly_lb, True)
                        for poly_lb in r.srf.loc[mask, 'srf_geom']
                    ]
                    poly_sh_arr, depths = _project(cam.P,
                                                   poly_np_arr,
                                                   ortho=ORTHO,
                                                   res=1)
                    srf_heat_arr = r.srf.loc[mask, 'srf_heat_transfer'].values
                    srf_heat_arr = np.array([srf_heat_arr[d] for d in depths])

                    if not i:
                        #print(r.mod.loc[mod_idx, 'sim_fpath'])
                        minv, maxv = srf_heat_arr[~np.isnan(srf_heat_arr)].min(
                        ), srf_heat_arr[~np.isnan(srf_heat_arr)].max()
                        print('min: {}, max: {}'.format(
                            np.round(minv, 2), np.round(maxv, 2)))

                    srf_heat_arr[np.where(np.isnan(srf_heat_arr))] = 0

                    #color_arr = 'lightblue' #[ for ii in srf_df['srf_heat_transfer']]
                    #_cam_color(poly_sh_arr, color_arr, a[i], linewidth=4)
                    a[i] = _cam_cmap(poly_sh_arr,
                                     srf_heat_arr,
                                     a[i],
                                     linewidth=4,
                                     vmin=vmin,
                                     vmax=vmax,
                                     legend=show_legend)

                plt.savefig(img_out_fpath)
                plt.clf()
                plt.close('all')
                print('Saved out img: {}'.format(img_out_fpath))

            # plt.close('all')
            # # ---------------------------------------------------------------------------------
            # # In
            # # ---------------------------------------------------------------------------------
            if not NN2:
                f, a = plt.subplots(1,
                                    PLT_NUM,
                                    figsize=(2 * PLT_HT * PLT_NUM, PLT_HT),
                                    sharey=True)
                plt.setp(a, yticks=np.arange(10, -10, 2))
                f.tight_layout()
                for i, cam_angle in enumerate(CAM_ANGLES):
                    cam = Pincam(CAM_POSN, deg2rad(cam_angle), PITCH,
                                 FOCAL_LEN)
                    mask = r.srf.query('mod_idx == {}'.format(mod_idx)).index
                    poly_np_arr = [
                        to_poly_np(poly_lb, True)
                        for poly_lb in r.srf.loc[mask, 'srf_geom']
                    ]
                    poly_sh_arr, depths = _project(cam.P,
                                                   poly_np_arr,
                                                   ortho=ORTHO,
                                                   res=1)
                    bcs = r.srf.loc[mask, 'bc'].values
                    bcs = np.array([bcs[d] for d in depths])

                    color_arr = [0] * len(bcs)
                    for k, _bc in enumerate(bcs):
                        if _bc == 'Adiabatic':
                            color_arr[k] = 'grey'
                        else:
                            color_arr[k] = 'lightblue'

                    _cam_color(poly_sh_arr, color_arr, a[i], linewidth=4)

                f.savefig(img_in_fpath1)
                f.savefig(img_in_fpath2)
                plt.clf()
                plt.close('all')
                print('Saved in img: {}'.format(img_in_fpath1))
                print('Saved in img: {}'.format(img_in_fpath2))
            else:
                srf_f, srf_a = plt.subplots(1,
                                            PLT_NUM,
                                            figsize=(2 * PLT_HT * PLT_NUM,
                                                     PLT_HT),
                                            sharey=True)
                plt.setp(srf_a, yticks=np.arange(10, -10, 2))
                win_f, win_a = plt.subplots(1,
                                            PLT_NUM,
                                            figsize=(2 * PLT_HT * PLT_NUM,
                                                     PLT_HT),
                                            sharey=True)
                plt.setp(win_a, yticks=np.arange(10, -10, 2))
                srf_f.tight_layout()
                win_f.tight_layout()

                for i, cam_angle in enumerate(CAM_ANGLES):
                    # Get surfaces
                    mask = r.srf.query('mod_idx == {}'.format(mod_idx)).index
                    srf_geoms = r.srf.loc[mask, 'srf_geom']
                    _win_geoms = [srf_geom.apertures for srf_geom in srf_geoms]
                    _win_geoms = [j for i in _win_geoms for j in i]
                    win_geoms = [
                        win_geom.duplicate().geometry.move(
                            win_geom.normal.duplicate() * 0.001)
                        for win_geom in _win_geoms
                    ]

                    srf_np_arr = [
                        to_poly_np(poly_lb, True) for poly_lb in srf_geoms
                    ]
                    win_np_arr = [
                        to_poly_np(poly_lb, True) for poly_lb in win_geoms
                    ]

                    # Camera
                    cam = Pincam(CAM_POSN, deg2rad(cam_angle), PITCH,
                                 FOCAL_LEN)
                    # filter srfs by in view
                    srf_np_arr = [
                        to_poly_np(poly_sh)
                        for poly_sh in r.srf.loc[mask, 'srf_geom']
                    ]
                    mask_bool = poly_in_cam_view(cam.P,
                                                 srf_np_arr,
                                                 in_view=True)
                    view_srf_np_arr = [
                        srf_np for m, srf_np in zip(mask_bool, srf_np_arr) if m
                    ]
                    view_mask = [_m for b, _m in zip(mask_bool, mask) if b]

                    # Project
                    view_srf_sh_arr, view_srf_depths = _project(
                        cam.P, view_srf_np_arr, ortho=ORTHO, res=1)
                    win_sh_arr, win_depths = _project(cam.P,
                                                      win_np_arr,
                                                      ortho=ORTHO,
                                                      res=1)
                    srf_sh_arr, srf_depths = _project(cam.P,
                                                      srf_np_arr,
                                                      ortho=ORTHO,
                                                      res=1)
                    all_sh_arr, all_depths = _project(cam.P,
                                                      srf_np_arr + win_np_arr,
                                                      ortho=ORTHO,
                                                      res=1)

                    # label depths by type
                    win_start_idx = len(srf_np_arr)
                    type_str = [
                        'win' if d >= win_start_idx else 'srf'
                        for d in all_depths
                    ]
                    type_dict = dict(zip(all_depths, type_str))

                    # colors for view srfs
                    vbcs = r.srf.loc[view_mask, 'bc'].values
                    vbcs = np.array([vbcs[d] for d in view_srf_depths])
                    view_srf_color_arr = [0] * len(view_srf_sh_arr)
                    for k, _vbc in enumerate(vbcs):
                        view_srf_color_arr[
                            k] = 'grey' if _vbc == 'Adiabatic' else 'lightblue'

                    # colors for srfs
                    abcs = r.srf.loc[mask, 'bc'].values
                    abcs = np.array([abcs[d] for d in srf_depths])
                    srf_color_arr = [0] * len(srf_sh_arr)
                    for k, _abc in enumerate(abcs):
                        srf_color_arr[
                            k] = 'grey' if _abc == 'Adiabatic' else 'lightblue'

                    # colors for wins
                    bcs = r.srf.loc[mask, 'bc'].values
                    bcs = np.array([bcs[d] for d in srf_depths])
                    srf_i = 0
                    all_color_arr = [0] * len(all_sh_arr)
                    for k, (all_sh, all_depth) in enumerate(
                            zip(all_sh_arr, all_depths)):
                        if type_dict[all_depth] == 'win':
                            all_color_arr[k] = 'red'
                        else:
                            all_color_arr[k] = 'white' if bcs[
                                srf_i] == 'Adiabatic' else 'white'
                            srf_i += 1

                    # Ghost the building
                    srf_a[i] = _cam_color(view_srf_sh_arr,
                                          view_srf_color_arr,
                                          srf_a[i],
                                          linewidth=4)
                    srf_a[i] = _cam_color(srf_sh_arr,
                                          srf_color_arr,
                                          a=srf_a[i],
                                          linewidth=4,
                                          alpha=0.5)

                    # second channel
                    _cam_color(all_sh_arr,
                               all_color_arr,
                               win_a[i],
                               linewidth=0)
                    del _win_geoms
                    del win_geoms

                srf_f.savefig(img_in_fpath1)
                win_f.savefig(img_in_fpath2)
                plt.clf()
                plt.close('all')
                print('Saved in img: {}'.format(img_in_fpath1))

        except Exception as e:
            print('\nFail at {} {}\n'.format(mod_idx, e))
Пример #21
0
opaque_energy_flow_output = 'Surface Inside Face Conduction Heat Transfer Energy'
window_loss_output = 'Surface Window Heat Loss Energy'
window_gain_output = 'Surface Window Heat Gain Energy'
all_output = [
    face_indoor_temp_output, face_outdoor_temp_output,
    opaque_energy_flow_output, window_loss_output, window_gain_output
]

if all_required_inputs(ghenv.Component):
    # check the size of the SQL file to see if we should use the CLI
    assert os.path.isfile(_sql), 'No sql file found at: {}.'.format(_sql)
    if os.name == 'nt' and os.path.getsize(_sql) < 1e8:
        # small file on windows; use IronPython like usual
        sql_obj = SQLiteResult(_sql)  # create the SQL result parsing object
        # get all of the results
        face_indoor_temp = sql_obj.data_collections_by_output_name(
            face_indoor_temp_output)
        face_outdoor_temp = sql_obj.data_collections_by_output_name(
            face_outdoor_temp_output)
        opaque_energy_flow = sql_obj.data_collections_by_output_name(
            opaque_energy_flow_output)
        window_loss = sql_obj.data_collections_by_output_name(
            window_loss_output)
        window_gain = sql_obj.data_collections_by_output_name(
            window_gain_output)

    else:  # use the honeybee_energy CLI
        # sqlite3 module doesn't work in Mac IronPython
        # or the file's big and we know that the Python3 version scales better
        # Execute the honybee CLI to obtain the results via CPython
        cmds = [
            folders.python_exe_path, '-m', 'honeybee_energy', 'result',
nat_vent_gain_outputs = LoadBalance.NAT_VENT_GAIN
nat_vent_loss_outputs = LoadBalance.NAT_VENT_LOSS
all_output = \
[cooling_outputs, heating_outputs, lighting_outputs, electric_equip_outputs,
 gas_equip_outputs, shw_outputs, fan_electric_outputs, pump_electric_outputs,
 people_gain_outputs, solar_gain_outputs, infil_gain_outputs, infil_loss_outputs,
 vent_loss_outputs, vent_gain_outputs, nat_vent_gain_outputs, nat_vent_loss_outputs]


if all_required_inputs(ghenv.Component):
    if os.name == 'nt':  # we are on windows; use IronPython like usual
        # create the SQL result parsing object
        sql_obj = SQLiteResult(_sql)

        # get all of the results relevant for energy use
        cooling = sql_obj.data_collections_by_output_name(cooling_outputs)
        heating = sql_obj.data_collections_by_output_name(heating_outputs)
        lighting = sql_obj.data_collections_by_output_name(lighting_outputs)
        electric_equip = sql_obj.data_collections_by_output_name(electric_equip_outputs)
        hot_water = sql_obj.data_collections_by_output_name(shw_outputs)
        gas_equip = sql_obj.data_collections_by_output_name(gas_equip_outputs)
        fan_electric = sql_obj.data_collections_by_output_name(fan_electric_outputs)
        pump_electric = sql_obj.data_collections_by_output_name(pump_electric_outputs)

        # get all of the results relevant for gains and losses
        people_gain = sql_obj.data_collections_by_output_name(people_gain_outputs)
        solar_gain = sql_obj.data_collections_by_output_name(solar_gain_outputs)
        infil_gain = sql_obj.data_collections_by_output_name(infil_gain_outputs)
        infil_loss = sql_obj.data_collections_by_output_name(infil_loss_outputs)
        vent_loss = sql_obj.data_collections_by_output_name(vent_loss_outputs)
        vent_gain = sql_obj.data_collections_by_output_name(vent_gain_outputs)