def get_results_windows(sql_files):
    # set initial values that will be computed based on results
    total_floor_area, total_energy = 0, 0
    end_uses = OrderedDict()

    # loop through the sql files in the directory and add the energy use
    for result_file in sql_files:
        # parse the SQL file
        sql_obj = SQLiteResult(result_file)
        # get the total floor area of the model
        area_dict = sql_obj.tabular_data_by_name('Building Area')
        areas = tuple(area_dict.values())
        total_floor_area += areas[0][0]
        # get the energy use
        eui_dict = sql_obj.tabular_data_by_name('End Uses By Subcategory')
        for catgory, vals in eui_dict.items():
            total_use = sum([val for val in vals[:12]])
            if total_use != 0:
                total_energy += total_use
                cat, sub_cat = catgory.split(':')
                eu_cat = cat if sub_cat == 'General' or sub_cat == 'Other' else sub_cat
                try:
                    end_uses[eu_cat] += total_use
                except KeyError:
                    end_uses[eu_cat] = total_use

    # assemble all of the results into a final dictionary
    eui = round(total_energy / total_floor_area, 3)
    gross_floor = round(total_floor_area, 3)
    end_use_pairs = OrderedDict([(key, round(val / total_floor_area, 3))
                                 for key, val in end_uses.items()])
    return eui, gross_floor, end_use_pairs
示例#2
0
def data_by_output(result_sql, output_name, output_file):
    """Get an array of DataCollection JSONs for a specific EnergyPlus output.

    \b
    Args:
        result_sql: Full path to an SQLite file that was generated by EnergyPlus.
        output_name: The name of an EnergyPlus output to be retrieved from
            the SQLite result file. This can also be an array of names if the
            string is formatted as a JSON array with [] brackets. Note that only
            a single array of data collection JSONs will be returned from this
            method and, if data collections must be grouped, the data_by_outputs
            method should be used.
    """
    try:
        sql_obj = SQLiteResult(result_sql)
        output_name = str(output_name)
        if output_name.startswith('['):
            output_name = tuple(outp.replace('"', '').strip()
                                for outp in output_name.strip('[]').split(','))
        data_colls = sql_obj.data_collections_by_output_name(output_name)
        output_file.write(json.dumps([data.to_dict() for data in data_colls]))
    except Exception as e:
        _logger.exception('Failed to retrieve outputs from sql file.\n{}'.format(e))
        sys.exit(1)
    else:
        sys.exit(0)
示例#3
0
def data_by_outputs(result_sql, output_names, output_file):
    """Get an array of DataCollection JSONs for a several EnergyPlus outputs.

    \b
    Args:
        result_sql: Full path to an SQLite file that was generated by EnergyPlus.
        output_names: An array of EnergyPlus output names to be retrieved from
            the SQLite result file. This can also be a nested array (an array of
            output name arrays) if each string is formatted as a JSON array
            with [] brackets.
    """
    try:
        sql_obj = SQLiteResult(result_sql)
        data_colls = []
        for output_name in output_names:
            output_name = str(output_name)
            if output_name.startswith('['):
                output_name = tuple(outp.replace('"', '').strip()
                                    for outp in output_name.strip('[]').split(','))
            data_cs = sql_obj.data_collections_by_output_name(output_name)
            data_colls.append([data.to_dict() for data in data_cs])
        output_file.write(json.dumps(data_colls))
    except Exception as e:
        _logger.exception('Failed to retrieve outputs from sql file.\n{}'.format(e))
        sys.exit(1)
    else:
        sys.exit(0)
示例#4
0
def test_sqlite_component_sizing():
    """Test the properties and methods related to component sizes."""
    sql_path = './tests/assets/sql/eplusout_hourly.sql'
    sql_obj = SQLiteResult(sql_path)

    comp_sizes = sql_obj.component_sizes
    comp_size_type = sql_obj.component_sizes_by_type(
        'ZoneHVAC:IdealLoadsAirSystem')
    comp_types = sql_obj.component_types

    assert len(comp_sizes) == 7
    assert len(comp_size_type) == 7
    assert comp_types == ['ZoneHVAC:IdealLoadsAirSystem']

    for size_obj in comp_sizes:
        assert isinstance(size_obj, ComponentSize)
        assert size_obj.component_type == 'ZoneHVAC:IdealLoadsAirSystem'
        assert isinstance(size_obj.component_name, str)
        assert all(isinstance(desc, str) for desc in size_obj.descriptions)
        assert all(isinstance(prop, str) for prop in size_obj.properties)
        assert all(isinstance(val, float) for val in size_obj.values)
        assert all(isinstance(unit, str) for unit in size_obj.units)
        assert isinstance(size_obj.properties_dict, dict)
        assert len(size_obj.properties_dict) == 4
        size_dict = size_obj.to_dict()
        new_size = ComponentSize.from_dict(size_dict)
        assert new_size.to_dict() == size_dict
示例#5
0
def output_csv(result_sql, output_names, output_file):
    """Get CSV for specific EnergyPlus outputs.

    \b
    Args:
        result_sql: Full path to an SQLite file that was generated by EnergyPlus.
        output_names: The name of an EnergyPlus output to be retrieved from
            the SQLite result file. This can also be several output names
            for which all data collections should be retrieved.
    """
    try:
        # get the data collections
        sql_obj = SQLiteResult(result_sql)
        data_colls = []
        for output_name in output_names:
            output_name = str(output_name)
            if output_name.startswith('['):
                output_name = tuple(outp.replace('"', '').strip()
                                    for outp in output_name.strip('[]').split(','))
            data_colls.extend(sql_obj.data_collections_by_output_name(output_name))

        # create the header rows
        type_row = ['DateTime'] + [data.header.metadata['type'] for data in data_colls]
        units_row = [''] + [data.header.unit for data in data_colls]
        obj_row = ['']
        for data in data_colls:
            try:
                obj_row.append(data.header.metadata['Zone'])
            except KeyError:
                try:
                    obj_row.append(data.header.metadata['Surface'])
                except KeyError:
                    try:
                        obj_row.append(data.header.metadata['System'])
                    except KeyError:
                        obj_row.append('')

        # create the data rows
        try:
            datetimes = [data_colls[0].datetimes]
        except IndexError:  # no data for the requested type
            datetimes = []
        val_columns = datetimes + [data.values for data in data_colls]

        # write everything into the output file
        def write_row(row):
            output_file.write(','.join([str(item) for item in row]) + '\n')
        write_row(type_row)
        write_row(units_row)
        write_row(obj_row)
        for row in zip(*val_columns):
            write_row(row)
    except Exception as e:
        _logger.exception('Failed to retrieve outputs from sql file.\n{}'.format(e))
        sys.exit(1)
    else:
        sys.exit(0)
示例#6
0
def test_sqlite_data_collections_by_output_name():
    """Test the data_collections_by_output_name method."""
    sql_path = './tests/assets/sql/eplusout_hourly.sql'
    sql_obj = SQLiteResult(sql_path)

    data_colls = sql_obj.data_collections_by_output_name(
        'Zone Lights Electric Energy')
    assert len(data_colls) == 7
    for coll in data_colls:
        assert isinstance(coll, HourlyContinuousCollection)
        assert len(coll) == len(coll.header.analysis_period.hoys)
        assert isinstance(coll.header.data_type, Energy)
        assert coll.header.unit == 'kWh'

    data_colls = sql_obj.data_collections_by_output_name(
        'Zone Mean Radiant Temperature')
    for coll in data_colls:
        assert isinstance(coll, HourlyContinuousCollection)
        assert len(coll) == len(coll.header.analysis_period.hoys)
        assert isinstance(coll.header.data_type, Temperature)
        assert coll.header.unit == 'C'

    data_colls = sql_obj.data_collections_by_output_name(
        'Zone Electric Equipment Electric Energy')
    data_colls = sql_obj.data_collections_by_output_name(
        'Zone Mean Air Temperature')
    data_colls = sql_obj.data_collections_by_output_name(
        'Zone Air Relative Humidity')
    data_colls = sql_obj.data_collections_by_output_name(
        'Zone Ideal Loads Supply Air Total Heating Energy')
    data_colls = sql_obj.data_collections_by_output_name(
        'Zone Ideal Loads Supply Air Total Cooling Energy')
示例#7
0
def test_sqlite_data_collections_by_output_name_timestep():
    """Test the data_collections_by_output_name method with timestep values."""
    sql_path = './tests/assets/sql/eplusout_timestep.sql'
    sql_obj = SQLiteResult(sql_path)

    assert sql_obj.reporting_frequency == 6
    data_colls = sql_obj.data_collections_by_output_name(
        'Zone Lights Electric Energy')
    for coll in data_colls:
        assert isinstance(coll, HourlyContinuousCollection)
        assert len(coll) == 7 * 24 * 6
示例#8
0
def test_sqlite_data_collections_by_output_name_design_day():
    """Test the data_collections_by_output_name method with several design day results."""
    sql_path = './tests/assets/sql/eplusout_design_days.sql'
    sql_obj = SQLiteResult(sql_path)

    data_colls = sql_obj.data_collections_by_output_name(
        'Zone Lights Electric Energy')
    assert len(data_colls) == 49
    for coll in data_colls:
        assert isinstance(coll, HourlyContinuousCollection)
        assert len(coll) == 24
示例#9
0
def test_sqlite_data_collections_by_output_name_monthly():
    """Test the data_collections_by_output_name method with monthly values."""
    sql_path = './tests/assets/sql/eplusout_monthly.sql'
    sql_obj = SQLiteResult(sql_path)

    assert sql_obj.reporting_frequency == 'Monthly'
    data_colls = sql_obj.data_collections_by_output_name(
        'Zone Lights Electric Energy')
    for coll in data_colls:
        assert isinstance(coll, MonthlyCollection)
        assert coll.header.analysis_period.is_annual
        assert len(coll) == 12
示例#10
0
def test_sqlite_data_collections_by_output_name_single():
    """Test the data_collections_by_output_name method with a single data."""
    sql_path = './tests/assets/sql/eplusout_openstudio_error.sql'
    sql_obj = SQLiteResult(sql_path)

    data_colls = sql_obj.data_collections_by_output_name(
        'Zone Lights Electric Energy')
    assert len(data_colls) == 1
    for coll in data_colls:
        assert isinstance(coll, HourlyContinuousCollection)
        assert len(coll) == len(coll.header.analysis_period.hoys)
        assert isinstance(coll.header.data_type, Energy)
        assert coll.header.unit == 'kWh'
示例#11
0
def test_sqlite_tabular_data():
    """Test the tabular_data_by_name method."""
    sql_path = './tests/assets/sql/eplusout_monthly.sql'
    sql_obj = SQLiteResult(sql_path)

    data = sql_obj.tabular_data_by_name(
        'Utility Use Per Conditioned Floor Area')
    assert len(data) == 4
    assert len(data['Lighting']) == 6
    col_names = sql_obj.tabular_column_names(
        'Utility Use Per Conditioned Floor Area')
    assert len(col_names) == 6
    assert 'Electricity Intensity' in col_names[0]
示例#12
0
def test_sqlite_data_collections_by_output_name_dday_runperiod():
    """Test the data_collections_by_output_name method with several design day results."""
    sql_path = './tests/fixtures/sql/eplusout_dday_runper.sql'
    sql_obj = SQLiteResult(sql_path)

    data_colls = sql_obj.data_collections_by_output_name(
        'Zone Lights Electric Energy')
    assert len(data_colls) == 56
    for coll in data_colls[:49]:
        assert isinstance(coll, HourlyContinuousCollection)
        assert len(coll) == 24
    for coll in data_colls[49:]:
        assert isinstance(coll, HourlyContinuousCollection)
        assert len(coll) == 744
示例#13
0
def available_run_period_info(result_sql, output_file):
    """Get an array of run period info within an sql file.

    \b
    Args:
        result_sql: Full path to an SQLite file that was generated by EnergyPlus.
    """
    try:
        sql_obj = SQLiteResult(result_sql)
        time_int = sql_obj.reporting_frequency
        all_info = []
        for runper, per_name in zip(sql_obj.run_periods, sql_obj.run_period_names):
            clean_dict = {
                'name': per_name,
                'time_interval ': time_int,
                'start_date ': [runper.st_month, runper.st_day],
                'end_date  ': [runper.end_month, runper.end_day]
            }
            all_info.append(clean_dict)
        output_file.write(json.dumps(all_info))
    except Exception as e:
        _logger.exception('Failed to parse sql file.\n{}'.format(e))
        sys.exit(1)
    else:
        sys.exit(0)
示例#14
0
def available_results_info(result_sql, output_file):
    """Get all timeseries outputs within an sql file and metadata about them.

    \b
    Args:
        result_sql: Full path to an SQLite file that was generated by EnergyPlus.
    """
    try:
        sql_obj = SQLiteResult(result_sql)
        all_info = []
        for outp_dict in sql_obj.available_outputs_info:
            clean_dict = {
                'output_name': outp_dict['output_name'],
                'object_type': outp_dict['object_type'],
                'units': outp_dict['units']
            }
            d_type = outp_dict['data_type']
            clean_dict['units_ip'] = d_type.ip_units[0]
            clean_dict['cumulative'] = d_type.cumulative
            if d_type.normalized_type is not None:
                norm_type = d_type.normalized_type()
                clean_dict['normalized_units'] = norm_type.units[0]
                clean_dict['normalized_units_ip'] = norm_type.ip_units[0]
            else:
                clean_dict['normalized_units'] = None
                clean_dict['normalized_units_ip'] = None
            all_info.append(clean_dict)
        output_file.write(json.dumps(all_info))
    except Exception as e:
        _logger.exception('Failed to parse sql file.\n{}'.format(e))
        sys.exit(1)
    else:
        sys.exit(0)
示例#15
0
def test_sqlite_zone_sizing():
    """Test the properties and methods related to zone sizes."""
    sql_path = './tests/assets/sql/eplusout_hourly.sql'
    sql_obj = SQLiteResult(sql_path)

    cool_sizes = sql_obj.zone_cooling_sizes
    heat_sizes = sql_obj.zone_heating_sizes

    assert len(cool_sizes) == 7
    assert len(heat_sizes) == 7

    for size_obj in cool_sizes:
        assert isinstance(size_obj, ZoneSize)
        assert isinstance(size_obj.zone_name, str)
        assert size_obj.load_type == 'Cooling'
        assert isinstance(size_obj.calculated_design_load, float)
        assert isinstance(size_obj.final_design_load, float)
        assert isinstance(size_obj.calculated_design_flow, float)
        assert isinstance(size_obj.final_design_flow, float)
        assert size_obj.design_day_name == 'BOSTON LOGAN INTL ARPT ANN CLG .4% CONDNS DB=>MWB'
        assert isinstance(size_obj.peak_date_time, DateTime)
        assert isinstance(size_obj.peak_temperature, float)
        assert isinstance(size_obj.peak_humidity_ratio, float)
        assert isinstance(size_obj.peak_outdoor_air_flow, float)
        size_dict = size_obj.to_dict()
        new_size = ZoneSize.from_dict(size_dict)
        assert new_size.to_dict() == size_dict

    for size_obj in heat_sizes:
        assert size_obj.load_type == 'Heating'
        assert size_obj.design_day_name == 'BOSTON LOGAN INTL ARPT ANN HTG 99.6% CONDNS DB'
示例#16
0
def test_sqlite_data_collections_by_output_names():
    """Test the data_collections_by_output_name method with multiple names."""
    sql_path = './tests/assets/sql/eplusout_hourly.sql'
    sql_obj = SQLiteResult(sql_path)

    data_colls = sql_obj.data_collections_by_output_name(
        ('Zone Lights Electric Energy', 'Zone Mean Radiant Temperature'))
    assert len(data_colls) == 14
    for coll in data_colls:
        assert isinstance(coll, HourlyContinuousCollection)
        assert len(coll) == len(coll.header.analysis_period.hoys)
        assert isinstance(coll.header.data_type, (Energy, Temperature))

    data_colls = sql_obj.data_collections_by_output_name(
        ('Zone Lights Electric Energy', ))
    assert len(data_colls) == 7
示例#17
0
def adaptive_by_room(result_sql, epw_file, air_speed, comfort_par, result_type,
                     output_file):
    """Get data collections for Adaptive comfort in each room from an EnergyPlus sql.

    \b
    Args:
        result_sql: Path to an SQLite file that was generated by EnergyPlus. This
            file must contain hourly or sub-hourly results for zone comfort variables.
        epw_file: Path to an .epw file, used to provide prevailing outdoor
            temperature for the adaptive comfort model.
    """
    try:
        # load the energyplus results related to thermal comfort and the EPW object
        epw_obj = EPW(epw_file)
        out_temp = epw_obj.dry_bulb_temperature
        sql_obj = SQLiteResult(result_sql)
        op_temps = sql_obj.data_collections_by_output_name(
            'Zone Operative Temperature')

        # load the air speed data collection if specified
        assert len(op_temps) != 0, \
            'Input result-sql does not contain "Zone Operative Temperature" output.'
        air_speed = _load_data(air_speed, op_temps[0], AirSpeed, 'm/s')

        # run the collections through the Adaptive model and output results
        param = _load_adaptive_par_str(comfort_par)
        ad_colls = []
        for op_temp in op_temps:
            ad_obj = Adaptive(out_temp,
                              op_temp,
                              air_speed,
                              comfort_parameter=param)
            if result_type == 'DegreesFromNeutral':
                ad_colls.append(ad_obj.degrees_from_neutral)
            elif result_type == 'Comfort':
                ad_colls.append(ad_obj.is_comfortable)
            else:
                ad_colls.append(ad_obj.thermal_condition)
        output_file.write(json.dumps([col.to_dict() for col in ad_colls]))
    except Exception as e:
        _logger.exception(
            'Failed to run Adaptive model from sql file.\n{}'.format(e))
        sys.exit(1)
    else:
        sys.exit(0)
示例#18
0
def get_results_windows(sql_files):
    # set initial values that will be computed based on results
    total_floor_area, total_energy = 0, 0
    all_uses = \
        ('heating', 'cooling', 'interior_lighting', 'exterior_lighting',
         'interior_equipment', 'exterior_equipment', 'fans', 'pumps',
         'heat_rejection', 'humidification', 'heat_recovery', 'water_systems',
          'refrigeration', 'generators')
    end_uses = OrderedDict()
    for use in all_uses:
        end_uses[use] = 0

    # loop through the sql files in the directory and add the energy use
    for result_file in sql_files:
        # parse the SQL file
        sql_obj = SQLiteResult(result_file)
        # get the total floor area of the model
        area_dict = sql_obj.tabular_data_by_name('Building Area')
        areas = tuple(area_dict.values())
        total_floor_area += areas[0][0]
        # get the energy use
        eui_dict = sql_obj.tabular_data_by_name('End Uses')
        euis = tuple(eui_dict.values())
        total_energy += sum([val for val in euis[-2][:12]])
        end_uses['heating'] += sum([val for val in euis[0][:12]])
        end_uses['cooling'] += sum([val for val in euis[1][:12]])
        end_uses['interior_lighting'] += sum([val for val in euis[2][:12]])
        end_uses['exterior_lighting'] += sum([val for val in euis[3][:12]])
        end_uses['interior_equipment'] += sum([val for val in euis[4][:12]])
        end_uses['exterior_equipment'] += sum([val for val in euis[5][:12]])
        end_uses['fans'] += sum([val for val in euis[6][:12]])
        end_uses['pumps'] += sum([val for val in euis[7][:12]])
        end_uses['heat_rejection'] += sum([val for val in euis[8][:12]])
        end_uses['humidification'] += sum([val for val in euis[9][:12]])
        end_uses['heat_recovery'] += sum([val for val in euis[10][:12]])
        end_uses['water_systems'] += sum([val for val in euis[11][:12]])
        end_uses['refrigeration'] += sum([val for val in euis[12][:12]])
        end_uses['generators'] += sum([val for val in euis[13][:12]])

    # assemble all of the results into a final dictionary
    eui = round(total_energy / total_floor_area, 3)
    gross_floor = round(total_floor_area, 3)
    end_use_pairs = OrderedDict([(key, round(val / total_floor_area, 3))
                                 for key, val in end_uses.items() if val != 0])
    return eui, gross_floor, end_use_pairs
示例#19
0
def tabular_data(result_sql, table_name, output_file):
    """Get all the data within a table of a Summary Report using the table name.

    \b
    Args:
        result_sql: Full path to an SQLite file that was generated by EnergyPlus.
        table_name: Text string for the name of a table within a summary
            report. (eg. 'General').
    """
    try:
        sql_obj = SQLiteResult(result_sql)
        table_dict = sql_obj.tabular_data_by_name(str(table_name))
        output_file.write(json.dumps(list(table_dict.values())))
    except Exception as e:
        _logger.exception('Failed to retrieve table data from sql file.\n{}'.format(e))
        sys.exit(1)
    else:
        sys.exit(0)
示例#20
0
def test_sqlite_sizing_odd():
    """Test the properties and methods related to zone sizes with an odd SQL file."""
    sql_path = './tests/assets/sql/eplusout_odd_zonesize.sql'
    sql_obj = SQLiteResult(sql_path)

    cool_sizes = sql_obj.zone_cooling_sizes
    heat_sizes = sql_obj.zone_heating_sizes
    assert len(cool_sizes) == 2
    assert len(heat_sizes) == 2

    comp_sizes = sql_obj.component_sizes
    comp_size_type = sql_obj.component_sizes_by_type(
        'ZoneHVAC:IdealLoadsAirSystem')
    comp_types = sql_obj.component_types

    assert len(comp_sizes) == 2
    assert len(comp_size_type) == 2
    assert comp_types == ['ZoneHVAC:IdealLoadsAirSystem']
示例#21
0
def test_sqlite_run_period():
    """Test the run_period property of SQLiteResult."""
    sql_path = './tests/assets/sql/eplusout_hourly.sql'
    sql_obj = SQLiteResult(sql_path)

    assert len(sql_obj.run_periods) == 1
    assert isinstance(sql_obj.run_periods[0], AnalysisPeriod)
    assert sql_obj.run_periods[0].st_month == 1
    assert sql_obj.run_periods[0].st_day == 6
    assert sql_obj.run_periods[0].end_month == 1
    assert sql_obj.run_periods[0].end_day == 12
    assert len(sql_obj.run_period_names) == 1
    assert sql_obj.run_period_names[0] == 'CUSTOMRUNPERIOD'

    sql_path = './tests/assets/sql/eplusout_design_days.sql'
    sql_obj = SQLiteResult(sql_path)
    assert len(sql_obj.run_periods) == 7
    assert len(sql_obj.run_period_names) == 7
    assert len(sql_obj.run_period_indices) == 7
    assert 'BOSTON LOGAN INTL ARPT ANN' in sql_obj.run_period_names[0]
示例#22
0
def test_available_results_info():
    """Test the available_results_info property."""
    sql_path = './tests/assets/sql/eplusout_hourly.sql'
    sql_obj = SQLiteResult(sql_path)

    assert len(sql_obj.available_outputs_info) == 8
    assert all(isinstance(obj, dict) for obj in sql_obj.available_outputs_info)
    for outp in sql_obj.available_outputs_info:
        if outp['output_name'] == 'Zone Mean Radiant Temperature':
            assert outp['object_type'] == 'Zone'
            assert outp['units'] == 'C'
            assert str(outp['data_type']) == 'Temperature'
示例#23
0
def tabular_metadata(result_sql, table_name, output_file):
    """Get a dictionary with the names of a table's rows and columns.

    \b
    Args:
        result_sql: Full path to an SQLite file that was generated by EnergyPlus.
        table_name: Text string for the name of a table within a summary
            report. (eg. 'General').
    """
    try:
        sql_obj = SQLiteResult(result_sql)
        table_dict = sql_obj.tabular_data_by_name(str(table_name))
        row_names = list(table_dict.keys())
        col_names = sql_obj.tabular_column_names(str(table_name))
        output_file.write(json.dumps(
            {'row_names': row_names, 'column_names': col_names}))
    except Exception as e:
        _logger.exception('Failed to retrieve table data from sql file.\n{}'.format(e))
        sys.exit(1)
    else:
        sys.exit(0)
示例#24
0
def all_available_info(result_sql, output_file):
    """Get a dictionary with metadata of all outputs and run periods within an sql file.

    The dictionary will have two keys - 'run_periods', 'outputs'.

    \b
    Args:
        result_sql: Full path to an SQLite file that was generated by EnergyPlus.
    """
    try:
        # create the SQLiteResult object
        sql_obj = SQLiteResult(result_sql)
        all_info = {}

        # get all of the info on the outputs within the file
        all_outp = []
        for outp_dict in sql_obj.available_outputs_info:
            clean_dict = {
                'output_name': outp_dict['output_name'],
                'object_type': outp_dict['object_type'],
                'units': outp_dict['units']
            }
            d_type = outp_dict['data_type']
            clean_dict['units_ip'] = d_type.ip_units[0]
            clean_dict['cumulative'] = d_type.cumulative
            if d_type.normalized_type is not None:
                norm_type = d_type.normalized_type()
                clean_dict['normalized_units'] = norm_type.units[0]
                clean_dict['normalized_units_ip'] = norm_type.ip_units[0]
            else:
                clean_dict['normalized_units'] = None
                clean_dict['normalized_units_ip'] = None
            all_outp.append(clean_dict)
        all_info['outputs'] = all_outp

        # get all of the run periods within the fil
        time_int = sql_obj.reporting_frequency
        all_run_per = []
        for runper, per_name in zip(sql_obj.run_periods, sql_obj.run_period_names):
            clean_dict = {
                'name': per_name,
                'time_interval ': time_int,
                'start_date ': [runper.st_month, runper.st_day],
                'end_date  ': [runper.end_month, runper.end_day]
            }
            all_run_per.append(clean_dict)
        all_info['run_periods'] = all_run_per
        output_file.write(json.dumps(all_info))
    except Exception as e:
        _logger.exception('Failed to parse sql file.\n{}'.format(e))
        sys.exit(1)
    else:
        sys.exit(0)
示例#25
0
def component_sizes(result_sql, component_type, output_file):
    """Get a list of ComponentSize JSONs.

    \b
    Args:
        result_sql: Full path to an SQLite file that was generated by EnergyPlus.
    """
    try:
        sql_obj = SQLiteResult(result_sql)
        comp_sizes = []
        if component_type is None or component_type == '' or component_type == 'None':
            for comp_size in sql_obj.component_sizes:
                comp_sizes.append(comp_size.to_dict())
        else:
            for comp_size in sql_obj.component_sizes_by_type(component_type):
                comp_sizes.append(comp_size.to_dict())
        output_file.write(json.dumps(comp_sizes))
    except Exception as e:
        _logger.exception('Failed to retrieve component sizes from sql.\n{}'.format(e))
        sys.exit(1)
    else:
        sys.exit(0)
示例#26
0
def available_results(result_sql, output_file):
    """Get an array of all timeseries outputs within an sql file.

    \b
    Args:
        result_sql: Full path to an SQLite file that was generated by EnergyPlus.
    """
    try:
        sql_obj = SQLiteResult(result_sql)
        output_file.write(json.dumps(sql_obj.available_outputs))
    except Exception as e:
        _logger.exception('Failed to parse sql file.\n{}'.format(e))
        sys.exit(1)
    else:
        sys.exit(0)
示例#27
0
def zone_sizes(result_sql, output_file):
    """Get a dictionary with two arrays of ZoneSize JSONs under 'cooling' and 'heating'.

    \b
    Args:
        result_sql: Full path to an SQLite file that was generated by EnergyPlus.
    """
    try:
        sql_obj = SQLiteResult(result_sql)
        base = {}
        base['cooling'] = [zs.to_dict() for zs in sql_obj.zone_cooling_sizes]
        base['heating'] = [zs.to_dict() for zs in sql_obj.zone_heating_sizes]
        output_file.write(json.dumps(base))
    except Exception as e:
        _logger.exception('Failed to retrieve zone sizes from sql file.\n{}'.format(e))
        sys.exit(1)
    else:
        sys.exit(0)
示例#28
0
def test_sqlite_init():
    """Test the initialization of SQLiteResult and basic properties."""
    sql_path = './tests/assets/sql/eplusout_hourly.sql'
    sql_obj = SQLiteResult(sql_path)
    str(sql_obj)  # test the string representation

    assert sql_obj.reporting_frequency == 'Hourly'
    assert isinstance(sql_obj.file_path, str)
    assert isinstance(sql_obj.location, Location)
    assert sql_obj.location.latitude == 42.37

    all_output = sql_obj.available_outputs
    assert len(all_output) == 8
    assert 'Zone Operative Temperature' in all_output
    assert 'Zone Lights Electric Energy' in all_output
    assert 'Zone Electric Equipment Electric Energy' in all_output
    assert 'Zone Air Relative Humidity' in all_output
    assert 'Zone Ideal Loads Supply Air Total Cooling Energy' in all_output
    assert 'Zone Mean Radiant Temperature' in all_output
    assert 'Zone Ideal Loads Supply Air Total Heating Energy' in all_output
def find_max_cooling_des_day(des_days, sim_par, base_strs):
    """Find the cooling design day with the highest coincident peak load."""
    # create sizing parameters with all of the design days
    sim_par_dup = sim_par.duplicate()
    sim_par_dup.output.outputs = None
    for dy in des_days:
        sim_par_dup.sizing_parameter.add_design_day(dy)
    # write the IDF and run the sizing calculation
    idf_str_init = '\n\n'.join([sim_par_dup.to_idf()] + base_strs)
    idf = os.path.join(directory, 'in.idf')
    write_to_file_by_name(directory, 'in.idf', idf_str_init, True)
    sql, zsz, rdd, html, err = run_idf(idf, silent=True)
    # determine the design day with the highest peak using the sizing results
    sql_obj = SQLiteResult(sql)
    d_day_dict = {d_day.name.upper(): [0, d_day] for d_day in des_days}
    peak_cool_dict = {}
    for zs in sql_obj.zone_cooling_sizes:
        d_day_dict[zs.design_day_name][0] += zs.calculated_design_load
        peak_cool_dict[zs.zone_name] = zs.calculated_design_load
    day_loads = list(d_day_dict.values())
    day_loads.sort(key=lambda y: y[0])

    return [day_loads[-1][1]], peak_cool_dict
示例#30
0
def test_sqlite_data_collections_by_output_name_openstudio():
    """Test the data_collections_by_output_name method with openstudio values."""
    sql_path = './tests/assets/sql/eplusout_openstudio.sql'
    sql_obj = SQLiteResult(sql_path)

    data_colls = sql_obj.data_collections_by_output_name(
        'Zone Lights Electric Energy')
    for coll in data_colls:
        assert isinstance(coll, HourlyContinuousCollection)
        assert len(coll) == len(coll.header.analysis_period.hoys)
        assert isinstance(coll.header.data_type, Energy)
        assert coll.header.unit == 'kWh'

    data_colls = sql_obj.data_collections_by_output_name(
        'Zone Electric Equipment Electric Energy')
    data_colls = sql_obj.data_collections_by_output_name(
        'Zone Ideal Loads Supply Air Total Heating Energy')
    data_colls = sql_obj.data_collections_by_output_name(
        'Zone Ideal Loads Supply Air Total Cooling Energy')