Beispiel #1
0
    def from_idf(cls, idf_string, type_idf_string=None):
        """Create a ScheduleFixedInterval from an EnergyPlus IDF text strings.

        Args:
            idf_string: A text string fully describing an EnergyPlus
                Schedule:File.
            type_idf_string: An optional text string for the ScheduleTypeLimits.
                If None, the resulting schedule will have no ScheduleTypeLimit.
        """
        # process the schedule inputs
        sch_fields = parse_idf_string(idf_string, 'Schedule:File')
        schedule_type = ScheduleTypeLimit.from_idf(type_idf_string) if type_idf_string \
            is not None else None
        timestep = 60 / int(sch_fields[8]) if sch_fields[8] != '' else 1
        start_date = Date(1, 1, False) if sch_fields[5] == '8760' else Date(
            1, 1, True)
        interpolate = False if sch_fields[7] == 'No' or sch_fields[
            7] == '' else True

        # load the data from the CSV file referenced in the string
        assert os.path.isfile(sch_fields[2]), \
            'CSV Schedule:File "{}" was not found on this system.'.format(sch_fields[2])
        all_data = csv_to_matrix(sch_fields[2])
        transposed_data = tuple(zip(*all_data))
        csv_data = (float(x) for x in transposed_data[int(sch_fields[3]) -
                                                      1][int(sch_fields[4]):])

        return cls(sch_fields[0], csv_data, schedule_type, timestep,
                   start_date, 0, interpolate)
Beispiel #2
0
def test_csv_to_matrix():
    """Test the csv_to_matrix functions."""
    path = './tests/fixtures/epw/tokyo.epw'
    epw_mtx = futil.csv_to_matrix(path)
    assert len(epw_mtx) == 8768

    with pytest.raises(Exception):
        epw_mtx = futil.csv_to_num_matrix(path)
Beispiel #3
0
    def __init__(self,
                 file_path,
                 cooling_date=Date(1, 1),
                 heating_date=Date(1, 1)):
        """Initialize ZSZ"""
        # check that the file exists
        assert os.path.isfile(file_path), 'No file was found at {}'.format(
            file_path)
        assert file_path.endswith('.csv'), \
            '{} is not an CSV file ending in .csv.'.format(file_path)
        self._file_path = file_path

        # parse the data in the file
        data_mtx = csv_to_matrix(file_path)

        # extract the header and the peak values
        headers = data_mtx[0][:]  # copy the list
        del headers[0]
        peak = data_mtx[-3][:]  # copy the list
        del peak[0]
        del data_mtx[0]
        for i in range(3):
            del data_mtx[-1]
        self._headers = headers
        self._peak = peak

        # process the timestep of the data
        data_mtx = list(zip(*data_mtx))
        time1 = datetime.strptime(data_mtx[0][0], '%H:%M:%S')
        time2 = datetime.strptime(data_mtx[0][1], '%H:%M:%S')
        t_delta = time2 - time1
        self._timestep = int(3600 / t_delta.seconds)
        self._cooling_date = cooling_date
        self._heating_date = heating_date
        self._cool_a_period = AnalysisPeriod(cooling_date.month,
                                             cooling_date.day,
                                             0,
                                             cooling_date.month,
                                             cooling_date.day,
                                             23,
                                             timestep=self._timestep)
        self._heat_a_period = AnalysisPeriod(heating_date.month,
                                             heating_date.day,
                                             0,
                                             heating_date.month,
                                             heating_date.day,
                                             23,
                                             timestep=self._timestep)

        # process the body of the data
        del data_mtx[0]
        self._data = data_mtx

        # properties to be computed upon request
        self._cooling_load_data = None
        self._heating_load_data = None
        self._cooling_flow_data = None
        self._heating_flow_data = None
Beispiel #4
0
def test_shcedule_fixedinterval_to_idf_collective_csv():
    """Test the to_idf_collective_csv method."""
    ec_sched_idf = './tests/idf/ElectrochromicControlSchedules.idf'
    ec_scheds = ScheduleFixedInterval.extract_all_from_idf_file(ec_sched_idf)

    collective_string = ScheduleFixedInterval.to_idf_collective_csv(
        ec_scheds, './tests/csv/', 'All Electrochromic')

    assert len(collective_string) == 4
    assert os.path.isfile('./tests/csv/All_Electrochromic.csv')
    all_data = csv_to_matrix('./tests/csv/All_Electrochromic.csv')
    assert len(all_data) == 8761
    assert len(all_data[0]) >= 4

    os.remove('./tests/csv/All_Electrochromic.csv')
Beispiel #5
0
                          1: 'Overloaded',
                          0: 'Normal'
                      })
volt_cond = GenericType('Voltage Condition',
                        'condition',
                        unit_descr={
                            -1: 'Undervoltage',
                            0: 'Normal',
                            1: 'Overvoltage'
                        })

if all_required_inputs(ghenv.Component):
    factors, condition = [], []
    for result_file in _dss_csv:
        # parse the data and figure out the timeseries properties
        data = csv_to_matrix(result_file)
        csv_header = data.pop(0)
        a_period = extract_analysis_period(data)

        # figure out the type of object to write into the metadata
        obj_name = os.path.basename(result_file).replace('.csv', '')
        if obj_name.startswith('Line.'):
            obj_name = obj_name.replace('Line.', '')
            obj_type = 'Electrical Connector Loading'
        elif obj_name.startswith('Transformer.'):
            obj_name = obj_name.replace('Transformer.', '')
            obj_type = 'Transformer Loading'
        else:
            obj_type = 'Building Voltage'
        metadata = {'type': obj_type, 'name': obj_name}
Beispiel #6
0
                values.append(val)
                parameters.append(key.replace('_', ' ').title())
            elif key == 'wind' and len(val) != 0:
                wind = val[0]['size_kw']
            elif key == 'solar_pv' and len(val) != 0:
                pv = val[0]['size_kw']
            elif key == 'storage' and len(val) != 0:
                storage = [val[0]['size_kw'], val[0]['size_kwh']]
            elif key == 'generator' and len(val) != 0:
                generator = val[0]['size_kw']

    # parse the CSV results of the simulation if successful
    if os.path.isfile(re_csv):
        data = []  # final list of data to be collected
        # parse the data and figure out the timeseries properties
        csv_data = csv_to_matrix(re_csv)
        csv_header = csv_data.pop(0)
        a_period = extract_analysis_period(csv_data)
        for col, col_name in zip(zip(*csv_data), csv_header):
            if col_name.startswith('REopt:'):
                # figure out the type of object to write into the metadata
                base_name = col_name.replace('REopt:', '').split(':')
                end_name, units_init = base_name[-1].split('(')
                units_init = units_init.replace(')', '')
                if units_init == 'kw':
                    units, data_type = 'kW', Power()
                elif units_init == 'pct':
                    units, data_type = 'fraction', Fraction()
                else:
                    continue
                metadata = {'type': ':'.join(base_name[:-1] + [end_name])}
Beispiel #7
0
def constructions_2004(model_json, climate_zone, output_file):
    """Convert a Model's constructions to be conformant with ASHRAE 90.1-2004 appendix G.

    This includes assigning a ConstructionSet that is compliant with Table 5.5 to
    all rooms in the model.

    \b
    Args:
        model_json: Full path to a Model JSON file.
        climate_zone: Text indicating the ASHRAE climate zone. This can be a single
            integer (in which case it is interpreted as A) or it can include the
            A, B, or C qualifier (eg. 3C).
    """
    try:
        # re-serialize the Model to Python and get the glazing ratios
        with open(model_json) as json_file:
            data = json.load(json_file)
        model = Model.from_dict(data)
        w_area = model.exterior_wall_area
        r_area = model.exterior_roof_area
        wr = model.exterior_wall_aperture_area / w_area if w_area != 0 else 0
        sr = model.exterior_skylight_aperture_area / r_area if r_area != 0 else 0

        # get the base ConstructionSet from the standards library
        clean_cz = str(climate_zone)[0]
        constr_set_id = '2004::ClimateZone{}::SteelFramed'.format(clean_cz)
        base_set = construction_set_by_identifier(constr_set_id)

        # parse the CSV file with exceptions to the base construction set
        ex_file = os.path.join(os.path.dirname(__file__), 'data',
                               'ashrae_2004.csv')
        ex_data = csv_to_matrix(ex_file)
        ex_cz = clean_cz if climate_zone != '3C' else climate_zone
        ex_ratio = '100'
        for ratio in (40, 30, 20, 10):
            if wr < ratio / 100 + 0.001:
                ex_ratio = str(ratio)
        for row in ex_data:
            if row[0] == ex_cz and row[1] == ex_ratio:
                vert_except = [float(val) for val in row[2:]]
                break

        # change the constructions for fixed and operable windows
        si_ip_u = 5.678263337
        fixed_id = 'U {} SHGC {} Fixed Glz'.format(vert_except[0],
                                                   vert_except[2])
        fixed_mat = EnergyWindowMaterialSimpleGlazSys(fixed_id,
                                                      vert_except[0] * si_ip_u,
                                                      vert_except[2])
        fixed_constr = WindowConstruction(fixed_id.replace('Glz', 'Window'),
                                          [fixed_mat])
        oper_id = 'U {} SHGC {} Operable Glz'.format(vert_except[1],
                                                     vert_except[2])
        oper_mat = EnergyWindowMaterialSimpleGlazSys(oper_id,
                                                     vert_except[1] * si_ip_u,
                                                     vert_except[2])
        oper_constr = WindowConstruction(oper_id.replace('Glz', 'Window'),
                                         [oper_mat])
        base_set.aperture_set.window_construction = fixed_constr
        base_set.aperture_set.operable_construction = oper_constr

        # change the construction for skylights if the ratio is greater than 2%
        if sr > 0.021:
            for row in ex_data:
                if row[0] == ex_cz and row[1] == 'sky_5':
                    sky_except = [float(row[2]), float(row[4])]
                    break
            sky_id = 'U {} SHGC {} Skylight Glz'.format(
                sky_except[0], sky_except[1])
            sky_mat = EnergyWindowMaterialSimpleGlazSys(
                sky_id, sky_except[0] * si_ip_u, sky_except[1])
            sky_constr = WindowConstruction(sky_id.replace('Glz', 'Window'),
                                            [sky_mat])
            base_set.aperture_set.skylight_construction = sky_constr

        # remove child constructions ans assign the construction set to all rooms
        model.properties.energy.remove_child_constructions()
        for room in model.rooms:
            room.properties.energy.construction_set = base_set

        # write the Model JSON string
        output_file.write(json.dumps(model.to_dict()))
    except Exception as e:
        _logger.exception(
            'Model baseline construction creation failed.\n{}'.format(e))
        sys.exit(1)
    else:
        sys.exit(0)