Esempio n. 1
0
    def allocate_output_memory(
        self, t_start, t_end, t_step, data_spec, categories_dict
    ):
        """preallocate output memory as numpy arrays to speed up simulation"""
        # reset output memory
        self.output = {}
        self.fmu_output = {}

        self.output = {
            STATES.SIMULATION_TIME: np.arange(
                t_start, t_end + t_step, t_step, dtype="int64"
            )
        }
        n_s = len(self.output[STATES.SIMULATION_TIME])

        # add output state variables
        for state in self.output_states:
            if data_spec.full.spec[state]["dtype"] == "category":
                self.output[state] = pd.Series(
                    pd.Categorical(
                        pd.Series(index=np.arange(n_s)),
                        categories=categories_dict[state],
                    )
                )
            else:
                (
                    np_default_value,
                    np_dtype,
                ) = Conversions.numpy_down_cast_default_value_dtype(
                    data_spec.full.spec[state]["dtype"]
                )
                self.output[state] = np.full(
                    n_s,
                    np_default_value,
                    dtype=np_dtype,
                )

        # add fmu state variables
        self.fmu_output[STATES.STEP_STATUS] = np.full(n_s, False, dtype="bool")
        self.fmu_output[STATES.SIMULATION_TIME] = self.output[STATES.SIMULATION_TIME]

        for k, v in self.idf.output_spec.items():
            (
                np_default_value,
                np_dtype,
            ) = Conversions.numpy_down_cast_default_value_dtype(v["dtype"])
            self.fmu_output[k] = np.full(n_s, np_default_value, dtype=np_dtype)

        # set current time
        self.current_time = t_start
        self.current_t_idx = 0
    def update_output(self, status, step_sensor_input):
        """Update internal output obj for current_t_idx with fmu output."""
        self.fmu_output[STATES.STEP_STATUS][self.current_t_idx] = status

        # get fmi zone output
        for k, v in self.idf.output_spec.items():
            self.fmu_output[k][self.current_t_idx] = self.fmu.get(k)[0]

        # map fmu output to model output
        self.output[STATES.THERMOSTAT_TEMPERATURE][
            self.current_t_idx] = self.get_tstat_temperature()

        self.output[STATES.THERMOSTAT_HUMIDITY][
            self.current_t_idx] = Conversions.relative_humidity_from_dewpoint(
                temperature=self.output[STATES.THERMOSTAT_TEMPERATURE][
                    self.current_t_idx],
                dewpoint=self.get_tstat_dewpoint(),
            )

        # pass through motion
        self.output[STATES.THERMOSTAT_MOTION][
            self.current_t_idx] = step_sensor_input[STATES.THERMOSTAT_MOTION]

        # get step_output
        for state in self.output_states:
            self.step_output[state] = self.output[state][self.current_t_idx]
Esempio n. 3
0
    def allocate_output_memory(self, t_start, t_end, t_step, data_spec,
                               categories_dict):
        """preallocate output memory to speed up simulation"""
        # reset output
        self.output = {}

        self.output = {
            STATES.SIMULATION_TIME:
            np.arange(t_start, t_end + t_step, t_step, dtype="int64")
        }
        n_s = len(self.output[STATES.SIMULATION_TIME])

        # add state variables
        for state in self.output_states:
            if data_spec.full.spec[state]["dtype"] == "category":
                self.output[state] = pd.Series(
                    pd.Categorical(
                        pd.Series(index=np.arange(n_s)),
                        categories=categories_dict[state],
                    ))
            else:
                (
                    np_default_value,
                    np_dtype,
                ) = Conversions.numpy_down_cast_default_value_dtype(
                    data_spec.full.spec[state]["dtype"])
                self.output[state] = np.full(
                    n_s,
                    np_default_value,
                    dtype=np_dtype,
                )

        self.output[STATES.STEP_STATUS] = np.full(n_s, 0, dtype="int8")
def spec_unit_conversion(df, src_spec, dest_spec):
    """This method must be able to evaluate multiple sources should
    a channel be composed from multiple sources."""
    for k, v in src_spec.full.spec.items():
        if k in df.columns:
            src_unit = v["unit"]
            # permutations on Internal spec usage
            dest_unit = None
            if isinstance(dest_spec, Internal):
                dest_unit = dest_spec.full.spec[v["internal_state"]]["unit"]
            elif isinstance(src_spec, Internal):
                for d_k, d_v in dest_spec.full.spec.items():
                    if d_v["internal_state"] == k:
                        dest_unit = d_v["unit"]
            else:
                for d_k, d_v in dest_spec.full.spec.items():
                    if d_v["internal_state"] == v["internal_state"]:
                        dest_unit = d_v["unit"]

            if dest_unit and src_unit != dest_unit:
                if (src_unit == UNITS.FARHENHEIT) and (dest_unit
                                                       == UNITS.CELSIUS):
                    df[k] = Conversions.F2C(df[k])
                elif (src_unit == UNITS.CELSIUS) and (dest_unit
                                                      == UNITS.FARHENHEIT):
                    df[k] = Conversions.C2F(df[k])
                elif (src_unit
                      == UNITS.FARHENHEITx10) and (dest_unit
                                                   == UNITS.FARHENHEIT):
                    df[k] = df[k] / 10.0
                elif (src_unit
                      == UNITS.FARHENHEIT) and (dest_unit
                                                == UNITS.FARHENHEITx10):
                    df[k] = df[k] * 10.0
                elif (src_unit == UNITS.FARHENHEITx10) and (dest_unit
                                                            == UNITS.CELSIUS):
                    df[k] = Conversions.F2C(df[k] / 10.0)
                elif (src_unit == UNITS.CELSIUS) and (dest_unit
                                                      == UNITS.FARHENHEITx10):
                    df[k] = Conversions.C2F(df[k]) * 10.0
                else:
                    logger.error("Unsupported conversion: {} to {}".format(
                        src_unit,
                        dest_unit,
                    ))
    return df
    def fill_epw(self, input_epw_data, datetime_channel, fill_epw_data,
                 sim_config):
        """Any missing fields required by EnergyPlus should be filled with
        defaults from Typical Meteorological Year 3 data sets for nearest city.
        All data is internally in UTC.

        :param epw_data: EnergyPlus Weather data in a dataframe of epw_columns
        :type epw_data: pd.DataFrame

        :param datetime_column: datetime column in fill data.
        "type datetime_column: str
        """
        if input_epw_data.empty:
            input_epw_data.columns = self.epw_columns
            return input_epw_data

        if fill_epw_data.empty:
            raise ValueError(f"fill_epw_data is empty.")

        # save fill_epw_data that was actually used to fill
        self.fill_epw_data = fill_epw_data

        # edit unique copy of input df
        epw_data = input_epw_data.copy(deep=True)

        # add datetime column for merge with fill data
        epw_data = pd.concat(
            [
                datetime_channel.data[datetime_channel.spec.datetime_column],
                epw_data,
            ],
            axis="columns",
        ).rename(columns={
            datetime_channel.spec.datetime_column: self.datetime_column
        })

        # get current period to check if resampling is needed
        _cur_fill_epw_data_period = (fill_epw_data[
            self.datetime_column].diff().mode()[0].total_seconds())

        if _cur_fill_epw_data_period < sim_config["sim_step_size_seconds"]:
            # downsample data
            fill_epw_data = (fill_epw_data.set_index(
                fill_epw_data[self.datetime_column]).resample(
                    f"{sim_config['sim_step_size_seconds']}S").mean().
                             reset_index())
        elif _cur_fill_epw_data_period > sim_config["sim_step_size_seconds"]:
            # upsample data
            fill_epw_data = fill_epw_data.set_index(self.datetime_column)
            fill_epw_data = fill_epw_data.resample(
                f"{sim_config['sim_step_size_seconds']}S").asfreq()
            # ffill is only method that works on all types
            fill_epw_data = fill_epw_data.interpolate(axis="rows",
                                                      method="ffill")
            fill_epw_data = fill_epw_data.reset_index()

        # using annual TMY there may be missing columns at beginning
        # cycle from endtime to give full UTC year
        # wrap TMY data to fill any gaps

        if min(fill_epw_data[self.datetime_column]) > min(
                epw_data[self.datetime_column]):
            # have data before fill data starts
            # wrap fill data on year
            time_diff = min(fill_epw_data[self.datetime_column]) - min(
                epw_data[self.datetime_column])
            years = math.ceil(time_diff.days / 365.0)
            fill_epw_data_prev_years = []
            for y in range(1, years):
                _fill_epw_data_prev_year = fill_epw_data.copy(deep=True)
                _fill_epw_data_prev_year[
                    "year"] = _fill_epw_data_prev_year["year"] - 1
                _fill_epw_data_prev_year[
                    self.datetime_column] = _fill_epw_data_prev_year[
                        self.datetime_column] - pd.offsets.DateOffset(years=1)
                fill_epw_data_prev_years.append(_fill_epw_data_prev_year)

            fill_epw_data = pd.concat(fill_epw_data_prev_years +
                                      [fill_epw_data],
                                      axis="rows")
            fill_epw_data.sort_values(self.datetime_column)

        if max(fill_epw_data[self.datetime_column]) < max(
                epw_data[self.datetime_column]):
            # have data before fill data starts
            # wrap fill data on year
            time_diff = max(epw_data[self.datetime_column]) - max(
                fill_epw_data[self.datetime_column])
            years = math.ceil(time_diff.days / 365.0)
            fill_epw_data_prev_years = []
            for y in range(1, years):
                _fill_epw_data_prev_year = fill_epw_data.copy(deep=True)
                _fill_epw_data_prev_year[
                    "year"] = _fill_epw_data_prev_year["year"] + 1
                _fill_epw_data_prev_year[
                    self.datetime_column] = _fill_epw_data_prev_year[
                        self.datetime_column] + pd.offsets.DateOffset(years=1)
                fill_epw_data_prev_years.append(_fill_epw_data_prev_year)

            fill_epw_data = pd.concat([fill_epw_data] +
                                      fill_epw_data_prev_years,
                                      axis="rows")
            fill_epw_data.sort_values(self.datetime_column)

        # epw_data left join fill_data will give fill data for every epw_data
        # record
        epw_data_full = epw_data[[self.datetime_column] +
                                 self.spec.columns].merge(
                                     fill_epw_data,
                                     how="left",
                                     on=[self.datetime_column],
                                 )

        # loop over spec columns and fill missing values
        # then set them to epw names
        for _col in self.spec.columns:
            epw_data_full.loc[epw_data_full[_col].isnull(),
                              _col, ] = epw_data_full[
                                  EnergyPlusWeather.output_rename_dict[_col]]
            epw_data_full[EnergyPlusWeather.
                          output_rename_dict[_col]] = epw_data_full[_col]
            epw_data_full = epw_data_full.drop(columns=[_col])

        # compute dewpoint from dry-bulb and relative humidity
        epw_data_full["temp_dew"] = Conversions.relative_humidity_to_dewpoint(
            epw_data_full["temp_air"], epw_data_full["relative_humidity"])

        # convert to local time INVARIANT to DST changes
        # .epw will have wrong hour columns if DST shift occurs during simulation
        # need a standard UTC offset for entire simulation period
        # no time zone shift occurs on or within 1 week of January 17th
        # use this for tz standard UTC offset
        tz_offset_seconds = datetime_channel.timezone.utcoffset(
            datetime(min(epw_data_full[self.datetime_column]).year, 1,
                     17)).total_seconds()

        epw_data_full[self.datetime_column] = epw_data_full[
            self.datetime_column] + pd.Timedelta(seconds=tz_offset_seconds)

        # last day of data must exist and be invariant to TZ shift
        # add ffill data for final day and extra day.
        _fill = epw_data_full.tail(1).copy(deep=True)
        _fill_rec = _fill.iloc[0]
        _fill[
            self.datetime_column] = _fill[self.datetime_column] + pd.Timedelta(
                days=2,
                hours=-_fill_rec[self.datetime_column].hour,
                minutes=-_fill_rec[self.datetime_column].minute,
                seconds=-_fill_rec[self.datetime_column].second,
            )
        epw_data_full = epw_data_full.append(_fill, ignore_index=True)
        epw_data_full = epw_data_full.set_index(self.datetime_column)
        epw_data_full = epw_data_full.resample(
            f"{sim_config['sim_step_size_seconds']}S").asfreq()
        # first ffill then bfill will fill both sides padding data
        epw_data_full = epw_data_full.fillna(method="ffill")
        epw_data_full = epw_data_full.fillna(method="bfill")
        epw_data_full = epw_data_full.reset_index()

        epw_data_full["year"] = epw_data_full[self.datetime_column].dt.year
        epw_data_full["month"] = epw_data_full[self.datetime_column].dt.month
        epw_data_full["day"] = epw_data_full[self.datetime_column].dt.day
        # energyplus uses non-standard hours [1-24] this is accounted in to_epw()
        epw_data_full["hour"] = epw_data_full[self.datetime_column].dt.hour
        epw_data_full["minute"] = epw_data_full[self.datetime_column].dt.minute

        # date time columns can be smaller dtypes
        epw_data_full = epw_data_full.astype(
            {
                "year": "Int16",
                "month": "Int8",
                "day": "Int8",
                "hour": "Int8",
                "minute": "Int8",
            }, )

        # reorder return columns
        return epw_data_full
    def generate_dummy_data(
        sim_config,
        spec,
        outdoor_weather=None,
        schedule_chg_pts=None,
        comfort_chg_pts=None,
        hvac_mode_chg_pts=None,
    ):
        if isinstance(spec, Internal):
            raise ValueError(
                f"Supplied Spec {spec} is internal spec." +
                " Data of this spec should not be stored in data files")

        for _idx, sim in sim_config.iterrows():
            # _df = pd.DataFrame(columns=spec.full.spec.keys())
            _df = pd.DataFrame(index=pd.date_range(
                start=sim.start_utc,
                end=sim.end_utc,
                freq=f"{spec.data_period_seconds}S",
            ))

            if not schedule_chg_pts:
                # set default ecobee schedule
                schedule_chg_pts = {
                    sim.start_utc: [
                        {
                            "name":
                            "Home",
                            "minute_of_day":
                            390,
                            "on_day_of_week": [
                                True,
                                True,
                                True,
                                True,
                                True,
                                True,
                                True,
                            ],
                        },
                        {
                            "name":
                            "Sleep",
                            "minute_of_day":
                            1410,
                            "on_day_of_week": [
                                True,
                                True,
                                True,
                                True,
                                True,
                                True,
                                True,
                            ],
                        },
                    ]
                }

            if not comfort_chg_pts:
                # set default ecobee comfort setpoints
                if isinstance(spec, FlatFilesSpec):
                    home_stp_cool = Conversions.C2Fx10(23.5)
                    home_stp_heat = Conversions.C2Fx10(21.0)
                    sleep_stp_cool = Conversions.C2Fx10(28.0)
                    sleep_stp_heat = Conversions.C2Fx10(16.5)
                elif isinstance(spec, DonateYourDataSpec):
                    home_stp_cool = Conversions.C2F(23.5)
                    home_stp_heat = Conversions.C2F(21.0)
                    sleep_stp_cool = Conversions.C2F(28.0)
                    sleep_stp_heat = Conversions.C2F(16.5)
                else:
                    home_stp_cool = 23.5
                    home_stp_heat = 21.0
                    sleep_stp_cool = 28.0
                    sleep_stp_heat = 16.5

                comfort_chg_pts = {
                    sim.start_utc: {
                        "Home": {
                            STATES.TEMPERATURE_STP_COOL: home_stp_cool,
                            STATES.TEMPERATURE_STP_HEAT: home_stp_heat,
                        },
                        "Sleep": {
                            STATES.TEMPERATURE_STP_COOL: sleep_stp_cool,
                            STATES.TEMPERATURE_STP_HEAT: sleep_stp_heat,
                        },
                    }
                }

            if not hvac_mode_chg_pts:
                # set default ecobee comfort setpoints
                hvac_mode_chg_pts = {sim.start_utc: "heat"}

            # enforce ascending sorting of dict keys
            hvac_mode_chg_pts = dict(sorted(hvac_mode_chg_pts.items()))
            comfort_chg_pts = dict(sorted(comfort_chg_pts.items()))
            schedule_chg_pts = dict(sorted(schedule_chg_pts.items()))

            # check for errors in settings
            if len(hvac_mode_chg_pts) <= 0:
                raise ValueError(
                    f"Invalid hvac_mode_chg_pts={hvac_mode_chg_pts}.")
            if len(comfort_chg_pts) <= 0:
                raise ValueError(f"Invalid comfort_chg_pts={comfort_chg_pts}.")
            if len(schedule_chg_pts) <= 0:
                raise ValueError(
                    f"Invalid schedule_chg_pts={schedule_chg_pts}.")

            for k, v in spec.full.spec.items():
                _default_value, _ = Conversions.numpy_down_cast_default_value_dtype(
                    v["dtype"])
                if v["channel"] == CHANNELS.THERMOSTAT_SETTING:
                    # settings channels set with default values first
                    # they are set below after full df columns have been filled
                    _df[k] = _default_value
                elif v["channel"] == CHANNELS.WEATHER:
                    # default: set no values for outdoor_weather=None
                    # will default to using TMY3 data for the provided location
                    if outdoor_weather:
                        # outdoor_weather can be set with internal states as keys
                        if v["internal_state"] in outdoor_weather.keys():
                            _df[k] = outdoor_weather[v["internal_state"]]

                elif v["channel"] == CHANNELS.THERMOSTAT_SENSOR:
                    # sensor data unused for dummy data
                    # set default
                    _df[k] = _default_value
                elif v["channel"] == CHANNELS.EQUIPMENT:
                    # equipment data unused for dummy data
                    # set default
                    _df[k] = _default_value

            # settings is always in spec add in specific order
            # 1. add HVAC_MODE
            k_hvac_mode = [
                k for k, v in spec.full.spec.items()
                if v["internal_state"] == STATES.HVAC_MODE
            ][0]
            # assuming sorted ascending by timestamp
            # each change point sets all future hvac modes
            for _ts, _hvac_mode in hvac_mode_chg_pts.items():
                _df.loc[_df.index >= _ts, k_hvac_mode] = _hvac_mode

            # 2. add SCHEDULE
            k_schedule = [
                k for k, v in spec.full.spec.items()
                if v["internal_state"] == STATES.SCHEDULE
            ][0]
            # assuming sorted ascending by timestamp
            # each change point sets all future schedules
            for _ts, _schedule in schedule_chg_pts.items():
                for _dow in range(7):
                    _dow_schedule = [
                        _s for _s in _schedule if _s["on_day_of_week"][_dow]
                    ]
                    _dow_schedule = sorted(_dow_schedule,
                                           key=lambda k: k["minute_of_day"])
                    _prev_dow_schedule = [
                        _s for _s in _schedule
                        if _s["on_day_of_week"][(_dow - 1) % 7]
                    ]
                    _prev_dow_schedule = sorted(
                        _prev_dow_schedule, key=lambda k: k["minute_of_day"])
                    # first period is defined from previous day of week last schedule
                    _prev_s = _prev_dow_schedule[-1]
                    _s = _dow_schedule[0]
                    _df.loc[(_df.index >= _ts)
                            & (_df.index.day_of_week == _dow)
                            & (_df.index.hour * 60 +
                               _df.index.minute < _s["minute_of_day"]),
                            k_schedule, ] = _prev_s["name"]
                    for _s in _dow_schedule:

                        _df.loc[(_df.index >= _ts)
                                & (_df.index.day_of_week == _dow)
                                & (_df.index.hour * 60 +
                                   _df.index.minute >= _s["minute_of_day"]),
                                k_schedule, ] = _s["name"]

            # 3. add SCHEDULE
            k_stp_cool = [
                k for k, v in spec.full.spec.items()
                if v["internal_state"] == STATES.TEMPERATURE_STP_COOL
            ][0]
            k_stp_heat = [
                k for k, v in spec.full.spec.items()
                if v["internal_state"] == STATES.TEMPERATURE_STP_HEAT
            ][0]
            # assuming sorted ascending by timestamp
            # each change point sets all future comfort set points
            for _ts, _comfort in comfort_chg_pts.items():
                for _schedule_name, _setpoints in _comfort.items():
                    _df.loc[(_df.index >= _ts) &
                            (_df[k_schedule] == _schedule_name),
                            k_stp_cool, ] = _setpoints[
                                STATES.TEMPERATURE_STP_COOL]
                    _df.loc[(_df.index >= _ts) &
                            (_df[k_schedule] == _schedule_name),
                            k_stp_heat, ] = _setpoints[
                                STATES.TEMPERATURE_STP_HEAT]

            _df = _df.reset_index().rename(
                columns={"index": spec.datetime_column})

            return _df
Esempio n. 7
0
    def make_epw_file(
        self,
        sim_config,
        datetime_channel,
        epw_step_size_seconds,
    ):
        """Generate epw file in local time"""
        if self.epw_data.empty:
            raise ValueError(
                f"No input: epw_data={self.epw_data} and epw_path={self.epw_path}"
            )

        self.epw_step_size_seconds = epw_step_size_seconds

        _epw_path = os.path.join(
            self.simulation_epw_dir,
            "NREL_EPLUS" + f"_{sim_config['identifier']}" + f"_{self.epw_fname}",
        )

        # resample
        _cur_epw_data_period = (
            self.epw_data[self.datetime_column].diff().mode()[0].total_seconds()
        )
        if _cur_epw_data_period < self.epw_step_size_seconds:
            # downsample data
            non_numeric_cols = ["data_source_unct"]
            numeric_cols = [
                col for col in self.epw_data.columns if col not in non_numeric_cols
            ]

            self.epw_data = (
                self.epw_data.set_index(self.epw_data[self.datetime_column])
                .resample(f"{self.epw_step_size_seconds}S")
                .agg(
                    {
                        col: "mean" if col in numeric_cols else "max"
                        for col in self.epw_data.columns
                    }
                )
                .reset_index(drop=True)
            )
        elif _cur_epw_data_period > self.epw_step_size_seconds:
            # upsample data
            self.epw_data = self.epw_data.set_index(self.datetime_column)
            self.epw_data = self.epw_data.resample(
                f"{self.epw_step_size_seconds}S"
            ).asfreq()
            # ffill is only method that works on all types
            self.epw_data = self.epw_data.interpolate(axis="rows", method="ffill")
            self.epw_data = self.epw_data.reset_index()

        # NOTE:
        # EnergyPlus assumes solar radiance is given in W/m2 instead of Wh/m2
        # if more than one data interval per hour is given
        # see: https://github.com/NREL/EnergyPlus/blob/v9.4.0/src/EnergyPlus/WeatherManager.cc#L3147

        # compute dewpoint from dry-bulb and relative humidity
        self.epw_data["temp_dew"] = Conversions.relative_humidity_to_dewpoint(
            self.epw_data["temp_air"], self.epw_data["relative_humidity"]
        )

        # convert to local time INVARIANT to DST changes
        # .epw will have wrong hour columns if DST shift occurs during simulation
        # need a standard UTC offset for entire simulation period
        # no time zone shift occurs on or within 1 week of January 17th
        # use this for tz standard UTC offset
        tz_offset_seconds = datetime_channel.timezone.utcoffset(
            datetime(min(self.epw_data[self.datetime_column]).year, 1, 17)
        ).total_seconds()

        self.epw_data[self.datetime_column] = self.epw_data[
            self.datetime_column
        ] + pd.Timedelta(seconds=tz_offset_seconds)

        # last day of data must exist and be invariant to TZ shift
        # add ffill data for final day and extra day.
        _fill = self.epw_data.tail(1).copy(deep=True)
        _fill_rec = _fill.iloc[0]
        _fill[self.datetime_column] = _fill[self.datetime_column] + pd.Timedelta(
            days=2,
            hours=-_fill_rec[self.datetime_column].hour,
            minutes=-_fill_rec[self.datetime_column].minute,
            seconds=-_fill_rec[self.datetime_column].second,
        )
        self.epw_data = self.epw_data.append(_fill, ignore_index=True)
        self.epw_data = self.epw_data.set_index(self.datetime_column)

        # resample to building frequency
        self.epw_data = self.epw_data.resample(
            f"{self.epw_step_size_seconds}S"
        ).asfreq()
        # first ffill then bfill will fill both sides padding data
        self.epw_data = self.epw_data.fillna(method="ffill")
        self.epw_data = self.epw_data.fillna(method="bfill")
        self.epw_data = self.epw_data.reset_index()

        self.epw_data["year"] = self.epw_data[self.datetime_column].dt.year
        self.epw_data["month"] = self.epw_data[self.datetime_column].dt.month
        self.epw_data["day"] = self.epw_data[self.datetime_column].dt.day
        # energyplus uses non-standard hours [1-24] this is accounted in to_epw()
        self.epw_data["hour"] = self.epw_data[self.datetime_column].dt.hour
        self.epw_data["minute"] = self.epw_data[self.datetime_column].dt.minute

        # date time columns can be smaller dtypes
        self.epw_data = self.epw_data.astype(
            {
                "year": "Int16",
                "month": "Int8",
                "day": "Int8",
                "hour": "Int8",
                "minute": "Int8",
            },
        )

        meta_lines = self.add_epw_data_periods(
            epw_data=self.epw_data,
            meta_lines=self.epw_meta_lines,
            sim_config=sim_config,
        )

        # save to file
        self.to_epw(
            epw_data=self.epw_data,
            meta_lines=meta_lines,
            fpath=_epw_path,
        )

        self.epw_path = _epw_path

        return self.epw_path