def main(config):
    gv = cea.globalvar.GlobalVariables()
    locator = cea.inputlocator.InputLocator(scenario=config.scenario)
    weather_data = epwreader.epw_reader(config.weather)[[
        'year', 'drybulb_C', 'wetbulb_C', 'relhum_percent', 'windspd_ms',
        'skytemp_C'
    ]]
    year = weather_data['year'][0]
    region = config.region
    settings = config.demand
    use_daysim_radiation = settings.use_daysim_radiation
    building_properties, schedules_dict, date = properties_and_schedule(
        gv, locator, region, year, use_daysim_radiation)
    list_building_names = building_properties.list_building_names()
    climatic_variables = config.neural_network.climatic_variables
    weather_data = epwreader.epw_reader(
        locator.get_default_weather())[climatic_variables]

    input_prepare_estimate(
        list_building_names,
        locator,
        gv,
        climatic_variables=config.neural_network.climatic_variables,
        region=config.region,
        year=config.neural_network.year,
        use_daysim_radiation=settings.use_daysim_radiation,
        use_stochastic_occupancy=config.demand.use_stochastic_occupancy,
        weather_array=np.transpose(np.asarray(weather_data)),
        weather_data=epwreader.epw_reader(
            locator.get_default_weather())[climatic_variables])
Esempio n. 2
0
    def setUpClass(cls):
        import cea.examples
        cls.locator = cea.inputlocator.ReferenceCaseOpenLocator()
        cls.config = cea.config.Configuration(cea.config.DEFAULT_CONFIG)
        cls.config.scenario = cls.locator.scenario
        weather_path = cls.locator.get_weather('Zug_inducity_2009')
        cls.weather_data = epwreader.epw_reader(weather_path)[[
            'year', 'drybulb_C', 'wetbulb_C', 'relhum_percent', 'windspd_ms',
            'skytemp_C'
        ]]
        year = cls.weather_data['year'][0]
        cls.date_range = get_date_range_hours_from_year(year)
        cls.test_config = configparser.ConfigParser()
        cls.test_config.read(
            os.path.join(os.path.dirname(__file__),
                         'test_calc_thermal_loads.config'))

        # run properties script
        import cea.datamanagement.archetypes_mapper
        cea.datamanagement.archetypes_mapper.archetypes_mapper(
            cls.locator, True, True, True, True, True, True,
            cls.locator.get_zone_building_names())

        cls.building_properties = BuildingProperties(
            cls.locator, epwreader.epw_reader(cls.locator.get_weather_file()))

        cls.use_dynamic_infiltration_calculation = cls.config.demand.use_dynamic_infiltration_calculation
        cls.resolution_output = cls.config.demand.resolution_output
        cls.loads_output = cls.config.demand.loads_output
        cls.massflows_output = cls.config.demand.massflows_output
        cls.temperatures_output = cls.config.demand.temperatures_output
        cls.debug = cls.config.debug
Esempio n. 3
0
def calc_SC(locator, sensors_data, radiation, latitude, longitude, year, gv, weather_path):
    # weather data
    weather_data = epwreader.epw_reader(weather_path)

    # solar properties
    g, Sz, Az, ha, trr_mean, worst_sh, worst_Az = solar_equations.calc_sun_properties(latitude, longitude, weather_data,
                                                                                      gv)

    # read radiation file
    hourly_data = pd.read_csv(radiation)

    # get only datapoints with production beyond min_production
    Max_Isol = hourly_data.total.max()
    Min_Isol = Max_Isol * gv.min_production  # 80% of the local average maximum in the area
    sensors_data_clean = sensors_data[sensors_data["total"] > Min_Isol]
    radiation_clean = radiation.loc[radiation['sensor_id'].isin(sensors_data_clean.sensor_id)]

    # Calculate the heights of all buildings for length of vertical pipes
    height = locator.get_total_demand().height.sum()

    # calculate optimal angle and tilt for panels
    optimal_angle_and_tilt(sensors_data, latitude, worst_sh, worst_Az, trr_mean, gv.grid_side,
                           gv.module_lenght_SC, gv.angle_north, Min_Isol, Max_Isol)

    Number_groups, hourlydata_groups, number_points, prop_observers = calc_groups(radiation_clean, sensors_data_clean)

    result, Final = SC_generation(gv.type_SCpanel, hourlydata_groups, prop_observers, number_points, g, Sz, Az, ha,
                                  latitude,
                                  gv.Tin, height)

    Final.to_csv(locator.solar_collectors_result(), index=True, float_format='%.2f')
    return
Esempio n. 4
0
def create_data():
    """Create test data to compare against - run this the first time you make changes that affect the results. Note,
    this will overwrite the previous test data."""
    test_config = ConfigParser.SafeConfigParser()
    test_config.read(get_test_config_path())
    if not test_config.has_section('test_mixed_use_archetype_values'):
        test_config.add_section('test_mixed_use_archetype_values')
    locator = ReferenceCaseOpenLocator()
    expected_results = calculate_mixed_use_archetype_values_results(locator)
    test_config.set('test_mixed_use_archetype_values', 'expected_results', expected_results.to_json())

    config = cea.config.Configuration(cea.config.DEFAULT_CONFIG)
    locator = ReferenceCaseOpenLocator()

    # calculate schedules
    building_properties = BuildingProperties(locator, False)
    bpr = building_properties['B1011']
    list_uses = ['OFFICE', 'LAB', 'INDUSTRIAL', 'SERVERRROOM']
    bpr.occupancy = {'OFFICE': 0.5, 'SERVERROOM': 0.5}
    # get year from weather file
    weather_path = locator.get_weather_file()
    weather_data = epwreader.epw_reader(weather_path)[['year']]
    year = weather_data['year'][0]
    date = pd.date_range(str(year) + '/01/01', periods=HOURS_IN_YEAR, freq='H')

    calculated_schedules = schedule_maker_main(locator, config)
    if not test_config.has_section('test_mixed_use_schedules'):
        test_config.add_section('test_mixed_use_schedules')
    test_config.set('test_mixed_use_schedules', 'reference_results', json.dumps(
        {schedule: calculated_schedules[schedule][REFERENCE_TIME] for schedule in calculated_schedules.keys()}))

    with open(get_test_config_path(), 'w') as f:
        test_config.write(f)
def calc_PV(locator, sensors_data, radiation, latitude, longitude, year, gv, weather_path):

    # weather data
    weather_data = epwreader.epw_reader(weather_path)

    # solar properties
    g, Sz, Az, ha, trr_mean, worst_sh, worst_Az = solar_equations.calc_sun_properties(latitude, longitude, weather_data,
                                                                                      gv)

    # read radiation file
    hourly_data = pd.read_csv(radiation)

    # get only datapoints with production beyond min_production
    Max_Isol = hourly_data.total.max()
    Min_Isol = Max_Isol * gv.min_production  # 80% of the local average maximum in the area
    sensors_data_clean = sensors_data[sensors_data["total"] > Min_Isol]
    radiation_clean = radiation.loc[radiation['sensor_id'].isin(sensors_data_clean.sensor_id)]

    # get only datapoints with aminimum 50 W/m2 of radiation for energy production
    radiation_clean[radiation_clean[:] <= 50] = 0

    # calculate optimal angle and tilt for panels
    optimal_angle_and_tilt(sensors_data, latitude, worst_sh, worst_Az, trr_mean, gv.grid_side,
                           gv.module_lenght_PV, gv.angle_north, Min_Isol, Max_Isol)

    Number_groups, hourlydata_groups, number_points, prop_observers = calc_groups(radiation_clean, sensors_data_clean)

    results, Final = Calc_pv_generation(gv.type_PVpanel, hourlydata_groups, Number_groups, number_points,
                                            prop_observers, weather_data,g, Sz, Az, ha, latitude, gv.misc_losses)

    Final.to_csv(locator.PV_result(), index=True, float_format='%.2f')
    return
Esempio n. 6
0
    def setUpClass(cls):
        import zipfile
        import tempfile
        import cea.examples
        archive = zipfile.ZipFile(
            os.path.join(os.path.dirname(cea.examples.__file__),
                         'reference-case-open.zip'))
        archive.extractall(tempfile.gettempdir())
        reference_case = os.path.join(tempfile.gettempdir(),
                                      'reference-case-open', 'baseline')
        cls.locator = InputLocator(reference_case)
        cls.gv = GlobalVariables()
        weather_path = cls.locator.get_default_weather()
        cls.weather_data = epwreader.epw_reader(weather_path)[[
            'drybulb_C', 'relhum_percent', 'windspd_ms', 'skytemp_C'
        ]]

        # run properties script
        import cea.demand.preprocessing.properties
        cea.demand.preprocessing.properties.properties(cls.locator, True, True,
                                                       True, True)

        cls.building_properties = BuildingProperties(cls.locator, cls.gv)
        cls.date = pd.date_range(cls.gv.date_start, periods=8760, freq='H')
        cls.list_uses = cls.building_properties.list_uses()
        cls.archetype_schedules, cls.archetype_values = schedule_maker(
            cls.date, cls.locator, cls.list_uses)
        cls.occupancy_densities = cls.archetype_values['people']
        cls.usage_schedules = {
            'list_uses': cls.list_uses,
            'archetype_schedules': cls.archetype_schedules,
            'occupancy_densities': cls.occupancy_densities,
            'archetype_values': cls.archetype_values
        }
def preproccessing(locator, total_demand, building_names, weather_file, gv):

    # read weather and calculate ground temperature
    T_ambient = epwreader.epw_reader(weather_file)['drybulb_C']
    gv.ground_temperature = geothermal.calc_ground_temperature(T_ambient.values, gv)

    print "Run substation model for each building separately"
    subsM.subsMain(locator, total_demand, building_names, gv, Flag = True) # 1 if disconected buildings are calculated

    print "Heating operation pattern for disconnected buildings"
    dbM.discBuildOp(locator, building_names, gv)

    print "Create network file with all buildings connected"
    nM.Network_Summary(locator, total_demand, building_names, gv, "all") #"_all" key for all buildings

    print "Solar features extraction"
    solarFeat = sFn.solarRead(locator, gv)

    print "electricity"
    elecCosts, elecCO2, elecPrim = electricity.calc_pareto_electricity(locator, gv)

    print "Process Heat "
    hpCosts, hpCO2, hpPrim = hpMain.calc_pareto_Qhp(locator, total_demand, gv)

    extraCosts = elecCosts + hpCosts
    extraCO2 = elecCO2 + hpCO2
    extraPrim = elecPrim + hpPrim

    return extraCosts, extraCO2, extraPrim, solarFeat
Esempio n. 8
0
def main(config):
    locator = cea.inputlocator.InputLocator(scenario=config.scenario)
    weather_data = epwreader.epw_reader(config.weather)[[
        'year', 'drybulb_C', 'wetbulb_C', 'relhum_percent', 'windspd_ms',
        'skytemp_C'
    ]]
    year = weather_data['year'][0]
    region = config.region
    settings = config.demand
    use_daysim_radiation = settings.use_daysim_radiation
    building_properties, schedules_dict, date = properties_and_schedule(
        locator, region, year, use_daysim_radiation)
    list_building_names = building_properties.list_building_names()
    eval_nn_performance(
        locator,
        random_variables,
        target_parameters,
        list_building_names,
        config=config,
        nn_delay=config.neural_network.nn_delay,
        climatic_variables=config.neural_network.climatic_variables,
        region=config.region,
        year=config.neural_network.year,
        use_daysim_radiation=settings.use_daysim_radiation,
        use_stochastic_occupancy=config.demand.use_stochastic_occupancy)
    def setUpClass(cls):
        import zipfile
        import tempfile
        import cea.examples
        cls.locator = cea.inputlocator.ReferenceCaseOpenLocator()
        cls.config = cea.config.Configuration(cea.config.DEFAULT_CONFIG)
        weather_path = cls.locator.get_weather('Zug')
        cls.weather_data = epwreader.epw_reader(weather_path)[[
            'year', 'drybulb_C', 'wetbulb_C', 'relhum_percent', 'windspd_ms',
            'skytemp_C'
        ]]
        year = cls.weather_data['year'][0]
        cls.region = cls.config.region
        cls.test_config = ConfigParser.SafeConfigParser()
        cls.test_config.read(
            os.path.join(os.path.dirname(__file__),
                         'test_calc_thermal_loads.config'))

        # run properties script
        import cea.datamanagement.data_helper
        cea.datamanagement.data_helper.data_helper(cls.locator, cls.config,
                                                   True, True, True, True,
                                                   True, True)

        use_daysim_radiation = cls.config.demand.use_daysim_radiation
        cls.building_properties, cls.usage_schedules, cls.date = properties_and_schedule(
            cls.locator, cls.region, year, use_daysim_radiation)

        cls.use_dynamic_infiltration_calculation = cls.config.demand.use_dynamic_infiltration_calculation
        cls.use_stochastic_occupancy = cls.config.demand.use_stochastic_occupancy
        cls.resolution_output = cls.config.demand.resolution_output
        cls.loads_output = cls.config.demand.loads_output
        cls.massflows_output = cls.config.demand.massflows_output
        cls.temperatures_output = cls.config.demand.temperatures_output
        cls.format_output = cls.config.demand.format_output
Esempio n. 10
0
def main(config):
    gv = cea.globalvar.GlobalVariables()
    settings = config.demand
    use_daysim_radiation = settings.use_daysim_radiation
    weather_data = epwreader.epw_reader(config.weather)[[
        'year', 'drybulb_C', 'wetbulb_C', 'relhum_percent', 'windspd_ms',
        'skytemp_C'
    ]]
    year = weather_data['year'][0]
    region = config.region
    locator = cea.inputlocator.InputLocator(scenario=config.scenario)

    building_properties, schedules_dict, date = properties_and_schedule(
        locator, region, year, use_daysim_radiation)
    list_building_names = building_properties.list_building_names()
    target_parameters = [
        'Qhsf_kWh', 'Qcsf_kWh', 'Qwwf_kWh', 'Ef_kWh', 'T_int_C'
    ]
    input_prepare_main(
        list_building_names,
        locator,
        target_parameters,
        nn_delay=config.neural_network.nn_delay,
        climatic_variables=config.neural_network.climatic_variables,
        region=config.region,
        year=config.neural_network.year,
        use_daysim_radiation=settings.use_daysim_radiation,
        use_stochastic_occupancy=config.demand.use_stochastic_occupancy)
def calc_SC(locator, sensors_data, radiation, latitude, longitude, year, gv, weather_path):

    # weather data
    weather_data = epwreader.epw_reader(weather_path)

    # solar properties
    g, Sz, Az, ha, trr_mean, worst_sh, worst_Az = solar_equations.calc_sun_properties(latitude, longitude, weather_data, gv)

    # read radiation file
    hourly_data = pd.read_csv(radiation)

    # get only datapoints with production beyond min_production
    Max_Isol = hourly_data.total.max()
    Min_Isol = Max_Isol * gv.min_production  # 80% of the local average maximum in the area
    sensors_data_clean = sensors_data[sensors_data["total"] > Min_Isol]
    radiation_clean =radiation.loc[radiation['sensor_id'].isin(sensors_data_clean.sensor_id)]

    # Calculate the heights of all buildings for length of vertical pipes
    height = locator.get_total_demand().height.sum()

    # calculate optimal angle and tilt for panels
    optimal_angle_and_tilt(sensors_data, latitude, worst_sh, worst_Az, trr_mean, gv.grid_side,
                           gv.module_lenght_SC, gv.angle_north, Min_Isol, Max_Isol)

    Number_groups, hourlydata_groups, number_points, prop_observers = calc_groups(radiation_clean, sensors_data_clean)

    result, Final = SC_generation(gv.type_SCpanel, hourlydata_groups, prop_observers, number_points, g, Sz, Az, ha, latitude,
                                  gv.Tin, height)

    Final.to_csv(locator.solar_collectors_result(), index=True, float_format='%.2f')
    return
Esempio n. 12
0
def main(config):

    gv = cea.globalvar.GlobalVariables()
    locator = cea.inputlocator.InputLocator(scenario=config.scenario)
    weather_data = epwreader.epw_reader(config.weather)[[
        'year', 'drybulb_C', 'wetbulb_C', 'relhum_percent', 'windspd_ms',
        'skytemp_C'
    ]]
    year = weather_data['year'][0]
    region = config.region
    settings = config.demand
    use_daysim_radiation = settings.use_daysim_radiation
    weather_path = config.weather
    building_properties, schedules_dict, date = properties_and_schedule(
        gv, locator, region, year, use_daysim_radiation)
    list_building_names = building_properties.list_building_names()
    scalerX_file, scalerT_file = locator.get_minmaxscalar_model()
    scalerX = joblib.load(scalerX_file)
    scalerT = joblib.load(scalerT_file)
    run_nn_pipeline(
        locator,
        random_variables,
        target_parameters,
        list_building_names,
        weather_path,
        gv,
        scalerX,
        scalerT,
        multiprocessing=config.multiprocessing,
        config=config,
        nn_delay=config.neural_network.nn_delay,
        climatic_variables=config.neural_network.climatic_variables,
        region=config.region,
        year=config.neural_network.year,
        use_daysim_radiation=settings.use_daysim_radiation)
Esempio n. 13
0
 def weather_data(self):
     if self.__weather_data is None:
         weather_path = self.locator.get_weather_file()
         self.__weather_data = epwreader.epw_reader(weather_path)[[
             'year', 'drybulb_C', 'wetbulb_C'
         ]]
     return self.__weather_data
Esempio n. 14
0
def run_as_script(scenario_path=None):
    """
    run the whole network summary routine
    """
    import cea.globalvar
    import cea.inputlocator as inputlocator
    from geopandas import GeoDataFrame as gpdf
    from cea.utilities import epwreader
    from cea.resources import geothermal

    gv = cea.globalvar.GlobalVariables()

    if scenario_path is None:
        scenario_path = gv.scenario_reference

    locator = inputlocator.InputLocator(scenario_path=scenario_path)
    total_demand = pd.read_csv(locator.get_total_demand())
    building_names = pd.read_csv(locator.get_total_demand())['Name']
    weather_file = locator.get_default_weather()
    # add geothermal part of preprocessing
    T_ambient = epwreader.epw_reader(weather_file)['drybulb_C']
    gv.ground_temperature = geothermal.calc_ground_temperature(T_ambient.values, gv)
    #substation_main(locator, total_demand, total_demand['Name'], gv, False)

    t = 1000  # FIXME
    T_DH = 60  # FIXME
    network = 'DH'  # FIXME
    t_flag = True  # FIXME

    substations_HEX_specs, buildings = substation_HEX_design_main(locator, total_demand, building_names, gv)

    substation_return_model_main(locator, gv, building_names, buildings, substations_HEX_specs, T_DH, t, network, t_flag)

    print 'substation_main() succeeded'
Esempio n. 15
0
def run_as_script(config):
    gv = cea.globalvar.GlobalVariables()
    settings = config.demand
    use_daysim_radiation = settings.use_daysim_radiation
    weather_data = epwreader.epw_reader(config.weather)[[
        'year', 'drybulb_C', 'wetbulb_C', 'relhum_percent', 'windspd_ms',
        'skytemp_C'
    ]]
    year = weather_data['year'][0]
    region = config.region
    locator = cea.inputlocator.InputLocator(scenario=config.scenario)
    building_properties, schedules_dict, date = properties_and_schedule(
        locator, region, year, use_daysim_radiation)
    list_building_names = building_properties.list_building_names()
    sampling_scaler(
        locator=locator,
        random_variables=config.neural_network.random_variables,
        target_parameters=config.neural_network.target_parameters,
        boolean_vars=config.neural_network.boolean_vars,
        list_building_names=list_building_names,
        number_samples_scaler=config.neural_network.number_samples_scaler,
        nn_delay=config.neural_network.nn_delay,
        gv=gv,
        config=config,
        climatic_variables=config.neural_network.climatic_variables,
        year=config.neural_network.year,
        use_daysim_radiation=settings.use_daysim_radiation,
        use_stochastic_occupancy=config.demand.use_stochastic_occupancy,
        region=region)
    def setUpClass(cls):
        import cea.examples
        cls.locator = cea.inputlocator.ReferenceCaseOpenLocator()
        cls.config = cea.config.Configuration(cea.config.DEFAULT_CONFIG)
        weather_path = cls.locator.get_weather('Zug-inducity_1990_2010_TMY')
        cls.weather_data = epwreader.epw_reader(weather_path)[
            ['year', 'drybulb_C', 'wetbulb_C', 'relhum_percent', 'windspd_ms', 'skytemp_C']]
        year = cls.weather_data['year'][0]
        cls.date_range = get_dates_from_year(year)
        cls.test_config = ConfigParser.SafeConfigParser()
        cls.test_config.read(os.path.join(os.path.dirname(__file__), 'test_calc_thermal_loads.config'))

        # run properties script
        import cea.datamanagement.data_helper
        cea.datamanagement.data_helper.data_helper(cls.locator, 'CH', True, True, True, True, True, True, True)

        cls.building_properties = BuildingProperties(cls.locator, cls.config.demand.override_variables)

        cls.use_dynamic_infiltration_calculation = cls.config.demand.use_dynamic_infiltration_calculation
        cls.resolution_output = cls.config.demand.resolution_output
        cls.loads_output = cls.config.demand.loads_output
        cls.massflows_output = cls.config.demand.massflows_output
        cls.temperatures_output = cls.config.demand.temperatures_output
        cls.format_output = cls.config.demand.format_output
        cls.write_detailed_output = cls.config.demand.write_detailed_output
        cls.debug = cls.config.debug
def calc_geothermal_potential(locator, config):
    "A very simplified calculation based on the area available"

    # local variables
    weather_file = locator.get_weather_file()
    buildings = config.shallow_geothermal.buildings_available
    extra_area = config.shallow_geothermal.extra_area_available
    depth_m = config.shallow_geothermal.average_probe_depth

    # dataprocessing
    area_below_buildings = calc_area_buildings(locator, buildings)
    T_ambient_C = epwreader.epw_reader(weather_file)[['drybulb_C']].values

    # total area available
    area_geothermal = extra_area + area_below_buildings

    T_ground_K = calc_ground_temperature(locator, T_ambient_C, depth_m)

    # convert back to degrees C
    t_source_final = [x[0] - 273 for x in T_ground_K]

    Q_max_kwh = np.ceil(
        area_geothermal / GHP_A) * GHP_HMAX_SIZE / 1000  # [kW th]

    # export
    output_file = locator.get_geothermal_potential()
    pd.DataFrame({
        "Ts_C": t_source_final,
        "QGHP_kW": Q_max_kwh,
        "Area_avail_m2": area_geothermal
    }).to_csv(output_file, index=False, float_format='%.3f')
Esempio n. 18
0
def preproccessing(locator, total_demand, building_names, weather_file, gv):

    # read weather and calculate ground temperature
    T_ambient = epwreader.epw_reader(weather_file)['drybulb_C']
    gv.ground_temperature = geothermal.calc_ground_temperature(
        T_ambient.values, gv)

    print "Run substation model for each building separately"
    subsM.subsMain(locator, total_demand, building_names, gv,
                   Flag=True)  # 1 if disconected buildings are calculated

    print "Heating operation pattern for disconnected buildings"
    dbM.discBuildOp(locator, building_names, gv)

    print "Create network file with all buildings connected"
    nM.Network_Summary(locator, total_demand, building_names, gv,
                       "all")  #"_all" key for all buildings

    print "Solar features extraction"
    solarFeat = sFn.solarRead(locator, gv)

    print "electricity"
    elecCosts, elecCO2, elecPrim = electricity.calc_pareto_electricity(
        locator, gv)

    print "Process Heat "
    hpCosts, hpCO2, hpPrim = hpMain.calc_pareto_Qhp(locator, total_demand, gv)

    extraCosts = elecCosts + hpCosts
    extraCO2 = elecCO2 + hpCO2
    extraPrim = elecPrim + hpPrim

    return extraCosts, extraCO2, extraPrim, solarFeat
Esempio n. 19
0
def main():
    locator = InputLocator(REFERENCE_CASE)
    gv = GlobalVariables()
    weather_path = locator.get_default_weather()
    weather_data = epwreader.epw_reader(weather_path)[[
        'drybulb_C', 'relhum_percent', 'windspd_ms', 'skytemp_C'
    ]]

    building_properties = BuildingProperties(locator, gv)
    date = pd.date_range(gv.date_start, periods=8760, freq='H')
    list_uses = building_properties.list_uses()
    schedules = schedule_maker(date, locator, list_uses)
    usage_schedules = {'list_uses': list_uses, 'schedules': schedules}

    print("data for test_calc_thermal_loads_new_ventilation:")
    print building_properties.list_building_names()

    bpr = building_properties['B01']
    result = calc_thermal_loads('B01', bpr, weather_data, usage_schedules,
                                date, gv, locator)

    # test the building csv file
    df = pd.read_csv(locator.get_demand_results_file('B01'))

    expected_columns = list(df.columns)
    print("expected_columns = %s" % repr(expected_columns))

    value_columns = [
        u'Ealf_kWh', u'Eauxf_kWh', u'Edataf_kWh', u'Ef_kWh', u'QCf_kWh',
        u'QHf_kWh', u'Qcdataf_kWh', u'Qcref_kWh', u'Qcs_kWh', u'Qcsf_kWh',
        u'Qhs_kWh', u'Qhsf_kWh', u'Qww_kWh', u'Qwwf_kWh', u'Tcsf_re_C',
        u'Thsf_re_C', u'Twwf_re_C', u'Tcsf_sup_C', u'Thsf_sup_C', u'Twwf_sup_C'
    ]

    print("values = %s " % repr([df[column].sum()
                                 for column in value_columns]))

    print("data for test_calc_thermal_loads_other_buildings:")
    # randomly selected except for B302006716, which has `Af == 0`
    buildings = {
        'B01': (81124.39400, 150471.05200),
        'B03': (81255.09200, 150520.01000),
        'B02': (82176.15300, 150604.85100),
        'B05': (84058.72400, 150841.56200),
        'B04': (82356.22600, 150598.43400),
        'B07': (81052.19000, 150490.94800),
        'B06': (83108.45600, 150657.24900),
        'B09': (84491.58100, 150853.54000),
        'B08': (88572.59000, 151020.09300),
    }

    for building in buildings.keys():
        bpr = building_properties[building]
        b, qcf_kwh, qhf_kwh = run_for_single_building(building, bpr,
                                                      weather_data,
                                                      usage_schedules, date,
                                                      gv, locator)
        print("'%(b)s': (%(qcf_kwh).5f, %(qhf_kwh).5f)," % locals())
Esempio n. 20
0
def schedule_maker_main(locator, config, building=None):
    # local variables
    buildings = config.schedule_maker.buildings
    schedule_model = config.schedule_maker.schedule_model

    if schedule_model == 'deterministic':
        stochastic_schedule = False
    elif schedule_model == 'stochastic':
        stochastic_schedule = True
    else:
        raise ValueError("Invalid schedule model: {schedule_model}".format(**locals()))

    if building != None:
        buildings = [building]  # this is to run the tests

    # CHECK DATABASE
    if is_3_22(config.scenario):
        raise ValueError("""The data format of indoor comfort has been changed after v3.22. 
        Please run Data migrator in Utilities.""")

    # get variables of indoor comfort and internal loads
    internal_loads = dbf_to_dataframe(locator.get_building_internal()).set_index('Name')
    indoor_comfort = dbf_to_dataframe(locator.get_building_comfort()).set_index('Name')
    architecture = dbf_to_dataframe(locator.get_building_architecture()).set_index('Name')

    # get building properties
    prop_geometry = Gdf.from_file(locator.get_zone_geometry())
    prop_geometry['footprint'] = prop_geometry.area
    prop_geometry['GFA_m2'] = prop_geometry['footprint'] * (prop_geometry['floors_ag'] + prop_geometry['floors_bg'])
    prop_geometry['GFA_ag_m2'] = prop_geometry['footprint'] * prop_geometry['floors_ag']
    prop_geometry['GFA_bg_m2'] = prop_geometry['footprint'] * prop_geometry['floors_bg']
    prop_geometry = prop_geometry.merge(architecture, on='Name').set_index('Name')
    prop_geometry = calc_useful_areas(prop_geometry)

    # get calculation year from weather file
    weather_path = locator.get_weather_file()
    weather_data = epwreader.epw_reader(weather_path)[['year', 'drybulb_C', 'wetbulb_C',
                                                       'relhum_percent', 'windspd_ms', 'skytemp_C']]
    year = weather_data['year'][0]

    # create date range for the calculation year
    date_range = get_date_range_hours_from_year(year)

    # SCHEDULE MAKER
    n = len(buildings)
    calc_schedules_multiprocessing = cea.utilities.parallel.vectorize(calc_schedules,
                                                                      config.get_number_of_processes(),
                                                                      on_complete=print_progress)

    calc_schedules_multiprocessing(repeat(locator, n),
                                   buildings,
                                   repeat(date_range, n),
                                   [internal_loads.loc[b] for b in buildings],
                                   [indoor_comfort.loc[b] for b in buildings],
                                   [prop_geometry.loc[b] for b in buildings],
                                   repeat(stochastic_schedule, n))
    return None
Esempio n. 21
0
def isolation_daysim(chunk_n, rad, geometry_3D_zone, locator, weather_path,
                     settings):

    # folder for data work
    daysim_dir = locator.get_temporary_file("temp" + str(chunk_n))
    rad.initialise_daysim(daysim_dir)

    # calculate sensors
    print " calculating and sending sensor points"
    sensors_coords_zone, sensors_dir_zone, sensors_number_zone, names_zone, \
    sensors_code_zone = calc_sensors_zone(geometry_3D_zone, locator, settings)
    rad.set_sensor_points(sensors_coords_zone, sensors_dir_zone)
    create_sensor_input_file(rad, chunk_n)

    num_sensors = sum(sensors_number_zone)
    print "Daysim simulation starts for building(s)", names_zone
    print "and the next number of total sensors", num_sensors
    if num_sensors > 50000:
        raise ValueError(
            'You are sending more than 50000 sensors at the same time, this \
                          will eventually crash a daysim instance. To solve it, reduce the number of buildings \
                          in each chunk in the Settings.py file')

    # add_elevation_weather_file(weather_path)
    rad.execute_epw2wea(weather_path, ground_reflectance=settings.albedo)
    rad.execute_radfiles2daysim()
    rad.write_radiance_parameters(
        settings.rad_ab, settings.rad_ad, settings.rad_as, settings.rad_ar,
        settings.rad_aa, settings.rad_lr, settings.rad_st, settings.rad_sj,
        settings.rad_lw, settings.rad_dj, settings.rad_ds, settings.rad_dr,
        settings.rad_dp)

    rad.execute_gen_dc("w/m2")
    rad.execute_ds_illum()
    solar_res = rad.eval_ill_per_sensor()

    #erase daysim folder to avoid conflicts after every iteration
    shutil.rmtree(daysim_dir)

    # check inconsistencies and replace by max value of weather file
    # check inconsistencies and replace by max value of weather file
    weatherfile = epwreader.epw_reader(weather_path)['glohorrad_Whm2'].values
    max_global = weatherfile.max()
    for i, value in enumerate(solar_res):
        solar_res[i] = [0 if x > max_global else x for x in value]

    print "Writing results to disk"
    index = 0
    for building_name, sensors_number_building, sensor_code_building in zip(
            names_zone, sensors_number_zone, sensors_code_zone):
        selection_of_results = solar_res[index:index + sensors_number_building]
        items_sensor_name_and_result = dict(
            zip(sensor_code_building, selection_of_results))
        with open(locator.get_radiation_building(building_name),
                  'w') as outfile:
            json.dump(items_sensor_name_and_result, outfile)
        index = index + sensors_number_building
Esempio n. 22
0
def radiation_singleprocessing(cea_daysim, zone_building_names, locator,
                               settings, geometry_pickle_dir, num_processes):
    weather_path = locator.get_weather_file()
    # check inconsistencies and replace by max value of weather file
    weatherfile = epwreader.epw_reader(weather_path)
    max_global = weatherfile['glohorrad_Whm2'].max()

    list_of_building_names = [
        building_name for building_name in settings.buildings
        if building_name in zone_building_names
    ]
    # get chunks of buildings to iterate
    chunks = [
        list_of_building_names[i:i + settings.n_buildings_in_chunk] for i in
        range(0, len(list_of_building_names), settings.n_buildings_in_chunk)
    ]

    write_sensor_data = settings.write_sensor_data
    radiance_parameters = {
        "rad_ab": settings.rad_ab,
        "rad_ad": settings.rad_ad,
        "rad_as": settings.rad_as,
        "rad_ar": settings.rad_ar,
        "rad_aa": settings.rad_aa,
        "rad_lr": settings.rad_lr,
        "rad_st": settings.rad_st,
        "rad_sj": settings.rad_sj,
        "rad_lw": settings.rad_lw,
        "rad_dj": settings.rad_dj,
        "rad_ds": settings.rad_ds,
        "rad_dr": settings.rad_dr,
        "rad_dp": settings.rad_dp
    }
    grid_size = {
        "walls_grid": settings.walls_grid,
        "roof_grid": settings.roof_grid
    }

    num_chunks = len(chunks)
    if num_chunks == 1:
        for chunk_n, building_names in enumerate(chunks):
            daysim_main.isolation_daysim(chunk_n, cea_daysim, building_names,
                                         locator, radiance_parameters,
                                         write_sensor_data, grid_size,
                                         max_global, weatherfile,
                                         geometry_pickle_dir)
    else:
        vectorize(daysim_main.isolation_daysim,
                  num_processes)(range(0, num_chunks),
                                 repeat(cea_daysim, num_chunks), chunks,
                                 repeat(locator, num_chunks),
                                 repeat(radiance_parameters, num_chunks),
                                 repeat(write_sensor_data, num_chunks),
                                 repeat(grid_size, num_chunks),
                                 repeat(max_global, num_chunks),
                                 repeat(weatherfile, num_chunks),
                                 repeat(geometry_pickle_dir, num_chunks))
Esempio n. 23
0
def input_prepare_main(list_building_names, locator, target_parameters, gv,
                       nn_delay, climatic_variables, region, year,
                       use_daysim_radiation, use_stochastic_occupancy):
    '''
    this function prepares the inputs and targets for the neural net by splitting the jobs between different processors
    :param list_building_names: a list of building names
    :param locator: points to the variables
    :param target_parameters: (imported from 'nn_settings.py') a list containing the name of desirable outputs
    :param gv: global variables
    :return: inputs and targets for the whole dataset (urban_input_matrix, urban_taget_matrix)
    '''

    #   collect weather data
    weather_data = epwreader.epw_reader(
        locator.get_default_weather())[climatic_variables]
    #   transpose the weather array
    weather_array = np.transpose(np.asarray(weather_data))
    building_properties, schedules_dict, date = properties_and_schedule(
        gv, locator, region, year, use_daysim_radiation)
    # ***tag (#) lines 40-68 if you DO NOT want multiprocessing***
    # multiprocessing pool
    pool = mp.Pool()
    #   count number of CPUs
    gv.log("Using %i CPU's" % mp.cpu_count())
    #   creat an empty job list to be filled later
    joblist = []
    #   create one job for each data preparation task i.e. each building
    from cea.demand.metamodel.nn_generator.input_matrix import input_prepare_multi_processing
    for building_name in list_building_names:
        job = pool.apply_async(input_prepare_multi_processing, [
            building_name, gv, locator, target_parameters, nn_delay,
            climatic_variables, region, year, use_daysim_radiation,
            use_stochastic_occupancy, weather_array, weather_data,
            building_properties, schedules_dict, date
        ])
        joblist.append(job)
    #   run the input/target preperation for all buildings in the list (here called jobs)
    for i, job in enumerate(joblist):
        NN_input_ready, NN_target_ready = job.get(240)
        #   remove buildings that have "NaN" in their input (e.g. if heating/cooling is off, the indoor temperature
        #   will be returned as "NaN"). Afterwards, stack the inputs/targets of all buildings
        check_nan = 1 * (np.isnan(np.sum(NN_input_ready)))
        if check_nan == 0:
            if i == 0:
                urban_input_matrix = NN_input_ready
                urban_taget_matrix = NN_target_ready
            else:
                urban_input_matrix = np.concatenate(
                    (urban_input_matrix, NN_input_ready))
                urban_taget_matrix = np.concatenate(
                    (urban_taget_matrix, NN_target_ready))

    #   close the multiprocessing
    pool.close()

    print urban_input_matrix
    return urban_input_matrix, urban_taget_matrix
def preproccessing(locator, total_demand, buildings_heating_demand, buildings_cooling_demand,
                   weather_file, district_heating_network, district_cooling_network):
    """
    This function aims at preprocessing all data for the optimization.

    :param locator: path to locator function
    :param total_demand: dataframe with total demand and names of all building in the area
    :param building_names: dataframe with names of all buildings in the area
    :param weather_file: path to wather file
    :type locator: class
    :type total_demand: list
    :type building_names: list
    :type weather_file: string
    :return:
        - extraCosts: extra pareto optimal costs due to electricity and process heat (
            these are treated separately and not considered inside the optimization)
        - extraCO2: extra pareto optimal emissions due to electricity and process heat (
            these are treated separately and not considered inside the optimization)
        - extraPrim: extra pareto optimal primary energy due to electricity and process heat (
            these are treated separately and not considered inside the optimization)
        - solar_features: extraction of solar features form the results of the solar technologies
            calculation.

    :rtype: float, float, float, float

    """

    # local variables
    network_depth_m = Z0

    print("PRE-PROCESSING 1/2: weather properties")
    T_ambient = epwreader.epw_reader(weather_file)['drybulb_C']
    ground_temp = calc_ground_temperature(locator, T_ambient, depth_m=network_depth_m)

    print("PRE-PROCESSING 2/2: thermal networks")  # at first estimate a distribution with all the buildings connected
    if district_heating_network:
        num_tot_buildings = len(buildings_heating_demand)
        DHN_barcode = ''.join(str(1) for e in range(num_tot_buildings))
        substation.substation_main_heating(locator, total_demand, buildings_heating_demand,
                                           DHN_barcode=DHN_barcode)

        summarize_network.network_main(locator, buildings_heating_demand, ground_temp, num_tot_buildings, "DH",
                                       DHN_barcode)
        # "_all" key for all buildings
    if district_cooling_network:
        num_tot_buildings = len(buildings_cooling_demand)
        DCN_barcode = ''.join(str(1) for e in range(num_tot_buildings))
        substation.substation_main_cooling(locator, total_demand, buildings_cooling_demand, DCN_barcode=DCN_barcode)

        summarize_network.network_main(locator, buildings_cooling_demand,
                                       ground_temp, num_tot_buildings, "DC",
                                       DCN_barcode)  # "_all" key for all buildings

    network_features = NetworkOptimizationFeatures(district_heating_network, district_cooling_network, locator)

    return network_features
Esempio n. 25
0
def calculate_daily_transmissivity_and_daily_diffusivity(weather_path):
    # calcuate daily transmissivity and daily diffusivity
    weather_data = epwreader.epw_reader(weather_path)[['dayofyear', 'exthorrad_Whm2',
                                                       'glohorrad_Whm2', 'difhorrad_Whm2']]
    weather_data['diff'] = weather_data.difhorrad_Whm2 / weather_data.glohorrad_Whm2
    weather_data = weather_data[np.isfinite(weather_data['diff'])]
    daily_transmissivity = np.round(weather_data.groupby(['dayofyear']).mean(), 2)
    daily_transmissivity['diff'] = daily_transmissivity['diff'].replace(1, 0.90)
    daily_transmissivity['trr'] = (1 - daily_transmissivity['diff'])
    return daily_transmissivity
Esempio n. 26
0
    def _calculate_PVT_hourly_aggregated_kW(self):
        # get extra data of weather and date
        weather_data = epwreader.epw_reader(
            self.weather)[["date", "drybulb_C", "wetbulb_C", "skytemp_C"]]

        pvt_hourly_aggregated_kW = sum(
            pd.read_csv(self.locator.PVT_results(building),
                        usecols=self.pvt_analysis_fields)
            for building in self.buildings)
        pvt_hourly_aggregated_kW['DATE'] = weather_data["date"]
        return pvt_hourly_aggregated_kW
Esempio n. 27
0
def main(config):
    assert os.path.exists(
        config.scenario), 'Scenario not found: %s' % config.scenario
    locator = cea.inputlocator.InputLocator(scenario=config.scenario)

    print('Running photovoltaic with scenario = %s' % config.scenario)
    print('Running photovoltaic with annual-radiation-threshold-kWh/m2 = %s' %
          config.solar.annual_radiation_threshold)
    print('Running photovoltaic with panel-on-roof = %s' %
          config.solar.panel_on_roof)
    print('Running photovoltaic with panel-on-wall = %s' %
          config.solar.panel_on_wall)
    print('Running photovoltaic with solar-window-solstice = %s' %
          config.solar.solar_window_solstice)
    print('Running photovoltaic with type-pvpanel = %s' %
          config.solar.type_pvpanel)
    if config.solar.custom_tilt_angle:
        print(
            'Running photovoltaic with custom-tilt-angle = %s and panel-tilt-angle = %s'
            % (config.solar.custom_tilt_angle, config.solar.panel_tilt_angle))
    else:
        print('Running photovoltaic with custom-tilt-angle = %s' %
              config.solar.custom_tilt_angle)
    if config.solar.custom_roof_coverage:
        print(
            'Running photovoltaic with custom-roof-coverage = %s and max-roof-coverage = %s'
            % (config.solar.custom_roof_coverage,
               config.solar.max_roof_coverage))
    else:
        print('Running photovoltaic with custom-roof-coverage = %s' %
              config.solar.custom_roof_coverage)

    building_names = locator.get_zone_building_names()
    zone_geometry_df = gdf.from_file(locator.get_zone_geometry())
    latitude, longitude = get_lat_lon_projected_shapefile(zone_geometry_df)

    # list_buildings_names =['B026', 'B036', 'B039', 'B043', 'B050'] for missing buildings
    weather_data = epwreader.epw_reader(locator.get_weather_file())
    date_local = solar_equations.calc_datetime_local_from_weather_file(
        weather_data, latitude, longitude)

    num_process = config.get_number_of_processes()
    n = len(building_names)
    cea.utilities.parallel.vectorize(calc_PV,
                                     num_process)(repeat(locator, n),
                                                  repeat(config, n),
                                                  repeat(latitude, n),
                                                  repeat(longitude, n),
                                                  repeat(weather_data, n),
                                                  repeat(date_local, n),
                                                  building_names)

    # aggregate results from all buildings
    write_aggregate_results(locator, building_names, num_process)
def disconnected_buildings_heating_main(locator, total_demand, building_names,
                                        config, prices, lca):
    """
    Computes the parameters for the operation of disconnected buildings
    output results in csv files.
    There is no optimization at this point. The different technologies are calculated and compared 1 to 1 to
    each technology. it is a classical combinatorial problem.
    :param locator: locator class
    :param building_names: list with names of buildings
    :type locator: class
    :type building_names: list
    :return: results of operation of buildings located in locator.get_optimization_decentralized_folder
    :rtype: Nonetype
    """
    t0 = time.perf_counter()
    prop_geometry = Gdf.from_file(locator.get_zone_geometry())
    geometry = pd.DataFrame({
        'Name': prop_geometry.Name,
        'Area': prop_geometry.area
    })
    geothermal_potential_data = dbf.dbf_to_dataframe(
        locator.get_building_supply())
    geothermal_potential_data = pd.merge(geothermal_potential_data,
                                         geometry,
                                         on='Name')
    geothermal_potential_data['Area_geo'] = geothermal_potential_data['Area']
    weather_path = locator.get_weather_file()
    weather_data = epwreader.epw_reader(weather_path)[[
        'year', 'drybulb_C', 'wetbulb_C', 'relhum_percent', 'windspd_ms',
        'skytemp_C'
    ]]

    T_ground_K = calc_ground_temperature(locator,
                                         weather_data['drybulb_C'],
                                         depth_m=10)
    supply_systems = SupplySystemsDatabase(locator)

    # This will calculate the substation state if all buildings where connected(this is how we study this)
    substation.substation_main_heating(locator, total_demand, building_names)

    n = len(building_names)
    cea.utilities.parallel.vectorize(disconnected_heating_for_building,
                                     config.get_number_of_processes())(
                                         building_names,
                                         repeat(supply_systems, n),
                                         repeat(T_ground_K, n),
                                         repeat(geothermal_potential_data, n),
                                         repeat(lca, n), repeat(locator, n),
                                         repeat(prices, n))

    print(time.perf_counter() - t0,
          "seconds process time for the Disconnected Building Routine \n")
Esempio n. 29
0
def calculate_ground_temperature(locator):
    """
    calculate ground temperatures.

    :param locator:
    :return: list of ground temperatures, one for each hour of the year
    :rtype: list[np.float64]
    """
    weather_file = locator.get_weather_file()
    T_ambient_C = epw_reader(weather_file)['drybulb_C']
    network_depth_m = NETWORK_DEPTH  # [m]
    T_ground_K = geothermal.calc_ground_temperature(locator, T_ambient_C.values, network_depth_m)
    return T_ground_K
Esempio n. 30
0
def get_array_weather_variables(locator, climatic_variables):
    '''
    this function collects the climatic features
    :param locator: points to the variables
    :return: array of climatic features and weather properties (weather_array, weather_data)
    '''
    #   collect weather data
    weather_data = epwreader.epw_reader(
        locator.get_default_weather())[climatic_variables]
    #   transpose the weather array
    weather_array = np.transpose(np.asarray(weather_data))

    return weather_array, weather_data
Esempio n. 31
0
def calc_VCC_COP(config, load_types, centralized=True):
    """
    Calculates the VCC COP based on evaporator and compressor temperatures, VCC g-value, and an assumption of
    auxiliary power demand for centralized and decentralized systems.
    This approximation only works in tropical climates

    Clark D (CUNDALL). Chiller energy efficiency 2013.

    :param load_types: a list containing the systems (aru, ahu, scu) that the chiller is supplying for
    :param centralized:
    :return:
    """
    if centralized == True:
        g_value = G_VALUE_CENTRALIZED
    else:
        g_value = G_VALUE_DECENTRALIZED
    T_evap_K = 10000000  # some high enough value
    for load_type in load_types:  # find minimum evap temperature of supplied loads
        if load_type == 'ahu':
            T_evap_K = min(T_evap_K, T_EVAP_AHU)
        elif load_type == 'aru':
            T_evap_K = min(T_evap_K, T_EVAP_ARU)
        elif load_type == 'scu':
            T_evap_K = min(T_evap_K, T_EVAP_SCU)
        else:
            print 'Undefined cooling load_type for chiller COP calculation.'
    if centralized == True:  # Todo: improve this to a better approximation than a static value DT_Network
        # for the centralized case we have to supply somewhat colder, currently based on CEA calculation for MIX_m case
        T_evap_K = T_evap_K - DT_NETWORK_CENTRALIZED
    # read weather data for condeser temperature calculation
    locator = cea.inputlocator.InputLocator(config.scenario)
    weather_path = locator.get_weather_file()
    weather_data = epwreader.epw_reader(weather_path)[[
        'year', 'drybulb_C', 'wetbulb_C'
    ]]
    # calculate condenser temperature with static approach temperature assumptions # FIXME: only work for tropical climates
    T_cond_K = np.mean(
        weather_data['wetbulb_C']
    ) + CHILLER_DELTA_T_APPROACH + CHILLER_DELTA_T_HEX_CT + 273.15
    # calculate chiller COP
    cop_chiller = g_value * T_evap_K / (T_cond_K - T_evap_K)
    # calculate system COP with pumping power of auxiliaries
    if centralized == True:
        cop_system = 1 / (1 / cop_chiller *
                          (1 + CENTRALIZED_AUX_PERCENTAGE / 100))
    else:
        cop_system = 1 / (1 / cop_chiller *
                          (1 + DECENTRALIZED_AUX_PERCENTAGE / 100))

    return cop_system, cop_chiller
def main():
    locator = InputLocator(REFERENCE_CASE)
    gv = GlobalVariables()
    weather_path = locator.get_default_weather()
    weather_data = epwreader.epw_reader(weather_path)[['drybulb_C', 'relhum_percent', 'windspd_ms', 'skytemp_C']]

    building_properties = BuildingProperties(locator, gv)
    date = pd.date_range(gv.date_start, periods=8760, freq='H')
    list_uses = building_properties.list_uses()
    schedules = schedule_maker(date, locator, list_uses)
    usage_schedules = {'list_uses': list_uses,
                            'schedules': schedules}

    print("data for test_calc_thermal_loads_new_ventilation:")
    print building_properties.list_building_names()

    bpr = building_properties['B01']
    result = calc_thermal_loads('B01', bpr, weather_data, usage_schedules, date, gv, locator)

    # test the building csv file
    df = pd.read_csv(locator.get_demand_results_file('B01'))

    expected_columns = list(df.columns)
    print("expected_columns = %s" % repr(expected_columns))

    value_columns = [u'Ealf_kWh', u'Eauxf_kWh', u'Edataf_kWh', u'Ef_kWh', u'QCf_kWh', u'QHf_kWh',
                     u'Qcdataf_kWh', u'Qcref_kWh', u'Qcs_kWh', u'Qcsf_kWh', u'Qhs_kWh', u'Qhsf_kWh', u'Qww_kWh',
                     u'Qwwf_kWh', u'Tcsf_re_C', u'Thsf_re_C', u'Twwf_re_C', u'Tcsf_sup_C', u'Thsf_sup_C',
                     u'Twwf_sup_C']

    print("values = %s " % repr([df[column].sum() for column in value_columns]))

    print("data for test_calc_thermal_loads_other_buildings:")
    # randomly selected except for B302006716, which has `Af == 0`
    buildings = {'B01': (81124.39400, 150471.05200),
                 'B03': (81255.09200, 150520.01000),
                 'B02': (82176.15300, 150604.85100),
                 'B05': (84058.72400, 150841.56200),
                 'B04': (82356.22600, 150598.43400),
                 'B07': (81052.19000, 150490.94800),
                 'B06': (83108.45600, 150657.24900),
                 'B09': (84491.58100, 150853.54000),
                 'B08': (88572.59000, 151020.09300), }

    for building in buildings.keys():
        bpr = building_properties[building]
        b, qcf_kwh, qhf_kwh = run_for_single_building(building, bpr, weather_data, usage_schedules,
                                                      date, gv, locator)
        print("'%(b)s': (%(qcf_kwh).5f, %(qhf_kwh).5f)," % locals())
Esempio n. 33
0
def main(config):
    locator = cea.inputlocator.InputLocator(scenario=config.scenario)
    weather_data = epwreader.epw_reader(locator.get_weather_file())[[
        'year', 'drybulb_C', 'wetbulb_C', 'relhum_percent', 'windspd_ms',
        'skytemp_C'
    ]]
    year = weather_data['year'][0]
    settings = config.demand
    building_properties, schedules_dict, date = properties_and_schedule(
        locator, year)
    list_building_names = building_properties.list_building_names()
    ss_calibrator(
        number_samples_scaler=config.neural_network.number_samples_scaler,
        locator=cea.inputlocator.InputLocator(scenario=config.scenario),
        list_building_names=building_properties.list_building_names())
Esempio n. 34
0
def radiation_singleprocessing(rad, geometry_3D_zone, locator, settings):

    weather_path = locator.get_weather_file()
    # check inconsistencies and replace by max value of weather file
    weatherfile = epwreader.epw_reader(weather_path)
    max_global = weatherfile['glohorrad_Whm2'].max()

    selected_buildings = [bldg_dict for bldg_dict in geometry_3D_zone if bldg_dict['name'] in settings.buildings]
    # get chunks of buildings to iterate
    chunks = [selected_buildings[i:i + settings.n_buildings_in_chunk] for i in
              range(0, len(selected_buildings),
                    settings.n_buildings_in_chunk)]

    for chunk_n, building_dict in enumerate(chunks):
        daysim_main.isolation_daysim(chunk_n, rad, building_dict, locator, settings, max_global, weatherfile)
    def setUpClass(cls):
        if os.environ.has_key('REFERENCE_CASE'):
            cls.locator = InputLocator(os.environ['REFERENCE_CASE'])
        else:
            cls.locator = InputLocator(REFERENCE_CASE)
        cls.gv = GlobalVariables()

        weather_path = cls.locator.get_default_weather()
        cls.weather_data = epwreader.epw_reader(weather_path)[['drybulb_C', 'relhum_percent', 'windspd_ms', 'skytemp_C']]

        cls.building_properties = BuildingProperties(cls.locator, cls.gv)
        cls.date = pd.date_range(cls.gv.date_start, periods=8760, freq='H')
        cls.list_uses = cls.building_properties.list_uses()
        cls.schedules = schedule_maker(cls.date, cls.locator, cls.list_uses)
        cls.usage_schedules = {'list_uses': cls.list_uses,
                               'schedules': cls.schedules}
def solar_radiation_vertical(locator, path_arcgis_db, latitude, longitude, year, gv, weather_path):
    """
    algorithm to calculate the hourly solar isolation in vertical building surfaces.
    The algorithm is based on the Solar Analyst Engine of ArcGIS 10.
    For more info check the integrated demand model of Fonseca et al. 2015. Appl. energy.

    Parameters
    ----------
    path_geometry : string
        path to file buildings_geometry.shp
    path_boundary: renovation
        path to file zone_of_study.shp
    path_arcgis_db: boolean
        path to default database of Arcgis. Generally of the form c:\users\your_name\Documents\Arcgis\Default.gdb
    latitude: float
        latitude north  at the centre of the location
    longitude: float
        latitude north
    timezone: integer
        timezone (UTC)
    year: integer
        year of calculation
    path_dem_raster: string
        path to terrain file
    weather_daily_data: string
        path to weather_day.csv file
    prop_architecture_flag: boolean
        True, get properties about the construction and architecture, otherwise False.
    prop_HVAC_flag: boolean
        True, get properties about types of HVAC systems, otherwise False.
    gv: GlobalVariables
        an instance of globalvar.GlobalVariables with the constants
        to use (like `list_uses` etc.)
    Returns
    -------
    radiation: .csv
        solar radiation file in vertical surfaces of buildings stored in path_output
    """

    # Set environment settings
    arcpy.env.workspace = path_arcgis_db
    arcpy.env.overwriteOutput = True
    arcpy.CheckOutExtension("spatial")

    # local variables
    aspect_slope = "FROM_DEM"
    heightoffset = 1
    Simple_CQ = path_arcgis_db + "\\" + "Simple_CQ"
    Simple_context = path_arcgis_db + "\\" + "Simple_context"
    dem_rasterfinal = path_arcgis_db + "\\" + "DEM_All2"
    observers = path_arcgis_db + "\\" + "observers"
    DataFactorsBoundaries = locator.get_temporary_file("DataFactorsBoundaries.csv")
    DataFactorsCentroids = locator.get_temporary_file("DataFactorsCentroids.csv")
    DataradiationLocation = locator.get_temporary_file("RadiationYear.csv")

    # calculate sunrise
    sunrise = calc_sunrise(range(1, 366), year, longitude, latitude, gv)

    # calcuate daily transmissivity and daily diffusivity
    weather_data = epwreader.epw_reader(weather_path)[
        ["dayofyear", "exthorrad_Whm2", "glohorrad_Whm2", "difhorrad_Whm2"]
    ]
    weather_data["diff"] = weather_data.difhorrad_Whm2 / weather_data.glohorrad_Whm2
    weather_data = weather_data[np.isfinite(weather_data["diff"])]
    T_G_day = np.round(weather_data.groupby(["dayofyear"]).mean(), 2)
    T_G_day["diff"] = T_G_day["diff"].replace(1, 0.90)
    T_G_day["trr"] = 1 - T_G_day["diff"]

    # T_G_day.to_csv(r'C:\Users\Jimeno\Documents/test4.csv')

    # Simplify building's geometry
    elevRaster = arcpy.sa.Raster(locator.get_terrain())
    dem_raster_extent = elevRaster.extent
    arcpy.SimplifyBuilding_cartography(
        locator.get_building_geometry(), Simple_CQ, simplification_tolerance=8, minimum_area=None
    )
    arcpy.SimplifyBuilding_cartography(
        locator.get_district(), Simple_context, simplification_tolerance=8, minimum_area=None
    )

    # # burn buildings into raster
    Burn(Simple_context, locator.get_terrain(), dem_rasterfinal, locator.get_temporary_folder(), dem_raster_extent, gv)

    # Calculate boundaries of buildings
    CalcBoundaries(
        Simple_CQ, locator.get_temporary_folder(), path_arcgis_db, DataFactorsCentroids, DataFactorsBoundaries, gv
    )

    # calculate observers
    CalcObservers(Simple_CQ, observers, DataFactorsBoundaries, path_arcgis_db, gv)

    # Calculate radiation
    for day in range(1, 366):
        result = None
        while result is None:  # trick to avoid that arcgis stops calculating the days and tries again.
            try:
                result = CalcRadiation(
                    day,
                    dem_rasterfinal,
                    observers,
                    T_G_day,
                    latitude,
                    locator.get_temporary_folder(),
                    aspect_slope,
                    heightoffset,
                    gv,
                )
            except arcgisscripting.ExecuteError:
                # redo the calculation
                pass

    gv.log("complete raw radiation files")

    # run the transformation of files appending all and adding non-sunshine hours
    radiations = []
    for day in range(1, 366):
        radiations.append(calc_radiation_day(day, sunrise, locator.get_temporary_folder()))

    radiationyear = radiations[0]
    for r in radiations[1:]:
        radiationyear = radiationyear.merge(r, on="ID", how="outer")
    radiationyear.fillna(value=0, inplace=True)
    radiationyear.to_csv(DataradiationLocation, Index=False)

    radiationyear = radiations = None
    gv.log("complete transformation radiation files")

    # Assign radiation to every surface of the buildings
    Data_radiation_path = CalcRadiationSurfaces(
        observers, DataFactorsCentroids, DataradiationLocation, locator.get_temporary_folder(), path_arcgis_db
    )

    # get solar insolation @ daren: this is a A BOTTLE NECK
    CalcIncidentRadiation(Data_radiation_path, locator.get_radiation(), locator.get_surface_properties(), gv)
    gv.log("done")
def demand_calculation(locator, weather_path, gv):
    """
    Algorithm to calculate the hourly demand of energy services in buildings
    using the integrated model of Fonseca et al. 2015. Applied energy.
    (http://dx.doi.org/10.1016/j.apenergy.2014.12.068)

    PARAMETERS
    ----------
    :param locator: An InputLocator to locate input files
    :type locator: inputlocator.InputLocator

    :param weather_path: A path to the EnergyPlus weather data file (.epw)
    :type weather_path: str

    :param gv: A GlobalVariable (context) instance
    :type gv: globalvar.GlobalVariable


    RETURNS
    -------

    :returns: None
    :rtype: NoneType


    INPUT / OUTPUT FILES
    --------------------

    - get_radiation: c:\reference-case\baseline\outputs\data\solar-radiation\radiation.csv
    - get_surface_properties: c:\reference-case\baseline\outputs\data\solar-radiation\properties_surfaces.csv
    - get_building_geometry: c:\reference-case\baseline\inputs\building-geometry\zone.shp
    - get_building_hvac: c:\reference-case\baseline\inputs\building-properties\technical_systems.shp
    - get_building_thermal: c:\reference-case\baseline\inputs\building-properties\thermal_properties.shp
    - get_building_occupancy: c:\reference-case\baseline\inputs\building-properties\occupancy.shp
    - get_building_architecture: c:\reference-case\baseline\inputs\building-properties\architecture.shp
    - get_building_age: c:\reference-case\baseline\inputs\building-properties\age.shp
    - get_building_comfort: c:\reference-case\baseline\inputs\building-properties\indoor_comfort.shp
    - get_building_internal: c:\reference-case\baseline\inputs\building-properties\internal_loads.shp


    SIDE EFFECTS
    ------------

    Produces a demand file per building and a total demand file for the whole zone of interest.

    B153767T.csv: csv file for every building with hourly demand data
    Total_demand.csv: csv file of yearly demand data per buidling.
    """
    t0 = time.clock()

    date = pd.date_range(gv.date_start, periods=8760, freq='H')

    # weather model
    weather_data = epwreader.epw_reader(weather_path)[['drybulb_C', 'relhum_percent', 'windspd_ms', 'skytemp_C']]

    # building properties model
    building_properties = BuildingProperties(locator, gv)

    # schedules model
    list_uses = list(building_properties._prop_occupancy.drop('PFloor', axis=1).columns)
    schedules = occupancy_model.schedule_maker(date, locator, list_uses)
    schedules_dict = {'list_uses': list_uses, 'schedules': schedules}

    # demand model
    num_buildings = len(building_properties)
    if gv.multiprocessing and mp.cpu_count() > 1:
        thermal_loads_all_buildings_multiprocessing(building_properties, date, gv, locator, num_buildings,
                                                    schedules_dict,
                                                    weather_data)
    else:
        thermal_loads_all_buildings(building_properties, date, gv, locator, num_buildings, schedules_dict,
                                    weather_data)
    totals = write_totals_csv(building_properties, locator, gv)
    gv.log('done - time elapsed: %(time_elapsed).2f seconds', time_elapsed=time.clock() - t0)

    return totals