def test_monthlychart_daily_stack(): """Test the initialization of MonthlyChart with daily stacked data collections.""" header = Header(Energy(), 'kWh', AnalysisPeriod()) values = [i / 31 for i in range(365)] date_t = list(range(1, 366)) data_coll = DailyCollection(header, values, date_t) month_chart = MonthlyChart([data_coll]) meshes = month_chart.data_meshes assert len(meshes) == 1 assert isinstance(meshes[0], Mesh2D) assert len(meshes[0].faces) == 365 assert month_chart.data_polylines is None header2 = Header(Energy(), 'kWh', AnalysisPeriod()) values2 = [i / 31 for i in range(365)] data_coll2 = DailyCollection(header2, values2, date_t) month_chart = MonthlyChart([data_coll, data_coll2]) meshes = month_chart.data_meshes assert len(meshes) == 2 assert isinstance(meshes[1], Mesh2D) assert len(meshes[1].faces) == 365 month_chart = MonthlyChart([data_coll, data_coll2], stack=True) meshes = month_chart.data_meshes assert len(meshes) == 2 assert isinstance(meshes[1], Mesh2D) assert len(meshes[1].faces) == 365
def test_histogram_data_nested(): # Testing vals dir_vals = [0, 0, 0, 10, 10, 10, 85, 90, 90, 90, 95, 170, 285, 288] spd_vals = dir_vals # Make into fake data collections a_per = AnalysisPeriod(6, 21, 12, 6, 21, 13) dates = [DateTime(6, 21, i) for i in range(len(dir_vals))] spd_header = Header(Speed(), 'm/s', a_per) dir_header = Header(GenericType('Direction', 'deg'), 'deg', a_per) spd_data = HourlyDiscontinuousCollection(spd_header, spd_vals, dates) dir_data = HourlyDiscontinuousCollection(dir_header, dir_vals, dates) # Init simple example w segs == bin num w = WindRose(dir_data, spd_data, 4) #w.legend_parameters = LegendParameters(segment_count=5) w.frequency_hours = 1 # Bin values to divide into colors # 315-45: [10, 10, 10]; 2 intervals, [10, 10, 10] # 45-135: [85, 90, 90, 90, 95]; 3 intervals, [85, 90, 90, 90, 95] # 135-225: [170]; 1 intervals, [170]; # 225-315: [285, 288]; 2 intervals, [285, 288] # interval_num: [2, 3, 1, 2] chk_histstack = [ [10, 10, 10], [85, 90, 90, 90, 95], [170.], [285, 288]] # Testing histstack = WindRose._histogram_data_nested(w.histogram_data, 1) for chkh, h in zip(chk_histstack, histstack): for c, _h in zip(chkh, h): assert abs(c - _h) <= 1e-10 # Init complex dir set divided by 4 w = WindRose(dir_data, spd_data, 4) w.frequency_hours = 2 # Bin values to divide into colors # 315-45: [10, 10, 10]; 2 intervals, [10, 10] # 45-135: [85, 90, 90, 90, 95]; 3 intervals, [87.5, 90, 95. ] # 135-225: [170]; 1 intervals, [170] # 225-315: [285, 288]; 2 intervals, [286.5] # interval_num: [2, 3, 1, 2] chk_histstack = [ [10, 10], [87.5, 90, 95.], [170.], [286.5]] # Testing histstack = WindRose._histogram_data_nested(w.histogram_data, 2) for chkh, h in zip(chk_histstack, histstack): for c, _h in zip(chkh, h): assert abs(c - _h) <= 1e-10
def test_monthlychart_hourly_stack(): """Test the initialization of MonthlyChart with hourly stacked data collections.""" header = Header(Energy(), 'kWh', AnalysisPeriod()) values = [i / 365 for i in range(20, 8780)] data_coll = HourlyContinuousCollection(header, values) month_chart = MonthlyChart([data_coll]) meshes = month_chart.data_meshes assert len(meshes) == 1 assert isinstance(meshes[0], Mesh2D) assert len(meshes[0].faces) == 24 * 12 assert month_chart.y_axis_labels1[0] == '0.00' header2 = Header(Energy(), 'kWh', AnalysisPeriod()) values2 = [i / 365 for i in range(8760, 0, -1)] data_coll2 = HourlyContinuousCollection(header2, values2) month_chart = MonthlyChart([data_coll, data_coll2]) meshes = month_chart.data_meshes assert len(meshes) == 2 assert isinstance(meshes[1], Mesh2D) assert len(meshes[1].faces) == 24 * 12 month_chart = MonthlyChart([data_coll, data_coll2], stack=True) meshes = month_chart.data_meshes assert len(meshes) == 2 assert isinstance(meshes[1], Mesh2D) assert len(meshes[1].faces) == 24 * 12 plines = month_chart.data_polylines assert isinstance(plines[0], Polyline2D) assert len(plines) == 2 * 12 * 2
def test_adaptive_collection_immutability(): """Test that the Adaptive collection is immutable.""" calc_length = 24 prevail_header = Header(PrevailingOutdoorTemperature(), 'C', AnalysisPeriod(end_month=1, end_day=1)) prevail_temp = HourlyContinuousCollection(prevail_header, [22] * calc_length) op_temp_header = Header(Temperature(), 'C', AnalysisPeriod(end_month=1, end_day=1)) op_temp = HourlyContinuousCollection(op_temp_header, [26] * calc_length) adapt_obj = Adaptive(prevail_temp, op_temp) # check that editing the original collection does not mutate the object op_temp[0] = 28 assert adapt_obj.operative_temperature[0] == 26 # check that editing collection properties does not mutate the object with pytest.raises(Exception): adapt_obj.operative_temperature[0] = 28 with pytest.raises(Exception): adapt_obj.operative_temperature.values = [28] * calc_length with pytest.raises(Exception): adapt_obj.degrees_from_neutral[0] = 0.5 with pytest.raises(Exception): adapt_obj.degrees_from_neutral.values = [0.5] * calc_length # check that properties cannot be edited directly with pytest.raises(Exception): adapt_obj.operative_temperature = op_temp with pytest.raises(Exception): adapt_obj.degrees_from_neutral = op_temp with pytest.raises(Exception): adapt_obj.comfort_parameter = AdaptiveParameter(False)
def test_interpolate_holes(): """Test the interoplate holes method on the discontinuous collection.""" a_per = AnalysisPeriod(6, 21, 0, 6, 21, 23) dt1, dt2 = DateTime(6, 21, 12), DateTime(6, 21, 14) v1, v2 = 20, 25 dc1 = HourlyDiscontinuousCollection(Header(Temperature(), 'C', a_per), [v1, v2], [dt1, dt2]) with pytest.raises(Exception): interp_coll1 = dc1.interpolate_holes() dc2 = dc1.validate_analysis_period() interp_coll1 = dc2.interpolate_holes() assert isinstance(interp_coll1, HourlyContinuousCollection) assert len(interp_coll1.values) == 24 assert interp_coll1[0] == 20 assert interp_coll1[12] == 20 assert interp_coll1[13] == 22.5 assert interp_coll1[14] == 25 assert interp_coll1[23] == 25 values = list(xrange(24)) test_header = Header(GenericType('Test Type', 'test'), 'test', AnalysisPeriod(end_month=1, end_day=1)) dc3 = HourlyContinuousCollection(test_header, values) interp_coll2 = dc3.interpolate_holes() assert isinstance(interp_coll2, HourlyContinuousCollection) assert len(interp_coll2.values) == 24
def test_init_adaptive_collection_mrt(): """Test the initialization of the Adaptive collection with MRT.""" calc_length = 24 prevail_header = Header(PrevailingOutdoorTemperature(), 'C', AnalysisPeriod(end_month=1, end_day=1)) prevail_temp = HourlyContinuousCollection(prevail_header, [22] * calc_length) air_temp_header = Header(Temperature(), 'C', AnalysisPeriod(end_month=1, end_day=1)) air_temp = HourlyContinuousCollection(air_temp_header, [24] * calc_length) adapt_obj = Adaptive.from_air_and_rad_temp(prevail_temp, air_temp, 28) assert adapt_obj.comfort_model == 'Adaptive' assert adapt_obj.calc_length == calc_length str(adapt_obj) # test that the string representation is ok assert isinstance(adapt_obj.prevailing_outdoor_temperature, HourlyContinuousCollection) assert len(adapt_obj.prevailing_outdoor_temperature.values) == calc_length assert adapt_obj.prevailing_outdoor_temperature[0] == 22 assert isinstance(adapt_obj.operative_temperature, HourlyContinuousCollection) assert len(adapt_obj.operative_temperature.values) == calc_length assert adapt_obj.operative_temperature[0] == 26 assert isinstance(adapt_obj.neutral_temperature, HourlyContinuousCollection) assert len(adapt_obj.neutral_temperature.values) == calc_length assert adapt_obj.neutral_temperature[0] == pytest.approx(24.62, rel=1e-3) assert isinstance(adapt_obj.degrees_from_neutral, HourlyContinuousCollection) assert len(adapt_obj.degrees_from_neutral.values) == calc_length assert adapt_obj.degrees_from_neutral[0] == pytest.approx(1.3799, rel=1e-3)
def test_monthlychart_monthly_stack(): """Test the initialization of MonthlyChart with monthly stacked data collections.""" header = Header(Energy(), 'kWh', AnalysisPeriod()) values = [i for i in range(12, 24)] date_t = list(range(1, 13)) data_coll = MonthlyCollection(header, values, date_t) month_chart = MonthlyChart([data_coll]) meshes = month_chart.data_meshes assert len(meshes) == 1 assert isinstance(meshes[0], Mesh2D) assert len(meshes[0].faces) == 12 assert month_chart.y_axis_labels1[0] == '0.00' header2 = Header(Energy(), 'kWh', AnalysisPeriod()) values2 = [i for i in range(24, 36)] data_coll2 = MonthlyCollection(header2, values2, date_t) month_chart = MonthlyChart([data_coll, data_coll2]) meshes = month_chart.data_meshes assert len(meshes) == 2 assert isinstance(meshes[1], Mesh2D) assert len(meshes[1].faces) == 12 month_chart = MonthlyChart([data_coll, data_coll2], stack=True) meshes = month_chart.data_meshes assert len(meshes) == 2 assert isinstance(meshes[1], Mesh2D) assert len(meshes[1].faces) == 12
def test_monthlychart_monthly(): """Test the initialization of MonthlyChart with monthly data collections.""" header = Header(Temperature(), 'C', AnalysisPeriod()) values = [i for i in range(12)] date_t = list(range(1, 13)) data_coll = MonthlyCollection(header, values, date_t) month_chart = MonthlyChart([data_coll]) meshes = month_chart.data_meshes assert len(meshes) == 1 assert isinstance(meshes[0], Mesh2D) assert len(meshes[0].faces) == 12 assert month_chart.data_polylines is None header2 = Header(RelativeHumidity(), '%', AnalysisPeriod()) values2 = [i for i in range(10, 70, 5)] data_coll2 = MonthlyCollection(header2, values2, date_t) month_chart = MonthlyChart([data_coll, data_coll2]) meshes = month_chart.data_meshes assert len(meshes) == 2 assert isinstance(meshes[1], Mesh2D) assert len(meshes[1].faces) == 12 month_chart = MonthlyChart([data_coll, data_coll2], stack=True) meshes = month_chart.data_meshes assert len(meshes) == 2 assert isinstance(meshes[1], Mesh2D) assert len(meshes[1].faces) == 12
def test_is_collection_aligned(): """Test the test_is_collection_aligned method for discontinuous collections.""" header = Header(Temperature(), 'C', AnalysisPeriod(end_month=1, end_day=1)) header3 = Header(Temperature(), 'C', AnalysisPeriod(end_month=1, end_day=2)) header4 = Header(Temperature(), 'C', AnalysisPeriod(st_day=2, end_month=1, end_day=2)) header5 = Header(Temperature(), 'C', AnalysisPeriod(end_month=1, end_day=24)) values1 = list(xrange(24)) values2 = [12] * 24 values3 = [12] * 48 dc1 = HourlyDiscontinuousCollection(header, values1, header.analysis_period.datetimes) dc2 = HourlyDiscontinuousCollection(header, values2, header.analysis_period.datetimes) dc3 = HourlyDiscontinuousCollection(header3, values3, header3.analysis_period.datetimes) dc4 = HourlyDiscontinuousCollection(header4, values1, header4.analysis_period.datetimes) dc5 = DailyCollection(header5, values1, header5.analysis_period.doys_int) assert dc1.is_collection_aligned(dc2) assert dc2.is_collection_aligned(dc1) assert not dc1.is_collection_aligned(dc3) assert not dc3.is_collection_aligned(dc1) assert not dc1.is_collection_aligned(dc4) assert not dc4.is_collection_aligned(dc1) assert not dc1.is_collection_aligned(dc5) assert not dc5.is_collection_aligned(dc1) assert HourlyDiscontinuousCollection.are_collections_aligned([dc1, dc2]) assert not HourlyDiscontinuousCollection.are_collections_aligned( [dc1, dc2, dc3], False)
def test_monthlychart_monthly_per_hour_stack(): """Test the initialization of MonthlyChart with monthly-per-hour stacked collections.""" header = Header(Energy(), 'kWh', AnalysisPeriod()) values = list(range(20, 12 * 24 + 20)) date_t = AnalysisPeriod().months_per_hour data_coll = MonthlyPerHourCollection(header, values, date_t) month_chart = MonthlyChart([data_coll]) assert month_chart.data_meshes is None plines = month_chart.data_polylines assert isinstance(plines[0], Polyline2D) assert len(plines) == 12 assert month_chart.y_axis_labels1[0] == '0.00' header2 = Header(Energy(), 'kWh', AnalysisPeriod()) values2 = [x / 10 for x in range(12 * 24)] data_coll2 = MonthlyPerHourCollection(header2, values2, date_t) month_chart = MonthlyChart([data_coll, data_coll2]) plines = month_chart.data_polylines assert isinstance(plines[0], Polyline2D) assert len(plines) == 12 * 2 month_chart = MonthlyChart([data_coll, data_coll2], stack=True) plines = month_chart.data_polylines assert isinstance(plines[0], Polyline2D) assert len(plines) == 12 * 2
def test_radial_histogram(): """ Test circular histogram""" # Testing vals dir_vals = [0, 0, 0, 10, 85, 90, 95, 170, 285, 288] spd_vals = dir_vals # Make into fake data collections a_per = AnalysisPeriod(6, 21, 12, 6, 21, 13) dates = [DateTime(6, 21, i) for i in range(len(dir_vals))] spd_header = Header(Speed(), 'm/s', a_per) dir_header = Header(GenericType('Direction', 'deg'), 'deg', a_per) spd_data = HourlyDiscontinuousCollection(spd_header, spd_vals, dates) dir_data = HourlyDiscontinuousCollection(dir_header, dir_vals, dates) # Init simple dir set divided by 4 w = WindRose(dir_data, spd_data, 4) # Testing bin_vecs = w.bin_vectors vec_cpt = (0, 0) radius_arr = (0., 1.) ytick_num = 1 hist = w.histogram_data histstack = w._histogram_data_stacked(hist, ytick_num) show_stack = False vecs = WindRose._histogram_array_radial(bin_vecs, vec_cpt, hist, histstack, radius_arr, show_stack)
def test_monthlychart_two_axes(): """Test the MonthlyChart with two Y-axes.""" header = Header(Temperature(), 'C', AnalysisPeriod()) values = [i for i in range(12)] date_t = list(range(1, 13)) data_coll = MonthlyCollection(header, values, date_t) header2 = Header(RelativeHumidity(), '%', AnalysisPeriod()) values2 = [i for i in range(10, 70, 5)] data_coll2 = MonthlyCollection(header2, values2, date_t) month_chart = MonthlyChart([data_coll, data_coll2]) y_txt = month_chart.y_axis_labels2 assert all(isinstance(txt, str) for txt in y_txt) y_lines = month_chart.y_axis_lines y_pts = month_chart.y_axis_label_points2 assert len(y_lines) == len(y_txt) == len(y_pts) == 11 assert all(isinstance(line, LineSegment2D) for line in y_lines) assert all(isinstance(pt, Point2D) for pt in y_pts) assert isinstance(month_chart.y_axis_title_text2, str) assert 'Fraction' in month_chart.y_axis_title_text2 assert isinstance(month_chart.y_axis_title_location2, Plane) # ensure the first axis was not affected y_txt = month_chart.y_axis_labels1 assert all(isinstance(txt, str) for txt in y_txt) y_pts = month_chart.y_axis_label_points1 assert len(y_lines) == len(y_txt) == len(y_pts) == 11 assert all(isinstance(pt, Point2D) for pt in y_pts) assert isinstance(month_chart.y_axis_title_text1, str) assert 'Temperature' in month_chart.y_axis_title_text1 assert isinstance(month_chart.y_axis_title_location1, Plane)
def test_histogram_data_stacked(): # Testing vals dir_vals = [0, 0, 0, 10, 10, 10, 85, 90, 90, 90, 95, 170, 285, 288] spd_vals = dir_vals # Make into fake data collections a_per = AnalysisPeriod(6, 21, 12, 6, 21, 13) dates = [DateTime(6, 21, i) for i in range(len(dir_vals))] spd_header = Header(Speed(), 'm/s', a_per) dir_header = Header(GenericType('Direction', 'deg'), 'deg', a_per) spd_data = HourlyDiscontinuousCollection(spd_header, spd_vals, dates) dir_data = HourlyDiscontinuousCollection(dir_header, dir_vals, dates) # Init simple dir set divided by 4 w = WindRose(dir_data, spd_data, 4) w.legend_parameters.segment_count = 3 # Bin values to divide into colors # 315-45: [10, 10, 10]; 2 intervals # 45-135: [85, 90, 90, 90, 95]; 3 intervals, [85. , 88.3, 91.7, 95. ] # 135-225: [170]; 1 intervals # 225-315: [285, 288]; 2 intervals, [285. , 286.5, 288. ] # interval_num: [2, 3, 1, 2] chk_histstack = [[(10 + 10) / 2., (10 + 10) / 2.], [(85 + 88.3) / 2., (88.3 + 91.7) / 2., (91.7 + 95) / 2.], [170.], [(285 + 286.5) / 2., (286.5 + 288) / 2.]] # Testing histstack = WindRose._histogram_data_stacked(w.histogram_data, 3) for chkh, h in zip(chk_histstack, histstack): for c, _h in zip(chkh, h): assert abs(c - _h) <= 1e-1
def test_xticks_radial(): """Test polar coordinate array""" # Testing vals ensure all histogram heights are equal. dir_vals = [ 3, 3, 10, # 315 - 45 85, 90, 95, # 45 - 135 170, 170, 170, # 135 - 225 230, 285, 288 ] # 225 - 315 spd_vals = dir_vals # Make into fake data collections a_per = AnalysisPeriod(6, 21, 12, 6, 21, 13) dates = [DateTime(6, 21, i) for i in range(len(dir_vals))] spd_header = Header(Speed(), 'm/s', a_per) dir_header = Header(GenericType('Direction', 'deg'), 'deg', a_per) spd_data = HourlyDiscontinuousCollection(spd_header, spd_vals, dates) dir_data = HourlyDiscontinuousCollection(dir_header, dir_vals, dates) # Init simple dir set divided by 4 w = WindRose(dir_data, spd_data, 4) f = _deg2rad cos, sin = math.cos, math.sin # Testing xticks = w.orientation_lines xticks = [xtick.scale(1 / w.compass_radius) for xtick in xticks] # w.angles - 90: [225, -45, 45, 135, 225] # Since frequencies are for xticks, no need to scale vectors. chk_xticks = [ [(0, 0), (cos(f(225)), -sin(f(225)))], # v0 [(0, 0), (cos(f(-45)), -sin(f(-45)))], # v1 bin 0 [(0, 0), (cos(f(-45)), -sin(f(-45)))], # v2 [(0, 0), (cos(f(45)), -sin(f(45)))], # v3 bin 1 [(0, 0), (cos(f(45)), -sin(f(45)))], # v4 [(0, 0), (cos(f(135)), -sin(f(135)))], # v5 bin 2 [(0, 0), (cos(f(135)), -sin(f(135)))], # v6 [(0, 0), (cos(f(225)), -sin(f(225)))] ] # v7 bin 3 for i, (chk_xtick, xtick) in enumerate(zip(chk_xticks, xticks)): # Check x, y # print(chk_xtick[1][0], xtick.to_array()[1][0]) # print(chk_xtick[1][1], xtick.to_array()[1][1]) assert abs(chk_xtick[1][0] - xtick.to_array()[1][0]) < 1e-10 assert abs(chk_xtick[1][1] - xtick.to_array()[1][1]) < 1e-10
def test_radial_histogram_plot(): """ Test circular histogram""" # Testing vals ensure all histogram heights are equal. dir_vals = [3, 3, 10, # 315 - 45 85, 90, 95, # 45 - 135 170, 170, 170, # 135 - 225 230, 285, 288] # 225 - 315 spd_vals = dir_vals # Make into fake data collections a_per = AnalysisPeriod(6, 21, 12, 6, 21, 13) dates = [DateTime(6, 21, i) for i in range(len(dir_vals))] spd_header = Header(Speed(), 'm/s', a_per) dir_header = Header(GenericType('Direction', 'deg'), 'deg', a_per) spd_data = HourlyDiscontinuousCollection(spd_header, spd_vals, dates) dir_data = HourlyDiscontinuousCollection(dir_header, dir_vals, dates) # Init simple dir set divided by 4 w = WindRose(dir_data, spd_data, 4) f = _deg2rad cos, sin = math.cos, math.sin # Testing bin_vecs = w.bin_vectors vec_cpt = (0, 0) radius_arr = (0., 1.) hist = w.histogram_data speeds = [val for bin in w.histogram_data for val in bin] min_speed, max_speed = min(speeds), max(speeds) speed_interval = (max_speed - min_speed) / w.legend_parameters.segment_count histstack, _ = w._histogram_data_nested( hist, (min_speed, max_speed), speed_interval) show_freq = False vecs = WindRose._histogram_array_radial(bin_vecs, vec_cpt, hist, histstack, radius_arr, show_freq) # Make bins of equal height (unit circle) chk_bin_vecs = [[(cos(f(225)), -sin(f(225))), # 0 west (cos(f(-45)), -sin(f(-45)))], [(cos(f(-45)), -sin(f(-45))), # 1 north (cos(f(45)), -sin(f(45)))], [(cos(f(45)), -sin(f(45))), # 2 east (cos(f(135)), -sin(f(135)))], [(cos(f(135)), -sin(f(135))), # 3 south (cos(f(225)), -sin(f(225)))]] for i in range(len(chk_bin_vecs)): vec2, vec1 = chk_bin_vecs[i][0], chk_bin_vecs[i][1] chk_pts = [vec1, vec2] pts = vecs[i][1:] # Get rid of cpt (0, 0) for p, cp in zip(pts, chk_pts): assert abs(p[0] - cp[0]) < 1e-10, (p[0], cp[0]) assert abs(p[1] - cp[1]) < 1e-10, (p[1], cp[1])
def map_result_info(comfort_model, run_period, qualifier, folder, log_file): """Get a JSON that specifies the data type and units for comfort map outputs. This JSON is needed by interfaces to correctly parse comfort map results. \b Args: comfort_model: Text for the comfort model of the thermal mapping simulation. Choose from: pmv, adaptive, utci. """ try: # parse the run period run_period = _load_analysis_period_str(run_period) run_period = run_period if run_period is not None else AnalysisPeriod() # get the data type and units from the comfort model comfort_model = comfort_model.lower() cond, cond_units = ThermalCondition(), 'condition' if comfort_model == 'pmv': temp, temp_units = OperativeTemperature(), 'C' if 'write-set-map' in qualifier: temp = StandardEffectiveTemperature() cond_i, cond_i_units = PredictedMeanVote(), 'PMV' elif comfort_model == 'adaptive': temp, temp_units = OperativeTemperature(), 'C' cond_i, cond_i_units = OperativeTemperatureDelta(), 'dC' elif comfort_model == 'utci': temp, temp_units = UniversalThermalClimateIndex(), 'C' cond_i, cond_i_units = ThermalConditionElevenPoint(), 'condition' else: raise ValueError( 'Comfort model "{}" not recognized. Choose from: {}.'.format( comfort_model, ('pmv', 'adaptive', 'utci'))) # build up the dictionary of data headers temp_header = Header(temp, temp_units, run_period) cond_header = Header(cond, cond_units, run_period) cond_i_header = Header(cond_i, cond_i_units, run_period) result_info_dict = { 'temperature': temp_header.to_dict(), 'condition': cond_header.to_dict(), 'condition_intensity': cond_i_header.to_dict() } # write the JSON into result sub-folders if folder is not None: if not os.path.isdir(folder): os.makedirs(folder) for metric in ('temperature', 'condition', 'condition_intensity'): file_path = os.path.join(folder, '{}.json'.format(metric)) with open(file_path, 'w') as fp: json.dump(result_info_dict[metric], fp, indent=4) log_file.write(json.dumps(result_info_dict)) except Exception as e: _logger.exception('Failed to write thermal map info.\n{}'.format(e)) sys.exit(1) else: sys.exit(0)
def test_prevailing_direction(): """Test prevailing direction getter""" # Test with single prevailing dir dir_vals = [0, 3, 10, # 315 - 45 85, 90, 95, # 45 - 135 140, 170, 170, 170, # 135 - 225 230, 285, 288] # 225 - 315 spd_vals = dir_vals # Make into fake data collections a_per = AnalysisPeriod(6, 21, 12, 6, 21, 13) dates = [DateTime(6, 21, i) for i in range(len(dir_vals))] spd_header = Header(Speed(), 'm/s', a_per) dir_header = Header(GenericType('Direction', 'deg'), 'deg', a_per) spd_data = HourlyDiscontinuousCollection(spd_header, spd_vals, dates) dir_data = HourlyDiscontinuousCollection(dir_header, dir_vals, dates) # Init simple dir set divided by 4 w = WindRose(dir_data, spd_data, 4) test_prev_dir = 180 assert w.prevailing_direction[0] == test_prev_dir # Testing with two max prevailing values dir_vals = [3, 3, 10, # 315 - 45 85, 90, 90, 100, # 45 - 135 170, 170, 170, 180, # 135 - 225 230, 285, 288] # 225 - 315 spd_vals = dir_vals # Make into fake data collections a_per = AnalysisPeriod(6, 21, 12, 6, 21, 13) dates = [DateTime(6, 21, i) for i in range(len(dir_vals))] spd_header = Header(Speed(), 'm/s', a_per) dir_header = Header(GenericType('Direction', 'deg'), 'deg', a_per) spd_data = HourlyDiscontinuousCollection(spd_header, spd_vals, dates) dir_data = HourlyDiscontinuousCollection(dir_header, dir_vals, dates) # Init simple dir set divided by 4 w = WindRose(dir_data, spd_data, 4) test_prev_dir = set((90, 180)) assert set(w.prevailing_direction) == test_prev_dir # Test with epw epw_path = os.path.join(os.getcwd(), 'tests/fixtures/epw/chicago.epw') epw = EPW(epw_path) # Test 5 directions w = WindRose(epw.wind_direction, epw.wind_speed, 5) assert w.prevailing_direction[0] == 216.0
def test_to_unit(): """Test the conversion of DataCollection units.""" header1 = Header(Temperature(), 'C', AnalysisPeriod()) header2 = Header(Temperature(), 'F', AnalysisPeriod()) values = [20] * 8760 dc1 = HourlyContinuousCollection(header1, values) dc2 = HourlyContinuousCollection(header2, values) dc3 = dc1.to_unit('K') dc4 = dc2.to_unit('K') assert dc1.values[0] == 20 assert dc3.values[0] == 293.15 assert dc2.values[0] == 20 assert dc4.values[0] == pytest.approx(266.483, rel=1e-1)
def test_validate_a_period_daily(): """Test the validate_a_period methods for daily collections.""" a_per = AnalysisPeriod(6, 21, 0, 6, 22, 23) v1, v2 = 20, 25 dt1, dt2 = 172, 173 # Test that the validate method correctly sorts reversed datetimes. dc1 = DailyCollection(Header(Temperature(), 'C', a_per), [v1, v2], [dt2, dt1]) dc1_new = dc1.validate_analysis_period() assert dc1.validated_a_period is False assert dc1_new.validated_a_period is True assert dc1.datetimes == (dt2, dt1) assert dc1_new.datetimes == (dt1, dt2) # Test that the validate method correctly updates analysis_period range. a_per_2 = AnalysisPeriod(6, 20, 0, 6, 20, 23) dc1 = DailyCollection(Header(Temperature(), 'C', a_per_2), [v1, v2], [dt1, dt2]) dc1_new = dc1.validate_analysis_period() assert dc1.validated_a_period is False assert dc1_new.validated_a_period is True assert dc1.header.analysis_period == a_per_2 assert dc1_new.header.analysis_period == AnalysisPeriod( 6, 20, 0, 6, 22, 23) # Test that the validate method with reversed analysis_periods. a_per_3 = AnalysisPeriod(6, 20, 0, 2, 20, 23) dt5 = 21 dc1 = DailyCollection(Header(Temperature(), 'C', a_per_3), [v1, v2, v2], [dt1, dt2, dt5]) dc1_new = dc1.validate_analysis_period() assert dc1_new.header.analysis_period == a_per_3 dc1 = DailyCollection(Header(Temperature(), 'C', a_per_3), [v1, v2], [dt1, dt2]) dc1_new = dc1.validate_analysis_period() assert dc1_new.header.analysis_period == a_per_3 dc1 = DailyCollection(Header(Temperature(), 'C', a_per_3), [v1, v2], [dt5, 22]) dc1_new = dc1.validate_analysis_period() assert dc1_new.header.analysis_period == a_per_3 dc1 = DailyCollection(Header(Temperature(), 'C', a_per_3), [v1, v2], [dt5, 60]) dc1_new = dc1.validate_analysis_period() assert dc1_new.header.analysis_period == AnalysisPeriod() # Test that the validate method correctly identifies leap years. dc1 = DailyCollection(Header(Temperature(), 'C', a_per), [v1, v2, v2], [dt1, dt2, 366]) dc1_new = dc1.validate_analysis_period() assert dc1.validated_a_period is False assert dc1_new.validated_a_period is True assert dc1.header.analysis_period.is_leap_year is False assert dc1_new.header.analysis_period.is_leap_year is True # Test that duplicated datetimes are caught dc1 = DailyCollection(Header(Temperature(), 'C', a_per), [v1, v2], [dt1, dt1]) with pytest.raises(Exception): dc1_new = dc1.validate_analysis_period()
def test_bin_vectors(): """Bin vectors""" # Testing vals dir_vals = [3, 3, 3, 10, 85, 90, 95, 170, 230, 285, 288] spd_vals = dir_vals # Make into fake data collections a_per = AnalysisPeriod(6, 21, 12, 6, 21, 13) dates = [DateTime(6, 21, i) for i in range(len(dir_vals))] spd_header = Header(Speed(), 'm/s', a_per) dir_header = Header(GenericType('Direction', 'deg'), 'deg', a_per) spd_data = HourlyDiscontinuousCollection(spd_header, spd_vals, dates) dir_data = HourlyDiscontinuousCollection(dir_header, dir_vals, dates) # Init simple dir set divided by 4 w = WindRose(dir_data, spd_data, 4) f = _deg2rad cos, sin = math.cos, math.sin # Testing # Check angles a = w.angles chk_a = [315, 45, 135, 225, 315] for _a, _chk_a in zip(a, chk_a): assert abs(_a - _chk_a) < 1e-10, (_a, _chk_a) # Check vectors bin_vecs = w.bin_vectors a = [_deg2rad(_a) for _a in a] chk_bin_vecs = [[(cos(f(225)), -sin(f(225))), # 0 (cos(f(-45)), -sin(f(-45)))], [(cos(f(-45)), -sin(f(-45))), # 1 (cos(f(45)), -sin(f(45)))], [(cos(f(45)), -sin(f(45))), # 2 (cos(f(135)), -sin(f(135)))], [(cos(f(135)), -sin(f(135))), # 3 (cos(f(225)), -sin(f(225)))]] # Check len assert len(bin_vecs) == len(chk_bin_vecs) # Check coords for i, (chk_vec, vec) in enumerate(zip(chk_bin_vecs, bin_vecs)): # left vec assert abs(chk_vec[0][0] - vec[0][0]) < 1e-5, (i, chk_vec[0][0], vec[0][0]) assert abs(chk_vec[0][1] - vec[0][1]) < 1e-5, (i, chk_vec[0][1], vec[0][1]) # right vec assert abs(chk_vec[1][0] - vec[1][0]) < 1e-5, (i, chk_vec[1][0], vec[1][0]) assert abs(chk_vec[1][1] - vec[1][1]) < 1e-5, (i, chk_vec[1][1], vec[1][1])
def test_xticks_radial(): """Test polar coordinate array""" # Testing vals dir_vals = [0, 0, 0, 10, 85, 90, 95, 170, 285, 288] spd_vals = dir_vals # Make into fake data collections a_per = AnalysisPeriod(6, 21, 12, 6, 21, 13) dates = [DateTime(6, 21, i) for i in range(len(dir_vals))] spd_header = Header(Speed(), 'm/s', a_per) dir_header = Header(GenericType('Direction', 'deg'), 'deg', a_per) spd_data = HourlyDiscontinuousCollection(spd_header, spd_vals, dates) dir_data = HourlyDiscontinuousCollection(dir_header, dir_vals, dates)
def test_adaptive_collection_cooling_effect_output(): """Test the cooling effect output of the Adaptive collection.""" calc_length = 24 prevail_header = Header(PrevailingOutdoorTemperature(), 'C', AnalysisPeriod(end_month=1, end_day=1)) prevail_temp = HourlyContinuousCollection(prevail_header, [22] * calc_length) op_temp_header = Header(Temperature(), 'C', AnalysisPeriod(end_month=1, end_day=1)) op_temp = HourlyContinuousCollection(op_temp_header, [26] * calc_length) adapt_obj = Adaptive(prevail_temp, op_temp, air_speed=0.7) assert isinstance(adapt_obj.cooling_effect, HourlyContinuousCollection) assert len(adapt_obj.cooling_effect.values) == calc_length assert adapt_obj.cooling_effect[0] == 1.2
def test_init_continuous(): """Test the init methods for continuous collections""" # Setup temperature data collection header = Header(Temperature(), 'C', AnalysisPeriod()) values = list(xrange(8760)) dc1 = HourlyContinuousCollectionImmutable(header, values) assert len(dc1.datetimes) == 8760 assert list(dc1.values) == list(xrange(8760)) assert dc1.is_mutable is False with pytest.raises(AttributeError): dc1[0] = 18 with pytest.raises(AttributeError): dc1.values = [24] * 8760 with pytest.raises(Exception): dc1.values.append(10) with pytest.raises(AttributeError): dc2 = dc1.convert_to_culled_timestep(1) dc2 = dc1.to_mutable() assert isinstance(dc2, HourlyContinuousCollection) assert dc2.is_mutable is True dc2[0] = 18 assert dc2[0] == 18 dc2.values = [24] * 8760 assert dc2.values == tuple([24] * 8760) with pytest.raises(Exception): dc2.values.append(10) # make sure that we can still not append dc3 = dc2.to_immutable() assert isinstance(dc3, HourlyContinuousCollectionImmutable) assert dc3.is_mutable is False dc4 = dc3.to_immutable() assert isinstance(dc4, HourlyContinuousCollectionImmutable) assert dc4.is_mutable is False
def data_collection_at_timestep(self, timestep=1, start_date=Date(1, 1), end_date=Date(12, 31)): """Get a ladybug DataCollection representing this schedule at a given timestep. Note that ladybug DataCollections always follow the "Ladybug Tools Interpretation" of date time values as noted in the ScheduleDay.values_at_timestep documentation. Args: timestep: An integer for the number of steps per hour at which to make the resulting DataCollection. start_date: An optional ladybug Date object for when to start the DataCollection. Default: 1 Jan on a non-leap year. end_date: An optional ladybug Date object for when to end the DataCollection. Default: 31 Dec on a non-leap year. """ a_period = AnalysisPeriod(start_date.month, start_date.day, 0, end_date.month, end_date.day, 23, timestep, self.is_leap_year) data_type, unit = self._get_lb_data_type_and_unit() header = Header(data_type, unit, a_period, metadata={'schedule': self.identifier}) values = self.values_at_timestep(timestep, start_date, end_date) return HourlyContinuousCollection(header, values)
def data_collection(self, date, schedule_type_limit, timestep=1): """Get a ladybug DataCollection representing this schedule at a given timestep. Note that ladybug DataCollections always follow the "Ladybug Tools Interpretation" of date time values as noted in the values_at_timestep documentation. Args: date: A ladybug Date object for the day of the year the DataCollection is representing. schedule_type_limit: A ScheduleTypeLimit object that describes the schedule, which will be used to make the header for the DataCollection. timestep: An integer for the number of steps per hour at which to make the resulting DataCollection. """ assert isinstance(date, Date), \ 'Expected ladybug Date. Got {}.'.format(type(date)) assert isinstance(schedule_type_limit, ScheduleTypeLimit), \ 'Expected Honeybee ScheduleTypeLimit. Got {}.'.format( type(schedule_type_limit)) a_period = AnalysisPeriod(date.month, date.day, 0, date.month, date.day, 23, timestep, date.leap_year) header = Header(schedule_type_limit.data_type, schedule_type_limit.unit, a_period, metadata={'schedule': self.name}) return HourlyContinuousCollection(header, self.values_at_timestep(timestep))
def test_utci_collection_immutability(): """Test that the UTCI collection is immutable.""" calc_length = 24 air_temp_header = Header(Temperature(), 'C', AnalysisPeriod(end_month=1, end_day=1)) air_temp = HourlyContinuousCollection(air_temp_header, [24] * calc_length) utci_obj = UTCI(air_temp, 50) # check that editing the original collection does not mutate the object air_temp[0] = 26 assert utci_obj.air_temperature[0] == 24 # check that editing collection properties does not mutate the object with pytest.raises(Exception): utci_obj.air_temperature[0] = 26 with pytest.raises(Exception): utci_obj.air_temperature.values = [26] * calc_length with pytest.raises(Exception): utci_obj.air_temperature = air_temp with pytest.raises(Exception): utci_obj.universal_thermal_climate_index[0] = 20 with pytest.raises(Exception): utci_obj.universal_thermal_climate_index.values = [20] * calc_length # check that properties cannot be edited directly with pytest.raises(Exception): utci_obj.universal_thermal_climate_index = air_temp with pytest.raises(Exception): utci_obj.comfort_parameter = UTCIParameter()
def test_utci_collection_defaults(): """Test the default inputs assigned to the UTCI collection.""" calc_length = 24 air_temp_header = Header(Temperature(), 'C', AnalysisPeriod(end_month=1, end_day=1)) air_temp = HourlyContinuousCollection(air_temp_header, [24] * calc_length) utci_obj = UTCI(air_temp, 50) assert isinstance(utci_obj.rad_temperature, HourlyContinuousCollection) assert len(utci_obj.rad_temperature.values) == calc_length assert utci_obj.rad_temperature[0] == utci_obj.air_temperature[0] assert isinstance(utci_obj.wind_speed, HourlyContinuousCollection) assert len(utci_obj.wind_speed.values) == calc_length assert utci_obj.wind_speed[0] == 0.1 assert isinstance(utci_obj.comfort_parameter, UTCIParameter) default_par = UTCIParameter() assert utci_obj.comfort_parameter.cold_thresh == default_par.cold_thresh assert utci_obj.comfort_parameter.heat_thresh == default_par.heat_thresh assert utci_obj.comfort_parameter.extreme_cold_thresh == default_par.extreme_cold_thresh assert utci_obj.comfort_parameter.very_strong_cold_thresh == default_par.very_strong_cold_thresh assert utci_obj.comfort_parameter.strong_cold_thresh == default_par.strong_cold_thresh assert utci_obj.comfort_parameter.moderate_cold_thresh == default_par.moderate_cold_thresh assert utci_obj.comfort_parameter.moderate_heat_thresh == default_par.moderate_heat_thresh assert utci_obj.comfort_parameter.strong_heat_thresh == default_par.strong_heat_thresh assert utci_obj.comfort_parameter.very_strong_heat_thresh == default_par.very_strong_heat_thresh assert utci_obj.comfort_parameter.extreme_heat_thresh == default_par.extreme_heat_thresh
def test_monthly_per_hour(): """Test the monthly per hour collections.""" a_per = AnalysisPeriod(6, 1, 0, 7, 31, 23) vals = [20] * 24 + [25] * 24 dc1 = MonthlyPerHourCollectionImmutable( Header(Temperature(), 'C', a_per), vals, a_per.months_per_hour) assert dc1.datetimes == tuple(a_per.months_per_hour) assert dc1.values == tuple(vals) assert dc1.is_mutable is False with pytest.raises(AttributeError): dc1[0] = 18 with pytest.raises(AttributeError): dc1.values = range(48) with pytest.raises(Exception): dc1.values.append(10) dc2 = dc1.to_mutable() assert isinstance(dc2, MonthlyPerHourCollection) assert dc2.is_mutable is True dc2[0] = 18 assert dc2[0] == 18 dc2.values = range(48) assert dc2.values == tuple(range(48)) with pytest.raises(Exception): dc2.values.append(10) # make sure that we can still not append dc3 = dc2.to_immutable() assert isinstance(dc3, MonthlyPerHourCollectionImmutable) assert dc3.is_mutable is False dc4 = dc3.to_immutable() assert isinstance(dc4, MonthlyPerHourCollectionImmutable) assert dc4.is_mutable is False
def data_to_load_intensity(data_colls, floor_area, data_type, cop=1, mults=None): """Convert data collections output by EnergyPlus to a single load intensity collection. Args: data_colls: A list of monthly data collections for an energy term. floor_area: The total floor area of the rooms, used to compute EUI. data_type: Text for the data type of the collections (eg. "Cooling"). cop: Optional number for the COP, which the results will be divided by. """ if len(data_colls) != 0: if mults is not None: if 'Zone' in data_colls[0].header.metadata: rel_mults = [ mults[data.header.metadata['Zone']] for data in data_colls ] data_colls = [ dat * mul for dat, mul in zip(data_colls, rel_mults) ] total_vals = [ sum(month_vals) / floor_area for month_vals in zip(*data_colls) ] if cop != 1: total_vals = [val / cop for val in total_vals] else: # just make a "filler" collection of 0 values total_vals = [0] * 12 meta_dat = {'type': data_type} total_head = Header(EnergyIntensity(), 'kWh/m2', AnalysisPeriod(), meta_dat) return MonthlyCollection(total_head, total_vals, range(12))
def test_daily(): """Test the daily collections.""" a_per = AnalysisPeriod(6, 21, 0, 6, 22, 23) v1, v2 = 20, 25 dc1 = DailyCollectionImmutable( Header(Temperature(), 'C', a_per), [v1, v2], a_per.doys_int) assert dc1.datetimes == tuple(a_per.doys_int) assert dc1.values == (v1, v2) assert dc1.is_mutable is False with pytest.raises(AttributeError): dc1[0] = 18 with pytest.raises(AttributeError): dc1.values = [18, 24] with pytest.raises(Exception): dc1.values.append(10) dc2 = dc1.to_mutable() assert isinstance(dc2, DailyCollection) assert dc2.is_mutable is True dc2[0] = 18 assert dc2[0] == 18 dc2.values = [18, 24] assert dc2.values == (18, 24) with pytest.raises(Exception): dc2.values.append(10) # make sure that we can still not append dc3 = dc2.to_immutable() assert isinstance(dc3, DailyCollectionImmutable) assert dc3.is_mutable is False dc4 = dc3.to_immutable() assert isinstance(dc4, DailyCollectionImmutable) assert dc4.is_mutable is False