def test_get_cci(self): self._supor.get('cci_14') self._supor.get('cci') assert_that(self._supor.ix[20160817]['cci'], close_to(50, 0.01)) assert_that(self._supor.ix[20160817]['cci_14'], close_to(50, 0.01)) assert_that(self._supor.ix[20160816]['cci_14'], close_to(24.8, 0.01)) assert_that(self._supor.ix[20160815]['cci_14'], close_to(-26.46, 0.01))
def test_cr(self): stock = self.get_stock_90day() stock.get('cr') assert_that(stock['cr'].ix[20110331], close_to(178.2, 0.1)) assert_that(stock['cr-ma1'].ix[20110331], close_to(120.0, 0.1)) assert_that(stock['cr-ma2'].ix[20110331], close_to(117.1, 0.1)) assert_that(stock['cr-ma3'].ix[20110331], close_to(111.5, 0.1))
def test_get_rsi(self): self._supor.get('rsi_6') self._supor.get('rsi_12') self._supor.get('rsi_24') assert_that(self._supor.ix[20160817]['rsi_6'], close_to(71.31, 0.01)) assert_that(self._supor.ix[20160817]['rsi_12'], close_to(63.11, 0.01)) assert_that(self._supor.ix[20160817]['rsi_24'], close_to(61.31, 0.01))
def test_column_macd(self): stock = self.get_stock_90day() stock.get('macd') record = stock.ix[20110225] assert_that(record['macd'], close_to(-0.0382, 0.0001)) assert_that(record['macds'], close_to(-0.0101, 0.0001)) assert_that(record['macdh'], close_to(-0.02805, 0.0001))
def test_get_atr(self): self._supor.get('atr_14') self._supor.get('atr') assert_that(self._supor.ix[20160817]['atr_14'], close_to(1.33, 0.01)) assert_that(self._supor.ix[20160817]['atr'], close_to(1.33, 0.01)) assert_that(self._supor.ix[20160816]['atr'], close_to(1.32, 0.01)) assert_that(self._supor.ix[20160815]['atr'], close_to(1.28, 0.01))
def test_parse(self): body = read_file_in_same_dir(__file__, 'daily_detail.json') result = NeteaseStock._parse(body) sh60 = result.ix['sh600000'] assert_that(sh60.name, equal_to('sh600000')) assert_that(sh60['name'], equal_to(u'浦发银行')) assert_that(sh60.open, equal_to(18.28)) assert_that(sh60.close, equal_to(18.53)) assert_that(sh60.high, equal_to(18.46)) assert_that(sh60.low, equal_to(18.05)) assert_that(sh60.volume, equal_to(412760522)) assert_that(sh60.amount, equal_to(7523663257)) assert_that(sh60.buy1_price, equal_to(18.07)) assert_that(sh60.buy1_volume, close_to(300, 50)) assert_that(sh60.buy2_price, equal_to(18.06)) assert_that(sh60.buy2_volume, close_to(532800, 50)) assert_that(sh60.buy3_price, equal_to(18.05)) assert_that(sh60.buy3_volume, close_to(1053500, 50)) assert_that(sh60.buy4_price, equal_to(18.04)) assert_that(sh60.buy4_volume, close_to(153000, 50)) assert_that(sh60.buy5_price, equal_to(18.03)) assert_that(sh60.buy5_volume, close_to(312200, 50)) assert_that(sh60.sell1_price, equal_to(18.08)) assert_that(sh60.sell1_volume, close_to(205900, 50)) assert_that(sh60.sell2_price, equal_to(18.09)) assert_that(sh60.sell2_volume, close_to(466000, 50)) assert_that(sh60.sell3_price, equal_to(18.10)) assert_that(sh60.sell3_volume, close_to(333500, 50)) assert_that(sh60.sell4_price, equal_to(18.11)) assert_that(sh60.sell4_volume, close_to(253600, 50)) assert_that(sh60.sell5_price, equal_to(18.12)) assert_that(sh60.sell5_volume, close_to(155400, 50)) assert_that(sh60.date, equal_to(20150430)) assert_that(sh60.time, equal_to('15:03:02'))
def test_bollinger(self): stock = self.get_stock().within(20140930, 20141211) boll_ub = stock['boll_ub'] boll_lb = stock['boll_lb'] assert_that(stock['boll'].ix[20141103], close_to(9.80, 0.01)) assert_that(boll_ub.ix[20141103], close_to(10.1310, 0.01)) assert_that(boll_lb.ix[20141103], close_to(9.48, 0.01))
def test_acceleration(self): # we use prime numbers in the hope to get more distinguishable numbers for v0i in range(0, 700000, 233): for jerki in range(3, 30000, 175): for max_accelerationi in range(5, 5000, 233): v0 = float(v0i) jerk = float(jerki) max_acceleration = float(max_accelerationi) #print "testing v0=%s, j=%s, a=%s" % (v0, jerk, max_acceleration) time_to_max_acceleration = constant_jerk_time_to_acceleration(j=jerk, a0=0, a=max_acceleration) # first of all we test an acceleration below the max acceleration test_time = time_to_max_acceleration * 2.0 - (time_to_max_acceleration / 7.0) # for the acceleration_phase acceleration_time = test_time / 2.0 target_acceleration = constant_jerk_acceleration(jerk, acceleration_time) target_velocity_1 = constant_jerk_speed(jerk, acceleration_time, v0=v0) target_distance_1 = constant_jerk_displacement(jerk, acceleration_time, v0=v0) # for the deceleration phase target_velocity_2 = constant_jerk_speed(-jerk, acceleration_time, a0=target_acceleration, v0=target_velocity_1) target_distance_2 = constant_jerk_displacement(-jerk, acceleration_time, a0=target_acceleration, v0=target_velocity_1, x0=target_distance_1) # and now test the s curve acceleration calculated_target_velocity = calculate_ideal_s_curve_acceleration(v0=v0, s=target_distance_2, j=jerk) assert_that(calculated_target_velocity, close_to(target_acceleration, 0.1)) calculated_target_velocity = get_target_velocity(start_velocity=v0, length=target_distance_2, max_acceleration=max_acceleration, jerk=jerk) assert_that(calculated_target_velocity, close_to(target_velocity_2, 0.1)) # and now for some constant acceleration phase for constant_acceleration_timei in range(1, 100, 3): constant_acceleration_time = float(constant_acceleration_timei) acceleration_time = time_to_max_acceleration test_time = time_to_max_acceleration * 2.0 + constant_acceleration_time target_acceleration = constant_jerk_acceleration(jerk, acceleration_time) target_velocity_1 = constant_jerk_speed(jerk, acceleration_time, v0=v0) target_distance_1 = constant_jerk_displacement(jerk, acceleration_time, v0=v0) target_velocity_2 = constant_acceleration_speed(target_acceleration, constant_acceleration_time, v0=target_velocity_1) target_distance_2 = constant_acceleration_displacement(target_acceleration, constant_acceleration_time, v0=target_velocity_1, x0=target_distance_1) target_velocity_3 = constant_jerk_speed(-jerk, acceleration_time, a0=target_acceleration, v0=target_velocity_2) target_distance_3 = constant_jerk_displacement(-jerk, acceleration_time, a0=target_acceleration, v0=target_velocity_2, x0=target_distance_2) calculated_target_velocity = get_target_velocity(start_velocity=v0, length=target_distance_3, max_acceleration=max_acceleration, jerk=jerk) assert_that(calculated_target_velocity, close_to(target_velocity_3, 0.1)) # print"error:%s" % str(calculated_target_velocity - target_velocity_3) pass
def test_cutout_over_two_edges(self): calculator = CutoutCalculator(250, 250) (x0, x1, y0, y1), _ = calculator.calc_cutout( (1997.68, 4618.31), (2112, 4644), inverted=False) delta = 0.01 assert_that(x0, close_to(1862, delta)) assert_that(x1, close_to(2112, delta)) assert_that(y0, close_to(4394, delta)) assert_that(y1, close_to(4644, delta))
def test_converter_near_edge(self): calculator = CutoutCalculator(250, 250) _, converter = calculator.calc_cutout( (1970.17, 4611.65), (2112, 4644), inverted=False) x, y = converter.convert((1970.17, 4611.65)) delta = 0.01 # note 1-based indexing not 0 assert_that(x, close_to(126.17, delta)) assert_that(y, close_to(218.65, delta))
def test_data_coverage_excludes_non_high_volume_services(self): services = [ Service(details({ "2012-Q4 Vol.": "2,000", '2012-Q4 Digital vol.': '10', u'2012-Q4 CPT (\xa3)': "2.00", "2013-Q1 Vol.": "2,000", u'2013-Q1 CPT (\xa3)': "2.00", '2013-Q1 Digital vol.': '10', })), Service(details({ "2012-Q4 Vol.": "1,000", u'2012-Q4 CPT (\xa3)': "3.00", '2012-Q4 Digital vol.': '10', u'High-volume?': 'yes' })), ] dept = Department("Agency for Beautiful Code", services) coverage = dept.data_coverage assert_that(float(coverage.percentage), close_to(0.25, 0.001)) assert_that(coverage.requested, is_(12)) assert_that(coverage.provided, is_(3))
def test_GIVEN_bng_WHEN_bng_to_latlon_service_called_THEN_correct_latlon_returned(self): # BNG test values are from http://www.bgs.ac.uk/data/webservices/convertForm.cfm bng_easting = 429157 bng_northing = 623009 lat = 55.5 lon = -1.54 delta = 0.00001 response = self.app.post( url(controller='model_run', action='bng_to_latlon'), params={ 'bng_easting': bng_easting, 'bng_northing': bng_northing }) json = response.json_body assert_that(json['lat'], is_(close_to(lat, delta))) assert_that(json['lon'], is_(close_to(lon, delta)))
def test_GIVEN_divide_by_zeros_WHEN_rel_stddev_THEN_rel_stddev_correct(self): # Zeros in the first array cause undefined values in the relative calculations. data_with_zeros = self.data1 data_with_zeros.data[0][1] = 0 stats = StatsAnalyzer(data_with_zeros, self.missing2) res = stats.rel_stddev() assert_that(res[0].rel_stddev, close_to(5.8725242578, 1e-5))
def test_GIVEN_divide_by_zeros_WHEN_rel_mean_THEN_rel_mean_correct(self): # Zeros in the first array cause undefined values in the relative calculations. data_with_zeros = self.data1 data_with_zeros.data[0][1] = 0 stats = StatsAnalyzer(data_with_zeros, self.missing2) res = stats.rel_mean() assert_that(res[0].rel_mean, close_to(-2.3518849206, 1e-5))
def test_data_coverage_when_quarter_not_requested(self): services = [ Service(details({ "2012-Q4 Vol.": "2,000", '2012-Q4 Digital vol.': '10', u'2012-Q4 CPT (\xa3)': "2.00", "2013-Q1 Vol.": "***", u'2013-Q1 CPT (\xa3)': "***", '2013-Q1 Digital vol.': '***', u'High-volume?': 'yes' })), Service(details({ "2012-Q4 Vol.": "1,000", u'2012-Q4 CPT (\xa3)': "3.00", '2012-Q4 Digital vol.': '10', u'High-volume?': 'yes' })), ] dept = Department("Agency for Beautiful Code", services) coverage = dept.data_coverage assert_that(float(coverage.percentage), close_to(0.1333333, 0.001)) assert_that(coverage.requested, is_(45)) assert_that(coverage.provided, is_(6))
def test_GIVEN_altitude_not_read_by_iris_WHEN_read_THEN_pressure_coordinate_present(self): self.read_data() # The altitude aux coord is CF compliant but not identified by IRIS so we manually create it altitude = self.data.coord("altitude") assert_that(altitude.shape, is_((248, 31, 96, 192))) # Check it has been converted to meters assert_that(str(altitude.units), is_("meter")) assert_that(altitude.points[0, 0, 0, 0], close_to(275081.0 / 9.80665, 0.1))
def test_read_kline(self): df = next(self.tdx.read_kline('sz002707')) assert_that(df.columns.name, equal_to('sz002707')) data = df.ix[20150608] assert_that(data.amount, close_to(457520100, 100)) assert_that(data.volume, equal_to(3750037)) assert_that(data.open, equal_to(126.87)) assert_that(data.high, equal_to(130.45)) assert_that(data.low, equal_to(117.60)) assert_that(data.close, equal_to(119.64))
def test_increx_returns_counter(counters): """ Incrementing a counter should give information as to the expiration of the counter. This can be useful to write rate limiting HTTP headers. """ counters.set_delta(5000) counter = counters.increx("some-key") assert_that(counter.value, equal_to(1)) assert_that(counter.pttl, close_to(5000, 5)) counters.set_delta(1000) counter = counters.increx("key") assert_that(counter.value, equal_to(1)) assert_that(counter.pttl, close_to(1000, 5)) time.sleep(.05) counter = counters.increx("key") assert_that(counter.value, equal_to(2)) assert_that(counter.pttl, close_to(950, 5)) time.sleep(.05) counter = counters.increx("key") assert_that(counter.value, equal_to(3)) assert_that(counter.pttl, close_to(900, 5))
def test_phot_mag(self): fits_filename = self.get_abs_path("data/1616681p22.fits") x_in = 560.06 y_in = 406.51 ap = 4 insky = 11 outsky = 15 maxcount = 30000.0 exptime = 1.0 swidth = outsky - insky apcor = 0.0 x, y, mag, magerr = daophot.phot_mag(fits_filename, x_in, y_in, aperture=ap, sky=insky, swidth=swidth, apcor=apcor, maxcount=maxcount, exptime=exptime) assert_that(x, close_to(560.000, DELTA)) assert_that(y, close_to(406.600, DELTA)) assert_that(mag, close_to(24.769, DELTA)) # NOTE: minor difference in magnitude error: 0.290 vs 0.291 assert_that(magerr, close_to(0.290, 0.0011))
def test_phot(self): """ Test data to compare with generated by running make testphot which runs the equivalent Perl script with the same data. """ fits_filename = self.get_abs_path("data/1616681p22.fits") x_in = 560.06 y_in = 406.51 ap = 4 insky = 11 outsky = 15 maxcount = 30000.0 exptime = 1.0 swidth = outsky - insky apcor = 0.0 hdu = daophot.phot(fits_filename, x_in, y_in, aperture=ap, sky=insky, swidth=swidth, apcor=apcor, maxcount=maxcount, exptime=exptime) def get_first(param): value_list = hdu["data"][param] assert_that(value_list, has_length(1)) return value_list[0] xcen = get_first("X") ycen = get_first("Y") mag = get_first("MAG") magerr = get_first("MERR") assert_that(xcen, close_to(560.000, DELTA)) assert_that(ycen, close_to(406.600, DELTA)) assert_that(mag, close_to(24.769, DELTA)) # NOTE: minor difference in magnitude error: 0.290 vs 0.291 assert_that(magerr, close_to(0.290, 0.0011))
def test_vr_default(self): c = self._supor['vr'] assert_that(c.ix[20160817], close_to(153.2, 0.01)) assert_that(c.ix[20160816], close_to(171.69, 0.01)) assert_that(c.ix[20160815], close_to(178.78, 0.01)) c = self._supor['vr_26'] assert_that(c.ix[20160817], close_to(153.2, 0.01)) assert_that(c.ix[20160816], close_to(171.69, 0.01)) assert_that(c.ix[20160815], close_to(178.78, 0.01))
def test_coverage_with_non_requested_metrics(self): service = Service(details({ "2012-Q4 Vol.": "2,000", '2012-Q4 Digital vol.': '10', u'2012-Q4 CPT (\xa3)': "2.00", "2013-Q1 Vol.": "2,000", '2013-Q1 Digital vol.': '10', '2013-Q2 Vol.': '***', '2013-Q2 Digital vol.': '', u'High-volume?': 'yes' })) coverage = service.data_coverage assert_that(float(coverage.percentage), close_to(0.4545, 0.001)) assert_that(coverage.requested, is_(11)) assert_that(coverage.provided, is_(5))
def test_timer_simple(): avr = Avr(mcu='atmega88', f_cpu=8000000) # Callback method mocked out. callbackMock = Mock(return_value=0) #Schedule callback at 20uSec. # cycles = avr->frequency * (avr_cycle_count_t)usec / 1000000; timer = avr.timer(callbackMock, uSec=20) assert_that(timer.status(), close_to(8000000*20/1000000, 10), "uSec to cycles convertion") avr.step(1000) eq_(avr.state, cpu_Running, "mcu is not running") eq_(callbackMock.call_count, 1, "number of calback invocations") avr.terminate()
def test_phot_mag(self): fits_filename = self.get_abs_path("data/1616681p22.fits") x_in = 560.06 y_in = 406.51 ap = 4 insky = 11 outsky = 15 maxcount = 30000.0 exptime = 1.0 swidth = outsky - insky apcor = 0.0 mag = daophot.phot_mag(fits_filename, x_in, y_in, aperture=ap, sky=insky, swidth=swidth, apcor=apcor, maxcount=maxcount, exptime=exptime) assert_that(mag, close_to(24.769, DELTA))
def compare_move_configs(machine_move_config, x_move_config, y_move_config): x_move = None y_move = None for machine_move in machine_move_config: if machine_move['motor'] == 0: assert_that(x_move, equal_to(None)) x_move = machine_move if machine_move['motor'] == 1: assert_that(y_move, equal_to(None)) y_move = machine_move if not x_move: assert_that(x_move_config, none()) if not y_move: assert_that(y_move_config, none()) if x_move and y_move: ratio = float(x_move['speed']) / float(y_move['speed']) assert_that(float(x_move['acceleration']) / float(y_move['acceleration']), close_to(ratio, 0.0001)) assert_that(float(x_move['startBow']) / float(y_move['startBow']), close_to(ratio, 0.0001)) assert_that(y_move or x_move, not (equal_to(None)))
def test_parse_source_reference_point(self): astrom_data = self.parse(TEST_FILE_2) source = astrom_data.get_sources()[0] reading0 = source.get_reading(0) reading1 = source.get_reading(1) reading2 = source.get_reading(2) delta = 0.00000001 assert_that(reading0.reference_source_point[0], close_to(560.06, delta)) assert_that(reading0.reference_source_point[1], close_to(406.51, delta)) assert_that(reading1.reference_source_point[0], close_to(562.82, delta)) assert_that(reading1.reference_source_point[1], close_to(406.68, delta)) assert_that(reading2.reference_source_point[0], close_to(564.44, delta)) assert_that(reading2.reference_source_point[1], close_to(406.03, delta))
def test_reading_coordinate_offsets(self): astrom_data = self.parse(TEST_FILE_2) source = astrom_data.get_sources()[0] reading0 = source.get_reading(0) reading1 = source.get_reading(1) reading2 = source.get_reading(2) delta = 0.00000001 assert_that(reading0.get_coordinate_offset(reading0)[0], close_to(0, delta)) assert_that(reading0.get_coordinate_offset(reading0)[1], close_to(0, delta)) assert_that(reading0.get_coordinate_offset(reading1)[0], close_to(-2.76, delta)) assert_that(reading0.get_coordinate_offset(reading1)[1], close_to(-0.17, delta)) assert_that(reading0.get_coordinate_offset(reading2)[0], close_to(-4.38, delta)) assert_that(reading0.get_coordinate_offset(reading2)[1], close_to(0.48, delta))
def test_timer_reoccuring(): avr = Avr(mcu='atmega48', f_cpu=8000000) # Callback method mocked out. It will register another callback # at 200 cycles and then cancel by returning 0. callbackMock = Mock(side_effect = [200, 0]) timer = avr.timer(callbackMock) avr.step(10) eq_(avr.state, cpu_Running, "mcu is not running") callbackMock.assert_not_called() # Request first timer callback at 100 cycles timer.set_timer_cycles(100) # Run long enought to ensure callback is canceled by returning 0 on the second invocation. avr.step(1000) eq_(avr.state, cpu_Running, "mcu is not running") eq_(callbackMock.call_count, 2, "number of calback invocations") lastCallFirstArg = callbackMock.call_args[0][0] assert_that(lastCallFirstArg, close_to(200, 10), "The last cycle number received in the callback doesn't match the requested one") avr.terminate()
def test_grid_boundaries(self): grid = self.grid_factory.get_for(self.base_length) x_div = grid.x_div y_div = grid.y_div assert_that(grid.x_min, is_(10)) assert_that(grid.y_min, is_(52)) assert_that(grid.x_max, is_(close_to(10.964855970781894, 1e-10))) assert_that(grid.y_max, is_(close_to(52.99942586979069, 1e-10))) assert_that(x_div, is_(close_to(0.06891828362727814, 1e-10))) assert_that(y_div, is_(close_to(0.04759170808527102, 1e-10))) x_0, y_0 = pyproj.transform(self.base_proj, self.proj, 10.5, 52.5) x_1, y_1 = pyproj.transform(self.base_proj, self.proj, 10.5 + x_div, 52.5 + y_div) assert_that(x_1 - x_0, is_(close_to(self.base_length, 1e-4))) assert_that(y_1 - y_0, is_(close_to(self.base_length, 1e-4)))
def test_vector_math(self): result = calculate_relative_vector(1, 1, 0, 1) assert_that(result, not_none()) assert_that(result['x'], close_to(1 / sqrt(3), 0.0001)) assert_that(result['y'], close_to(1 / sqrt(3), 0.0001)) assert_that(result['l'], close_to(1.7, 0.1)) result = calculate_relative_vector(23, 23, 0, 0) assert_that(result, not_none()) assert_that(result['x'], close_to(1 / sqrt(2), 0.0001)) assert_that(result['y'], close_to(1 / sqrt(2), 0.0001)) assert_that(result['l'], close_to(32.5, 0.1)) result = calculate_relative_vector(0, 0, 0, 12) assert_that(result, not_none()) assert_that(result['x'], equal_to(0)) assert_that(result['y'], equal_to(0)) assert_that(result['l'], equal_to(12)) result = calculate_relative_vector(0, 20, 0, 0) assert_that(result, not_none()) assert_that(result['x'], equal_to(0)) assert_that(result['y'], equal_to(1)) assert_that(result['l'], equal_to(20))
def test_column_rsv(self): stock = self.get_stock_20day() rsv_3 = stock['rsv_3'] assert_that(rsv_3.loc[20110106], close_to(60.65, 0.01))
def test_sg_write_size_kb(self): assert_that(self.sg.write_size_kb, close_to(921, 1))
def test_middle(self): stock = self.get_stock_20day() middle = stock['middle'] assert_that(middle.loc[20110104], close_to(12.53, 0.01))
def test_GIVEN_no_missing_vals_WHEN_rel_stddev_THEN_rel_stddev_correct( self): stats = StatsAnalyzer(self.data1, self.data2) res = stats.rel_stddev() assert_that(res[0].rel_stddev, close_to(0.1097392069, 1e-5))
def test_GIVEN_no_missing_vals_WHEN_stddev_THEN_stddev_correct(self): stats = StatsAnalyzer(self.data1, self.data2) res = stats.stddevs() assert_that(res[0].stddev, close_to(3.7252889523, 1e-5)) assert_that(res[1].stddev, close_to(3.7020864988, 1e-5))
def test_column_kdjd(self): stock = self.get_stock_20day() kdjk_3 = stock['kdjd_3'] assert_that(kdjk_3.loc[20110104], close_to(53.50, 0.01)) assert_that(kdjk_3.loc[20110120], close_to(43.13, 0.01))
def test_GIVEN_one_masked_one_nparray_WHEN_rel_stddev_THEN_rel_stddev_correct( self): stats = StatsAnalyzer(self.data1, self.missing2) res = stats.rel_stddev() assert_that(res[0].rel_stddev, close_to(5.4371807462, 1e-5))
def test_disk_utilization(self): unity = t_unity() disk = UnityDisk(_id='dae_0_1_disk_2', cli=unity._cli) assert_that(disk.utilization, close_to(2.41, 0.01))
def test_metric_read_iops(self): unity = t_unity() disk = UnityDisk(_id='dae_0_1_disk_2', cli=unity._cli) assert_that(disk.write_iops, close_to(0.0, 0.01))
def test_data_full_1(self): avg = WeightedAverage(6) avg.add(30, 24, 18, 12, 6, 6, 6) assert_that(avg.value(), close_to(8.85, 0.01)) avg.add(30) assert_that(avg.value(), equal_to(14))
def test_a_core_service_requests_process_time_on_a_server_r( test_count, env, serverLst, svcRqrLst, nSvcReqs, svcReqLog, nServers, nSvcRqrs, fi, dump=False): func_name = sys._getframe().f_code.co_name print("\n\n@@@@@@@@ Start", func_name, "-", test_count, "- nServers:", nServers, ", nSvcRqrs:", nSvcRqrs, "nSvcReqs:", nSvcReqs, file=fi) for i in range(nSvcReqs): # svcRqr = choice(svcRqrLst) # this is expensive, replaced below j = random.randint(0, len(svcRqrLst) - 1) svcRqr = svcRqrLst[j] svcReq = svcRqr.make_svc_request(None) # print(">>> submitting", svcRqr.svc_name, file=fi) svcReq.submit() simTime = 1000000000 print("\n\n***** Start Simulation (%s, %s, %s)-%s *****" % (nServers, nSvcRqrs, dump, test_count), file=fi) print("Simulation: simTime = %s" % simTime, file=fi) env.run(until=simTime) delta = 0.0001 if dump: dump_svc_reqs(svcReqLog) # Scenario: A core service request's process time on a server # The process time is request.compUnits / (server.speed / # server.max_concurrency). for svcReq in svcReqLog: server = svcReq.server assert_that( svcReq.process_time, close_to(svcReq.compUnits / (server.speed / server.maxConcurrency), delta)) if dump: dump_servers(serverLst) # Scenario: Server average time stats # Server stats are aggregates of service request stats for all core # service requests that have completed processing on the server: # # The server's average processing time is the average of the # processing time over all core service requests processed # on the server. # The server's average (_hardware thread) queue time is the average # of the (_hardware thread) queue time over all core service # requests processed on the server. # The server's average service time is the average of the service # time over all core service requests processed on the server. for server in serverLst: print(">>>", server.name, "hwThreads=", server.maxConcurrency, "_threads=", server.numThreads, "speed=", server.speed, file=fi) svcReqLog = server.svc_req_log nSvcReqs = len(svcReqLog) print("nSvcReqs", nSvcReqs, file=fi) if nSvcReqs != 0: print("svcReq processTimes", [(svcReq.svcName, svcReq.process_time) for svcReq in svcReqLog], file=fi) print("svcReq hwQueueTimes", [(svcReq.svcName, svcReq.hw_queue_time) for svcReq in svcReqLog], file=fi) print("svcReq serviceTimes", [(svcReq.svcName, svcReq.service_time) for svcReq in svcReqLog], file=fi) avgSvcReqProcessTime = \ sum([svcReq.process_time for svcReq in svcReqLog]) / nSvcReqs avgSvcReqHwQueueTime = \ sum([svcReq.hw_queue_time for svcReq in svcReqLog]) / nSvcReqs avgSvcReqServiceTime = \ sum([svcReq.service_time for svcReq in svcReqLog]) / nSvcReqs print("Before assertions", file=fi) print(avgSvcReqProcessTime, server.avg_process_time, file=fi) print(avgSvcReqHwQueueTime, server.avg_hw_queue_time, file=fi) print(avgSvcReqServiceTime, server.avg_service_time, file=fi) assert_that(avgSvcReqProcessTime, close_to(server.avg_process_time, delta)) assert_that(avgSvcReqHwQueueTime, close_to(server.avg_hw_queue_time, delta)) assert_that(avgSvcReqServiceTime, close_to(server.avg_service_time, delta)) print("@@@@@@@@ End test: " + str(test_count) + " ended: " + str(env.now), file=fi)
def assert_close(self, expected, actual): assert_that(expected, close_to(actual, 0.0001))
def test_hamcrest(self): assert_that(10, equal_to(10), '这是一个提示') assert_that(12, close_to(10, 2), '这是一个提示') assert_that('contains some string', contains_string("string"))
def check_cost(context, cost): assert_that(context.simulator.total_cost, close_to(float(cost), 0.01))
def test_column_kdj_default(self): stock = self.get_stock_20day() assert_that(stock['kdjk'].loc[20110104], close_to(60.52, 0.01)) assert_that(stock['kdjd'].loc[20110104], close_to(53.50, 0.01)) assert_that(stock['kdjj'].loc[20110104], close_to(74.56, 0.01))
def assert_tuples_almost_equal(self, actual, expected, delta=0.0000001): assert_that(actual[0], close_to(expected[0], delta)) assert_that(actual[1], close_to(expected[1], delta))
def test_column_kdjk(self): stock = self.get_stock_20day() kdjk_3 = stock['kdjk_3'] assert_that(kdjk_3.loc[20110104], close_to(60.52, 0.01)) assert_that(kdjk_3.loc[20110120], close_to(31.21, 0.01))
def test_pool_read_size_kb(self): assert_that(self.pool.read_size_kb, close_to(2018, 1))
def test_GIVEN_no_missing_vals_WHEN_spearman_THEN_spearman_correct(self): stats = StatsAnalyzer(self.data1, self.data2) res = stats.spearmans_rank() assert_that(res[0].spearman, close_to(1.0, 1e-5))
def test_GIVEN_no_missing_vals_WHEN_abs_mean_THEN_abs_mean_correct(self): stats = StatsAnalyzer(self.data1, self.data2) res = stats.abs_mean() assert_that(res[0].abs_mean, close_to(0.09, 1e-5))
def test_GIVEN_missing_vals_WHEN_rel_stddev_THEN_rel_stddev_correct(self): stats = StatsAnalyzer(self.missing1, self.missing2) res = stats.rel_stddev() assert_that(res[0].rel_stddev, close_to(0.1930820326, 1e-5))
def test_GIVEN_missing_vals_WHEN_count_THEN_mean_correct(self): stats = StatsAnalyzer(self.missing1, self.missing2) res = stats.means() assert_that(res[0].mean, close_to(-13.5, 1e-5)) assert_that(res[1].mean, close_to(-16.783333333, 1e-5))
def test_GIVEN_missing_vals_WHEN_stddev_THEN_stddev_correct(self): stats = StatsAnalyzer(self.missing1, self.missing2) res = stats.stddevs() assert_that(res[0].stddev, close_to(42.5099988238, 1e-5)) assert_that(res[1].stddev, close_to(50.6813344997, 1e-5))
def test_column_rate_plus2(self): stock = self.get_stock_20day() open_r = stock['open_2_r'] assert_that(open_r.loc[20110118], close_to(-1.566, 0.001)) assert_that(isnan(open_r.loc[20110119]), equal_to(True)) assert_that(isnan(open_r.loc[20110120]), equal_to(True))
def check_diversification(context): for market in _MARKETS: assert_that( context.spot_fleet.market_size(market) * (context.spot_fleet._instance_types[market].weight), close_to(context.desired_target_capacity / len(_MARKETS), 5.0), )
def test_GIVEN_one_masked_one_nparray_WHEN_spearman_THEN_spearman_correct( self): stats = StatsAnalyzer(self.data1, self.missing2) res = stats.spearmans_rank() assert_that(res[0].spearman, close_to(0.2142857143, 1e-5))
def test_GIVEN_missing_vals_WHEN_spearman_THEN_spearman_correct(self): stats = StatsAnalyzer(self.missing1, self.missing2) res = stats.spearmans_rank() assert_that(res[0].spearman, close_to(0.9428571429, 1e-5))
def test_column_rate_prev(self): stock = self.get_stock_20day() rate = stock['rate'] assert_that(rate.loc[20110107], close_to(4.41, 0.01))
def test_sg_read_size_kb(self): assert_that(self.sg.read_size_kb, close_to(2018, 1))
def test_GIVEN_no_missing_vals_WHEN_mean_THEN_mean_correct(self): stats = StatsAnalyzer(self.data1, self.data2) res = stats.means() assert_that(res[0].mean, close_to(5.1, 1e-5)) assert_that(res[1].mean, close_to(5.19, 1e-5))