def test_merge_rts_2d_intersect(self): """ Test merging of two rts of 2-d data and has intersection.""" d1=[[1.0,2.0]]*int(spy.math.pow(2,10)) d2=[[3.0,4.0]]*int(spy.math.pow(2,11)) st1=datetime.datetime(year=1990,month=2,day=3,hour=11, minute=15) st2=datetime.datetime(year=1990,month=3,day=3,hour=11, minute=15) ts1=rts(d1,st1,time_interval(hours=1)) ts2=rts(d2,st2,time_interval(hours=1)) ## Intentionally put some nan into ts1 to see effect. ii=ts1.index_after(st2) num_nan=10 ts1.data[ii+1:ii+num_nan+2,]=spy.nan nt=merge(ts1,ts2) self.assertEqual(nt.start,ts1.start) self.assertEqual(nt.end,ts2.end) total_n=number_intervals(ts1.start,ts2.end,ts1.interval)+1 self.assertEqual(len(nt.data),total_n) s1=nt.index_after(ts1.end) s2=nt.index_after(ts2.start) s3=ts2.index_after(ts1.end) self.assert_(spy.allclose(nt.data[0:ii+1,],ts1.data[0:ii+1,])) self.assert_(spy.allclose(nt.data[ii+1:ii+num_nan+2,],ts2.data[0:num_nan+1,])) self.assert_(spy.allclose(nt.data[ii+num_nan+2:s1+1,],ts1.data[ii+num_nan+2:s1+1,])) self.assert_(spy.allclose(nt.data[s1+1:len(nt.data),],ts2.data[s3+1:len(ts2.data),]))
def test_merge_rts_no_intersect(self): """ Test merging two rts without intersection.""" d1=[1]*int(spy.math.pow(2,10)) d2=[2]*int(spy.math.pow(2,11)) st1=datetime.datetime(year=1990,month=2,day=3,hour=11, minute=15) st2=datetime.datetime(year=1990,month=5,day=3,hour=11, minute=15) ts1=rts(d1,st1,time_interval(hours=1)) ts2=rts(d2,st2,time_interval(hours=1)) nt=merge(ts1,ts2) self.assertEqual(nt.start,ts1.start) self.assertEqual(nt.end,ts2.end) total_n=number_intervals(ts1.start,ts2.end,ts1.interval)+1 self.assertEqual(len(nt.data),total_n) s1=nt.index_after(ts1.end) s2=nt.index_after(ts2.start) self.assert_(spy.alltrue(spy.isnan(nt.data[s1+1:s2]))) self.assert_(spy.allclose(nt.data[0:s1+1],ts1.data)) self.assert_(spy.allclose(nt.data[s2:len(nt.data)],ts2.data))
def test_merge_large_rts(self): largenum=250000 st1=datetime.datetime(1920,1,2) data1=range(0,largenum) dt=datetime.timedelta(minutes=15) rt1=rts(data1,st1,dt,{}) st2=rt1.end+dt data2=range(largenum,2*largenum) rt2=rts(data2,st2,dt,{}) st3=rt2.end+dt data3=range(2*largenum,3*largenum) rt3=rts(data3,st3,dt,{}) st4=rt3.end+dt data4=range(3*largenum,4*largenum) rt4=rts(data4,st4,dt,{}) ts=merge(rt1,rt2,rt3,rt4) self.assertEqual(len(ts),largenum*4) self.assertEqual(ts.data[largenum],data2[0]) self.assertEqual(ts.data[2*largenum],data3[0]) self.assertEqual(ts.data[3*largenum],data4[0])
def test_multidimension_tsdata(self): """ Test interploation methods on multi-dimensional data set.""" msgstr = "on test_multidimension_tsdata" num = 1000 data = [sin(2.0 * pi * k / 250.0) for k in range(num)] data = sciarray(data).reshape(num / 4, 4) id = 1 ts=rts(data,datetime(year=1990,month=1,day=2),\ parse_interval("1hour"),{}) ts_single_dimension=rts(data[:,id],datetime(year=1990,month=1,day=2),\ parse_interval("1hour"),{}) times=time_sequence(datetime(year=1990,month=1,day=3),\ parse_interval("1hour"),50) self.assertEqual(abs(ts_single_dimension.data-ts.\ data[:,id]).max(),0.0) ##rhsit won't pass this test function_to_test = [linear, spline, monotonic_spline] for funcs in function_to_test: nts = funcs(ts, times, filter_nan=False) nts_single_dimension=funcs(ts_single_dimension,times,\ filter_nan=False) self.assertEqual(len(nts),len(times),\ msg="test of %s fail %s."%(funcs.__name__,msgstr)) self.assertEqual(nts.data.shape[1],ts.data.shape[1],\ msg="test of %s fail %s."%(funcs.__name__,msgstr)) self.assertEqual(abs(nts_single_dimension.data-nts.\ data[:,id]).max(),0.0)
def _retrieve_rts_all_time(book, selection, **args): """ return ts when all times are given to the left of each data col""" rvar = _parse_range(selection) sheet = book.Worksheets(rvar[0]) if "header_labels" in args.keys(): header_labels = args["header_labels"] else: header_labels = None (header_lst,data_start_row)=_retrieve_headers_by_tryerror\ (sheet,rvar,header_labels) data_range = sheet.Range(rvar[1] + str(data_start_row) + ":" + rvar[3] + str(data_start_row)) ts_time_data = data_range.Value num_ts = len(ts_time_data[0]) / 2 tsl = [] t1 = sheet.Range(rvar[1] + str(data_start_row)) col = t1.Columns[0].Column t2 = sheet.Cells(data_start_row + 3, col) d1 = sheet.Cells(data_start_row, col + 1) d2 = sheet.Cells(rvar[4], col + 1) for i in range(0, num_ts): ts_time = sheet.Range(t1, t2).Value tt0 = ts_time[0][0] tt1 = ts_time[1][0] tt0 = datetime(tt0.year, tt0.month, tt0.day, tt0.hour, tt0.minute) tt1 = datetime(tt1.year, tt1.month, tt1.day, tt1.hour, tt1.minute) data = sheet.Range(d1, d2).Value data = array(data) if data.shape[1] == 1: data = data.flatten() if header_lst: ts = rts(data, tt0, tt1 - tt0, header_lst[i]) else: ts = rts(data, tt0, tt1 - tt0) tsl.append(ts) col = col + 2 t1 = sheet.Cells(data_start_row, col) t2 = sheet.Cells(data_start_row + 3, col) d1 = sheet.Cells(data_start_row, col + 1) d2 = sheet.Cells(rvar[4], col + 1) if len(tsl) == 1: return tsl[0] else: return tsl
def ts_split(ts, shared=True): """ Splits a 2D multivariate series into constituent univariate series. Parameters ---------- ts : :class:`~vtools.data.timeseries.TimeSeries` shared :: Boolean Return time sereis share or copy data array of input one Returns ------- out1,out2 : :class:`~vtools.data.timeseries.TimeSeries` Two comonent time series. """ if ts.data.ndim > 2: raise ValueError("Only 2D arrays can be split") if shared: dsource = ts.data else: dsource = ts.data.copy() ncol = dsource.shape[1] out = [] for jcol in range(ncol): if ts.is_regular(): colts = rts(dsource[:, jcol], ts.start, ts.interval) else: colts = its(ts.ticks, dsource[:, jcol]) out.append(colts) return tuple(out)
def test_butterworth(self): """ Butterworth filter on isa series of 1hour interval with four frequencies. """ # Test operations on ts of varied values. test_ts=[(pd.Timestamp(1990,2,3,11,15),\ self.two_to_ten,hours(1))] f1 = 0.76 f2 = 0.44 f3 = 0.95 f4 = 1.23 pi = np.pi av1 = f1 * pi / 12. av2 = f2 * pi / 12. av3 = f3 * pi / 12. av4 = f4 * pi / 12. for (st, num, delta) in test_ts: ## this day contains components of with frequecies of 0.76/day, ## 0.44/day, 0.95/day, 1.23/day data = [ np.sin(av1 * k) + 0.7 * np.cos(av2 * k) + 2.4 * np.sin(av3 * k) + 0.1 * np.sin(av4 * k) for k in np.arange(num) ] # This ts is the orignial one. ts0 = rts(data, st, delta) ts = butterworth(ts0, cutoff_period=hours(40)) self.assertTrue(ts.index.freq == ts0.index.freq)
def read_csv_from_dss(fpath): """ Read CSV file from dss Assume LST (no daylight saving) Parameters ---------- fpath: str file path Returns ------- array of vtools.data.timeseries.TimeSeries regular time series. """ with open(fpath, 'rU') as f: reader = csv.reader(f) data = [] times = [] for i, row in enumerate(reader): if i == 1: stations = row[2:] elif i > 6: t = datetime(*strptime(row[1], r'%d%b%Y %H%M')[:5]) times.append(t) row_data = list(map(convert_string_to_float, row[2:])) data.append(row_data) data = np.array(data) return [rts(data[:, i], times[0], times[1] - times[0], props={'unit': 'ec', 'name': stations[i]}) for i in range(len(stations))]
def test_period_op3(self): """ Test period operation on time series with 2-dimensional data.""" st = datetime.datetime(year=1990, month=2, day=3, hour=11, minute=15) num = 3005 dimension2 = 3 delta = time_interval(minutes=5) interval = '1 hour' op_delta = time_interval(hours=1) aligned_start = datetime.datetime(year=1990, month=2, day=3, hour=12, minute=0) data=[[random.uniform(self.min_val,self.max_val) for i in range(dimension2)] \ for k in range(num)] data = sciarray(data) # Reformalize raw data, insert known mini_val and max_val # and calcuate hourly mean to use later. i0 = 9 # this is the first index with aligned calendar num_interval = (num - i0 + 1) // 12 for k in range(num_interval): index = i0 + k * 12 + 1 data[index, ] = self.min_val index = index + 1 data[index, ] = self.max_val nt_data = data[i0:12 * num_interval + i0, ] nt_data = sciarray(nt_data) nt_data.shape = (num_interval, 12, -1) nt_mean = sciadd.reduce( nt_data, 1, ) / 12 nt_sum = sciadd.reduce( nt_data, 1, ) nt_min = sciminimum.reduce( nt_data, 1, ) nt_max = scimaximum.reduce( nt_data, 1, ) ts = rts(data, st, delta, {}) for (op, op_data) in [(MIN, nt_min), (MAX, nt_max), (MEAN, nt_mean), (SUM, nt_sum)]: nt = period_op(ts, interval, op) assert_array_equal(nt.data,op_data,\ err_msg="two array not equal in average" \ " by %s"%(op))
def test_period_op_uncompatible_interval(self): """ Test behaviour of period operation on TS with interval uncompatible with operation time interval """ test_input = [ (datetime.datetime(year=1990, month=2, day=3, hour=11, minute=15), 3005, time_interval(minutes=45), "1hour", time_interval(hours=1)), (datetime.datetime(year=1990, month=2, day=3, hour=11, minute=15), 3301, time_interval(days=1), "1hour", time_interval(hours=1)), (datetime.datetime(year=1990, month=1, day=1, hour=00, minute=00), 10957, time_interval(days=2), "1 month", time_interval(months=1)), (datetime.datetime(year=1990, month=1, day=1, hour=00, minute=00), 10957, time_interval(minutes=35), "3 days", time_interval(days=3)), (datetime.datetime(year=1990, month=1, day=1, hour=00, minute=00), 59, time_interval(months=5), "3 years", time_interval(years=3)), ] for (st, num, delta, interval, op_delta) in test_input: data=[random.uniform(self.min_val,self.max_val) \ for k in range(num)] ts = rts(data, st, delta, {}) for op in [MIN, MAX, MEAN, SUM]: self.assertRaises(ValueError, period_op, ts, interval, op)
def test_save_data(self): ## save some ts into dss file, ts may contain ## header. ## save rts first. data = range(1000) start = "12/21/2000 2:00" interval = "1hour" prop = {} prop[TIMESTAMP] = PERIOD_START prop[AGGREGATION] = MEAN prop["datum"] = "NGVD88" prop["manager"] = "John Doe" prop["model"] = "hydro 7.5" rt1 = rts(data, start, interval, prop) id = "vtools.datastore.dss.DssService" path = "/TEST/DOWNSTREAM/EC//1HOUR/STAGE/" source = self.test_file_path data_ref = DataReference(id, source=source, selector=path) self.dss_service.add_data(data_ref, rt1) dssc = self.dss_service.get_catalog(source) path = "/TEST/DOWNSTREAM/EC//1HOUR/STAGE/" data_ref = dssc.data_references(path).next() rtt = self.dss_service.get_data(data_ref) self.assertTrue(len(rtt) == len(data)) self.assertTrue(rtt.props[TIMESTAMP] == PERIOD_START) self.assertTrue(rtt.props[AGGREGATION] == MEAN) self.assertTrue(rtt.times[0], dtm.datetime(2000, 12, 21, 2)) extent = "time_window=(12/21/2000 02:00,01/31/2001 18:00)" data_ref = DataReference(id, source, None, path, extent) rtt2 = self.dss_service.get_data(data_ref) self.assertTrue(rtt.start == rtt2.start) self.assertTrue(rtt.end == rtt2.end) ## then its. path = "/HERE/IS/ITS//IR-YEAR/TEST/" data = range(20) data_ref = DataReference(id, source=source, selector=path) prop[AGGREGATION] = INDIVIDUAL times=["01/15/1997","02/17/1997","03/5/1997",\ "04/25/1997","05/1/1997","06/15/1997",\ "07/25/1997","08/14/1997","09/17/1997",\ "10/15/1997","11/21/1997","12/3/1997",\ "01/9/1998","02/15/1998","03/19/1998",\ "04/15/1998","05/19/1998","06/30/1998",\ "07/15/1998","08/24/1998"] times = map(parse_time, times) itt = its(times, data, prop) self.dss_service.add_data(data_ref, itt) extent = "time_window=(1/10/1997 02:00,09/30/1998 18:00)" data_ref = DataReference(id, source, None, path, extent) rtt3 = self.dss_service.get_data(data_ref) self.assertTrue(parse_time("01/15/1997") == rtt3.start) self.assertTrue(parse_time("08/24/1998") == rtt3.end)
def test_butterworth(self): """ Butterworth filter on a series of 1hour interval with four frequencies. """ # Test operations on ts of varied values. test_ts=[(datetime.datetime(year=1990,month=2,day=3,hour=11, minute=15),\ int(numpy.math.pow(2,10)),time_interval(hours=1)),] f1 = 0.76 f2 = 0.44 f3 = 0.95 f4 = 1.23 av1 = f1 * pi / 12 av2 = f2 * pi / 12 av3 = f3 * pi / 12 av4 = f4 * pi / 12 import pylab as plt for (st, num, delta) in test_ts: ## this day contains components of with frequecies of 0.76/day, ## 0.44/day, 0.95/day, 1.23/day data = [ numpy.math.sin(av1 * k) + 0.7 * numpy.math.cos(av2 * k) + 2.4 * numpy.math.sin(av3 * k) + 0.1 * numpy.math.sin(av4 * k) for k in range(num) ] # This ts is the orignial one. ts0 = rts(data, st, delta, {}) ts = butterworth(ts0) self.assert_(ts.is_regular())
def test_save2newf(self): """ try to save ts to a non exist file.""" ## save rts first. data = range(1000) start = "12/21/2000 2:00" interval = "1hour" prop = {} prop[TIMESTAMP] = PERIOD_START prop[AGGREGATION] = MEAN prop["datum"] = "NGVD88" prop["manager"] = "John Doe" prop["model"] = "hydro 7.5" rt1 = rts(data, start, interval, prop) id = "vtools.datastore.dss.DssService" path = "/TEST/DOWNSTREAM/EC//1HOUR/STAGE/" source = 'newdss.dss' data_ref = DataReference(id, source=source, selector=path) self.dss_service.add_data(data_ref, rt1) self.assertTrue(os.path.exists(source))
def _bind(ts1, ts2): """ bind data from timeseries ts1 and ts2. Parameters ---------- ts1,ts2 : :class:`~vtools.data.timeseries.TimeSeries` Two timeseries Returns ------- merged : :class:`~vtools.data.timeseries.TimeSeries` A new binded time series if success. """ if (not ((ts1.data.ndim == 1) and (ts2.data.ndim == 1))): raise ValueError("bind only support time series of univariate") ts = None ts_is_regular = False new_ts_time_sequence = [] new_start = None new_interval = None if ((ts1.is_regular()) and (ts2.is_regular())): ts1_start = ts1.times[0] ts1_end = ts1.times[-1] ts2_start = ts2.times[0] ts2_end = ts2.times[-1] new_start = ts1_start if new_start > ts2_start: new_start = ts2_start new_end = ts1_end if new_end < ts2_end: new_end = ts2_end new_interval = ts1.interval ts2_interval = ts2.interval if new_interval > ts2_interval: new_interval = ts2_interval num_data = number_intervals(new_start, new_end, new_interval) + 1 new_ts_time_sequence = time_sequence(new_start, new_interval, num_data) ts_is_regular = True else: new_ts_time_sequence = np.union1d(ts1.ticks, ts2.ticks) new_ts_len = len(new_ts_time_sequence) new_data = np.array([[np.nan] * new_ts_len, [np.nan] * new_ts_len]) ts1_data_id = np.searchsorted(new_ts_time_sequence, ts1.ticks) ts2_data_id = np.searchsorted(new_ts_time_sequence, ts2.ticks) new_data[0, ts1_data_id] = ts1.data new_data[1, ts2_data_id] = ts2.data new_data = new_data.transpose() if ts_is_regular: ts = rts(new_data, new_start, new_interval) else: ts = its(new_ts_time_sequence, new_data) return ts
def test_ts_accumulate(self): ts_start = datetime(year=1990, month=2, day=3, hour=11, minute=45) ts_len = 100 ts_intvl = days(1) data = range(ts_len) ts0 = rts(data, ts_start, ts_intvl) accadd_ts = ts_accumulate(ts0, numpyadd) self.assertEqual(len(accadd_ts), ts_len) self.assertEqual(accadd_ts.data[-1], sum(data))
def test_butterworth_noevenorder(self): """ test a butterworth with non even order input """ st = datetime.datetime(year=2000, month=2, day=3) delta = time_interval(hours=1) data = arange(100) order = 7 ts0 = rts(data, st, delta, {}) self.assertRaises(ValueError, butterworth, ts0, order)
def test_butterworth_noevenorder(self): """ test a butterworth with non even order input """ start = pd.Timestamp(2000, 2, 3) freq = hours(1) data = np.arange(100) order = 7 ts0 = rts(data, start, freq) self.assertRaises(ValueError, butterworth, ts0, order)
def test_ts_minimum(self): ts_start = datetime(year=1990, month=2, day=3, hour=11, minute=45) ts_len = 100 ts_intvl = days(1) data = range(ts_len) ts0 = rts(data, ts_start, ts_intvl) tsmin = ts_maximum(ts0, 50) self.assertEqual(tsmin[49].value, 50.) self.assertEqual(tsmin[51].value, 51.)
def test_merge_rts_intersect2(self): """ Test merging two rts with intersection.""" d1=[1.0]*4 d2=[2.0]*4 d1[2]=spy.nan st1=datetime.datetime(year=1990,month=2,day=1) st2=datetime.datetime(year=1990,month=3,day=1) ts1=rts(d1,st1,time_interval(months=1)) ts2=rts(d2,st2,time_interval(months=1)) nt=merge(ts1,ts2) ## test ts1 data take priority on valid data self.assertEqual(nt.data[1],ts1.data[1]) ## test ts2 data take prioirty on invalid data of ts1 self.assertEqual(nt.data[2],ts2.data[2])
def create_rts(self, delta, num): """ Only create a regular time series for usage of testing. """ ## This is a ten years long time series aproximately. st = datetime(year=1990, month=1, day=1, hour=00, minute=00) data=[0.3*sin(k*pi/1200+pi/15)+0.4*sin(k*pi/1100+pi/6)+1.1*sin(k*pi/990+pi/18) \ for k in range(num)] ts = rts(data, st, delta, {}) return ts
def test_resample_rts_aligned(self): # test resampling regular time series with aligned start data=range(100) st=datetime.datetime(year=2000,month=2,day=1,hour=2,minute=15) delta=time_interval(minutes=15) ts=rts(data,st,delta,{}) resample_interval=time_interval(hours=1) nts=resample(ts,resample_interval,aligned=True) rstart=datetime.datetime(year=2000,month=2,day=1,hour=3) self.assertEqual(rstart,nts.start)
def ts_accumulate(ts, ufunc): """Apply ufunc.accumulate to the data and produce a neatened time series The function will be applied cumulatively to the data. So... ts_apply(ts, add) will produce a time series, where each entry is the cumulative sum up to that index. """ if ts.is_regular(): return rts(ufunc.accumulate(ts.data), ts.start, ts.interval, ts.props) else: return its(ts.ticks, ufunc.accumulate(ts.data), ts.props)
def ts_gaussian_filter(ts, sigma, order=0, mode="reflect", cval=0.0): """ wrapper of scipy gaussian_filter. Parameters ----------- ts: :class:`~vtools.data.timeseries.TimeSeries` Must has data of one dimension, and regular. sigma: int or :ref:`time_interval<time_intervals>` Standard deviation for Gaussian kernel presented as number of samples, or time interval. order: int,optional Order of the gaussian filter. Must be one of 0,1,2,3. Default 0. mode : {'reflect', 'constant', 'nearest', 'mirror', 'wrap'}, optional This input determines how the array borders are handled, where cval is the value when mode is 'constant'. Default is 'reflect' cval : scalar, optional Value to fill past edges of input if mode is 'constant'. Default is 0.0 Returns ------- result : :class:`~vtools.data.timeseries.TimeSeries` A new regular time series with the same interval of ts. Raise -------- ValueError If input timeseries is not regular, or input order is not 0,1,2,3. """ if not ts.is_regular(): raise ValueError("Only regular time series are supported by\ guassian filter") sigma_int = 1 if is_interval(sigma): ticks_sigma = ticks(sigma) ticks_interval = ticks(ts.inteval) sigma_int = int(ticks_sigma / ticks_interval) elif type(sigma) == type(1): sigma_int = sigma else: raise ValueError("sigma must be int or timeinterval") filtered_data=gaussian_filter(ts.data,sigma_int,order=order,\ mode=mode,cval=cval) filtered_ts = rts(filtered_data, ts.start, ts.interval) return filtered_ts
def test_add_data(self): def rand_gen(): while True: yield random() tss = [] ## create several ts # 1 st = parse_time("1/2/1987 10:30") dt = parse_interval("1hour") prop={"agency":"dwr","interval":"1hour","station":"rsac045",\ "datum":"NGVD88","var":"flow"} n = 13470 data = list(islice(rand_gen(), n)) ts = rts(data, st, dt, prop) tss.append(ts) # 2 st = parse_time("3/20/1997 10:30") dt = parse_interval("1day") prop={"bearu":"usgs","interval":"1day","lat":70.90,\ "long":34.45,"datum":"NGVD88","var":"stage"} n = 40960 data = list(islice(rand_gen(), n)) ts = rts(data, st, dt, prop) tss.append(ts) # 3 st = parse_time("1/2/1967 4:30") dt = parse_interval("15min") prop={"place":"uml","interval":"15min","station":"rsac045",\ "datum":"NGVD88","var":"bod"} n = 20000 data = list(islice(rand_gen(), n)) ts = rts(data, st, dt, prop) tss.append(ts) # ref=DataReferenceFactory(EXCEL_DATA_SOURCE,"store.xls",\ selector="dss2excel$B5") self.excel_service.batch_add(ref, tss)
def test_daily_average(self): """ Test godin filter on 2-dimensional data set.""" d1 = [1.0] * 800 + [2.0] * 400 + [1.0] * 400 d2 = [1.0] * 800 + [2.0] * 400 + [1.0] * 400 data = numpy.array([d1, d2]) data = numpy.transpose(data) data[336, :] = numpy.nan st = datetime.datetime(year=1990, month=2, day=3, hour=11, minute=15) delta = time_interval(minutes=15) test_ts = rts(data, st, delta, {}) nt3 = daily_average(test_ts)
def test_norm_Linf(self): # Test operations on ts of varied values. # ts_start, ts_len, ts_interval, shift_interval_str, shift_interval ts_start = datetime(year=1990, month=2, day=3, hour=11, minute=00) ts_len = 10 ts_intvl = hours(1) data = range(ts_len) # This ts is the orignial one ts0 = rts(data, ts_start, ts_intvl) ts1 = rts(data, ts_start, ts_intvl) l1norm = norm_diff_linf(ts0, ts1) self.assertEqual(l1norm, 0.0) d2 = [1., 2., 0.] d3 = [0., 0., 1.] ts0 = rts(d2, ts_start, ts_intvl) ts1 = rts(d3, ts_start, ts_intvl) l1norm2 = norm_diff_linf(ts0, ts1) self.assertEqual(l1norm2, 2.0)
def test_merge_rts_intersect(self): """ Test merging two rts with intersection.""" d1=[1.0]*int(spy.math.pow(2,10)) d2=[2.0]*int(spy.math.pow(2,11)) st1=datetime.datetime(year=1990,month=2,day=3,hour=11, minute=15) st2=datetime.datetime(year=1990,month=3,day=3,hour=11, minute=15) ts1=rts(d1,st1,time_interval(hours=1)) ts2=rts(d2,st2,time_interval(hours=1)) ## Intentionally put some nan into ts1 to see effect. ii=ts1.index_after(st2) num_nan=10 ts1.data[ii+1:ii+num_nan+2]=spy.nan nt=merge(ts1,ts2) self.assertEqual(nt.start,ts1.start) self.assertEqual(nt.end,ts2.end) total_n=number_intervals(ts1.start,ts2.end,ts1.interval)+1 self.assertEqual(len(nt.data),total_n) s1=nt.index_after(ts1.end) s2=nt.index_after(ts2.start) s3=ts2.index_after(ts1.end) self.assert_(spy.allclose(nt.data[0:ii+1],ts1.data[0:ii+1])) self.assert_(spy.allclose(nt.data[ii+1:ii+num_nan+2],ts2.data[0:num_nan+1])) self.assert_(spy.allclose(nt.data[ii+num_nan+2:s1+1],ts1.data[ii+num_nan+2:s1+1])) self.assert_(spy.allclose(nt.data[s1+1:len(nt.data)],ts2.data[s3+1:len(ts2.data)])) ## a small test d1=[1.0]*4 d2=[2.0]*2 st1=datetime.datetime(year=1990,month=2,day=1) st2=datetime.datetime(year=1990,month=3,day=1) ts1=rts(d1,st1,time_interval(months=1)) ts2=rts(d2,st2,time_interval(months=1)) nt=merge(ts1,ts2) self.assertEqual(len(nt),len(ts1)) self.assertEqual(nt.data[-1],ts1.data[-1]) d1=[1.0,1.0,1.0,spy.nan] d2=[2.0,3.0,4.0] st1=datetime.datetime(year=1990,month=2,day=1) st2=datetime.datetime(year=1990,month=3,day=1) ts1=rts(d1,st1,time_interval(months=1)) ts2=rts(d2,st2,time_interval(months=1)) nt=merge(ts1,ts2) self.assertEqual(len(nt),len(ts1)) self.assertEqual(nt.data[-1],ts2.data[-1])
def test_bind_multivar(self): """ test behaviour of bind on multvariate ts""" start = parse_time("1996-2-1") interval = parse_interval("1hour") data1=sciarray([1.0,1.0,1.0,1.0,1.0,1.0,2.0,3.0,3.0,3.0]) data2t=sciarray([[1.0,1.0,1.0,1.0,1.0,1.0,2.0,3.0,3.0,3.0], [2.0,2.1,2.8,9.1,3.2,0.5,0.1,8.1,1.2,1.1]]) data2=data2t.transpose() data_temp= sciarray([data1[:],data2t[0,:],data2t[1,:]]).transpose() ts1=rts(data1,start,interval,{}) ts2=rts(data2,start,interval,{}) new_ts = ts_bind(ts1,ts2) self.assertEqual(len(new_ts),len(ts1)) self.assertEqual(new_ts.start,ts1.start) self.assertEqual(new_ts.interval,interval) for (d1,d2,d3),(dt1,dt2,dt3) in zip(new_ts.data,data_temp): self.assertEqual(d1,dt1) self.assertEqual(d2,dt2) self.assertEqual(d3,dt3)
def test_ts_maximum(self): ts_start = datetime(year=1990, month=2, day=3, hour=11, minute=45) ts_len = 100 ts_intvl = days(1) data = range(ts_len) ts0 = rts(data, ts_start, ts_intvl) tsmax = ts_maximum(ts0, 0) self.assertEqual(tsmax, data[-1]) time_window = (ts0.times[5], ts0.times[30]) truemax = data[30] tsmax = ts_max(ts0.window(ts0.times[5], ts0.times[30])) self.assertEqual(tsmax, truemax)
def test_ts_sum(self): ts_start = datetime(year=1990, month=2, day=3, hour=11, minute=45) ts_len = 100 ts_intvl = days(1) data = range(ts_len) ts0 = rts(data, ts_start, ts_intvl) tssum = ts_sum(ts0) self.assertEqual(tssum, sum(data)) time_window = (ts0.times[5], ts0.times[30]) truesum = sum(data[5:31]) tssum = ts_sum(ts0.window(time_window[0], time_window[1])) self.assertEqual(tssum, truesum)