Ejemplo n.º 1
0
    def test_save_data(self):
        ## save some ts into dss file, ts may contain
        ## header.

        ## save rts first.
        data = range(1000)
        start = "12/21/2000 2:00"
        interval = "1hour"
        prop = {}
        prop[TIMESTAMP] = PERIOD_START
        prop[AGGREGATION] = MEAN

        prop["datum"] = "NGVD88"
        prop["manager"] = "John Doe"
        prop["model"] = "hydro 7.5"

        rt1 = rts(data, start, interval, prop)

        id = "vtools.datastore.dss.DssService"
        path = "/TEST/DOWNSTREAM/EC//1HOUR/STAGE/"
        source = self.test_file_path
        data_ref = DataReference(id, source=source, selector=path)
        self.dss_service.add_data(data_ref, rt1)
        dssc = self.dss_service.get_catalog(source)
        path = "/TEST/DOWNSTREAM/EC//1HOUR/STAGE/"
        data_ref = dssc.data_references(path).next()
        rtt = self.dss_service.get_data(data_ref)
        self.assertTrue(len(rtt) == len(data))
        self.assertTrue(rtt.props[TIMESTAMP] == PERIOD_START)
        self.assertTrue(rtt.props[AGGREGATION] == MEAN)
        self.assertTrue(rtt.times[0], dtm.datetime(2000, 12, 21, 2))
        extent = "time_window=(12/21/2000 02:00,01/31/2001 18:00)"
        data_ref = DataReference(id, source, None, path, extent)
        rtt2 = self.dss_service.get_data(data_ref)
        self.assertTrue(rtt.start == rtt2.start)
        self.assertTrue(rtt.end == rtt2.end)

        ## then its.

        path = "/HERE/IS/ITS//IR-YEAR/TEST/"
        data = range(20)
        data_ref = DataReference(id, source=source, selector=path)
        prop[AGGREGATION] = INDIVIDUAL

        times=["01/15/1997","02/17/1997","03/5/1997",\
               "04/25/1997","05/1/1997","06/15/1997",\
               "07/25/1997","08/14/1997","09/17/1997",\
               "10/15/1997","11/21/1997","12/3/1997",\
               "01/9/1998","02/15/1998","03/19/1998",\
               "04/15/1998","05/19/1998","06/30/1998",\
               "07/15/1998","08/24/1998"]

        times = map(parse_time, times)
        itt = its(times, data, prop)
        self.dss_service.add_data(data_ref, itt)
        extent = "time_window=(1/10/1997 02:00,09/30/1998 18:00)"
        data_ref = DataReference(id, source, None, path, extent)
        rtt3 = self.dss_service.get_data(data_ref)
        self.assertTrue(parse_time("01/15/1997") == rtt3.start)
        self.assertTrue(parse_time("08/24/1998") == rtt3.end)
Ejemplo n.º 2
0
    def test_add_data(self):
        def rand_gen():
            while True:
                yield random()

        tss = []

        ## create several ts
        # 1
        st = parse_time("1/2/1987 10:30")
        dt = parse_interval("1hour")
        prop={"agency":"dwr","interval":"1hour","station":"rsac045",\
              "datum":"NGVD88","var":"flow"}
        n = 13470
        data = list(islice(rand_gen(), n))
        ts = rts(data, st, dt, prop)
        tss.append(ts)
        # 2
        st = parse_time("3/20/1997 10:30")
        dt = parse_interval("1day")
        prop={"bearu":"usgs","interval":"1day","lat":70.90,\
              "long":34.45,"datum":"NGVD88","var":"stage"}
        n = 40960
        data = list(islice(rand_gen(), n))
        ts = rts(data, st, dt, prop)
        tss.append(ts)

        # 3
        st = parse_time("1/2/1967 4:30")
        dt = parse_interval("15min")
        prop={"place":"uml","interval":"15min","station":"rsac045",\
              "datum":"NGVD88","var":"bod"}
        n = 20000
        data = list(islice(rand_gen(), n))
        ts = rts(data, st, dt, prop)
        tss.append(ts)

        #
        ref=DataReferenceFactory(EXCEL_DATA_SOURCE,"store.xls",\
                                 selector="dss2excel$B5")

        self.excel_service.batch_add(ref, tss)
Ejemplo n.º 3
0
    def _retrieve_rts_prop(self, dssf, cpath, dparts):
        """ Retrieve all the prop about a rts record.
            returned extent is stamped at the begining of
            period for aggregated data to observe vtools
            tradition, that is different from what user
            will see use HecdssVue
        """

        firstD = dparts[0]
        firstpath = cpath.replace('//', '/' + firstD + '/')
        (header_dic,data,cunits,ctype)=\
        self._retrieve_regular_header(dssf,firstpath)

        ## find out start datetime
        ce = firstpath.split('/')[5]
        interval = parse_interval(ce)
        cd = firstD
        start = parse_time(cd)

        valid_start = discover_valid_rts_start(data, start, interval)

        ## find out start datetime of ending data block.
        lastD = dparts[-1]
        lastpath = cpath.replace('//', '/' + lastD + '/')
        (header_dic,data,cunits,ctype)=\
        self._retrieve_regular_header(dssf,lastpath)
        end = parse_time(lastD)
        valid_end = discover_valid_rts_end(data, end, interval)

        ## dss file stamping time at the the end of
        ## aggregating period, so valid data period
        ## begins one interval, no such operation
        ## for the end for it is already stamped at the
        ## end of aggregating period
        if ctype in RTS_DSS_PROPTY_TO_VT.keys():
            valid_start = valid_start - interval

        time_window = (valid_start, valid_end)

        return (time_window, header_dic, cunits, ctype)
Ejemplo n.º 4
0
    def _add_data(self, data_reference, ts):
        """ Save a timesereis to the place referenced by data_reference.
        """

        if type(ts) == TimeSeries:
            ## stuff header here.
            clabels = []
            citems = []

            if ts.is_regular():
                (flags,lflags,cunits,ctype,cdate,ctime,cprops)=\
                validate_rts_for_dss(ts)

                if cprops:
                    for key in cprops.keys():
                        val = cprops[key]
                        clabels.append(key)
                        citems.append(val)

                headu, nheadu = self._stuff_header(clabels, citems)
                ## here we use cdate and ctime to parse new start time and end time
                ##for we may move start time point a interval to comform dss storage
                ##format
                stime = parse_time(cdate + " " + ctime[0:2] + ":" + ctime[2:4])
                etime = stime + (ts.end - ts.start)
                self._check_path_ts(data_reference, ts)
                nvals = len(ts)
                values = ts.data
                self._save_regularTS_extend(data_reference,stime,etime,\
                                            nvals,values,ts.interval,flags,\
                                            lflags,cunits,ctype,headu,\
                                            nheadu)
            else:
                itimes,flags,lflags,jbdate,cunits,ctype,cprops=\
                validate_its_for_dss(ts)
                if cprops:
                    for key in cprops.keys():
                        val = cprops[key]
                        clabels.append(key)
                        citems.append(val)

                headu, nheadu = self._stuff_header(clabels, citems)

                nvals = len(ts)
                values = ts.data
                self._save_irregularTS_extend(data_reference,itimes,\
                                              values,nvals,jbdate,flags,\
                                              lflags,cunits,ctype,headu,\
                                              nheadu)
Ejemplo n.º 5
0
    def test_split_op_regular(self):
        """ Test behaviour of split operation on regular TS."""
        
        #times=sciadd.accumulate(times)
        start = parse_time("1996-2-1")
        interval = parse_interval("1hour")
        data1=sciarray([1.0,1.0,1.0,1.0,1.0,1.0,2.0,3.0,3.0,3.0])
        data2=sciarray([7.0,1.2,10.5,3.0,1.0,1.0,9.0,3.0,0.0,0.2])
        
        ts = rts(sciarray([data1,data2]).transpose(),start,interval)
        
        ts1,ts2 =ts_split(ts,False)
        
        for d1,d2 in zip(ts.data[:,0],ts1.data):
            self.assertEqual(d1,d2)
        for d1,d2 in zip(ts.data[:,1],ts2.data):
            self.assertEqual(d1,d2)
            
        ts1.data[5] = -9999.0
        ts2.data[2] = -9999.0    
        self.assertNotEqual(ts1.data[5],ts.data[5,0])
        self.assertNotEqual(ts2.data[2],ts.data[2,1])
         
        self.assertEqual(ts1.start,ts.start)
        self.assertEqual(ts1.interval,ts.interval)
        self.assertEqual(len(ts1),len(ts))
        self.assertEqual(ts2.start,ts.start)
        self.assertEqual(ts2.interval,ts.interval)
        self.assertEqual(len(ts2),len(ts))
        
        ts1,ts2 =ts_split(ts,True)

        ts1.data[5] = -9999.0
        ts2.data[2] = -9999.0
    
        
        for d1,d2 in zip(ts.data[:,0],ts1.data):
            self.assertEqual(d1,d2)
        for d1,d2 in zip(ts.data[:,1],ts2.data):
            self.assertEqual(d1,d2)
         
        self.assertEqual(ts1.start,ts.start)
        self.assertEqual(ts1.interval,ts.interval)
        self.assertEqual(len(ts1),len(ts))
        self.assertEqual(ts2.start,ts.start)
        self.assertEqual(ts2.interval,ts.interval)
        self.assertEqual(len(ts2),len(ts))
Ejemplo n.º 6
0
    def test_bind_op_irregular(self):
        """ Test behaviour of bind operation on irregular TS."""
        times=[12,15,32,38,43,52,84,138,161,172]
        #times=sciadd.accumulate(times)
        start_datetime = parse_time("1996-2-1")
        start_ticks = ticks(start_datetime)
        times=scimultiply(times,ticks_per_minute)
        times=sciadd(times,start_ticks)
        data=sciarray([1.0,1.0,1.0,1.0,1.0,1.0,2.0,3.0,3.0,3.0])
        ts1=its(times,data,{})
        ts2=its(times,data,{})
 
        new_ts = ts_bind(ts1,ts2)
        self.assertEqual(len(new_ts),len(ts1))
        self.assertEqual(new_ts.start,ts1.start)
       
        for (d1,d2),d  in zip(new_ts.data,data):
            self.assertEqual(d1,d)
            self.assertEqual(d2,d)
Ejemplo n.º 7
0
 def test_split_op_irregular(self):
     """ Test behaviour of split operation on irregular TS."""
     times=[12,15,32,38,43,52,84,138,161,172]
     #times=sciadd.accumulate(times)
     start_datetime = parse_time("1996-2-1")
     start_ticks = ticks(start_datetime)
     times=scimultiply(times,ticks_per_minute)
     times=sciadd(times,start_ticks)
     data1=sciarray([1.0,1.0,1.0,1.0,1.0,1.0,2.0,3.0,3.0,3.0])
     data2=sciarray([7.0,1.2,10.5,3.0,1.0,1.0,9.0,3.0,0.0,0.2])
     
     ts = its(times,sciarray([data1,data2]).transpose())
     
     ts1,ts2 =ts_split(ts,False)
        
     
     for d1,d2 in zip(ts.data[:,0],ts1.data):
         self.assertEqual(d1,d2)
     for d1,d2 in zip(ts.data[:,1],ts2.data):
         self.assertEqual(d1,d2)
     ts1.data[5] = -9999.0
     ts2.data[2] = -9999.0    
     self.assertNotEqual(ts1.data[5],ts.data[5,0])
     self.assertNotEqual(ts2.data[2],ts.data[2,1])
     
     for t1,t2 in zip(ts1.times,ts.times):
        self.assertEqual(t1,t2)
     for t1,t2 in zip(ts2.times,ts.times):
        self.assertEqual(t1,t2)
        
     ts1,ts2 =ts_split(ts,True)
     ts1.data[5] = -9999.0
     ts2.data[2] = -9999.0
 
     for d1,d2 in zip(ts.data[:,0],ts1.data):
         self.assertEqual(d1,d2)
     for d1,d2 in zip(ts.data[:,1],ts2.data):
         self.assertEqual(d1,d2)
    
     for t1,t2 in zip(ts1.times,ts.times):
        self.assertEqual(t1,t2)
     for t1,t2 in zip(ts2.times,ts.times):
        self.assertEqual(t1,t2)
Ejemplo n.º 8
0
 def test_bind_multivar(self):
     """ test behaviour of bind on multvariate ts"""
     start = parse_time("1996-2-1")
     interval = parse_interval("1hour")
     data1=sciarray([1.0,1.0,1.0,1.0,1.0,1.0,2.0,3.0,3.0,3.0])
     data2t=sciarray([[1.0,1.0,1.0,1.0,1.0,1.0,2.0,3.0,3.0,3.0],
                     [2.0,2.1,2.8,9.1,3.2,0.5,0.1,8.1,1.2,1.1]])
     data2=data2t.transpose()
     
     data_temp= sciarray([data1[:],data2t[0,:],data2t[1,:]]).transpose()             
                     
     ts1=rts(data1,start,interval,{})
     ts2=rts(data2,start,interval,{})
    
     new_ts = ts_bind(ts1,ts2)
     self.assertEqual(len(new_ts),len(ts1))
     self.assertEqual(new_ts.start,ts1.start)
     self.assertEqual(new_ts.interval,interval)
     for (d1,d2,d3),(dt1,dt2,dt3) in zip(new_ts.data,data_temp):
         self.assertEqual(d1,dt1)
         self.assertEqual(d2,dt2)
         self.assertEqual(d3,dt3)
Ejemplo n.º 9
0
    def test_bind_op_regular(self):
        """ Test behaviour of bind operation on regular TS."""
    
        #times=sciadd.accumulate(times)
        start = parse_time("1996-2-1")
        interval = parse_interval("1hour")
        data=sciarray([1.0,1.0,1.0,1.0,1.0,1.0,2.0,3.0,3.0,3.0])
        ts1=rts(data,start,interval,{})
        ts2=rts(data,start,interval,{})
 
        new_ts = ts_bind(ts1,ts2)
        self.assertEqual(len(new_ts),len(ts1))
        self.assertEqual(new_ts.start,ts1.start)
        self.assertEqual(new_ts.interval,interval)
        for (d1,d2),d  in zip(new_ts.data,data):
            self.assertEqual(d1,d)
            self.assertEqual(d2,d)
        
        ## partial overlap
        start2 = parse_time("1996-2-1 4:00")
        ts2=rts(data,start2,interval,{})
        new_ts = ts_bind(ts1,ts2)
        self.assertEqual(len(new_ts),14)
        self.assertEqual(new_ts.start,ts1.start)
        self.assertEqual(new_ts.times[-1],ts2.times[-1])
        self.assertEqual(new_ts.interval,interval)
        for i in range(4):
            self.assertTrue(isnan(new_ts.data[i,1]))
            self.assertTrue(isnan(new_ts.data[i+10,0]))
        for i in range(10):
            self.assertEqual(new_ts.data[i,0],data[i])
            self.assertEqual(new_ts.data[i+4,1],data[i])
          
         ##no overlap,immediately after
        start2 = parse_time("1996-2-1 10:00")
        ts2=rts(data,start2,interval,{})
        new_ts = ts_bind(ts1,ts2)
        self.assertEqual(len(new_ts),20)
        self.assertEqual(new_ts.start,ts1.start)
        self.assertEqual(new_ts.times[-1],ts2.times[-1])
        self.assertEqual(new_ts.interval,interval)
        for i in range(10):
            self.assertTrue(isnan(new_ts.data[i,1]))
            self.assertTrue(isnan(new_ts.data[i+10,0]))
        for i in range(10):
            self.assertEqual(new_ts.data[i,0],data[i])
            self.assertEqual(new_ts.data[i+10,1],data[i])
        
        ## smaller interval
        start2 = parse_time("1996-2-1 8:00")
        interval2=parse_interval("15min")
        ts2=rts(data,start2,interval2,{})
        new_ts = ts_bind(ts1,ts2)
        self.assertEqual(len(new_ts),42)
        self.assertEqual(new_ts.start,ts1.start)
        self.assertEqual(new_ts.times[-1],ts2.times[-1])
        self.assertEqual(new_ts.interval,interval2)
        ts1_id = [4*x for x in range(10)]
        nan_id =  range(len(new_ts))
        for i in ts1_id:
            nan_id.remove(i) ## those id supoose have nan
        ts1_val = new_ts.data[ts1_id,0]
        left_val = new_ts.data[nan_id,0]
        for d1,d2 in zip(ts1_val,data):
            self.assertEqual(d1,d2)
        for d in left_val:
            self.assertTrue(isnan(d))
        ts2_id = range(32,42)
        ts2_val = new_ts.data[ts2_id,1]
        for d1,d2 in zip(ts2_val,data):
            self.assertEqual(d1,d2)
Ejemplo n.º 10
0
    def test_period_op_irregular(self):
        """ Test behaviour of period operation on irregular TS."""
        times = [12, 15, 32, 38, 43, 52, 84, 138, 161, 172]
        #times=sciadd.accumulate(times)
        start_datetime = parse_time("1996-2-1")
        start_ticks = ticks(start_datetime)
        times = scimultiply(times, ticks_per_minute)
        times = sciadd(times, start_ticks)
        data = sciarray([1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 2.0, 3.0, 3.0, 3.0])
        ts = its(times, data, {})
        op = MEAN
        ts_op = period_op(ts, "1 hour", op)
        self.assertEqual(len(ts_op), 3)
        self.assertEqual(ts_op.data[0], 1.0)
        self.assertEqual(ts_op.data[1], 2.0)
        self.assertEqual(ts_op.data[2], 3.0)

        times = [0, 15, 32, 38, 43, 52, 60, 120, 138, 161, 180]
        data = sciarray(
            [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 2.0, 3.0, 3.0, 3.0, 4.0])
        times = scimultiply(times, ticks_per_minute)
        times = sciadd(times, start_ticks)
        ts = its(times, data, {})
        op = MEAN
        ts_op = period_op(ts, "1 hour", op)
        self.assertEqual(len(ts_op), 4)
        self.assertEqual(ts_op.data[0], 1.0)
        self.assertEqual(ts_op.data[1], 2.0)
        self.assertEqual(ts_op.data[2], 3.0)
        self.assertEqual(ts_op.data[3], 4.0)

        data = sciarray(
            [1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 2.0, 3.0, 4.0, 5.0, 4.0])
        ts = its(times, data, {})
        op = MIN
        ts_op = period_op(ts, "1 hour", op)
        self.assertEqual(len(ts_op), 4)
        self.assertEqual(ts_op.data[0], 1.0)
        self.assertEqual(ts_op.data[1], 2.0)
        self.assertEqual(ts_op.data[2], 3.0)
        self.assertEqual(ts_op.data[3], 4.0)

        op = MAX
        ts_op = period_op(ts, "1 hour", op)
        self.assertEqual(len(ts_op), 4)
        self.assertEqual(ts_op.data[0], 6.0)
        self.assertEqual(ts_op.data[1], 2.0)
        self.assertEqual(ts_op.data[2], 5.0)
        self.assertEqual(ts_op.data[3], 4.0)

        times = [0, 15, 28, 30, 58, 64, 80, 90, 91]
        start_datetime = parse_time("1996-1-1")
        start_ticks = ticks(start_datetime)
        times = scimultiply(times, ticks_per_day)
        times = sciadd(times, start_ticks)
        data = sciarray([1.0, 1.0, 1.0, 1.0, 2.0, 3.0, 3.0, 3.0, 4.0])
        ts = its(times, data, {})
        op = MEAN
        ts_op = period_op(ts, "1 month", op)
        self.assertEqual(len(ts_op), 4)
        self.assertEqual(ts_op.data[0], 1.0)
        self.assertEqual(ts_op.data[1], 2.0)
        self.assertEqual(ts_op.data[2], 3.0)
        self.assertEqual(ts_op.data[3], 4.0)
Ejemplo n.º 11
0
    def _gen_rts_datetime_nval(self, data_ref, dssf):
        """ given a rts data_ref returns the character cdate,ctime
            and number of data contained within time extents.
        """
        ## here we requie data_ref must contain time extent for such a info are needed by
        ##fortran lib dss function to retirieve ts, it is best to generate data_ref from
        ##a dss catalog,not directly ,to save th work of finding ts time extent
        if not data_ref.extent:
            raise ValueError("data reference doesn't contain time extent.")

        # It is expected dss data_re has only one extent setting
        # extent has value like ('time_window',("'01/11/1991 10:30:00'",\
        # "'02/11/1995 10:30:00'")).
        (dummy, extent) = data_ref.extents()[0]

        # Start time and end time in string.
        stime = extent[0]
        etime = extent[1]
        # Parse string datetime into datetime instance.
        stime = dateutil.parser.parse(stime)
        etime = dateutil.parser.parse(etime)
        path = data_ref.selector
        time_interval = strip(split(path, "/")[5])
        step = parse_interval(time_interval)

        if (stime > etime):
            raise ValueError(
                "input time window start time is behind end time.")

        lfound = False
        ## find out the valid time extent of the ts
        (juls, istime, jule, ietime, cunits, ctype, lqual, ldouble,
         lfound) = dssf.ztsinfox(data_ref.selector)

        firstfound = False
        lastfound = False

        if juls:
            firstfound = True
            lfound = True

        if jule:
            lastfound = True
            lfound = True

        if (not lfound):
            dssf.close()
            raise DssAccessError("input path is invalid %s" %
                                 data_ref.selector)

        ts_start = dss_julian2python(juls, istime)
        ts_end = dss_julian2python(jule, ietime)

        if (not firstfound) or (not lastfound):

            dssf_catalog = self._dss_catalogs[data_ref.source]
            ## filter out other paths
            filtered_catalog = dssf_catalog.filter_catalog(data_ref.selector)
            firstDpart = filtered_catalog.uncondensed_D_parts(0)[0]
            lastDpart = filtered_catalog.uncondensed_D_parts(0)[-1]
            if (not firstfound):
                ts_start = parse_time(firstDpart)
            if (not lastfound):
                ts_end = parse_time(lastDpart)

        ## for aggregated ts move time stamp to begining of interval
        if (ctype in RTS_DSS_PROPTY_TO_VT.keys()):
            ts_start = ts_start - step
            ts_end = ts_end - step

        if (etime < ts_start) or (stime > ts_end):
            raise ValueError(
                "input time window is out of valid data extent %s." %
                data_ref.selector)

        if (stime < ts_start):
            stime = ts_start

        ##  for aggregated ts move a interval forward to make sure including the last data
        if (etime >
            (ts_end + step)) and (ctype in RTS_DSS_PROPTY_TO_VT.keys()):
            etime = ts_end + step
        elif (etime >
              (ts_end)) and (not (ctype in RTS_DSS_PROPTY_TO_VT.keys())):
            etime = ts_end

        cdate = stime.date()
        cdate = cdate.strftime('%d%b%Y')
        ctime = stime.time()
        ctime = ctime.strftime('%H%M')

        right = 1
        left = -1
        if ((align(stime, step, right) != stime)
                and (align(ts_start, step, right) == ts_start)):
            stime = align(stime, step, right)
        if ((align(etime, step, left) != etime)
                and (align(ts_end, step, left) == ts_end)):
            etime = align(etime, step, left)

        if (etime < stime):  ## no data should be return in such a case
            return iter([(cdate, ctime, 0)])

        ## here is a fix
        ## giving a example in reading aggregated ts, timewindow (3/14/2000,3/15/2000)
        ## time interval 1 day. number_interval will only return one interval for this
        ## timewindow, that means dss reading function will only retreieve one value
        ## stamped on 3/14/2000 (3/13/2000 24:00, dss file stamp time on the end of period)
        ##, which is actully the data of 3/13/2000 for daily aggregated data.
        ## So we need read one more value to get data stampped at
        ## 3/15/2000 (3/14/2000 24:00) which is the exact the aggreaged data on period (3/14/2000 - 3/15/2000)
        ## the extra one data will be abandoned when converting data into vtools ts object.
        ## In case of instaneous data, this fix works ok for we want data stamped on the late side of
        ## time window also. For instance time window (3/14/2000,3/15/2000) with interval of 1 day. We want
        ## data on 3/14 and 3/15, but number_interval(3/14/2000,3/15/2000) will only return 1 interval, so
        ## we can use number_interval(3/14/2000,3/16/2000)=2 as the number of data we want.
        etime = increment(etime, step)

        return self._multiple_window(stime, etime, step)
Ejemplo n.º 12
0
    def test_read_instant_rts_timewindow(self):
        ## save some ts into dss file, ts is hourly spaned instanteous

        ## save rts first.
        data = range(1000)
        start = "12/21/2000 2:00"
        interval = "1hour"
        prop = {}

        rt1 = rts(data, start, interval, prop)

        id = "vtools.datastore.dss.DssService"
        path = "/TEST/DOWNSTREAM/EC//1HOUR/STAGE/"
        source = self.test_file_path
        data_ref = DataReference(id, source=source, selector=path)
        self.dss_service.add_data(data_ref, rt1)

        ## test returning part of stored data up to the end
        ## it should get 992 numbers and value is (8,9,...,1000)
        ## it start datetime shoudl be 12/21/2000 10:00
        extent = "time_window=(12/21/2000 10:00,01/31/2001 18:00)"
        data_ref = DataReference(id, source, None, path, extent)
        rtt2 = self.dss_service.get_data(data_ref)
        self.assertTrue(rtt2.start == parse_time("12/21/2000 10:00"))
        self.assertTrue(len(rtt2) == 992)
        correct_data = range(8, len(rtt2) + 8)
        for i in range(len(rtt2)):
            self.assertTrue(rtt2.data[i] == float(correct_data[i]))

        ## test returning middle part of stored data
        ## it should get 13 numbers and value is (8,9,...,19,20)
        ## it start datetime should be 12/21/2000 10:00, end
        ## at 12/21/2000 22:00 (include the later side)
        extent = "time_window=(12/21/2000 10:00,12/21/2000 22:00)"
        data_ref = DataReference(id, source, None, path, extent)
        rtt2 = self.dss_service.get_data(data_ref)
        self.assertTrue(rtt2.start == parse_time("12/21/2000 10:00"))
        self.assertTrue(rtt2.end == parse_time("12/21/2000 22:00"))
        self.assertTrue(len(rtt2) == 13)
        correct_data = range(8, len(rtt2) + 8)
        for i in range(len(rtt2)):
            self.assertTrue(rtt2.data[i] == float(correct_data[i]))

        ## test valid timewindow overlap exaclty the last data of
        ## the record
        extent = "time_window=(1/31/2001 17:00,1/31/2001 17:00)"
        data_ref = DataReference(id, source, None, path, extent)
        rtt2 = self.dss_service.get_data(data_ref)
        self.assertTrue(len(rtt2) == 1)
        self.assertTrue(rtt2.data[0] == float(999))

        ## test valid time window with same start and end
        ## excatly at begining of the time sequence
        extent = "time_window=(12/21/2000 02:00,12/21/2000 02:00)"
        data_ref = DataReference(id, source, None, path, extent)
        rtt2 = self.dss_service.get_data(data_ref)
        self.assertTrue(len(rtt2) == 1)
        self.assertTrue(rtt2.data[0] == float(0))

        ## test valid time window overlap exactly a data in the middle
        extent = "time_window=(12/21/2000 05:00,12/21/2000 05:00)"
        data_ref = DataReference(id, source, None, path, extent)
        rtt2 = self.dss_service.get_data(data_ref)
        self.assertTrue(len(rtt2) == 1)
        self.assertTrue(rtt2.data[0] == float(3))

        ## test invalid time window with same end and start not aligned with
        ## interval
        extent = "time_window=(12/21/2000 05:15,12/21/2000 05:15)"
        data_ref = DataReference(id, source, None, path, extent)
        rtt2 = self.dss_service.get_data(data_ref)
        self.assertTrue(len(rtt2) == 0)

        ## test invalid time window in the middle with the same earlier and later side
        ## not aligns with time sequence
        extent = "time_window=(12/21/2000 05:15,12/21/2000 05:15)"
        data_ref = DataReference(id, source, None, path, extent)
        rtt2 = self.dss_service.get_data(data_ref)
        self.assertTrue(len(rtt2) == 0)

        ## test invalid timewindow before the data starting
        ## but still overlap data block window
        extent = "time_window=(12/21/2000 00:00,12/21/2000 1:00)"
        data_ref = DataReference(id, source, None, path, extent)
        self.assertRaises(ValueError, self.dss_service.get_data, data_ref)

        ## test invalid timewindow overlapping data block window,
        extent = "time_window=(1/31/2001 18:00,1/31/2001 22:00)"
        data_ref = DataReference(id, source, None, path, extent)
        self.assertRaises(ValueError, self.dss_service.get_data, data_ref)

        ## test invalid timewindow not overlapping data block window
        extent = "time_window=(11/21/2000 00:00,11/22/2000 1:00)"
        data_ref = DataReference(id, source, None, path, extent)
        self.assertRaises(ValueError, self.dss_service.get_data, data_ref)
Ejemplo n.º 13
0
    def test_read_aggregated_rts_timewindow(self):
        ## save some ts into dss file, ts is hourly averaged

        ## save rts first.
        data = range(1000)
        start = "12/21/2000 2:00"
        interval = "1hour"
        prop = {}
        prop[TIMESTAMP] = PERIOD_START
        prop[AGGREGATION] = MEAN
        rt1 = rts(data, start, interval, prop)

        id = "vtools.datastore.dss.DssService"
        path = "/TEST/DOWNSTREAM/EC//1HOUR/DWR/"
        source = self.test_file_path
        data_ref = DataReference(id, source=source, selector=path)
        self.dss_service.add_data(data_ref, rt1)

        ## test return part of stored data up to the end
        ## it should get 992 numbers and value is (8,9,...,1000)
        ## it start datetime shoudl be 12/21/2000 10:00
        extent = "time_window=(12/21/2000 10:00,01/31/2001 18:00)"
        data_ref = DataReference(id, source, None, path, extent)
        rtt2 = self.dss_service.get_data(data_ref)
        self.assertTrue(rtt2.start == parse_time("12/21/2000 10:00"))
        self.assertTrue(len(rtt2) == 992)
        correct_data = range(8, len(rtt2) + 8)
        for i in range(len(rtt2)):
            self.assertTrue(rtt2.data[i] == float(correct_data[i]))

        ## test return middle part of stored data
        ## it should get 12 numbers and value is (8,9,...,19)
        ## it start datetime should be 12/21/2000 10:00, end
        ## at 12/21/2000 21:00 (not include the late side)
        extent = "time_window=(12/21/2000 10:00,12/21/2000 22:00)"
        data_ref = DataReference(id, source, None, path, extent)
        rtt2 = self.dss_service.get_data(data_ref)
        self.assertTrue(rtt2.start == parse_time("12/21/2000 10:00"))
        self.assertTrue(rtt2.end == parse_time("12/21/2000 21:00"))
        self.assertTrue(len(rtt2) == 12)
        correct_data = range(8, len(rtt2) + 8)
        for i in range(len(rtt2)):
            self.assertTrue(rtt2.data[i] == float(correct_data[i]))

        ## test return middle part of stored data
        ## it should get 12 numbers and value is (8,9,...,19)
        ## it start datetime should be 12/21/2000 10:00, end
        ## at 12/21/2000 21:00 (not include the late side)
        ## time window is not given at the correct hourly time points.
        extent = "time_window=(12/21/2000 09:45,12/21/2000 22:15)"
        data_ref = DataReference(id, source, None, path, extent)
        rtt2 = self.dss_service.get_data(data_ref)
        self.assertTrue(rtt2.start == parse_time("12/21/2000 10:00"))
        self.assertTrue(rtt2.end == parse_time("12/21/2000 21:00"))
        self.assertTrue(len(rtt2) == 12)
        correct_data = range(8, len(rtt2) + 8)
        for i in range(len(rtt2)):
            self.assertTrue(rtt2.data[i] == float(correct_data[i]))

        ## test valid timewindow overlap exaclty the last data of
        ## the record
        extent = "time_window=(1/31/2001 17:00,1/31/2001 18:00)"
        data_ref = DataReference(id, source, None, path, extent)
        rtt2 = self.dss_service.get_data(data_ref)
        self.assertTrue(len(rtt2) == 1)
        self.assertTrue(rtt2.data[0] == float(999))

        ## test invalid time window with same start and end
        ## excatly at beginig time sequence
        extent = "time_window=(12/21/2000 02:00,12/21/2000 02:00)"
        data_ref = DataReference(id, source, None, path, extent)
        rtt2 = self.dss_service.get_data(data_ref)
        self.assertTrue(len(rtt2) == 0)

        ## test invalid time window with same start and end in the
        ## middle of time sequence
        extent = "time_window=(12/21/2000 05:00,12/21/2000 05:00)"
        data_ref = DataReference(id, source, None, path, extent)
        rtt2 = self.dss_service.get_data(data_ref)
        self.assertTrue(len(rtt2) == 0)

        ## test invalid time window with same start and end at the
        ## end of time sequence
        extent = "time_window=(12/21/2000 17:00,12/21/2000 17:00)"
        data_ref = DataReference(id, source, None, path, extent)
        rtt2 = self.dss_service.get_data(data_ref)
        self.assertTrue(len(rtt2) == 0)

        ## test invalid time window with same start and end not aligined with interval
        extent = "time_window=(12/21/2000 05:15,12/21/2000 05:15)"
        data_ref = DataReference(id, source, None, path, extent)
        rtt2 = self.dss_service.get_data(data_ref)
        self.assertTrue(len(rtt2) == 0)

        ## test invalid time window with different start and end within a hour interval
        extent = "time_window=(12/21/2000 05:15,12/21/2000 05:55)"
        data_ref = DataReference(id, source, None, path, extent)
        rtt2 = self.dss_service.get_data(data_ref)
        self.assertTrue(len(rtt2) == 0)

        ## test invalid time window with different start and end across two hour intervals
        ## but intervals are incomplete, so it should return no value
        extent = "time_window=(12/21/2000 05:15,12/21/2000 06:55)"
        data_ref = DataReference(id, source, None, path, extent)
        rtt2 = self.dss_service.get_data(data_ref)
        self.assertTrue(len(rtt2) == 0)

        ## test invalid time window with same start and end
        ## excatly at the middle time sequence
        extent = "time_window=(12/21/2000 17:15,12/21/2000 17:15)"
        data_ref = DataReference(id, source, None, path, extent)
        rtt2 = self.dss_service.get_data(data_ref)
        self.assertTrue(len(rtt2) == 0)

        ## test valid time window overlap exactly the first data
        ## at the begining
        extent = "time_window=(12/21/2000 02:00,12/21/2000 03:00)"
        data_ref = DataReference(id, source, None, path, extent)
        rtt2 = self.dss_service.get_data(data_ref)
        self.assertTrue(len(rtt2) == 1)
        self.assertTrue(rtt2.data[0] == float(0))

        ## test valid time window overlap exactly a data in the middle
        extent = "time_window=(12/21/2000 05:00,12/21/2000 06:00)"
        data_ref = DataReference(id, source, None, path, extent)
        rtt2 = self.dss_service.get_data(data_ref)
        self.assertTrue(len(rtt2) == 1)
        self.assertTrue(rtt2.data[0] == float(3))

        ## test valid time window overlap exactly a data at the end
        extent = "time_window=(1/31/2001 17:00,1/31/2001 18:00)"
        data_ref = DataReference(id, source, None, path, extent)
        rtt2 = self.dss_service.get_data(data_ref)
        self.assertTrue(len(rtt2) == 1)
        self.assertTrue(rtt2.data[0] == float(999))

        ## test invalid timewindow before the data starting
        ## but still overlap data block window
        extent = "time_window=(12/21/2000 00:00,12/21/2000 1:00)"
        data_ref = DataReference(id, source, None, path, extent)
        self.assertRaises(ValueError, self.dss_service.get_data, data_ref)

        ## test invalid timewindow overlapping data block window,
        extent = "time_window=(1/31/2001 18:00,1/31/2001 22:00)"
        data_ref = DataReference(id, source, None, path, extent)
        self.assertRaises(ValueError, self.dss_service.get_data, data_ref)

        ## test invalid timewindow not overlapping data block window
        extent = "time_window=(11/21/2000 00:00,11/22/2000 1:00)"
        data_ref = DataReference(id, source, None, path, extent)
        self.assertRaises(ValueError, self.dss_service.get_data, data_ref)
Ejemplo n.º 14
0
    def test_support_unaligned_ts(self):
        ## create some unaligned ts with aggregated attributes, save to test file and read it back

        ## daily averaged ts
        ts_start = parse_time("01/02/2000 23:00")
        ts_data = [1.0, 2.0, 3.0, 4.0, 5.0]
        interval = parse_interval("1day")
        props = {AGGREGATION: MEAN, TIMESTAMP: PERIOD_START}
        ts = rts(ts_data, ts_start, interval, props)
        selector = "/THIS/TS/UNALIGNED//1DAY/TEST/"
        id = "vtools.datastore.dss.DssService"
        source = self.test_file_path
        data_ref = DataReference(id, source=source, selector=selector)
        self.dss_service.add_data(data_ref, ts)

        dssc = self.dss_service.get_catalog(source)
        data_refs = dssc.data_references(selector)
        data_ref = data_refs.next()
        ts_back = self.dss_service.get_data(data_ref)
        self.assertTrue(ts_back.start == ts.start)
        self.assertTrue(len(ts_back) == len(ts))
        self.assertTrue((ts_back.data == ts.data).all())

        ## hourly averaged ts
        ts_start = parse_time("01/02/2000 23:59")
        ts_data = [1.0, 2.0, 3.0, 4.0, 5.0]
        interval = parse_interval("1hour")
        props = {AGGREGATION: MEAN, TIMESTAMP: PERIOD_START}
        ts = rts(ts_data, ts_start, interval, props)
        selector = "/THIS/TS/UNALIGNED//1HOUR/TEST/"
        data_ref = DataReference(id, source=source, selector=selector)
        self.dss_service.add_data(data_ref, ts)

        dssc = self.dss_service.get_catalog(source)
        data_refs = dssc.data_references(selector)
        data_ref = data_refs.next()
        ts_back = self.dss_service.get_data(data_ref)
        self.assertTrue(ts_back.start == ts.start)
        self.assertTrue(len(ts_back) == len(ts))
        self.assertTrue((ts_back.data == ts.data).all())

        ## 15MIN averaged ts
        ts_start = parse_time("01/02/2000 23:59")
        ts_data = [1.0, 2.0, 3.0, 4.0, 5.0]
        interval = parse_interval("15MIN")
        props = {AGGREGATION: MEAN, TIMESTAMP: PERIOD_START}
        ts = rts(ts_data, ts_start, interval, props)
        selector = "/THIS/TS/UNALIGNED//15MIN/TEST/"
        data_ref = DataReference(id, source=source, selector=selector)
        self.dss_service.add_data(data_ref, ts)

        dssc = self.dss_service.get_catalog(source)
        data_refs = dssc.data_references(selector)
        data_ref = data_refs.next()
        ts_back = self.dss_service.get_data(data_ref)
        self.assertTrue(ts_back.start == ts.start)
        self.assertTrue(len(ts_back) == len(ts))
        self.assertTrue((ts_back.data == ts.data).all())

        ## YEAR averaged ts
        ts_start = parse_time("02/02/2000 ")
        ts_data = [1.0, 2.0, 3.0, 4.0, 5.0]
        interval = parse_interval("1year")
        props = {AGGREGATION: MEAN, TIMESTAMP: PERIOD_START}
        ts = rts(ts_data, ts_start, interval, props)
        selector = "/THIS/TS/UNALIGNED//1YEAR/TEST/"
        data_ref = DataReference(id, source=source, selector=selector)
        self.dss_service.add_data(data_ref, ts)

        dssc = self.dss_service.get_catalog(source)
        data_refs = dssc.data_references(selector)
        data_ref = data_refs.next()
        ts_back = self.dss_service.get_data(data_ref)
        self.assertTrue(ts_back.start == ts.start)
        self.assertTrue(len(ts_back) == len(ts))
        self.assertTrue((ts_back.data == ts.data).all())