Пример #1
0
def proc_main(pk, row, arg):
    """
    including pk and arg because of call in tsdb_server.py
    """
    damean = ts.TimeSeries(row['ts']['times'], row['ts']['values']).mean()
    dastd = ts.TimeSeries(row['ts']['times'], row['ts']['values']).std()
    return [damean, dastd]
Пример #2
0
def hlm_from_psi4lm(psi4lm, w0, cmplx=False, taper=False, cut=False):
  h = integrate_FFI(psi4lm, float(w0), order=2, taper=taper, cut=cut)
  if cmplx:
    return h
  #
  hplus    = timeseries.TimeSeries(h.t, h.y.real)
  hcross   = timeseries.TimeSeries(h.t, -h.y.imag)
  return hplus, hcross
Пример #3
0
    def test_delete_ts(self):
        n_delete = 10
        # delete and check to make sure they're gone
        for i in range(n_delete):
            pk = 'ts-' + str(i)
            tsmeta = self.db._get_meta_dict(pk)

            self.db.delete_ts(pk)  # delete the timeseries

            #Check 1: __get__() get by pk fail
            with self.assertRaises(KeyError):
                self.db[pk]  # check to make sure it's gone

            #Check 2: db_select return empty sets
            self.assertEqual(self.db.select({'pk': pk}), ([], []))

            #Check 3: does not exist in index
            for field, value in tsmeta.items(
            ):  # make sure it's gone from indexes
                if field in self.schema.keys(
                ) and self.schema[field]['index'] is not None:
                    self.assertTrue(
                        pk not in self.db.select({field: value})[0])

        #Check 4: check the db after deletion is clean and can hold the same pk and timeseries again
        # insert the deleted ts and check to make sure everything is working as before
        for i in range(n_delete):
            pk = 'ts-' + str(i)
            values = np.array(range(self.ts_length)) + i
            series = ts.TimeSeries(values, values)
            meta = {}
            meta['mean'] = float(series.mean())
            meta['std'] = float(series.std())
            meta['vp'] = False
            meta['blarg'] = self.schema['blarg']['values'][
                i % 2]  #blarg only has two value
            n_order = len(self.schema['order']['values'])  # 11
            meta['order'] = self.schema['order']['values'][i % n_order]
            self.db.insert_ts(pk, series)
            self.db.upsert_meta(pk, meta)

        for i in range(n_delete):
            pk = 'ts-' + str(i)
            values = np.array(range(self.ts_length)) + i
            series = ts.TimeSeries(values, values)
            r_meta = self.db._get_meta_list(pk)
            n_order = len(self.schema['order']['values'])  # 11
            self.assertTrue(r_meta[self.db.metaheap.fields.index('order')] ==
                            self.schema['order']['values'][i % n_order])
            n_blarg = 2
            self.assertTrue(r_meta[self.db.metaheap.fields.index('blarg')] ==
                            self.schema['blarg']['values'][i % n_blarg])
            self.assertTrue(
                r_meta[self.db.metaheap.fields.index('mean')] == series.mean())
            self.assertTrue(
                r_meta[self.db.metaheap.fields.index('std')] == series.std())
    def setUp(self):
        identity = lambda x: x
        to_int = lambda x: int(x)
        to_float = lambda x: float(x)
        to_bool = lambda x: bool(x)

        schema = {
            'pk': {
                'convert': identity,
                'index': None
            },  #will be indexed anyways
            'ts': {
                'convert': identity,
                'index': None
            },
            'order': {
                'convert': to_int,
                'index': 1
            },
            'blarg': {
                'convert': to_int,
                'index': 1
            },
            'useless': {
                'convert': identity,
                'index': None
            },
            'mean': {
                'convert': to_float,
                'index': 1
            },
            'std': {
                'convert': to_float,
                'index': 1
            },
            'vp': {
                'convert': to_bool,
                'index': 1
            }
        }
        self.db = DictDB(schema, 'pk', 3)
        self.server = TSDBServer(self.db)
        self.prot = TSDBProtocol(self.server)
        self.des = Deserializer()

        msg = TSDBOp_InsertTS('one', ts.TimeSeries([1, 2, 3], [1, 4, 9]))
        status, payload = self._mockSendingMessage(msg)

        msg = TSDBOp_InsertTS('two', ts.TimeSeries([2, 3, 4], [4, 9, 16]))
        status, payload = self._mockSendingMessage(msg)

        msg = TSDBOp_InsertTS('three', ts.TimeSeries([9, 3, 4], [4, 0, 16]))
        status, payload = self._mockSendingMessage(msg)

        msg = TSDBOp_InsertTS('four', ts.TimeSeries([0, 0, 4], [1, 0, 4]))
        status, payload = self._mockSendingMessage(msg)
    async def test_REST_simple_run(self):
        # Data
        t = [0, 1, 2, 3, 4]
        v = [1.0, 2.0, 3.0, 2.0, 1.0]
        ats = ts.TimeSeries(t, v)

        # Setup Client
        client = TSDB_REST_Client()

        # Add Trigger
        await client.add_trigger('stats', 'insert_ts', ['mean', 'std'], None)

        # Insert
        await client.insert_ts(1, ats)

        # Select
        status, payload = await client.select({'pk': {
            '==': 1
        }}, ['ts', 'mean', 'std'], None)
        assert (status == 0)

        assert (ts.TimeSeries(payload['1']['ts'][0],
                              payload['1']['ts'][1]) == ats)
        assert (payload['1']['std'] == 1.4142135623730951)
        assert (payload['1']['mean'] == 2.0)
        # FINALLY WORKING!!! YAY!!!

        # Upsert
        await client.upsert_meta(1, {'order': 1})
        status, payload = await client.select({'order': {
            '==': 1
        }}, ['pk', 'order'], None)
        assert (status == 0)
        assert (payload['1']['order'] == 1)

        # Remove Trigger
        await client.remove_trigger('stats', 'insert_ts')

        # Insert (No Trigger)
        await client.insert_ts(2, ats)
        status, payload = await client.select({'pk': {
            '==': 2
        }}, ['ts', 'mean', 'std'], None)
        assert (ts.TimeSeries(payload['2']['ts'][0],
                              payload['2']['ts'][1]) == ats)
        assert ('std' not in payload['2'])
        assert ('mean' not in payload['2'])

        # Delete
        await client.delete_ts(1)
        status, payload = await client.select({'pk': {
            '==': 1
        }}, ['ts', 'mean', 'std'], None)
        assert (status == 0)
        assert (payload == {})
    def test_delete(self):
        n_delete = 10
        # delete and check to make sure they're gone
        for i in range(n_delete):
            pk = 'ts-' + str(i)
            tsmeta = self.db._get_meta_dict(pk)
            tsinstance = tsmeta['ts']

            self.db.delete_ts(pk)  # delete the timeseries
            with self.assertRaises(KeyError):
                self.db[pk]  # check to make sure it's gone
            self.assertEqual(self.db.select({'pk': pk}), ([], []))
            for field, value in tsmeta.items(
            ):  # make sure it's gone from indexes
                if field in self.schema.keys(
                ) and self.schema[field]['index'] is not None:
                    self.assertTrue(
                        pk not in self.db.select({field: value})[0])

        # reinsert
        for i in range(n_delete):
            pk = 'ts-' + str(i)
            values = np.array(range(self.tsLength)) + i
            series = ts.TimeSeries(values, values)
            meta = {}
            n_order = len(self.schema['order']['values'])  # 11
            meta['order'] = self.schema['order']['values'][i % n_order]
            n_blarg = 2
            meta['blarg'] = self.schema['blarg']['values'][i % n_blarg]
            meta['mean'] = float(series.mean())
            meta['std'] = float(series.std())
            meta['vp'] = False
            self.db.insert_ts(pk, series)
            self.db.upsert_meta(pk, meta)

        # check to make sure everything is working as before
        for i in range(n_delete):
            pk = 'ts-' + str(i)
            values = np.array(range(self.tsLength)) + i
            series = ts.TimeSeries(values, values)
            r_meta = self.db._get_meta_list(pk)
            n_order = len(self.schema['order']['values'])  # 11
            self.assertTrue(r_meta[self.db.metaheap.fields.index('order')] ==
                            self.schema['order']['values'][i % n_order])
            n_blarg = 2
            self.assertTrue(r_meta[self.db.metaheap.fields.index('blarg')] ==
                            self.schema['blarg']['values'][i % n_blarg])
            self.assertTrue(
                r_meta[self.db.metaheap.fields.index('mean')] == series.mean())
            self.assertTrue(
                r_meta[self.db.metaheap.fields.index('std')] == series.std())
Пример #7
0
 def test_insert_exception(self):
     pk = 'bad'
     existing = 'ts-0'
     with self.assertRaises(ValueError):
         bad_series = np.array(range(self.ts_length + 3))
         self.db.insert_ts(pk, bad_series)
     with self.assertRaises(ValueError):
         values = np.array(range(self.ts_length + 5))
         bad_series = ts.TimeSeries(values, values)
         self.db.insert_ts(pk, bad_series)
     with self.assertRaises(ValueError):
         values = np.array(range(self.ts_length))
         series = ts.TimeSeries(values, values)
         self.db.insert_ts('ts-0', series)
Пример #8
0
    def setUp(self):
        identity = lambda x: x
        to_int = lambda x: int(x)
        to_float = lambda x: float(x)
        to_bool = lambda x: bool(x)

        schema = {
            'pk': {
                'convert': identity,
                'index': None
            },  #will be indexed anyways
            'ts': {
                'convert': identity,
                'index': None
            },
            'order': {
                'convert': to_int,
                'index': 1
            },
            'blarg': {
                'convert': to_int,
                'index': 1
            },
            'useless': {
                'convert': identity,
                'index': None
            },
            'mean': {
                'convert': to_float,
                'index': 1
            },
            'std': {
                'convert': to_float,
                'index': 1
            },
            'vp': {
                'convert': to_bool,
                'index': 1
            }
        }
        self.db = DictDB(schema, 'pk', 3)
        self.db.insert_ts('one', ts.TimeSeries([1, 2, 3], [1, 4, 9]))
        self.db.insert_ts('two', ts.TimeSeries([2, 3, 4], [4, 9, 16]))
        self.db.insert_ts('three', ts.TimeSeries([9, 3, 4], [4, 0, 16]))
        self.db.insert_ts('four', ts.TimeSeries([0, 0, 4], [1, 0, 4]))
        self.db.upsert_meta('one', {'order': 1, 'blarg': 1})
        self.db.upsert_meta('two', {'order': 2})
        self.db.upsert_meta('three', {'order': 1, 'blarg': 2})
        self.db.upsert_meta('four', {'order': 2, 'blarg': 2})
Пример #9
0
 def __getitem__(self, key):
     if key in self._src_inf:
         return self._src_inf[key]
     tsmin = self._src_min[key]
     tsmax = self._src_max[key]
     infn = numpy.maximum(abs(tsmax.y), abs(tsmin.y))
     return timeseries.TimeSeries(tsmin.t, infn)
Пример #10
0
def test_serialize():

    t = [0, 1, 2, 3, 4]
    v = [1.0, 2.0, 3.0, 2.0, 1.0]
    ats = ts.TimeSeries(t, v)

    data_dict = {}
    data_dict['pk'] = 1
    data_dict['ts'] = ats

    obj = TSDBOp.to_json(data_dict)

    # Serialize
    s = serialize(obj)
    s_half = len(s) // 2

    first_half = s[:s_half]
    second_half = s[s_half:]

    # Deserialize
    # Break with only first half
    d = Deserializer()
    d.append(first_half)
    first_ready = d.ready()
    assert (first_ready == False)
    r = d.deserialize()
    assert (r == None)

    # Now re-try with both halves
    d.append(first_half)
    d.append(second_half)
    second_ready = d.ready()
    assert (second_ready == True)
    r = d.deserialize()
    assert (obj == r)
Пример #11
0
def proc_main(pk, row, arg):
    #your code here
    argts = ts.TimeSeries(*arg)
    series = stand(argts,argts.mean(),argts.std())
    a = 4
    w = 32
    symbols = ['{0:b}'.format(i).zfill(int(np.log(a-1)/np.log(2))+1) for i in range(a)]
    if a in Breakpoints:
        breakpoints = Breakpoints[a]#norm.ppf(np.array([i/a for i in range(1,a)]))
    else:
        raise ValueError('Breakpoints do not exist for cardinality {}'.format(a)) 
    breakpoints = np.array([*breakpoints,np.inf])
    T = np.zeros((w))
    n = len(series)
    SAX = []
    for i in range(w):
        T[i] = w/n*sum([series.data[j] for j in range(int(n/w*i),int(n/w*(i+1)))])
        for j in range(a):
            if j == a-1:
                SAX.append(symbols[j])
                break
            if T[i]<breakpoints[0]:
                SAX.append(symbols[0])
                break
            if T[i]>breakpoints[j] and T[i]<breakpoints[j+1]:
                SAX.append(symbols[j+1])
                break
    return SAX
Пример #12
0
    async def tsdb_augmented_select(self, request):
        if 'query' not in request.GET:
            view = AUGMENTED_SELECT_VIEW
            return web.Response(body=view.encode('utf-8'))
        else:  # if there is query parameter in URL
            try:
                query = json.loads(request.GET['query'])
                proc = query['proc']
                target = query['target']

                if 'where' in query:
                    metadata_dict = query['where']
                else:
                    metadata_dict = {}
                if 'additional' in query:
                    additional = query['additional']
                else:
                    additional = None
                if 'arg' in query:  # arg should be put as TimeSeries object form. Last time we only put json form of it and that aroused errors.
                    arg = ts.TimeSeries(*query['arg'])
                else:
                    arg = None
                status, result = await self.client.augmented_select(
                    proc, target, arg, metadata_dict, additional)

                if status != TSDBStatus.OK:
                    result = "Augmented Selection failed"

            except Exception as error:
                result = {"msg": "Cannot parse the Request"}
                result["type"] = str(type(error))
                result["args"] = str(error.args)

            finally:
                return web.Response(body=json.dumps(result).encode('utf-8'))
Пример #13
0
    def setUp(self):
        self.dirPath = "persistent_files/testing"
        if not os.path.isdir(self.dirPath):
            os.makedirs(self.dirPath)
            self._createdDirs = True
        else:
            self._createdDirs = False

        self.schema = schema_type
        self.ts_length = 100

        self.db = PersistentDB(schema_type,
                               pk_field='pk',
                               db_name='testing',
                               ts_length=self.ts_length)

        for i in range(100):
            pk = 'ts-' + str(i)
            values = np.array(range(self.ts_length)) + i
            series = ts.TimeSeries(values, values)
            meta = {}
            n_order = len(schema_type['order']['values'])  # 11
            meta['order'] = schema_type['order']['values'][i % n_order]
            n_blarg = 2
            meta['blarg'] = schema_type['blarg']['values'][i % n_blarg]
            meta['mean'] = float(series.mean(
            ))  # make sure they are python floats, not numpy floats
            meta['std'] = float(series.std())
            meta['vp'] = False
            self.db.insert_ts(pk, series)
            self.db.upsert_meta(pk, meta)
Пример #14
0
def integrate_FFI(ts, w0, order=1, taper=False, cut=False):
  regts = ts.regular_sample()
  t,z   = regts.t, regts.y
  p     = 2*math.pi/w0
  eps   = p / (t[-1]-t[0])
  if (eps>0.3):
    raise RuntimeError("FFI: waveform too short")
  #
  if taper:
    pw = planck_window(eps)
    z  *= pw(len(z))
  #
  dt    = t[1]-t[0]
  zt    = np.fft.fft(z)
  w     = np.fft.fftfreq(len(t), d=dt) * (2*math.pi)
  wa    = np.abs(w)
  fac1  = -1j * np.sign(w) / np.where(wa>w0, wa, w0)
  faco  = fac1**int(order)
  ztf   = zt * faco
  zf    = np.fft.ifft(ztf)
  g     = timeseries.TimeSeries(t, zf)
  if cut:
    g.clip(tmin=g.tmin()+p, tmax=g.tmax()-p)
  #
  return g
Пример #15
0
    def test_find_similar(self):

        query = ts.TimeSeries([1, 2, 3], [4, 0, 3])
        msg = TSDBOp_FindSimilar(query, self.vpkeys)
        status, payload = self._send(msg)

        self.assertEqual(payload, None)
Пример #16
0
 def test_read_ts(self):
     for i in range(100):
         pk = 'ts-' + str(i)
         values = np.array(range(self.ts_length)) + i
         series = ts.TimeSeries(values, values)
         r_ts = self.db._return_ts(pk)
         self.assertEqual(series, r_ts)
Пример #17
0
    def test_del2(self):
        self.db.delete_ts('one')
        self.db.delete_ts('two')
        self.db.delete_ts('three')
        self.db.delete_ts('four')

        pks, payload = self.db.select({}, None, None)
        self.assertEqual(set(pks), set([]))

        self.db.insert_ts('one', ts.TimeSeries([1, 2, 3], [1, 4, 9]))
        self.db.insert_ts('two', ts.TimeSeries([2, 3, 4], [4, 9, 16]))
        self.db.insert_ts('three', ts.TimeSeries([9, 3, 4], [4, 0, 16]))
        self.db.insert_ts('four', ts.TimeSeries([0, 0, 4], [1, 0, 4]))
        self.db.upsert_meta('one', {'order': 1, 'blarg': 1})
        self.db.upsert_meta('two', {'order': 2})
        self.db.upsert_meta('three', {'order': 1, 'blarg': 2})
        self.db.upsert_meta('four', {'order': 2, 'blarg': 2})
Пример #18
0
    def test_find_similar2(self):

        query = ts.TimeSeries([0, 5, 10],[15, 25, 50])
        msg = TSDBOp_FindSimilar(query)
        status, payload =  self._mockSendingMessage(msg)

        near = list(payload.keys())[0]
        self.assertTrue(near in ['one','two','three','four'])
Пример #19
0
def tsmaker(m, s, j):
    "returns metadata and a time series in the shape of a jittered normal"
    meta={}
    meta['order'] = int(np.random.choice([-5, -4, -3, -2, -1, 0, 1, 2, 3, 4, 5]))
    meta['blarg'] = int(np.random.choice([1, 2]))
    t = np.arange(0.0, 1.0, 0.01)
    v = norm.pdf(t, m, s) + j*np.random.randn(100)
    return meta, ts.TimeSeries(t, v)
Пример #20
0
    def _populate(self):

        msg = TSDBOp_AddTrigger('corr', 'insert_ts', ['d_vp-1'],
                                ts.TimeSeries([1, 2, 3], [1, 4, 9]))
        status, payload = self._send(msg)

        msg = TSDBOp_InsertTS('one', ts.TimeSeries([1, 2, 3], [1, 4, 9]))
        status, payload = self._send(msg)

        msg = TSDBOp_InsertTS('two', ts.TimeSeries([1, 2, 3], [4, 9, 16]))
        status, payload = self._send(msg)

        msg = TSDBOp_InsertTS('three', ts.TimeSeries([1, 2, 3], [4, 0, 16]))
        status, payload = self._send(msg)

        msg = TSDBOp_InsertTS('four', ts.TimeSeries([1, 2, 3], [1, 0, 4]))
        status, payload = self._send(msg)
Пример #21
0
 def read_and_decode_ts(self, offset):
     self.fd.seek(offset)
     # ts_length = int.from_bytes(self.fd.read(TS_FIELD_LENGTH), byteorder='little')
     # self.fd.seek(offset + TS_FIELD_LENGTH)
     buff = self.fd.read(self.byteArrayLength)
     items = struct.unpack('%sd' % (2 * self.ts_length), buff)
     return timeseries.TimeSeries(items[:self.ts_length],
                                  items[self.ts_length:])
Пример #22
0
 async def get(self):
     client = rest.TSDBrestapi()
     timeseries = ts.TimeSeries(*ast.literal_eval(self.get_argument('ts')))        
     r = client.simsearch(timeseries)
     status, payload = await tcp_echo_client(r,self.loop)
     status = TSDBStatus(status)
     print(status)
     self.write({'status':str(status),'payload':payload})
Пример #23
0
def tsmaker(m, s, j):
    meta = {}
    meta['order'] = int(
        np.random.choice([-5, -4, -3, -2, -1, 0, 1, 2, 3, 4, 5]))
    meta['blarg'] = int(np.random.choice([1, 2]))
    t = np.arange(0.0, 1.0, 0.01)
    v = norm.pdf(t, m, s) + j * np.random.randn(100)
    return meta, ts.TimeSeries(t, v)
Пример #24
0
def tsmaker(m, s, j):
    "generates a new TimeSeries using a normal distribution with a given mean, sd and jitter"
    meta = {}
    meta['order'] = int(
        np.random.choice([-5, -4, -3, -2, -1, 0, 1, 2, 3, 4, 5]))
    meta['blarg'] = int(np.random.choice([1, 2]))
    t = np.arange(0.0, 1.0, 0.01)
    v = norm.pdf(t, m, s) + j * np.random.randn(100)
    return meta, ts.TimeSeries(t, v)
Пример #25
0
def test_corr2():
    t = [1, 2, 3, 4]
    v = [40, 50, 60, 70]
    t2 = [1, 2, 3, 4]
    v2 = [40, 50, 60, 70]
    row = {}
    row['ts'] = ts.TimeSeries(t, v)
    #Since its the same time series, the correlation here should be zero
    assert (corr.proc_main(1, row, (t2, v2))[0] == 0)
Пример #26
0
def proc_main(pk, row, arg):
    # The argument is a time series. But due to serialization it does
    # not come out as the "instance", and must be cast
    argts = ts.TimeSeries(**arg)
    # compute a standardized time series
    stand_argts = stand(argts, argts.mean(), argts.std())
    # for each row in our select/etc, standardize the time series
    mean = ts.TimeSeries(row['ts']['times'], row['ts']['values']).mean()
    std = ts.TimeSeries(row['ts']['times'], row['ts']['values']).std()
    stand_rowts = stand(ts.TimeSeries(row['ts']['times'], row['ts']['values']),
                        mean, std)
    # compute the normalozed kernelized cross-correlation
    kerncorr = kernel_corr(stand_rowts, stand_argts, 1)
    # compute a distance from it.
    # The distance is given by np.sqrt(K(x,x) + K(y,y) - 2*K(x,y))
    # since we are normalized the autocorrs are 1
    kerndist = np.sqrt(2 * (1 - kerncorr))
    return [kerndist]
Пример #27
0
    def test_protocol_delete(self):
        db = DictDB(schema, 'pk')
        server = TSDBServer(db)
        prot = TSDBProtocol(server)
    
        t1 = [0,1,2,3,4]
        v1 = [1.0,2.0,3.0,2.0,1.0]
        ats1 = ts.TimeSeries(t1, v1)
    
        t2 = [10,11,12,13,14]
        v2 = [-1.0,-2.0,-3.0,-2.0,-1.0]
        ats2 = ts.TimeSeries(t2, v2)

        insert_op = {}
        insert_op['pk'] = 1
        insert_op['ts'] = ats1
        insert_op['op'] = 'insert_ts'
    
        # Test Protocol Insert
        insert_return = prot._insert_ts(insert_op)
        assert(insert_return['op'] == 'insert_ts')
        assert(insert_return['status'] == TSDBStatus.OK)
        assert(insert_return['payload'] == None)
        inserted_row = server.db.rows[1]
        assert(inserted_row['pk'] == 1)
        assert(inserted_row['ts'] == ats1)

        insert_return2 = prot._insert_ts(insert_op)
        assert(insert_return2['op'] == 'insert_ts')
        assert(insert_return2['status'] == TSDBStatus.INVALID_KEY)

        delete_op = {}
        delete_op['pk'] = 1
        delete_op['op'] = 'delete_ts'

        delete_return = prot._delete_ts(delete_op)
        assert(delete_return['op'] == 'delete_ts')
        assert(delete_return['status'] == TSDBStatus.OK)
        assert(delete_return['payload'] == None)
        assert (len(server.db.rows) == 0)

        delete_return2 = prot._delete_ts(delete_op)
        assert(delete_return2['op'] == 'delete_ts')
        assert(delete_return2['status'] == TSDBStatus.INVALID_KEY)
Пример #28
0
def test_corr_small():
    from procs.corr import proc_main

    ts_l = [10, 22, 26, 4, 18]
    ts_l_x = range(len(ts_l))
    ts_ts = ts.TimeSeries(ts_l_x, ts_l).to_json()
    # print (ts_ts)
    res = proc_main(None, {'ts': ts_ts}, {'times': ts_l_x, 'values': ts_l})
    # print (res)
    assert np.abs(res) < EPS
Пример #29
0
def test_p():
    stopTime = 20
    numPoints = 200
    numSelPoints = 60
    t2 = [1, 2, 3, 4]
    v2 = [40, 50, 60, 70]
    row = {}
    data_irr, time_irr = basic_irregular(stopTime, numPoints, numSelPoints)
    row['ts'] = ts.TimeSeries(time_irr, data_irr)
    assert (np.abs(period.proc_main(1, row, (t2, v2))[0] - 6.28) < 0.5)
Пример #30
0
def test_client():
    client = test_db_init()
    # test adding triggers
    assert client.add_trigger('junk', 'insert_ts', None, 23) is None
    # test insert
    assert client.insert_ts('one', ts.TimeSeries([1, 2, 3], [1, 4, 9])) is None
    # test removing triggers
    assert client.remove_trigger('junk', 'insert_ts') is None
    # test upsert
    assert client.upsert_meta('one', {'order': 1, 'blarg': 1}) is None
    # test select
    assert client.select({'order': 1, 'blarg': 1}) == (TSDBStatus(0), {})