def test_02_test_count(self): # compute the record counts for each job + comp counts = {} comps = {} for job_id in range(1, 6): filt = Sos.Filter(self.schema['job_time_cond']) filt.add_condition(self.schema['job_id'], Sos.COND_EQ, job_id) count = 0 o = filt.begin() comp_id = o['component_id'] comps[job_id] = comp_id filt.add_condition(self.schema['component_id'], Sos.COND_EQ, comp_id) o = filt.begin() while o: count += 1 o = filt.next() counts[job_id] = count del filt # confirm that the count returned by filt.count() matches # the computed count for job_id in range(1, 6): filt = Sos.Filter(self.schema['job_time_cond']) filt.add_condition(self.schema['job_id'], Sos.COND_EQ, job_id) filt.add_condition(self.schema['component_id'], Sos.COND_EQ, comps[job_id]) count = filt.count() self.assertEqual(count, counts[job_id])
def test_iter_inf_exact(self): for attr in self.schema: aid = attr.attr_id() itr = sos.AttrIter(attr) for data in self.input_data: key = sos.Key(attr=attr) key.set_value(data[aid]) self.assertTrue(itr.find_inf(key)) obj = itr.item() self.assertEqual(obj2tuple(obj), data, msg="bad result attr: %s"%attr.name())
def setUp(self): self.min_a_1 = 1000 self.max_a_1 = 10000 self.filt = Sos.Filter(self.schema.attr_by_name('join_key')) a_1 = self.schema.attr_by_name('a_1') self.filt.add_condition(a_1, Sos.COND_GE, self.min_a_1) self.filt.add_condition(a_1, Sos.COND_LE, self.max_a_1)
def test_int32_k0_k1_k2_k3_prev(self): a_join = self.int32_schema.attr_by_name('a_join') k0 = self.int32_schema.attr_by_name('k0') k1 = self.int32_schema.attr_by_name('k1') k2 = self.int32_schema.attr_by_name('k2') k3 = self.int32_schema.attr_by_name('k3') f = Sos.Filter(a_join) f.add_condition(k0, Sos.COND_LE, -4) f.add_condition(k0, Sos.COND_GE, -12) f.add_condition(k1, Sos.COND_LE, -4) f.add_condition(k1, Sos.COND_GE, -12) f.add_condition(k2, Sos.COND_LE, -4) f.add_condition(k2, Sos.COND_GE, -12) f.add_condition(k3, Sos.COND_LE, -4) f.add_condition(k3, Sos.COND_GE, -12) o = f.end() count = 0 while o: # Dprint(o[:]) count += 1 o = f.prev() Dprint("Misses {0}".format(f.miss_count())) Dprint("Count {0}".format(count)) self.assertTrue(f.miss_count() <= 4) self.assertEqual(count, 9 * 9 * 9 * 9) del f
def test_iter_pos(self): for attr in self.schema: itr = sos.AttrIter(attr) self.assertTrue(itr.begin()) self.assertTrue(itr.next()) self.assertTrue(itr.next()) obj = itr.item() pos = itr.get_pos() self.assertIsNotNone(obj) itr2 = sos.AttrIter(attr) rc = itr2.set_pos(pos) self.assertEqual(rc, 0) obj2 = itr2.item() self.assertIsNotNone(obj2) self.assertEqual(obj[:], obj2[:])
def setUpClass(cls): cls.setUpDb("schema_test_cont") cls.schema = Sos.Schema() cls.schema.from_template('schema_attr_test', attr_list) cls.schema.add(cls.db) cls.types = [ Sos.TYPE_UINT64, Sos.TYPE_INT16, Sos.TYPE_INT32, Sos.TYPE_INT64, Sos.TYPE_UINT16, Sos.TYPE_UINT32, Sos.TYPE_UINT64, Sos.TYPE_FLOAT, Sos.TYPE_DOUBLE, Sos.TYPE_LONG_DOUBLE, Sos.TYPE_TIMESTAMP, Sos.TYPE_STRUCT, Sos.TYPE_BYTE_ARRAY, Sos.TYPE_CHAR_ARRAY, Sos.TYPE_INT16_ARRAY, Sos.TYPE_INT32_ARRAY, Sos.TYPE_INT64_ARRAY, Sos.TYPE_UINT16_ARRAY, Sos.TYPE_UINT32_ARRAY, Sos.TYPE_UINT64_ARRAY, Sos.TYPE_FLOAT_ARRAY, Sos.TYPE_DOUBLE_ARRAY, Sos.TYPE_LONG_DOUBLE_ARRAY ]
def test_iter_last(self): for attr in self.schema: itr = sos.AttrIter(attr) self.assertTrue(itr.end()) obj = itr.item() self.assertEqual(obj2tuple(obj), self.input_data[len(self.input_data) - 1])
def __test_prev(self, attr_name): global key global data attr = self.schema[attr_name] f = Sos.Filter(attr) # Iterate to the start count = len(data) o = f.end() while o: d = data[count-1] v = o[:] self.assertEqual(d[0], v[0]) self.assertEqual(d[1], v[1]) count -= 1 o = f.prev() self.assertEqual( count, 0 ) # Add more data key = 500 # Put the key before the 1st key of the last test new_data = [] key, new_data = self.__add_data( key, new_data ) count = len(new_data) # f.prev should return the new data o = f.prev() self.assertIsNotNone( o ) while o: d = new_data[count-1] v = o[:] self.assertEqual(d[0], v[0]) self.assertEqual(d[1], v[1]) count -= 1 o = f.prev() # we should not see any object twice self.assertEqual( count, 0 )
def __join_test_next_prev(self, join_attr_name, attr_name, min_v, max_v): join_attr = self.schema[join_attr_name] attr = self.schema[attr_name] f = Sos.Filter(join_attr) f.add_condition(attr, Sos.COND_GE, min_v) f.add_condition(attr, Sos.COND_LE, max_v) o = f.begin() next_count = 0 while o: Dprint(o[:]) next_count += 1 self.assertTrue(o[attr_name] >= min_v) self.assertTrue(o[attr_name] <= max_v) o = next(f) # iterate backwards, the count should be the same o = f.end() prev_count = 0 while o: Dprint(o[:]) prev_count += 1 self.assertTrue(o[attr_name] >= min_v) self.assertTrue(o[attr_name] <= max_v) o = f.prev() self.assertEqual(next_count, prev_count)
def test_uint64_k0_k1_k2_k3(self): a_join = self.uint64_schema.attr_by_name('a_join') k0 = self.uint64_schema.attr_by_name('k0') k1 = self.uint64_schema.attr_by_name('k1') k2 = self.uint64_schema.attr_by_name('k2') k3 = self.uint64_schema.attr_by_name('k3') f = Sos.Filter(a_join) f.add_condition(k0, Sos.COND_GE, 4) f.add_condition(k0, Sos.COND_LE, 12) f.add_condition(k1, Sos.COND_GE, 4) f.add_condition(k1, Sos.COND_LE, 12) f.add_condition(k2, Sos.COND_GE, 4) f.add_condition(k2, Sos.COND_LE, 12) f.add_condition(k3, Sos.COND_GE, 4) f.add_condition(k3, Sos.COND_LE, 12) o = f.begin() count = 0 while o: # Dprint(o[:]) count += 1 o = next(f) Dprint("Misses {0}".format(f.miss_count())) Dprint("count {0}".format(count)) self.assertTrue(f.miss_count() <= 4) self.assertEqual(count, 9 * 9 * 9 * 9) del f
def __test_next_prev(self, attr_name, min_v, max_v): attr = self.schema[attr_name] f = Sos.Filter(attr) f.add_condition(attr, Sos.COND_GE, min_v) f.add_condition(attr, Sos.COND_LE, max_v) o = f.begin() next_count = 0 while o: next_count += 1 v = o[attr_name] if type(v) == numpy.ndarray: v = v.tolist() Dprint("{0} >= {1}".format(v, min_v)) Dprint("{0} <= {1}".format(v, max_v)) self.assertTrue(v >= min_v) self.assertTrue(v <= max_v) o = next(f) self.assertTrue(next_count > 0) # iterate backwards, the count should be the same o = f.end() prev_count = 0 while o: prev_count += 1 v = o[attr_name] if type(v) == numpy.ndarray: v = v.tolist() Dprint("{0} >= {1}".format(v, min_v)) Dprint("{0} <= {1}".format(v, max_v)) self.assertTrue(v >= min_v) self.assertTrue(v <= max_v) o = f.prev() self.assertTrue(prev_count > 0) self.assertEqual(next_count, prev_count)
def setUpClass(cls): cls.setUpDb("obj_set_get_cont") cls.schema = Sos.Schema() cls.schema.from_template('obj_set_get', [ { "name" : "int16", "type" : "int16" }, { "name" : "int32", "type" : "int32" }, { "name" : "int64", "type" : "int64" }, { "name" : "uint16", "type" : "uint16" }, { "name" : "uint32", "type" : "uint32" }, { "name" : "uint64", "type" : "uint64" }, { "name" : "float", "type" : "float" }, { "name" : "double", "type" : "double" }, { "name" : "long_double", "type" : "long_double" }, { "name" : "timestamp", "type" : "timestamp" }, { "name" : "struct", "type" : "struct", "size" : 24 }, { "name" : "byte_array", "type" : "byte_array" }, { "name" : "char_array", "type" : "char_array" }, { "name" : "int16_array", "type" : "int16_array" }, { "name" : "int32_array", "type" : "int32_array" }, { "name" : "int64_array", "type" : "int64_array" }, { "name" : "uint16_array", "type" : "uint16_array" }, { "name" : "uint32_array", "type" : "uint32_array" }, { "name" : "uint64_array", "type" : "uint64_array" }, { "name" : "float_array", "type" : "float_array" }, { "name" : "double_array", "type" : "double_array" }, { "name" : "long_double_array", "type" : "long_double_array" } ]) cls.schema.add(cls.db)
def open_test(path): try: c = Sos.Container(path) c.close() return True except Exception as e: log.write('open_test err: ' + repr(e)) return False
def attr_key_half_prev_STRUCT(a, v): (i, j, k, l) = struct.unpack("!LLLL", v) assert (i == j == k == l) k = sos.Key(attr=a) ii = i - HALF _v = struct.pack("!LLLL", ii, ii, ii, ii) k.set_value(_v) return k
def setUpClass(cls): cls.setUpDb('append_data_test_cont') cls.schema = Sos.Schema() cls.schema.from_template('append_data_test', [ { "name" : "int32", "type" : "int32", "index" : {} }, { "name" : "string", "type" : "char_array" } ]) cls.schema.add(cls.db)
def setUpClass(cls): cls.setUpDb('query_test2_cont') cls.schema = Sos.Schema() cls.schema.from_template('query_test2', [{ "name": "int16", "type": "int16", "index": {} }]) cls.schema.add(cls.db)
def test_double_from_str(self): a = self.schema['double'] v = Sos.Value(a) v.from_str('1234.50000') self.assertEqual(v.value, 1234.5) v.from_str('1.2345e3') self.assertEqual(v.value, 1234.5) v.from_str('1234.4e-3') self.assertEqual(v.value, 1.2344)
def test_float_from_str(self): a = self.schema['float'] v = Sos.Value(a) v.from_str('1234.50000') self.assertEqual(v.value, 1234.5) v.from_str('1.2345e3') self.assertEqual(v.value, 1234.5) v.from_str('1234.4e-3') self.assertEqual(v.value, 1.2344000339508057)
def set_output(self, path): self.out_cont = Sos.Container() try: self.out_cont.open(path, Sos.PERM_RW) except: self.out_cont.create(path) self.out_cont.open(path, Sos.PERM_RW) self.out_cont.part_create("RESULTS") part = self.out_cont.part_by_name("RESULTS") part.state_set("primary") self.out_schema = self.out_cont.schema_by_name(self.out_schema_name) if not self.out_schema: if self.out_schema_template is None: raise ValueError("A schema template must be provided.". \ format(self.out_schema_name)) self.out_schema = Sos.Schema() self.out_schema.from_template(self.out_schema_name, self.out_schema_template) self.out_schema.add(self.out_cont)
def setUpClass(cls): cls.setUpDb("timestamp_test_cont") cls.schema = Sos.Schema() cls.schema.from_template('timestamp_test', [ { "name" : "timestamp", "type" : "timestamp", "index" : {} }, { "name" : "timestr", "type" : "char_array" } ]) cls.schema.add(cls.db)
def test_iter_pos_cleanup(self): poss = [] for count in range(0, 1024): for attr in self.schema: itr = sos.AttrIter(attr) self.assertTrue(itr.begin()) self.assertTrue(itr.next()) pos = itr.get_pos() self.assertIsNotNone(pos) poss.append([itr, pos])
def test_timestamp_to_str(self): a = self.schema['timestamp'] v = Sos.Value(a) v.value = (1511885835, 123450) s = v.to_str() self.assertEqual(s, '1511885835.123450') v.value = (1511885835, 12345) s = v.to_str() self.assertEqual(s, '1511885835.012345')
def get_container(cont_name): try: global log log = _log.MsgLog('GrafanaViews') path = settings.SOS_ROOT + '/' + cont_name cont = Sos.Container(path) return cont except: cont = None return cont
def get_df(self, limit=None, wait=None, reset=True, keep=0, index=None, inputer=None): """Return a Pandas DataFrame from the DataSource The get_df() method returns the data identified by the select() method as a Pandas DataFrame Keyword Parameters: limit -- The maximum number of records to return. This limits how large each series in the resulting DataFrame. If not specified, the limit is DataSource.window_size index -- The column name to use as the DataFrame index wait -- A wait-specification that indicates how to wait for results if the data available is less than 'limit'. See Sos.Query.query() for more information. reset -- Set to True to re-start the query at the beginning of the matching data. keep -- Return [0..keep] as the [N-keep, N] values from the previous result. This is useful when the data from the previous 'window' needs to be combined with the the next window, for example when doing 'diff' over a large series of input data, the last sample from the previous window needs to be subtracted from the first sample of the next window (see Transform.diff()) """ if self.query is None: return None if limit is None: limit = self.window if keep and self.last_result is None: raise ValueError( "Cannot keep results from an empty previous result.") if inputer is None: inputer = Sos.QueryInputer(self.query_, limit, start=keep) count = self.query_.query(inputer, reset=reset, wait=wait) result = self.query_.to_dataframe(index=index) if keep: last_row = self.last_result.get_series_size() - keep for row in range(0, keep): for col in range(0, result.series_count): result[col, row] = self.last_result[col, last_row] last_row += 1 self.last_result = result return self.last_result
def setUpClass(cls): cls.setUpDb("h2htbl_cont") cls.schema = Sos.Schema() cls.schema.from_template('test_h2htbl', [ { "name" : "tkn_id", "type" : "uint64", "index" : { "type" : "H2HTBL" } }, { "name" : "tkn_count", "type" : "uint64", }, { "name" : "tkn_text", "type" : "char_array", "index" : { "type" : "h2htbl" } }, ]) cls.schema.add(cls.db)
def test_iter(self): for attr in self.schema: itr = sos.AttrIter(attr) data = [] b = itr.begin() while b: obj = itr.item() t = obj2tuple(obj) data.append(t) b = itr.next() self.assertEqual(data, self.input_data)
def test_int64_from_str(self): a = self.schema['int64'] v = Sos.Value(a) # octal v.from_str('-0123456789') self.assertEqual(v.value, -342391) # decimal v.from_str('-123456789') self.assertEqual(v.value, -123456789) # hex v.from_str('-0x123456789') self.assertEqual(v.value, -4886718345)
def test_timestamp_from_str(self): a = self.schema['timestamp'] v = Sos.Value(a) v.from_str('1511885835.012345') self.assertEqual(v.value, (1511885835, 12345)) v.from_str('1511885835.12345') self.assertEqual(v.value, (1511885835, 12345)) v.from_str('1511885835.123450') self.assertEqual(v.value, (1511885835, 123450))
def setUpClass(cls): cls.setUpDb("join_test_str_cont") cls.schema = Sos.Schema() cls.schema.from_template('test_str', [ { "name" : "a_1", "type" : "string" }, { "name" : "a_2", "type" : "string" }, { "name" : "a_3", "type" : "string" }, { "name" : "a_join", "type" : "join", "join_attrs" : [ "a_1", "a_2", "a_3" ], "index" : {}} ]) cls.schema.add(cls.db)
def test_iter_rev(self): for attr in self.schema: itr = sos.AttrIter(attr) data = [] b = itr.end() while b: obj = itr.item() t = obj2tuple(obj) data.append(t) b = itr.prev() data.reverse() self.assertEqual(data, self.input_data)