def open_test(path): try: c = Sos.Container(path) c.close() return True except Exception as e: log.write('open_test err: ' + repr(e)) return False
def get_container(cont_name): try: global log log = _log.MsgLog('GrafanaViews') path = settings.SOS_ROOT + '/' + cont_name cont = Sos.Container(path) return cont except: cont = None return cont
def setUpDb(cls, db_name): cls.db = Sos.Container() cls.db_name = db_name db_path = os.getenv("TEST_DATA_DIR") if db_path: cls.path = db_path + "/" + cls.db_name else: cls.path = cls.db_name shutil.rmtree(cls.path, ignore_errors=True) cls.db.create(cls.path) cls.db.open(cls.path) cls.db.part_create("ROOT") root = cls.db.part_by_name("ROOT") root.state_set("PRIMARY")
def config(self, **kwargs): """Configure the SOS data source Keyword Arguments: path - The path to the Sos container cont - A Sos.Container handle """ self.path = self._get_arg('path', kwargs, required=False) self.cont = self._get_arg('cont', kwargs, required=False) if self.path == None and self.cont == None: raise ValueError("One of 'cont' or 'path' must be specified") if self.path: if self.cont: raise ValueError( "The 'path' and 'cont' keywords are mutually exclusive") try: self.cont = Sos.Container(path=self.path, o_perm=Sos.PERM_RW) except: self.cont = Sos.Container() create = self._get_arg('create', kwargs, required=False, default=False) mode = self._get_arg('mode', kwargs, required=False, default=0o664) if create: # Create the database self.cont.create(path=self.path, o_mode=mode) self.cont.open(self.path, o_perm=Sos.PERM_RW) self.cont.part_create("ROOT") part = self.cont.part_by_name("ROOT") part.state_set("primary") else: raise ValueError( "The container {0} does not exist.".format(self.path))
def _data(self): db = Sos.Container() db.open(DIR + "/sos/cont") sch = db.schema_by_name("meminfo") attr = sch.attr_by_name("timestamp") itr = attr.attr_iter() ret = itr.begin() while ret: obj = itr.item() yield obj ret = itr.next() del itr del attr del sch del db
def set_output(self, path): self.out_cont = Sos.Container() try: self.out_cont.open(path, Sos.PERM_RW) except: self.out_cont.create(path) self.out_cont.open(path, Sos.PERM_RW) self.out_cont.part_create("RESULTS") part = self.out_cont.part_by_name("RESULTS") part.state_set("primary") self.out_schema = self.out_cont.schema_by_name(self.out_schema_name) if not self.out_schema: if self.out_schema_template is None: raise ValueError("A schema template must be provided.". \ format(self.out_schema_name)) self.out_schema = Sos.Schema() self.out_schema.from_template(self.out_schema_name, self.out_schema_template) self.out_schema.add(self.out_cont)
def parse_request(self, input_): # # Open the container or get it from our directory # self.input_ = input_ if 'container' in input_: container = input_['container'] log.write(container) try: self.container_ = Sos.Container( str(settings.SOS_ROOT + '/' + container)) except Exception as e: log.write(e) return { "Sos Error": "Container " + repr(container) + " could not be opened" } else: return {"Sos Error": "Container clause is mandatory"} # # Encoding # #if 'encoding' in input: # if input.encoding.lower() == 'table': # self.encoding_ = self.TABLE #else: # self.encoding_ = self.JSON # # Schema # if 'schema' in input_: try: schema = input_['schema'] if self.container(): self.schema_ = self.container().schema_by_name(schema) except Exception as e: log.write("Schema Error " + repr(e)) return {"Error": "Schema does not exist"} # # iDisplayStart (dataTable), start # if 'start' in input_: self.start = input_['start'] self.start = int(self.start) # overrides start if specified if 'iDisplayStart' in input_: self.start = input_['iDisplayStart'] self.start = int(self.start) # # iDisplayLength (dataTables), count # if 'count' in input_: self.count = input_['count'] self.count = int(self.count) # overrides count if specified if 'iDisplayLength' in input_: self.count = input_['iDisplayLength'] self.count = int(self.count) # Job Id if 'job_id' in input_: self.job_id = input_['job_id'] self.job_id = int(self.job_id)
def print_obj(obj): print("key : {0} order : {1}".format(obj['key'], obj['order'])) # Destroy previous test data if present try: chdir(ROOT_DIR) rmtree('iterator', ignore_errors=True) except Exception as e: print(e) pass # Open the test container db = Sos.Container() db.create('iterator') db.open('iterator') # Add a partition db.part_create("ROOT") part = db.part_by_name("ROOT") part.state_set("primary") # Create the test schema template = [{ "name": "key", "type": "uint64", "index": {} }, { "name": "order",
def setUpClass(cls): # purge existing store shutil.rmtree(cls.STORE_PATH, ignore_errors=True) try: # create new store cls.cont = sos.Container() cls.cont.create(cls.STORE_PATH) cls.cont.open(cls.STORE_PATH) # new partition cls.cont.part_create(cls.PART_NAME) part = cls.cont.part_by_name(cls.PART_NAME) part.state_set("PRIMARY") del part except Exception as e: raise StandardError( "The test container could not be created.".format( cls.STORE_PATH)) # new schema cls.schema = schema = sos.Schema() schema.from_template( cls.SCHEMA_NAME, [ { "name": "i32", "type": "INT32", "index": { "type": cls.IDX_TYPE, "key": "INT32", "args": cls.IDX_ARG, } }, { # {uint32, uint32, uint64} "name": "struct", "type": "STRUCT", "size": 16, "index": { "type": cls.IDX_TYPE, "key": "MEMCMP", "args": cls.IDX_ARG, } }, ]) cls.attr_key_half_next = [ attr_key_half_next_INT32, attr_key_half_next_STRUCT, ] cls.attr_key_half_prev = [ attr_key_half_prev_INT32, attr_key_half_prev_STRUCT, ] schema.add(cls.cont) cls.input_data = [ (i, struct.pack("!LLLL", i, i, i, i)) \ for i in range(10, 500, 10) \ for _ in range(REPEAT) ] # data for d in cls.input_data: obj = schema.alloc() obj[:] = d obj.index_add()
def set_input(self, path): self.in_cont = Sos.Container(path, Sos.PERM_RO)
for name in fd_data.keys(): steady_fd = steady_fds[name] nfd = last_fds[name] if nfd > steady_fd: test.assert_test(assert_id, False, "{}: num_fd({}) > steady_fd({})" \ .format(name, nfd, steady_fd)) broken = True break else: test.assert_test(assert_id, True, "verified") # End fd check #test.add_assertion(11, "SOS verification (L3)") if L3_STORE_ROOT: cont = sos.Container() cont.open(L3_STORE_ROOT + "/test") schema = cont.schema_by_name("test") attr = schema.attr_by_name("comp_time_job") itr = attr.attr_iter() def each(i): b = i.begin() while b: o = i.item() if o: yield o b = i.next() TestObj = namedtuple( "TestObj",
import argparse as ap import numpy import json from sosdb import Sos parser = ap.ArgumentParser(description='sos_query utility') parser.add_argument('--container', '-C', metavar='PATH', help='The path to the SOS container.') parser.add_argument('--schema', '-S', metavar='SCHEMA', help='The schema name.') parser.add_argument('--index', '-X', metavar='INDEX', help='The name of the index to iterate through.') args = parser.parse_args() cont = Sos.Container() cont.open(args.container) schema = cont.schema_by_name(args.schema) index = schema[args.index] def STR(obj): if obj is None: return None if type(obj) == str: return obj if type(obj) == bytes: return obj.decode() return str(obj) itr = index.attr_iter() def obj_iter(itr):