def test_metadata_fillup(self): """JIRA ID: DAOS-1512. Test Description: Test to verify no IO happens after metadata is full. Use Cases: ? :avocado: tags=all,metadata,large,metadatafill,hw :avocado: tags=full_regression """ self.pool.pool.connect(2) container = DaosContainer(self.context) self.log.info("Fillup Metadata....") for _cont in range(NO_OF_MAX_CONTAINER): container.create(self.pool.pool.handle) # This should fail with no Metadata space Error. self.log.info("Metadata Overload...") try: for _cont in range(400): container.create(self.pool.pool.handle) self.fail("Test expected to fail with a no metadata space error") except DaosApiError as exe: print(exe, traceback.format_exc()) return self.fail("Test was expected to fail but it passed.\n")
def check_handle(self, buf_len, iov_len, buf, uuidstr, rank): """Verify that the global handle can be turned into a local handle. This gets run in a child process and verifies the global handle can be turned into a local handle in another process. Args: buf_len (object): buffer length; 1st return value from DaosPool.local2global() iov_len (object): iov length; 2nd return value from DaosPool.local2global() buf (object): buffer; 3rd return value from DaosPool.local2global() uuidstr (str): pool UUID rank (int): pool svc rank Raises: DaosApiError: if there was an error converting the pool handle or using the local pool handle to create a container. """ pool = DaosPool(self.context) pool.set_uuid_str(uuidstr) pool.set_svc(rank) pool.group = "daos_server" # note that the handle is stored inside the pool as well dummy_local_handle = pool.global2local(self.context, iov_len, buf_len, buf) # perform some operations that will use the new handle pool.pool_query() container = DaosContainer(self.context) container.create(pool.handle)
def setUp(self): """ set up method """ super(Snapshot, self).setUp() self.log.info("==In setUp, self.context= %s", self.context) # initialize a python pool object then create the underlying # daos storage and connect to it self.prepare_pool() try: # create a container self.container = DaosContainer(self.context) self.container.create(self.pool.pool.handle) except DaosApiError as error: self.log.info("Error detected in DAOS pool container setup: %s", str(error)) self.log.info(traceback.format_exc()) self.fail("##Test failed on setUp, before snapshot taken") # now open it self.container.open() # do a query and compare the UUID returned from create with # that returned by query self.container.query() if self.container.get_uuid_str() != c_uuid_to_str( self.container.info.ci_uuid): self.fail("##Container UUID did not match the one in info.")
def setUp(self): super(ObjectDataValidation, self).setUp() self.obj = None self.ioreq = None self.no_of_dkeys = None self.no_of_akeys = None self.array_size = None self.record_length = None self.no_of_dkeys = self.params.get("no_of_dkeys", '/run/dkeys/*')[0] self.no_of_akeys = self.params.get("no_of_akeys", '/run/akeys/*')[0] self.array_size = self.params.get("size", '/array_size/') self.record_length = self.params.get("length", '/run/record/*') self.prepare_pool() self.container = DaosContainer(self.context) self.container.create(self.pool.pool.handle) self.container.open() self.obj = DaosObj(self.context, self.container) self.obj.create(objcls=1) self.obj.open() self.ioreq = IORequest(self.context, self.container, self.obj, objtype=4)
def get_cont(self, pool, cont_uuid): """Get an existing container. Args: pool (TestPool): pool to open the container in. cont_uuid (str): container uuid. Returns: TestContainer: the container object """ # Open the container # Create a TestContainer instance container = TestContainer(pool, daos_command=self.get_daos_command()) # Create the underlying DaosContainer instance container.container = DaosContainer(pool.context) container.container.uuid = str_to_c_uuid(cont_uuid) container.uuid = container.container.get_uuid_str() container.container.poh = pool.pool.handle # Save container and uuid self.container.append(container) self.uuids.append(str(container.uuid)) return container
def test_metadata_addremove(self): """JIRA ID: DAOS-1512. Test Description: Verify metadata release the space after container delete. Use Cases: ? :avocado: tags=metadata,metadata_free_space,nvme,large,hw :avocado: tags=full_regression """ self.pool.pool.connect(2) for k in range(10): container_array = [] self.log.info("Container Create Iteration %d / 9", k) for cont in range(NO_OF_MAX_CONTAINER): container = DaosContainer(self.context) try: container.create(self.pool.pool.handle) except DaosApiError as exc: self.log.info("Container create %d/%d failed: %s", cont, NO_OF_MAX_CONTAINER, exc) self.fail("Container create failed") container_array.append(container) self.log.info("Created %d containers", (cont+1)) self.log.info("Container Remove Iteration %d / 9", k) for cont in container_array: cont.destroy()
def setUp(self): try: super(SameKeyDifferentValue, self).setUp() # parameters used in pool create createmode = self.params.get("mode", '/run/pool/createmode/') createsetid = self.params.get("setname", '/run/pool/createset/') createsize = self.params.get("size", '/run/pool/createsize/') createuid = os.geteuid() creategid = os.getegid() # initialize a python pool object then create the underlying # daos storage self.pool = DaosPool(self.context) self.pool.create(createmode, createuid, creategid, createsize, createsetid, None) self.pool.connect(1 << 1) # create a container self.container = DaosContainer(self.context) self.container.create(self.pool.handle) # now open it self.container.open() except DaosApiError as excpn: print(excpn) print(traceback.format_exc()) self.fail("Test failed during setup.\n")
def write_single_object(self): """Write some data to the existing pool. """ self.pool.connect(2) csum = self.params.get("enable_checksum", '/run/container/*') container = DaosContainer(self.context) input_param = container.cont_input_values input_param.enable_chksum = csum container.create(poh=self.pool.pool.handle, con_prop=input_param) container.open() obj = DaosObj(self.context, container) obj.create(objcls=1) obj.open() ioreq = IORequest(self.context, container, obj, objtype=4) self.log.info("Writing the Single Dataset") record_index = 0 for dkey in range(self.no_of_dkeys): for akey in range(self.no_of_akeys): indata = ("{0}".format(str(akey)[0]) * self.record_length[record_index]) d_key_value = "dkey {0}".format(dkey) c_dkey = ctypes.create_string_buffer(d_key_value) a_key_value = "akey {0}".format(akey) c_akey = ctypes.create_string_buffer(a_key_value) c_value = ctypes.create_string_buffer(indata) c_size = ctypes.c_size_t(ctypes.sizeof(c_value)) ioreq.single_insert(c_dkey, c_akey, c_value, c_size) record_index = record_index + 1 if record_index == len(self.record_length): record_index = 0
def setUp(self): super(ObjFetchBadParam, self).setUp() time.sleep(5) self.prepare_pool() try: # create a container self.container = DaosContainer(self.context) self.container.create(self.pool.pool.handle) # now open it self.container.open() # create an object and write some data into it thedata = "a string that I want to stuff into an object" self.datasize = len(thedata) + 1 self.dkey = "this is the dkey" self.akey = "this is the akey" self.obj = self.container.write_an_obj(thedata, self.datasize, self.dkey, self.akey, None, None, 2) thedata2 = self.container.read_an_obj(self.datasize, self.dkey, self.akey, self.obj) if thedata not in thedata2.value: print(thedata) print(thedata2.value) self.fail("Error reading back data, test failed during"\ " the initial setup.\n") except DaosApiError as excep: print(excep) print(traceback.format_exc()) self.fail("Test failed during the initial setup.\n")
def setUp(self): super(ChecksumContainerValidation, self).setUp() self.agent_sessions = None self.pool = None self.container = None self.obj = None self.ioreq = None self.no_of_dkeys = None self.no_of_akeys = None self.array_size = None self.record_length = None self.no_of_dkeys = self.params.get("no_of_dkeys", '/run/dkeys/*')[0] self.no_of_akeys = self.params.get("no_of_akeys", '/run/akeys/*')[0] self.record_length = self.params.get("length", '/run/record/*') self.add_pool(connect=False) self.pool.connect(2) self.csum = self.params.get("enable_checksum", '/run/container/*') self.container = DaosContainer(self.context) input_param = self.container.cont_input_values input_param.enable_chksum = self.csum self.container.create(poh=self.pool.pool.handle, con_prop=input_param) self.container.open() self.obj = DaosObj(self.context, self.container) self.obj.create(objcls=1) self.obj.open() self.ioreq = IORequest(self.context, self.container, self.obj, objtype=4)
def test_file_modification(self): """Test ID: DAOS-???. Test Description: Test whether file modification happens as expected under different permission levels. :avocado: tags=all,daily_regression :avocado: tags=pool,permission,file_modification """ # parameter used for pool connect permissions = self.params.get("perm", '/run/createtests/permissions/*') expected_result = self.params.get("exp_result", '/run/createtests/permissions/*') # initialize a python pool object then create the underlying # daos storage self.add_pool(create=False) self.test_log.debug("Pool initialization successful") self.pool.create() self.test_log.debug("Pool Creation successful") try: self.pool.connect(1 << permissions) self.test_log.debug("Pool Connect successful") except TestFail as excep: self.log.error(str(excep)) if expected_result == RESULT_PASS: self.fail( "#Test was expected to pass but it failed at pool.connect.\n") try: self.container = DaosContainer(self.context) self.test_log.debug("Container initialization successful") self.container.create(self.pool.pool.handle) self.test_log.debug("Container create successful") # now open it self.container.open() self.test_log.debug("Container open successful") thedata = b"a string that I want to stuff into an object" size = 45 dkey = b"this is the dkey" akey = b"this is the akey" self.container.write_an_obj(thedata, size, dkey, akey) self.test_log.debug("Container write successful") if expected_result == RESULT_FAIL: self.fail( "Test was expected to fail at container operations " + "but it passed.\n") else: self.test_log.debug("Test Passed.") except DaosApiError as excep: self.log.error(str(excep)) if expected_result == RESULT_PASS: self.fail( "#Test was expected to pass but it failed at container operations.\n") else: self.test_log.debug("Test expected failed in container create, r/w. Test Passed.")
def create(self, uuid=None, con_in=None): """Create a container. Args: uuid (str, optional): contianer uuid. Defaults to None. """ self.destroy() self.log.info("Creating a container with pool handle %s", self.pool.pool.handle.value) self.container = DaosContainer(self.pool.context) if self.control_method.value == self.USE_API: # Create a container with the API method kwargs = {"poh": self.pool.pool.handle} if uuid is not None: kwargs["con_uuid"] = uuid # Refer daos_api for setting input params for DaosContainer. if con_in is not None: cop = self.input_params.get_con_create_params() cop.type = con_in[0] cop.enable_chksum = con_in[1] cop.srv_verify = con_in[2] cop.chksum_type = con_in[3] cop.chunk_size = con_in[4] kwargs["con_prop"] = cop self._call_method(self.container.create, kwargs) elif self.control_method.value == self.USE_DAOS and self.daos: # Create a container with the daos command kwargs = { "pool": self.pool.uuid, "sys_name": self.pool.name.value, "svc": ",".join(str(rank) for rank in self.pool.svc_ranks), "cont": uuid, "path": self.path.value, "cont_type": self.type.value, "oclass": self.oclass.value, "chunk_size": self.chunk_size.value, "properties": self.properties.value, } self._log_method("daos.container_create", kwargs) uuid = self.daos.get_output("container_create", **kwargs)[0] # Populte the empty DaosContainer object with the properties of the # container created with daos container create. self.container.uuid = str_to_c_uuid(uuid) self.container.attached = 1 elif self.control_method.value == self.USE_DAOS: self.log.error("Error: Undefined daos command") else: self.log.error("Error: Undefined control_method: %s", self.control_method.value) self.uuid = self.container.get_uuid_str() self.log.info(" Container created with uuid %s", self.uuid)
def write_a_bunch_of_values(self, how_many): """ Write data to an object, each with a dkey and akey. The how_many parameter determines how many key:value pairs are written. """ self.container = DaosContainer(self.context) self.container.create(self.pool.pool.handle) self.container.open() ioreq = IORequest(self.context, self.container, None) print("Started Writing the Dataset-----------\n") inc = 50000 last_key = inc for key in range(how_many): c_dkey = create_string_buffer("dkey {0}".format(key)) c_akey = create_string_buffer("akey {0}".format(key)) c_value = create_string_buffer( "some data that gets stored with the key {0}".format(key)) c_size = ctypes.c_size_t(ctypes.sizeof(c_value)) ioreq.single_insert(c_dkey, c_akey, c_value, c_size) if key > last_key: print("written: {}".format(key)) sys.stdout.flush() last_key = key + inc print("Started Verification of the Dataset-----------\n") last_key = inc for key in range(how_many): c_dkey = create_string_buffer("dkey {0}".format(key)) c_akey = create_string_buffer("akey {0}".format(key)) the_data = "some data that gets stored with the key {0}".format(key) val = ioreq.single_fetch(c_dkey, c_akey, len(the_data)+1) exp_value = val.value.decode("utf-8") if the_data != exp_value: self.fail("ERROR: Data mismatch for dkey = {0}, akey={1}, " "Expected Value={2} and Received Value={3}\n" .format("dkey {0}".format(key), "akey {0}".format(key), the_data, exp_value)) if key > last_key: print("veried: {}".format(key)) sys.stdout.flush() last_key = key + inc print("starting destroy") self.container.close() self.container.destroy() print("destroy complete")
def setUp(self): super(ContainerAttributeTest, self).setUp() self.large_data_set = {} self.prepare_pool() poh = self.pool.pool.handle self.container = DaosContainer(self.context) self.container.create(poh) self.container.open()
def test_createasync(self): """ Test container create for asynchronous mode. :avocado: tags=all,small,full_regression,container,createasync """ global GLOB_SIGNAL global GLOB_RC # initialize a python pool object then create the underlying # daos storage self.prepare_pool() poh = self.pool.pool.handle try: # Container initialization and creation self.container1 = DaosContainer(self.context) self.container2 = DaosContainer(self.context) GLOB_SIGNAL = threading.Event() self.container1.create(poh=poh, con_uuid=None, cb_func=cb_func) GLOB_SIGNAL.wait() if GLOB_RC != 0: self.fail("RC not as expected in async test") print("RC after successful container create: ", GLOB_RC) # Try to recreate container after destroying pool, # this should fail. Checking rc after failure. self.pool.destroy(1) GLOB_SIGNAL = threading.Event() GLOB_RC = -9900000 self.container2.create(poh, None, cb_func) GLOB_SIGNAL.wait() if GLOB_RC == 0: self.fail("RC not as expected in async test") print("RC after unsuccessful container create: ", GLOB_RC) except DaosApiError as excep: print(excep) print(traceback.format_exc())
def setUp(self): super(ObjOpenBadParam, self).setUp() try: # parameters used in pool create createmode = self.params.get("mode", '/run/pool/createmode/') createsetid = self.params.get("setname", '/run/pool/createset/') createsize = self.params.get("size", '/run/pool/createsize/') createuid = os.geteuid() creategid = os.getegid() # initialize a python pool object then create the underlying # daos storage self.pool = DaosPool(self.context) self.pool.create(createmode, createuid, creategid, createsize, createsetid, None) # need a connection to create container self.pool.connect(1 << 1) # create a container self.container = DaosContainer(self.context) self.container.create(self.pool.handle) # now open it self.container.open() # create an object and write some data into it thedata = "a string that I want to stuff into an object" self.datasize = len(thedata) + 1 self.dkey = "this is the dkey" self.akey = "this is the akey" self.obj, self.epoch = self.container.write_an_obj(thedata, self.datasize, self.dkey, self.akey, obj_cls=1) thedata2 = self.container.read_an_obj(self.datasize, self.dkey, self.akey, self.obj, self.epoch) if thedata not in thedata2.value: print(thedata) print(thedata2.value) err_str = "Error reading back data, test failed during the " \ "initial setup." self.d_log.error(err_str) self.fail(err_str) # setup leaves object in open state, so closing to start clean self.obj.close() except DaosApiError as excep: print(excep) print(traceback.format_exc()) self.fail("Test failed during the initial setup.")
def test_container_create(self): """ Test ID: DAOS-689 Test Description: valid and invalid container creation and close. :avocado: tags=all,container,tiny,smoke,full_regression,containercreate """ contuuid = None expected_results = [] # setup the pool self.prepare_pool() # maybe use the good handle, maybe not handleparam = self.params.get("handle", '/run/poolhandle/*') if handleparam == 'VALID': poh = self.pool.pool.handle else: poh = handleparam expected_results.append('FAIL') # maybe use a good UUID, maybe not uuidparam = self.params.get("uuid", "/uuids/*") expected_results.append(uuidparam[1]) if uuidparam[0] == 'NULLPTR': contuuid = 'NULLPTR' else: contuuid = uuid.UUID(uuidparam[0]) should_fail = False for result in expected_results: if result == 'FAIL': should_fail = True break try: self.container = DaosContainer(self.context) self.container.create(poh, contuuid) # check UUID is the specified one if (uuidparam[0]).upper() != self.container.get_uuid_str().upper(): print("uuidparam[0] is {}, uuid_str is {}".format( uuidparam[0], self.container.get_uuid_str())) self.fail("Container UUID differs from specified at create\n") if should_fail: self.fail("Test was expected to fail but it passed.\n") except DaosApiError as excep: print(excep) print(traceback.format_exc()) if not should_fail: self.fail("Test was expected to pass but it failed.\n")
def test_global_handle(self): """ Test ID: DAO Test Description: Use a pool handle in another process. :avocado: tags=all,container,tiny,daily_regression,conthandle """ # initialize a python pool object then create the underlying # daos storage and connect to it self.prepare_pool() # create a pool global handle iov_len, buf_len, buf = self.pool.pool.local2global() buftype = ctypes.c_byte * buf_len c_buf = buftype.from_buffer(buf) sct_pool_handle = ( sharedctypes.RawValue(IOV, ctypes.cast(c_buf, ctypes.c_void_p), buf_len, iov_len)) try: # create a container self.container = DaosContainer(self.context) self.container.create(self.pool.pool.handle) self.container.open() # create a container global handle iov_len, buf_len, buf = self.container.local2global() buftype = ctypes.c_byte * buf_len c_buf = buftype.from_buffer(buf) sct_cont_handle = ( sharedctypes.RawValue(IOV, ctypes.cast(c_buf, ctypes.c_void_p), buf_len, iov_len)) sct_pool_uuid = sharedctypes.RawArray( ctypes.c_byte, self.pool.pool.uuid) # this should work in the future but need on-line server addition #arg_list = ( #p = Process(target=check_handle, args=arg_list) #p.start() #p.join() # for now verifying global handle in the same process which is not # the intended use case self.check_handle( sct_pool_handle, sct_pool_uuid, sct_cont_handle, 0) except DaosApiError as excep: print(excep) print(traceback.format_exc()) self.fail("Expecting to pass but test has failed.\n")
def setUp(self): self.agent_sessions = None self.pool = None self.container = None self.obj = None self.ioreq = None self.hostlist = None self.hostfile = None self.no_of_dkeys = None self.no_of_akeys = None self.array_size = None self.record_length = None with open('../../.build_vars.json') as json_f: build_paths = json.load(json_f) self.basepath = os.path.normpath(build_paths['PREFIX'] + "/../") server_group = self.params.get("name", '/server_config/', 'daos_server') self.context = DaosContext(build_paths['PREFIX'] + '/lib64/') self.d_log = DaosLog(self.context) self.hostlist = self.params.get("test_machines", '/run/hosts/*') self.hostfile = write_host_file.write_host_file( self.hostlist, self.workdir) self.no_of_dkeys = self.params.get("no_of_dkeys", '/run/dkeys/*')[0] self.no_of_akeys = self.params.get("no_of_akeys", '/run/akeys/*')[0] self.array_size = self.params.get("size", '/array_size/') self.record_length = self.params.get("length", '/run/record/*') self.agent_sessions = agent_utils.run_agent(self.basepath, self.hostlist) server_utils.run_server(self, self.hostfile, server_group) self.pool = DaosPool(self.context) self.pool.create(self.params.get("mode", '/run/pool/createmode/*'), os.geteuid(), os.getegid(), self.params.get("size", '/run/pool/createsize/*'), self.params.get("setname", '/run/pool/createset/*'), None) self.pool.connect(2) self.container = DaosContainer(self.context) self.container.create(self.pool.handle) self.container.open() self.obj = DaosObj(self.context, self.container) self.obj.create(objcls=1) self.obj.open() self.ioreq = IORequest(self.context, self.container, self.obj, objtype=4)
def test_global_handle(self): """ Test ID: DAO Test Description: Use a pool handle in another process. :avocado: tags=all,pool,pr,tiny,poolglobalhandle """ try: # use the uid/gid of the user running the test, these should # be perfectly valid createuid = os.geteuid() creategid = os.getegid() # parameters used in pool create that are in yaml createmode = self.params.get("mode", '/run/testparams/createmode/') createsetid = self.params.get("setname", '/run/testparams/createset/') createsize = self.params.get("size", '/run/testparams/createsize/') # initialize a python pool object then create the underlying # daos storage self.pool = DaosPool(self.context) self.pool.create(createmode, createuid, creategid, createsize, createsetid, None) self.pool.connect(1 << 1) # create a container just to make sure handle is good self.container = DaosContainer(self.context) self.container.create(self.pool.handle) # create a global handle iov_len, buf_len, buf = self.pool.local2global() # this should work in the future but need on-line server addition #arg_list = (buf_len, iov_len, buf, pool.get_uuid_str(), 0) #p = Process(target=check_handle, args=arg_list) #p.start() #p.join() # for now verifying global handle in the same process which is not # the intended use case self.check_handle(buf_len, iov_len, buf, self.pool.get_uuid_str(), 0) except DaosApiError as excep: print(excep) print(traceback.format_exc()) self.fail("Expecting to pass but test has failed.\n")
def setUp(self): super().setUp() self.plog = logging.getLogger("progress") try: self.prepare_pool() # create a container self.container = DaosContainer(self.context) self.container.create(self.pool.pool.handle) self.plog.info("Container %s created.", self.container.get_uuid_str()) # now open it self.container.open() except DaosApiError as excep: print(excep) print(traceback.format_exc()) self.fail("Test failed during setup .\n")
def setUp(self): super(PunchTest, self).setUp() self.prepare_pool() try: # create a container self.container = DaosContainer(self.context) self.container.create(self.pool.pool.handle) # now open it self.container.open() except DaosApiError as excpn: print(excpn) print(traceback.format_exc()) self.fail("Test failed during setup.\n")
def setUp(self): """ set up method """ super(Snapshot, self).setUp() # get parameters from yaml file, set default createmode = self.params.get("mode", '/run/poolparams/createmode/', 511) createuid = os.geteuid() creategid = os.getegid() createsetid = self.params.get("setname", '/run/poolparams/createset/') createsize = self.params.get("size", '/run/poolparams/createsize/') self.log.info("==In setUp, self.context= %s", self.context) try: # initialize a python pool object then create the underlying # daos storage self.pool = DaosPool(self.context) self.pool.create(createmode, createuid, creategid, createsize, createsetid, None) # need a connection to the pool with rw permission # DAOS_PC_RO = int(1 << 0) # DAOS_PC_RW = int(1 << 1) # DAOS_PC_EX = int(1 << 2) self.pool.connect(1 << 1) # create a container self.container = DaosContainer(self.context) self.container.create(self.pool.handle) except DaosApiError as error: self.log.info("Error detected in DAOS pool container setup: %s", str(error)) self.log.info(traceback.format_exc()) self.fail("##Test failed on setUp, before snapshot taken") # now open it self.container.open() # do a query and compare the UUID returned from create with # that returned by query self.container.query() if self.container.get_uuid_str() != c_uuid_to_str( self.container.info.ci_uuid): self.fail("##Container UUID did not match the one in info.")
def create(self, uuid=None): """Create a container. Args: uuid (str, optional): contianer uuid. Defaults to None. """ self.destroy() self.log.info("Creating a container with pool handle %s", self.pool.pool.handle.value) self.container = DaosContainer(self.pool.context) kwargs = {"poh": self.pool.pool.handle} if uuid is not None: kwargs["con_uuid"] = uuid self._call_method(self.container.create, kwargs) self.uuid = self.container.get_uuid_str() self.log.info(" Container created with uuid %s", self.uuid)
def setUp(self): super(ContainerAttributeTest, self).setUp() self.large_data_set = {} self.pool = DaosPool(self.context) self.pool.create( self.params.get("mode", '/run/attrtests/createmode/*'), os.geteuid(), os.getegid(), self.params.get("size", '/run/attrtests/createsize/*'), self.params.get("setname", '/run/attrtests/createset/*'), None) self.pool.connect(1 << 1) poh = self.pool.handle self.container = DaosContainer(self.context) self.container.create(poh) self.container.open()
def test_bad_handle(self): """ Test ID: DAOS-1376 Test Description: Pass a bogus object handle, should return bad handle. :avocado: tags=all,object,full_regression,small,objbadhand """ self.prepare_pool() try: # create a container container = DaosContainer(self.context) container.create(self.pool.pool.handle) self.plog.info("Container %s created.", container.get_uuid_str()) # now open it container.open() # create an object and write some data into it thedata = "a string that I want to stuff into an object" thedatasize = len(thedata) + 1 dkey = "this is the dkey" akey = "this is the akey" obj = container.write_an_obj(thedata, thedatasize, dkey, akey, None, None, 2) saved_oh = obj.obj_handle obj.obj_handle = 99999 obj = container.write_an_obj(thedata, thedatasize, dkey, akey, obj, None, 2) container.oh = saved_oh container.close() container.destroy() self.fail("Test was expected to return a -1002 but it has not.\n") except DaosApiError as excep: container.oh = saved_oh container.close() container.destroy() self.plog.info("Test Complete") if '-1002' not in str(excep): print(excep) print(traceback.format_exc()) self.fail("Test was expected to get -1002 but it has not.\n")
def check_handle(self, pool_glob_handle, uuidstr, cont_glob_handle, rank): """Verify that the global handles can be turned into local handles. This gets run in a child process and verifies the global handles can be turned into local handles in another process. Args: pool_glob_handle (sharedctypes.RawValue): pool handle uuidstr (sharedctypes.RawArray): pool uuid cont_glob_handle (sharedctypes.RawValue): container handle rank (int): pool svc rank Raises: DaosApiError: if there was an error converting the pool handle or using the local pool handle to create a container. """ # setup the pool and connect using global handle pool = DaosPool(self.context) pool.uuid = uuidstr pool.set_svc(rank) pool.group = "daos_server" buf = ctypes.cast( pool_glob_handle.iov_buf, ctypes.POINTER(ctypes.c_byte * pool_glob_handle.iov_buf_len)) buf2 = bytearray() buf2.extend(buf.contents) pool_handle = pool.global2local(self.context, pool_glob_handle.iov_len, pool_glob_handle.iov_buf_len, buf2) # perform an operation that will use the new handle, if it # doesn't throw an exception, then all is well. pool.pool_query() # setup the container and then connect using the global handle container = DaosContainer(self.context) container.poh = pool_handle buf = ctypes.cast( cont_glob_handle.iov_buf, ctypes.POINTER(ctypes.c_byte * cont_glob_handle.iov_buf_len)) buf2 = bytearray() buf2.extend(buf.contents) dummy_cont_handle = container.global2local( self.context, cont_glob_handle.iov_len, cont_glob_handle.iov_buf_len, buf2) # just try one thing to make sure handle is good container.query()
def test_create_async(self): """Test container create for asynchronous mode. Test both positive and negative cases. For negative case, RC is -1002, but we just check if it's something other than 0 to make the test robust. The negative case is more like a test of the API implementation rather than DAOS itself. :avocado: tags=all,full_regression,container,cont_create_async """ self.add_pool() ph = self.pool.pool.handle cbh1 = CallbackHandler() cbh2 = CallbackHandler() self.container.append(TestContainer(pool=self.pool, cb_handler=cbh1)) self.container.append(TestContainer(pool=self.pool)) # We can't use TestContainer.create after the pool is destroyed, but we # can call DaosContainer.create to create the underlying DaosContainer, # so manually instantiate it and set it. self.container[1].container = DaosContainer(self.pool.context) try: self.container[0].create() self.assertEqual( cbh1.ret_code, RC_SUCCESS, "Async create failed! RC = {}".format(cbh1.ret_code)) # Destroy pool and try to create the second container. TestContainer # calls wait, but we're using DaosContainer, so we need to manually # call it. self.pool.destroy(1) self.container[1].container.create(poh=ph, con_uuid=None, cb_func=cbh2.callback) cbh2.wait() self.assertTrue( cbh2.ret_code is not None and cbh2.ret_code != RC_SUCCESS, "Async create of non-existing container succeeded!") except DaosApiError as excep: print(excep) print(traceback.format_exc())
def setUp(self): super(ObjectDataValidation, self).setUp() self.agent_sessions = None self.pool = None self.container = None self.obj = None self.ioreq = None self.hostlist = None self.hostfile = None self.no_of_dkeys = None self.no_of_akeys = None self.array_size = None self.record_length = None server_group = self.params.get("name", '/server_config/', 'daos_server') self.hostlist = self.params.get("test_servers", '/run/hosts/*') self.hostfile = write_host_file.write_host_file( self.hostlist, self.workdir) self.no_of_dkeys = self.params.get("no_of_dkeys", '/run/dkeys/*')[0] self.no_of_akeys = self.params.get("no_of_akeys", '/run/akeys/*')[0] self.array_size = self.params.get("size", '/array_size/') self.record_length = self.params.get("length", '/run/record/*') self.agent_sessions = agent_utils.run_agent(self, self.hostlist) server_utils.run_server(self, self.hostfile, server_group) self.pool = DaosPool(self.context) self.pool.create(self.params.get("mode", '/run/pool/createmode/*'), os.geteuid(), os.getegid(), self.params.get("size", '/run/pool/createsize/*'), self.params.get("setname", '/run/pool/createset/*'), None) self.pool.connect(2) self.container = DaosContainer(self.context) self.container.create(self.pool.handle) self.container.open() self.obj = DaosObj(self.context, self.container) self.obj.create(objcls=1) self.obj.open() self.ioreq = IORequest(self.context, self.container, self.obj, objtype=4)
def test_close_async(self): """Test container close for asynchronous mode. Test both positive and negative cases. :avocado: tags=all,full_regression,container,cont_close_async """ self.add_pool() cbh1 = CallbackHandler() cbh2 = CallbackHandler() tc1 = TestContainer(pool=self.pool) tc2 = TestContainer(pool=self.pool) self.container.append(tc1) self.container.append(tc2) # We need to open to test close. tc1.create() tc1.open() tc1.cb_handler = cbh1 # We'll test to close the non-existing container, so just instantiate # the underlying DaosContainer and set it. tc2.container = DaosContainer(self.pool.context) tc2.cb_handler = cbh2 try: tc1.close() self.assertEqual( cbh1.ret_code, RC_SUCCESS, "Async close failed! RC = {}".format(cbh1.ret_code)) # If we use TestContainer, it'll call the wait for us, but we're # using DaosContainer, so we need to manually call it. tc2.container.close(cb_func=cbh2.callback) cbh2.wait() self.assertTrue( cbh2.ret_code is not None and cbh2.ret_code != RC_SUCCESS, "Async close of non-existing container succeeded! " + "RC = {}".format(cbh2.ret_code)) except DaosApiError as excep: print(excep) print(traceback.format_exc())