def test_negative_create_1(self, data): """ @test: Create gpg key with valid name and valid gpg key via file import then try to create new one with same name @feature: GPG Keys @assert: gpg key is not created """ # Setup data to pass to the factory data = data.copy() data['organization-id'] = self.org['label'] try: new_obj = make_gpg_key(data) except Exception, e: self.fail(e)
def test_positive_create_2(self, data): """ @test: Create gpg key with valid name and valid gpg key via file import using the a new organization @feature: GPG Keys @assert: gpg key is created """ # Setup data to pass to the factory data = data.copy() data['key'] = VALID_GPG_KEY_FILE_PATH data['organization-id'] = self.org['label'] try: new_obj = make_gpg_key(data) except Exception, e: self.fail(e)
def test_negative_create_2(self, data): """ @test: Create gpg key with valid name and no gpg key @feature: GPG Keys @assert: gpg key is not created """ # Setup data to pass to create data = data.copy() data['organization-id'] = self.org['label'] # Try to create a new object passing @data to factory method new_obj = GPGKey().create(data) self.assertNotEqual( new_obj.return_code, 0, "Object should not be created") self.assertGreater( len(new_obj.stderr), 0, "Should have raised an exception")
def test_positive_create_1(self, data): """ @test: Create gpg key with valid name and valid gpg key via file import using the default created organization @feature: GPG Keys @assert: gpg key is created """ result = Org.list() self.assertGreater(len(result.stdout), 0, 'No organization found') org = result.stdout[0] # Setup data to pass to the factory data = data.copy() data['key'] = VALID_GPG_KEY_FILE_PATH data['organization-id'] = org['label'] try: new_obj = make_gpg_key(data) except Exception, e: self.fail(e)
def test_negative_create_3(self, data): """ @test: Create gpg key with invalid name and valid gpg key via file import @feature: GPG Keys @assert: gpg key is not created """ # Setup data to pass to create data = data.copy() data['key'] = '/tmp/%s' % generate_name() data['organization-id'] = self.org['label'] ssh.upload_file( local_file=VALID_GPG_KEY_FILE_PATH, remote_file=data['key']) # Try to create a new object passing @data to factory method new_obj = GPGKey().create(data) self.assertNotEqual( new_obj.return_code, 0, "Object should not be created") self.assertGreater( len(new_obj.stderr), 0, "Should have raised an exception")
def test(self, data): os.environ['CUDA_VISIBLE_DEVICES'] = '0' # load library dl = ctypes.cdll.LoadLibrary quant_lib = dl("nnieqat/gpu/lib/libgfpq_gpu.so") _libcublas = ctypes.cdll.LoadLibrary("libcublas.so") # struct GFPQ_PARAM_ST in gfpq.hpp class GFPQ_PARAM_ST(ctypes.Structure): _fields_ = [("mode", ctypes.c_int), ("buf", ctypes.c_byte * 16)] class _types: """Some alias types.""" handle = ctypes.c_void_p stream = ctypes.c_void_p data_origin = data.copy() print( "----------------------------------------------------------------------" ) print("\n\nOriginal data:") print(data) data = data.astype(np.float32) stream = cuda.stream() _libcublas.cublasCreate_v2.restype = int _libcublas.cublasCreate_v2.argtypes = [ctypes.c_void_p] cublas_handle = _types.handle() _libcublas.cublasCreate_v2(ctypes.byref(cublas_handle)) data_gpu = cuda.to_device(data, stream=stream) data_p = data_gpu.device_ctypes_pointer bit_width = 8 param = GFPQ_PARAM_ST() # init or update param first param.mode = 0 ret = quant_lib.HI_GFPQ_QuantAndDeQuant_GPU_PY(data_p, data.size, bit_width, ctypes.byref(param), stream.handle, cublas_handle) if ret != 0: print("HI_GFPQ_QuantAndDeQuant failed(%d)\n" % (ret)), # use apply param param.mode = 2 ret = quant_lib.HI_GFPQ_QuantAndDeQuant_GPU_PY(data_p, data.size, bit_width, ctypes.byref(param), stream.handle, cublas_handle) if ret != 0: print("HI_GFPQ_QuantAndDeQuant failed(%d)" % (ret)), data_gpu.copy_to_host(data, stream=stream) # data may not be available stream.synchronize() _libcublas.cublasDestroy_v2(cublas_handle) import nnieqat from quant_impl import fake_quantize import torch tensor = torch.Tensor(data_origin).cuda() tensor.data = fake_quantize(tensor.data.detach(), 8) diff = abs(tensor.cpu().numpy() - data) # diff_thres = np.max(abs(data)) * 0.001 # print("\nDIFF > 0.1%: ") # print("idx: ", np.where(diff > diff_thres)) # print("Original data:", data_origin[np.where(diff > diff_thres)]) # print("GFPQ result:", data[np.where(diff > diff_thres)]) # print("Impl result:", tensor.cpu().numpy()[np.where(diff > diff_thres)]) diff_max = np.max(diff) print("\nDIFF MAX: " + str(diff_max)) print("\nDIFF RATIO: " + str(diff_max / max(np.max(abs(data)), pow(10, -18))))
def test_do_activity_key_error(self, data): data_mock = data.copy() del data_mock['result'] self.verifypublishresponse.logger = MagicMock() self.assertRaises(KeyError, self.verifypublishresponse.do_activity, data_mock)