def test_vector_input(): inputs = [[0, 1, 2, 3, 0, 1, 2, 3], [2, 4]] inputs_orig = [[0, 1, 2, 3, 0, 1, 2, 3], [2, 4]] outputs_gs = [[0, 2, 4, 6, 0, 4, 8, 12], [2, 4]] src = """ kernel void A(global int* a, const int2 b) { const int x_id = get_global_id(0); const int y_id = get_global_id(1); if (!y_id) { a[x_id] *= b.x; } else { a[get_global_size(0) + x_id] *= b.y; } } """ outputs = driver.DriveKernel(env.OclgrindOpenCLEnvironment(), src, inputs, gsize=(4, 2, 1), lsize=(1, 1, 1)) testlib.Assert2DArraysAlmostEqual(inputs, inputs_orig) testlib.Assert2DArraysAlmostEqual(outputs, outputs_gs) # run kernel a second time with the previous outputs outputs2 = driver.DriveKernel(env.OclgrindOpenCLEnvironment(), src, outputs, gsize=(4, 2, 1), lsize=(1, 1, 1)) outputs2_gs = [[0, 4, 8, 12, 0, 16, 32, 48], [2, 4]] testlib.Assert2DArraysAlmostEqual(outputs2, outputs2_gs)
def test_ClLauncherHarness_oclgrind_testbed(): """Test that harness can be made from project-local oclgrind.""" config = harness_pb2.ClLauncherHarness() config.opencl_env.extend([ env.OclgrindOpenCLEnvironment().name, env.OclgrindOpenCLEnvironment().name ]) config.opencl_opt.extend([True, False]) harness = cl_launcher.ClLauncherHarness(config) assert len(harness.testbeds) == 2 assert harness.testbeds[0].name == env.OclgrindOpenCLEnvironment().name assert harness.testbeds[0].opts['opencl_opt'] == 'enabled' assert harness.testbeds[1].name == env.OclgrindOpenCLEnvironment().name assert harness.testbeds[1].opts['opencl_opt'] == 'disabled'
def test_zero_size_input(): src = "kernel void A(global int* a) {}" with pytest.raises(ValueError): driver.DriveKernel(env.OclgrindOpenCLEnvironment(), src, [[]], gsize=(1, 1, 1), lsize=(1, 1, 1))
def test_gsize_smaller_than_lsize(): src = "kernel void A() {}" with pytest.raises(ValueError): driver.DriveKernel(env.OclgrindOpenCLEnvironment(), src, [], gsize=(4, 1, 1), lsize=(8, 1, 1))
def test_invalid_sizes(): src = "kernel void A() {}" # invalid global size with pytest.raises(ValueError): driver.DriveKernel(env.OclgrindOpenCLEnvironment(), src, [], gsize=(0, -4, 1), lsize=(1, 1, 1)) # invalid local size with pytest.raises(ValueError): driver.DriveKernel(env.OclgrindOpenCLEnvironment(), src, [], gsize=(1, 1, 1), lsize=(-1, 1, 1))
def test_empty_kernel(): src = " kernel void A() {} " outputs = driver.DriveKernel(env.OclgrindOpenCLEnvironment(), src, [], gsize=(1, 1, 1), lsize=(1, 1, 1)) assert len(outputs) == 0
def test_syntax_error(): src = "kernel void A(gl ob a l i nt* a) {}" with testlib.DevNullRedirect(): with pytest.raises(driver.OpenCLValueError): driver.DriveKernel(env.OclgrindOpenCLEnvironment(), src, [[]], gsize=(1, 1, 1), lsize=(1, 1, 1))
def test_timeout(): # non-terminating kernel src = "kernel void A() { while (true) ; }" with pytest.raises(driver.Timeout): driver.DriveKernel(env.OclgrindOpenCLEnvironment(), src, [], gsize=(1, 1, 1), lsize=(1, 1, 1), timeout=1)
def test_ExecClsmithSource_syntax_error(): """Test outcome of kernel with syntax error.""" env_ = env.OclgrindOpenCLEnvironment() proc = cl_launcher.ExecClsmithSource( env_, "!@!###syntax error!", driver.NDRange(1, 1, 1), driver.NDRange(1, 1, 1), '---debug') assert proc.returncode == 1 assert proc.stdout == '' assert 'Error building program: -11' in proc.stderr
def test_data_unchanged(): src = "kernel void A(global int* a, global int* b, const int c) {}" inputs = data.MakeRand(src, 16) outputs = driver.DriveKernel(env.OclgrindOpenCLEnvironment(), src, inputs, gsize=(16, 1, 1), lsize=(1, 1, 1)) testlib.Assert2DArraysAlmostEqual(outputs, inputs)
def test_iterative_increment(): src = "kernel void A(global int* a) { a[get_global_id(0)] += 1; }" d_cl, d_host = [np.arange(16)], np.arange(16) for _ in range(8): d_host += 1 # perform computation on host d_cl = driver.DriveKernel(env.OclgrindOpenCLEnvironment(), src, d_cl, gsize=(16, 1, 1), lsize=(16, 1, 1)) testlib.Assert2DArraysAlmostEqual(d_cl, [d_host])
def test_data_zerod(): # zero-ing a randomly initialized array src = "kernel void A(global int* a) { a[get_global_id(0)] = 0; }" inputs = data.MakeRand(src, 16) outputs = driver.DriveKernel(env.OclgrindOpenCLEnvironment(), src, inputs, gsize=(16, 1, 1), lsize=(4, 1, 1)) testlib.Assert2DArraysAlmostEqual(outputs, [np.zeros(16)])
def test_comparison_against_pointer_warning(): src = """ kernel void A(global int* a) { int id = get_global_id(0); if (id < a) a += 1; } """ driver.DriveKernel(env.OclgrindOpenCLEnvironment(), src, [[0]], gsize=(1, 1, 1), lsize=(1, 1, 1))
def main(argv): if len(argv) > 1: unknown_args = ', '.join(argv[1:]) raise app.UsageError(f"Unknown arguments {unknown_args}") logging.info('Preparing OpenCL testbed.') config = harness_pb2.CldriveHarness() config.opencl_env.extend([env.OclgrindOpenCLEnvironment().name]) config.opencl_opt.extend([FLAGS.opencl_opt]) harness = cldrive.CldriveHarness(config) assert len(harness.testbeds) >= 1 input_directories = FLAGS.input_directories logging.info('Reading testcases from: %s', ' '.join(input_directories)) output_directory = pathlib.Path(FLAGS.output_directory) logging.info('Writing results to %s', output_directory) output_directory.mkdir(parents=True, exist_ok=True) # Load testcases. testcase_dirs = [ pathlib.Path(x) for x in input_directories if pathlib.Path(x).is_dir()] if not testcase_dirs: raise app.UsageError('No --input_directories found.') testcase_paths = labtypes.flatten( [[pathlib.Path(y) for y in fs.ls(x, abspaths=True)] for x in testcase_dirs]) testcases = [ pbutil.FromFile(path, deepsmith_pb2.Testcase()) for path in testcase_paths] logging.info('Read %d testcases.', len(testcases)) if not len(testcases): raise app.UsageError("No testcases found: '%s'", ' '.join(input_directories)) # Execute testcases. req = harness_pb2.RunTestcasesRequest() req.testbed.CopyFrom(harness.testbeds[0]) req.testcases.extend(testcases) res = harness.RunTestcases(req, None) # Write results to file. for testcase, result in zip(testcases, res.results): result_id = crypto.md5_str(str(testcase)) pbutil.ToFile(result, output_directory / f'{result_id}.pbtxt') logging.info('Executed %d testcases and wrote results to %s', len(res.results), output_directory) execution_times = [ result.profiling_events[0].duration_ms for result in res.results] logging.info('Average time to evaluate testcase: %.2f ms', sum(execution_times) / len(execution_times))
def test_gsize_smaller_than_data(): src = "kernel void A(global int* a) { a[get_global_id(0)] = 0; }" inputs = [[5, 5, 5, 5, 5, 5, 5, 5]] outputs_gs = [[0, 0, 0, 0, 5, 5, 5, 5]] outputs = driver.DriveKernel(env.OclgrindOpenCLEnvironment(), src, inputs, gsize=(4, 1, 1), lsize=(4, 1, 1)) testlib.Assert2DArraysAlmostEqual(outputs, outputs_gs)
def test_incorrect_num_of_args(): src = "kernel void A(const int a) {}" # too many inputs with pytest.raises(ValueError): driver.DriveKernel(env.OclgrindOpenCLEnvironment(), src, [[1], [2], [3]], gsize=(1, 1, 1), lsize=(1, 1, 1)) # too few inputs with pytest.raises(ValueError): driver.DriveKernel(env.OclgrindOpenCLEnvironment(), src, [], gsize=(1, 1, 1), lsize=(1, 1, 1)) # incorrect input width (3 ints instead of one) with pytest.raises(ValueError): driver.DriveKernel(env.OclgrindOpenCLEnvironment(), src, [[1, 2, 3]], gsize=(1, 1, 1), lsize=(1, 1, 1))
def test_ExecClsmithSource_pass(): """And end-to-end test of executing a CLSmith source.""" env_ = env.OclgrindOpenCLEnvironment() proc = cl_launcher.ExecClsmithSource( env_, CLSMITH_EXAMPLE_SRC, driver.NDRange(1, 1, 1), driver.NDRange(1, 1, 1), '---debug') assert not proc.returncode assert '3-D global size 1 = [1, 1, 1]' in proc.stderr assert '3-D local size 1 = [1, 1, 1]' in proc.stderr assert 'OpenCL optimizations: on' in proc.stderr assert 'Platform: ' in proc.stderr assert 'Device: ' in proc.stderr assert 'Compilation terminated successfully...' assert proc.stdout == '0,'
def test_profiling(): src = """ kernel void A(global int* a, constant int* b) { const int id = get_global_id(0); a[id] *= b[id]; } """ inputs = [np.arange(16), np.arange(16)] outputs_gs = [np.arange(16)**2, np.arange(16)] with testlib.DevNullRedirect(): outputs = driver.DriveKernel(env.OclgrindOpenCLEnvironment(), src, inputs, gsize=(16, 1, 1), lsize=(16, 1, 1), profiling=True) testlib.Assert2DArraysAlmostEqual(outputs, outputs_gs)
def test_simple(): inputs = [[0, 1, 2, 3, 4, 5, 6, 7]] inputs_orig = [[0, 1, 2, 3, 4, 5, 6, 7]] outputs_gs = [[0, 2, 4, 6, 8, 10, 12, 14]] src = """ kernel void A(global float* a) { const int x_id = get_global_id(0); a[x_id] *= 2.0; } """ outputs = driver.DriveKernel(env.OclgrindOpenCLEnvironment(), src, inputs, gsize=(8, 1, 1), lsize=(1, 1, 1)) testlib.Assert2DArraysAlmostEqual(inputs, inputs_orig) testlib.Assert2DArraysAlmostEqual(outputs, outputs_gs)
def test_OclgrindOpenCLEnvironment_Exec_version(): """Test that OclgrindOpenCLEnvironment.Exec() works as expected.""" proc = env.OclgrindOpenCLEnvironment().Exec(['--version']) # This test will of course fail if the @oclgrind package is updated. assert proc.stdout == """
def test_OclgrindOpenCLEnvironment_name(): """Test that the OclgrindOpenCLEnvironment has a correct 'name' property.""" env_ = env.OclgrindOpenCLEnvironment() # This test will of course fail if the @oclgrind package is updated. assert 'Emulator|Oclgrind|Oclgrind_Simulator|Oclgrind_18.3|1.2' == env_.name
def cl_launcher_harness_config() -> harness_pb2.ClLauncherHarness: """Test fixture to return a cl_launcher test harness.""" config = harness_pb2.ClLauncherHarness() config.opencl_env.extend([env.OclgrindOpenCLEnvironment().name]) config.opencl_opt.extend([True]) return config
def abc_harness_config() -> harness_pb2.ClLauncherHarness: """A test fixture which returns an oclgrind harness config.""" config = harness_pb2.ClLauncherHarness() config.opencl_env.extend([env.OclgrindOpenCLEnvironment().name]) config.opencl_opt.extend([True]) return config
def cldrive_harness_config() -> harness_pb2.CldriveHarness: """Test fixture to return an Cldrive test harness config.""" config = harness_pb2.CldriveHarness() config.opencl_env.extend([env.OclgrindOpenCLEnvironment().name]) config.opencl_opt.extend([True]) return config