def test_GenerateTestcases(abc_instance_config, tempdir: pathlib.Path): """Run a tiny end-to-end test.""" generator_config = generator_pb2.ClgenGenerator( instance=abc_instance_config, testcase_skeleton=[ deepsmith_pb2.Testcase( toolchain="opencl", harness=deepsmith_pb2.Harness(name="cldrive", opts={"timeout_seconds": "60"}), inputs={ "gsize": "1,1,1", "lsize": "1,1,1", }, ) ], ) generator_path = tempdir / "generator.pbtxt" pbutil.ToFile(generator_config, generator_path) output_dir = tempdir / "outputs" subprocess.check_call([ str(BIN), "--generator", generator_path, "--output_directory", str(output_dir), "--num_testcases", str(3), ]) assert len(list((output_dir / "generated_testcases").iterdir())) >= 3 assert len(list((output_dir / "generated_kernels").iterdir())) >= 3 for f in (output_dir / "generated_testcases").iterdir(): assert pbutil.ProtoIsReadable(f, deepsmith_pb2.Testcase())
def abc_testcase() -> deepsmith_pb2.Testcase(): """A test fixture which returns a very simple test case.""" return deepsmith_pb2.Testcase( toolchain='opencl', harness=deepsmith_pb2.Harness(name='cl_launcher'), inputs={ 'src': CLSMITH_EXAMPLE_SRC, 'gsize': '1,1,1', 'lsize': '1,1,1', })
def abc_testcase() -> deepsmith_pb2.Testcase(): """A test fixture which returns a very simple test case.""" return deepsmith_pb2.Testcase( toolchain='opencl', harness=deepsmith_pb2.Harness(name='cldrive'), inputs={ 'src': 'kernel void A(global int* a) {a[get_global_id(0)] = 10;}', 'gsize': '1,1,1', 'lsize': '1,1,1', })
def abc_testcase() -> deepsmith_pb2.Testcase(): """A test fixture which returns a very simple test case.""" return deepsmith_pb2.Testcase( toolchain="opencl", harness=deepsmith_pb2.Harness(name="cl_launcher"), inputs={ "src": CLSMITH_EXAMPLE_SRC, "gsize": "1,1,1", "lsize": "1,1,1", }, )
def abc_testcase() -> deepsmith_pb2.Testcase(): """A test fixture which returns a very simple test case.""" return deepsmith_pb2.Testcase( toolchain="opencl", harness=deepsmith_pb2.Harness(name="cldrive"), inputs={ "src": "kernel void A(global int* a) {a[get_global_id(0)] = 10;}", "gsize": "1,1,1", "lsize": "1,1,1", }, )
def test_RunTestcases_cl_launcher_syntax_error( cl_launcher_harness_config: harness_pb2.ClLauncherHarness, opencl_opt: bool): """Test execution of a simple test case.""" cl_launcher_harness_config.opencl_opt[0] = opencl_opt harness = cl_launcher.ClLauncherHarness(cl_launcher_harness_config) testcases = [ deepsmith_pb2.Testcase( toolchain='opencl', harness=deepsmith_pb2.Harness(name='cl_launcher'), inputs={ 'src': '__kernel void entry(\n!11@invalid syntax!', 'gsize': '1,1,1', 'lsize': '1,1,1', 'timeout_seconds': '60', }) ] results = opencl_fuzz.RunTestcases(harness, testcases) assert len(results) == 1 print(results[0].outputs['stderr']) assert testcases[0] == results[0].testcase assert results[0].testbed == cldrive.OpenClEnvironmentToTestbed( harness.envs[0]) assert results[0].outcome == deepsmith_pb2.Result.BUILD_FAILURE assert results[0].outputs['stdout'] == '' opt_str = 'on' if opencl_opt else 'off' assert results[0].outputs['stderr'] == f"""\
def test_Generator_GetOrAdd_ToProto_equivalence(session): proto_in = deepsmith_pb2.Testcase( toolchain='cpp', generator=deepsmith_pb2.Generator(name='generator', ), harness=deepsmith_pb2.Harness(name='harness', ), inputs={ 'src': 'void main() {}', 'data': '[1,2]' }, invariant_opts={'config': 'opt'}, profiling_events=[ deepsmith_pb2.ProfilingEvent( client='localhost', type='generate', duration_ms=100, event_start_epoch_ms=101231231, ), ]) testcase = deeplearning.deepsmith.testcase.Testcase.GetOrAdd( session, proto_in) # NOTE: We have to flush so that SQLAlchemy resolves all of the object IDs. session.flush() proto_out = testcase.ToProto() assert proto_in == proto_out proto_out.ClearField('toolchain') assert proto_in != proto_out # Sanity check.
def test_Generator_GetOrAdd_ToProto_equivalence(session): proto_in = deepsmith_pb2.Testcase( toolchain="cpp", generator=deepsmith_pb2.Generator(name="generator", ), harness=deepsmith_pb2.Harness(name="harness", ), inputs={ "src": "void main() {}", "data": "[1,2]" }, invariant_opts={"config": "opt"}, profiling_events=[ deepsmith_pb2.ProfilingEvent( client="localhost", type="generate", duration_ms=100, event_start_epoch_ms=101231231, ), ], ) testcase = deeplearning.deepsmith.testcase.Testcase.GetOrAdd( session, proto_in) # NOTE: We have to flush so that SQLAlchemy resolves all of the object IDs. session.flush() proto_out = testcase.ToProto() assert proto_in == proto_out proto_out.ClearField("toolchain") assert proto_in != proto_out # Sanity check.
def test_MakeDriver_CompileDriver_hello_world(): """And end-to-end test.""" testcase = deepsmith_pb2.Testcase( inputs={ "lsize": "1,1,1", "gsize": "1,1,1", "src": "kernel void A(global int* a) {a[get_global_id(0)] += 10;}", }) driver = cldrive.MakeDriver(testcase, True) with tempfile.TemporaryDirectory() as d: binary = cldrive.CompileDriver(driver, pathlib.Path(d) / "exe", 0, 0, timeout_seconds=60) proc = oclgrind.Exec([str(binary)]) assert "[cldrive] Platform:" in proc.stderr assert "[cldrive] Device:" in proc.stderr assert "[cldrive] OpenCL optimizations: on\n" in proc.stderr assert '[cldrive] Kernel: "A"\n' in proc.stderr assert "done.\n" in proc.stderr assert proc.stdout.split("\n")[-2] == ( "global int * a: 10 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 " "22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 " "46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 " "70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 " "94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 " "114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 " "132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 " "150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 " "168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 " "186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 " "204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 " "222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 " "240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255")
def test_RunTestcases_cldrive_syntax_error( cldrive_harness_config: harness_pb2.CldriveHarness, opencl_opt: bool): """Test execution of a test case with invalid syntax.""" cldrive_harness_config.opencl_opt[0] = opencl_opt harness = cldrive.CldriveHarness(cldrive_harness_config) testcases = [ deepsmith_pb2.Testcase( toolchain='opencl', harness=deepsmith_pb2.Harness(name='cldrive'), inputs={ 'src': 'kernel void A(global int* a) {\n!11@invalid syntax!', 'gsize': '1,1,1', 'lsize': '1,1,1', 'timeout_seconds': '60', }) ] results = opencl_fuzz.RunTestcases(harness, testcases) assert len(results) == 1 # Testcase.invariant_opts.driver_type field is set by cldrive harness. testcases[0].invariant_opts['driver_type'] = 'compile_only' assert testcases[0] == results[0].testcase assert results[0].testbed == cldrive.OpenClEnvironmentToTestbed( harness.envs[0]) assert results[0].outcome == deepsmith_pb2.Result.BUILD_FAILURE assert results[0].outputs['stdout'] == '' print(results[0].outputs['stderr']) opt_str = 'on' if opencl_opt else 'off' assert results[0].outputs['stderr'] == f"""\
def _ExportSolidityTestcases(cursor, start_id, proto_dir): batch_size = 1000 testcase_id = start_id while True: cursor.execute( """ SELECT testcases.id, programs.generator, programs.date, programs.generation_time, programs.src, testcases.harness, testcases.timeout FROM testcases LEFT JOIN programs ON testcases.program_id = programs.id WHERE testcases.id >= %s AND testcases.id NOT IN ( SELECT testcase_id FROM results ) ORDER BY testcases.id LIMIT %s """, (testcase_id, batch_size)) i = 0 for row in cursor: i += 1 (testcase_id, generator_id, program_date, program_generation_time, program_src, harness_id, harness_timeout) = row proto = deepsmith_pb2.Testcase( toolchain='solidity', generator=_GetSolidityGenerator(generator_id), harness=deepsmith_pb2.Harness( name='solc', opts={ 'timeout_seconds': str(int(harness_timeout)), 'url': 'https://github.com/ChrisCummins/dsmith/blob/5181c7c95575d428b5144a25549e5a5a55a3da31/dsmith/sol/harnesses.py#L117', }, ), inputs={ "src": program_src, }, invariant_opts={}, profiling_events=[ deepsmith_pb2.ProfilingEvent( client="cc1", type="generation", duration_ms=int(program_generation_time * 1000), event_start_epoch_ms=dateutil.MillisecondsTimestamp( program_date), ), ]) with open(proto_dir / 'sol' / 'testcases' / str(testcase_id), 'wb') as f: f.write(proto.SerializeToString()) if i < batch_size: return
def ToProto(self) -> deepsmith_pb2.Testcase: """Create protocol buffer representation. Returns: A Testcase message. """ proto = deepsmith_pb2.Testcase() return self.SetProto(proto)
def abc_config() -> generator_pb2.ClsmithGenerator: return generator_pb2.ClsmithGenerator(testcase_skeleton=[ deepsmith_pb2.Testcase(toolchain='opencl', inputs={ 'gsize': '1,1,1', 'lsize': '1,1,1', }, harness=deepsmith_pb2.Harness( name='cl_launcher')), deepsmith_pb2.Testcase(toolchain='opencl', inputs={ 'gsize': '128,16,1', 'lsize': '8,4,1', }, harness=deepsmith_pb2.Harness( name='cl_launcher')), ])
def dummy_result() -> deepsmith_pb2.Result: """A test fixture which returns a dummy result.""" return deepsmith_pb2.Result( testcase=deepsmith_pb2.Testcase( harness=deepsmith_pb2.Harness(name="name"), inputs={"src": "Kernel source.", "gsize": "1,1,1", "lsize": "2,2,2",}, ), outputs={"stdout": "Standard output.", "stderr": "Standard error.",}, )
def test_MakeDriver_ValueError_no_lsize(): """Test that ValueError is raised if lsize input not set.""" testcase = deepsmith_pb2.Testcase(inputs={ "gsize": "1,1,1", "src": "kernel void A() {}" }) with test.Raises(ValueError) as e_ctx: cldrive.MakeDriver(testcase, True) assert "Field not set: 'Testcase.inputs[\"lsize\"]'" == str(e_ctx.value)
def test_MakeDriver_ValueError_no_src(): """Test that ValueError is raised if src input not set.""" testcase = deepsmith_pb2.Testcase(inputs={ "lsize": "1,1,1", "gsize": "1,1,1", }) with test.Raises(ValueError) as e_ctx: cldrive.MakeDriver(testcase, True) assert "Field not set: 'Testcase.inputs[\"src\"]'" == str(e_ctx.value)
def test_duplicate_testcase_testbed_ignored(session): """Test that result is ignored if testbed and testcase are not unique.""" proto = deepsmith_pb2.Result( testcase=deepsmith_pb2.Testcase( toolchain='cpp', generator=deepsmith_pb2.Generator(name='generator'), harness=deepsmith_pb2.Harness(name='harness'), inputs={ 'src': 'void main() {}', 'data': '[1,2]', }, invariant_opts={ 'config': 'opt', }, profiling_events=[ deepsmith_pb2.ProfilingEvent( client='localhost', type='generate', duration_ms=100, event_start_epoch_ms=1123123123, ), ]), testbed=deepsmith_pb2.Testbed( toolchain='cpp', name='clang', opts={'arch': 'x86_64'}, ), returncode=0, outputs={'stdout': 'Hello, world!'}, profiling_events=[ deepsmith_pb2.ProfilingEvent( client='localhost', type='exec', duration_ms=100, event_start_epoch_ms=1123123123, ), ], outcome=deepsmith_pb2.Result.PASS, ) r1 = deeplearning.deepsmith.result.Result.GetOrAdd(session, proto) session.add(r1) session.flush() # Attempt to add a new result which is identical to the first in all fields # except for the outputs. proto.outputs['stdout'] = '!' r2 = deeplearning.deepsmith.result.Result.GetOrAdd(session, proto) session.add(r2) session.flush() # Check that only one result was added. assert session.query(deeplearning.deepsmith.result.Result).count() == 1 # Check that only the first result was added. r3 = session.query(deeplearning.deepsmith.result.Result).first() assert r3.outputs['stdout'] == 'Hello, world!'
def test_RunTestcases_cl_launcher_syntax_error( cl_launcher_harness_config: harness_pb2.ClLauncherHarness, opencl_opt: bool ): """Test execution of a simple test case.""" cl_launcher_harness_config.opencl_opt[0] = opencl_opt harness = cl_launcher.ClLauncherHarness(cl_launcher_harness_config) testcases = [ deepsmith_pb2.Testcase( toolchain="opencl", harness=deepsmith_pb2.Harness(name="cl_launcher"), inputs={ "src": "__kernel void entry(\n!11@invalid syntax!", "gsize": "1,1,1", "lsize": "1,1,1", "timeout_seconds": "60", }, ) ] results = opencl_fuzz.RunTestcases(harness, testcases) assert len(results) == 1 print(results[0].outputs["stderr"]) assert testcases[0] == results[0].testcase assert results[0].testbed == cldrive.OpenClEnvironmentToTestbed( harness.envs[0] ) assert results[0].outcome == deepsmith_pb2.Result.BUILD_FAILURE assert results[0].outputs["stdout"] == "" opt_str = "on" if opencl_opt else "off" assert ( results[0].outputs["stderr"] == f"""\ 3-D global size 1 = [1, 1, 1] 3-D local size 1 = [1, 1, 1] OpenCL optimizations: {opt_str} Platform: Oclgrind Device: Oclgrind Simulator 3 errors generated. Error found (callback): Oclgrind - OpenCL runtime error detected \tFunction: clBuildProgram \tError: CL_BUILD_PROGRAM_FAILURE Error building program: -11 input.cl:2:1: error: expected parameter declarator !11@invalid syntax! ^ input.cl:2:1: error: expected ')' input.cl:1:20: note: to match this '(' __kernel void entry( ^ input.cl:2:20: error: expected function body after function declarator !11@invalid syntax! ^ """ )
def test_MakeDriver_optimizations_on(): """Test that OpenCL optimizations are enabled when requested.""" testcase = deepsmith_pb2.Testcase(inputs={ 'lsize': "1,1,1", 'gsize': "1,1,1", 'src': 'kernel void A() {}' }) src = cldrive.MakeDriver(testcase, True) assert '[cldrive] OpenCL optimizations: on' in src assert 'clBuildProgram(program, 0, NULL, NULL, NULL, NULL);' in src
def abc_config() -> generator_pb2.ClsmithGenerator: return generator_pb2.ClsmithGenerator(testcase_skeleton=[ deepsmith_pb2.Testcase( toolchain="opencl", inputs={ "gsize": "1,1,1", "lsize": "1,1,1", }, harness=deepsmith_pb2.Harness(name="cl_launcher"), ), deepsmith_pb2.Testcase( toolchain="opencl", inputs={ "gsize": "128,16,1", "lsize": "8,4,1", }, harness=deepsmith_pb2.Harness(name="cl_launcher"), ), ])
def test_MakeDriver_ValueError_invalid_gsize(): """Test that ValueError is raised if gsize is not an NDRange.""" testcase = deepsmith_pb2.Testcase(inputs={ "lsize": "1,1,1", "gsize": "abc", "src": "kernel void A() {}" }) with test.Raises(ValueError) as e_ctx: cldrive.MakeDriver(testcase, True) assert "invalid literal for int() with base 10: 'abc'" == str(e_ctx.value)
def test_MakeDriver_optimizations_on(): """Test that OpenCL optimizations are enabled when requested.""" testcase = deepsmith_pb2.Testcase(inputs={ "lsize": "1,1,1", "gsize": "1,1,1", "src": "kernel void A() {}" }) src = cldrive.MakeDriver(testcase, True) assert "[cldrive] OpenCL optimizations: on" in src assert "clBuildProgram(program, 0, NULL, NULL, NULL, NULL);" in src
def ProtoFromFile(cls, path: pathlib.Path) -> deepsmith_pb2.Testcase: """Instantiate a protocol buffer testcase from file. Args: path: Path to the testcase proto file. Returns: Testcase message instance. """ return pbutil.FromFile(path, deepsmith_pb2.Testcase())
def test_Generator_GetOrAdd_ToProto_equivalence(session): proto_in = deepsmith_pb2.Result( testcase=deepsmith_pb2.Testcase( toolchain="cpp", generator=deepsmith_pb2.Generator(name="generator"), harness=deepsmith_pb2.Harness(name="harness"), inputs={"src": "void main() {}", "data": "[1,2]",}, invariant_opts={"config": "opt",}, profiling_events=[ deepsmith_pb2.ProfilingEvent( client="localhost", type="generate", duration_ms=100, event_start_epoch_ms=1123123123, ), deepsmith_pb2.ProfilingEvent( client="localhost", type="foo", duration_ms=100, event_start_epoch_ms=1123123123, ), ], ), testbed=deepsmith_pb2.Testbed( toolchain="cpp", name="clang", opts={"arch": "x86_64", "build": "debug+assert",}, ), returncode=0, outputs={"stdout": "Hello, world!", "stderr": "",}, profiling_events=[ deepsmith_pb2.ProfilingEvent( client="localhost", type="exec", duration_ms=500, event_start_epoch_ms=1123123123, ), deepsmith_pb2.ProfilingEvent( client="localhost", type="overhead", duration_ms=100, event_start_epoch_ms=1123123123, ), ], outcome=deepsmith_pb2.Result.PASS, ) result = deeplearning.deepsmith.result.Result.GetOrAdd(session, proto_in) # NOTE: We have to flush so that SQLAlchemy resolves all of the object IDs. session.flush() proto_out = result.ToProto() assert proto_in == proto_out proto_out.ClearField("outputs") assert proto_in != proto_out # Sanity check.
def test_RunTestcases_cldrive_syntax_error( cldrive_harness_config: harness_pb2.CldriveHarness, opencl_opt: bool ): """Test execution of a test case with invalid syntax.""" cldrive_harness_config.opencl_opt[0] = opencl_opt harness = cldrive.CldriveHarness(cldrive_harness_config) testcases = [ deepsmith_pb2.Testcase( toolchain="opencl", harness=deepsmith_pb2.Harness(name="cldrive"), inputs={ "src": "kernel void A(global int* a) {\n!11@invalid syntax!", "gsize": "1,1,1", "lsize": "1,1,1", "timeout_seconds": "60", }, ) ] results = opencl_fuzz.RunTestcases(harness, testcases) assert len(results) == 1 # Testcase.invariant_opts.driver_type field is set by cldrive harness. testcases[0].invariant_opts["driver_type"] = "compile_only" assert testcases[0] == results[0].testcase assert results[0].testbed == cldrive.OpenClEnvironmentToTestbed( harness.envs[0] ) assert results[0].outcome == deepsmith_pb2.Result.BUILD_FAILURE assert results[0].outputs["stdout"] == "" print(results[0].outputs["stderr"]) opt_str = "on" if opencl_opt else "off" assert ( results[0].outputs["stderr"] == f"""\ [cldrive] Platform: Oclgrind [cldrive] Device: Oclgrind Simulator [cldrive] OpenCL optimizations: {opt_str} 1 warning and 3 errors generated. input.cl:1:34: error: expected ';' after expression kernel void A(global int* a) {{!11@invalid syntax! ^ ; input.cl:1:34: error: expected expression input.cl:1:50: error: expected '}}' kernel void A(global int* a) {{!11@invalid syntax! ^ input.cl:1:30: note: to match this '{{' kernel void A(global int* a) {{!11@invalid syntax! ^ input.cl:1:31: warning: expression result unused kernel void A(global int* a) {{!11@invalid syntax! ^~~ clBuildProgram CL_BUILD_PROGRAM_FAILURE """ )
def test_RunTestcases_cldrive_pass( cldrive_harness_config: harness_pb2.CldriveHarness, opencl_opt: bool ): """Test execution of a simple test case.""" cldrive_harness_config.opencl_opt[0] = opencl_opt harness = cldrive.CldriveHarness(cldrive_harness_config) testcases = [ deepsmith_pb2.Testcase( toolchain="opencl", harness=deepsmith_pb2.Harness(name="cldrive"), inputs={ "src": "kernel void A(global int* a) {a[get_global_id(0)] = 10;}", "gsize": "1,1,1", "lsize": "1,1,1", "timeout_seconds": "60", }, ) ] results = opencl_fuzz.RunTestcases(harness, testcases) assert len(results) == 1 # Testcase.invariant_opts.driver_type field is set by cldrive harness. testcases[0].invariant_opts["driver_type"] = "compile_and_run" assert testcases[0] == results[0].testcase assert results[0].testbed == cldrive.OpenClEnvironmentToTestbed( harness.envs[0] ) assert results[0].outcome == deepsmith_pb2.Result.PASS assert results[0].outputs["stdout"] == ( "global int * a: 10 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 " "22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 " "46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 " "70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 " "94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 " "114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 " "132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 " "150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 " "168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 " "186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 " "204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 " "222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 " "240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255\n" ) opt_str = "on" if opencl_opt else "off" assert ( results[0].outputs["stderr"] == f"""\ [cldrive] Platform: Oclgrind [cldrive] Device: Oclgrind Simulator [cldrive] OpenCL optimizations: {opt_str} [cldrive] Kernel: "A" done. """ )
def test_GenerateTestcases(abc_instance_config): """Run a tiny end-to-end test.""" generator_config = generator_pb2.ClgenGenerator( instance=abc_instance_config, testcase_skeleton=[ deepsmith_pb2.Testcase(toolchain='opencl', harness=deepsmith_pb2.Harness( name='cldrive', opts={'timeout_seconds': '60'}), inputs={ 'gsize': '1,1,1', 'lsize': '1,1,1', }) ]) with tempfile.TemporaryDirectory() as d: d = pathlib.Path(d) evaluate_generator.GenerateTestcases(generator_config, d, 3) assert len(list((d / 'generated_testcases').iterdir())) >= 3 assert len(list((d / 'generated_kernels').iterdir())) >= 3 for f in (d / 'generated_testcases').iterdir(): assert pbutil.ProtoIsReadable(f, deepsmith_pb2.Testcase())
def test_MakeDriver_optimizations_off(): """Test that OpenCL optimizations are disabled when requested.""" testcase = deepsmith_pb2.Testcase(inputs={ "lsize": "1,1,1", "gsize": "1,1,1", "src": "kernel void A() {}" }) src = cldrive.MakeDriver(testcase, False) print(src) assert "[cldrive] OpenCL optimizations: off" in src assert ('clBuildProgram(program, 0, NULL, "-cl-opt-disable", NULL, NULL);' in src)
def main(argv): if len(argv) > 1: unknown_args = ', '.join(argv[1:]) raise app.UsageError(f"Unknown arguments {unknown_args}") logging.info('Preparing OpenCL testbed.') config = harness_pb2.CldriveHarness() config.opencl_env.extend([env.OclgrindOpenCLEnvironment().name]) config.opencl_opt.extend([FLAGS.opencl_opt]) harness = cldrive.CldriveHarness(config) assert len(harness.testbeds) >= 1 input_directories = FLAGS.input_directories logging.info('Reading testcases from: %s', ' '.join(input_directories)) output_directory = pathlib.Path(FLAGS.output_directory) logging.info('Writing results to %s', output_directory) output_directory.mkdir(parents=True, exist_ok=True) # Load testcases. testcase_dirs = [ pathlib.Path(x) for x in input_directories if pathlib.Path(x).is_dir()] if not testcase_dirs: raise app.UsageError('No --input_directories found.') testcase_paths = labtypes.flatten( [[pathlib.Path(y) for y in fs.ls(x, abspaths=True)] for x in testcase_dirs]) testcases = [ pbutil.FromFile(path, deepsmith_pb2.Testcase()) for path in testcase_paths] logging.info('Read %d testcases.', len(testcases)) if not len(testcases): raise app.UsageError("No testcases found: '%s'", ' '.join(input_directories)) # Execute testcases. req = harness_pb2.RunTestcasesRequest() req.testbed.CopyFrom(harness.testbeds[0]) req.testcases.extend(testcases) res = harness.RunTestcases(req, None) # Write results to file. for testcase, result in zip(testcases, res.results): result_id = crypto.md5_str(str(testcase)) pbutil.ToFile(result, output_directory / f'{result_id}.pbtxt') logging.info('Executed %d testcases and wrote results to %s', len(res.results), output_directory) execution_times = [ result.profiling_events[0].duration_ms for result in res.results] logging.info('Average time to evaluate testcase: %.2f ms', sum(execution_times) / len(execution_times))
def dummy_result() -> deepsmith_pb2.Result: """A test fixture which returns a dummy result.""" return deepsmith_pb2.Result(testcase=deepsmith_pb2.Testcase( harness=deepsmith_pb2.Harness(name='name'), inputs={ 'src': 'Kernel source.', 'gsize': '1,1,1', 'lsize': '2,2,2', }), outputs={ 'stdout': 'Standard output.', 'stderr': 'Standard error.', })