def test_Testbed_GetOrAdd_rollback(session): deeplearning.deepsmith.testbed.Testbed.GetOrAdd( session, deepsmith_pb2.Testbed( toolchain='opencl', name='nvidia', opts={ 'opencl': '1.2', 'devtype': 'GPU', }, )) assert session.query(deeplearning.deepsmith.testbed.Testbed).count() == 1 assert session.query( deeplearning.deepsmith.testbed.TestbedOpt).count() == 2 assert session.query( deeplearning.deepsmith.testbed.TestbedOptSet).count() == 2 assert session.query( deeplearning.deepsmith.testbed.TestbedOptName).count() == 2 assert session.query( deeplearning.deepsmith.testbed.TestbedOptValue).count() == 2 session.rollback() assert session.query(deeplearning.deepsmith.testbed.Testbed).count() == 0 assert session.query( deeplearning.deepsmith.testbed.TestbedOpt).count() == 0 assert session.query( deeplearning.deepsmith.testbed.TestbedOptSet).count() == 0 assert session.query( deeplearning.deepsmith.testbed.TestbedOptName).count() == 0 assert session.query( deeplearning.deepsmith.testbed.TestbedOptValue).count() == 0
def test_Testbed_GetOrAdd(session): proto = deepsmith_pb2.Testbed( toolchain='cpp', name='clang', opts={ 'arch': 'x86_64', 'build': 'debug+assert' }, ) testbed = deeplearning.deepsmith.testbed.Testbed.GetOrAdd(session, proto) assert session.query( deeplearning.deepsmith.testbed.TestbedOptSet).count() == 2 assert session.query( deeplearning.deepsmith.testbed.TestbedOpt).count() == 2 assert session.query( deeplearning.deepsmith.testbed.TestbedOptName).count() == 2 assert session.query( deeplearning.deepsmith.testbed.TestbedOptValue).count() == 2 assert testbed.toolchain.string == 'cpp' assert testbed.name == 'clang' assert len(testbed.optset) == 2 assert len(testbed.opts) == 2 assert testbed.opts['arch'] == 'x86_64' assert testbed.opts['build'] == 'debug+assert'
def ToProto(self) -> deepsmith_pb2.Testbed: """Create protocol buffer representation. Returns: A Testbed message. """ proto = deepsmith_pb2.Testbed() return self.SetProto(proto)
def test_duplicate_testcase_testbed_ignored(session): """Test that result is ignored if testbed and testcase are not unique.""" proto = deepsmith_pb2.Result( testcase=deepsmith_pb2.Testcase( toolchain='cpp', generator=deepsmith_pb2.Generator(name='generator'), harness=deepsmith_pb2.Harness(name='harness'), inputs={ 'src': 'void main() {}', 'data': '[1,2]', }, invariant_opts={ 'config': 'opt', }, profiling_events=[ deepsmith_pb2.ProfilingEvent( client='localhost', type='generate', duration_ms=100, event_start_epoch_ms=1123123123, ), ]), testbed=deepsmith_pb2.Testbed( toolchain='cpp', name='clang', opts={'arch': 'x86_64'}, ), returncode=0, outputs={'stdout': 'Hello, world!'}, profiling_events=[ deepsmith_pb2.ProfilingEvent( client='localhost', type='exec', duration_ms=100, event_start_epoch_ms=1123123123, ), ], outcome=deepsmith_pb2.Result.PASS, ) r1 = deeplearning.deepsmith.result.Result.GetOrAdd(session, proto) session.add(r1) session.flush() # Attempt to add a new result which is identical to the first in all fields # except for the outputs. proto.outputs['stdout'] = '!' r2 = deeplearning.deepsmith.result.Result.GetOrAdd(session, proto) session.add(r2) session.flush() # Check that only one result was added. assert session.query(deeplearning.deepsmith.result.Result).count() == 1 # Check that only the first result was added. r3 = session.query(deeplearning.deepsmith.result.Result).first() assert r3.outputs['stdout'] == 'Hello, world!'
def test_Generator_GetOrAdd_ToProto_equivalence(session): proto_in = deepsmith_pb2.Result( testcase=deepsmith_pb2.Testcase( toolchain="cpp", generator=deepsmith_pb2.Generator(name="generator"), harness=deepsmith_pb2.Harness(name="harness"), inputs={"src": "void main() {}", "data": "[1,2]",}, invariant_opts={"config": "opt",}, profiling_events=[ deepsmith_pb2.ProfilingEvent( client="localhost", type="generate", duration_ms=100, event_start_epoch_ms=1123123123, ), deepsmith_pb2.ProfilingEvent( client="localhost", type="foo", duration_ms=100, event_start_epoch_ms=1123123123, ), ], ), testbed=deepsmith_pb2.Testbed( toolchain="cpp", name="clang", opts={"arch": "x86_64", "build": "debug+assert",}, ), returncode=0, outputs={"stdout": "Hello, world!", "stderr": "",}, profiling_events=[ deepsmith_pb2.ProfilingEvent( client="localhost", type="exec", duration_ms=500, event_start_epoch_ms=1123123123, ), deepsmith_pb2.ProfilingEvent( client="localhost", type="overhead", duration_ms=100, event_start_epoch_ms=1123123123, ), ], outcome=deepsmith_pb2.Result.PASS, ) result = deeplearning.deepsmith.result.Result.GetOrAdd(session, proto_in) # NOTE: We have to flush so that SQLAlchemy resolves all of the object IDs. session.flush() proto_out = result.ToProto() assert proto_in == proto_out proto_out.ClearField("outputs") assert proto_in != proto_out # Sanity check.
def test_Testbed_GetOrAdd_only_different_optset(session): testbed_a = deeplearning.deepsmith.testbed.Testbed.GetOrAdd( session, deepsmith_pb2.Testbed( toolchain='toolchain', name='name', opts={ 'a': 'A', 'b': 'B', 'c': 'C', }, )) testbed_b = deeplearning.deepsmith.testbed.Testbed.GetOrAdd( session, deepsmith_pb2.Testbed( toolchain='toolchain', name='name', opts={ 'd': 'D', }, )) testbed_c = deeplearning.deepsmith.testbed.Testbed.GetOrAdd( session, deepsmith_pb2.Testbed( toolchain='toolchain', name='name', opts={}, )) assert session.query(deeplearning.deepsmith.testbed.Testbed).count() == 3 assert session.query( deeplearning.deepsmith.testbed.TestbedOpt).count() == 4 assert session.query( deeplearning.deepsmith.testbed.TestbedOptSet).count() == 4 assert session.query( deeplearning.deepsmith.testbed.TestbedOptName).count() == 4 assert session.query( deeplearning.deepsmith.testbed.TestbedOptValue).count() == 4 assert len(testbed_a.optset) == 3 assert len(testbed_b.optset) == 1 assert len(testbed_c.optset) == 0
def _AddExistingTestbed(session): deeplearning.deepsmith.testbed.Testbed.GetOrAdd( session, deepsmith_pb2.Testbed( toolchain='toolchain', name='name', opts={ 'a': 'a', 'b': 'b', 'c': 'c', }, )) session.flush()
def _AddRandomNewTestbed(session): deeplearning.deepsmith.testbed.Testbed.GetOrAdd( session, deepsmith_pb2.Testbed( toolchain=str(random.random()), name=str(random.random()), opts={ str(random.random()): str(random.random()), str(random.random()): str(random.random()), str(random.random()): str(random.random()), }, )) session.flush()
def test_duplicate_testcase_testbed_ignored(session): """Test that result is ignored if testbed and testcase are not unique.""" proto = deepsmith_pb2.Result( testcase=deepsmith_pb2.Testcase( toolchain="cpp", generator=deepsmith_pb2.Generator(name="generator"), harness=deepsmith_pb2.Harness(name="harness"), inputs={"src": "void main() {}", "data": "[1,2]",}, invariant_opts={"config": "opt",}, profiling_events=[ deepsmith_pb2.ProfilingEvent( client="localhost", type="generate", duration_ms=100, event_start_epoch_ms=1123123123, ), ], ), testbed=deepsmith_pb2.Testbed( toolchain="cpp", name="clang", opts={"arch": "x86_64"}, ), returncode=0, outputs={"stdout": "Hello, world!"}, profiling_events=[ deepsmith_pb2.ProfilingEvent( client="localhost", type="exec", duration_ms=100, event_start_epoch_ms=1123123123, ), ], outcome=deepsmith_pb2.Result.PASS, ) r1 = deeplearning.deepsmith.result.Result.GetOrAdd(session, proto) session.add(r1) session.flush() # Attempt to add a new result which is identical to the first in all fields # except for the outputs. proto.outputs["stdout"] = "!" r2 = deeplearning.deepsmith.result.Result.GetOrAdd(session, proto) session.add(r2) session.flush() # Check that only one result was added. assert session.query(deeplearning.deepsmith.result.Result).count() == 1 # Check that only the first result was added. r3 = session.query(deeplearning.deepsmith.result.Result).first() assert r3.outputs["stdout"] == "Hello, world!"
def test_Testbed_GetOrAdd_no_opts(session): testbed = deeplearning.deepsmith.testbed.Testbed.GetOrAdd( session, deepsmith_pb2.Testbed( toolchain='toolchain', name='name', opts={}, )) empty_md5 = hashlib.md5().digest() assert testbed.optset_id == empty_md5 assert session.query(deeplearning.deepsmith.testbed.Testbed).count() == 1 assert session.query( deeplearning.deepsmith.testbed.TestbedOpt).count() == 0 assert session.query( deeplearning.deepsmith.testbed.TestbedOptSet).count() == 0 assert session.query( deeplearning.deepsmith.testbed.TestbedOptName).count() == 0 assert session.query( deeplearning.deepsmith.testbed.TestbedOptValue).count() == 0
def test_Testbed_GetOrAdd_ToProto_equivalence(session): proto_in = deepsmith_pb2.Testbed( toolchain='cpp', name='clang', opts={ 'arch': 'x86_64', 'build': 'debug+assert' }, ) testbed = deeplearning.deepsmith.testbed.Testbed.GetOrAdd( session, proto_in) # NOTE: We have to flush before constructing a proto so that SQLAlchemy # resolves all of the object IDs. session.flush() proto_out = testbed.ToProto() assert proto_in == proto_out proto_out.ClearField('toolchain') assert proto_in != proto_out # Sanity check.
def OpenClEnvironmentToTestbed( opencl_environment: env.OpenCLEnvironment) -> deepsmith_pb2.Testbed: """Instantiate a DeepSmith testbed from an OpenCL environment. Args: opencl_environment: A cldrive OpenCLEnvironment instance. Returns: A Testbed proto instance. """ testbed = deepsmith_pb2.Testbed() testbed.toolchain = 'opencl' testbed.name = opencl_environment.name testbed.opts['platform'] = opencl_environment.platform_name testbed.opts['device'] = opencl_environment.device_name testbed.opts['driver'] = opencl_environment.driver_version testbed.opts['device_type'] = opencl_environment.device_type testbed.opts['opencl_version'] = opencl_environment.opencl_version testbed.opts['opencl_opt'] = ('enabled' if opencl_environment.opencl_opt else 'disabled') return testbed
def OpenClEnvironmentToTestbed( opencl_environment: env.OpenCLEnvironment, ) -> deepsmith_pb2.Testbed: """Instantiate a DeepSmith testbed from an OpenCL environment. Args: opencl_environment: A cldrive OpenCLEnvironment instance. Returns: A Testbed proto instance. """ testbed = deepsmith_pb2.Testbed() testbed.toolchain = "opencl" testbed.name = opencl_environment.name testbed.opts["platform"] = opencl_environment.platform_name testbed.opts["device"] = opencl_environment.device_name testbed.opts["driver"] = opencl_environment.driver_version testbed.opts["device_type"] = opencl_environment.device_type testbed.opts["opencl_version"] = opencl_environment.opencl_version testbed.opts["opencl_opt"] = ("enabled" if opencl_environment.opencl_opt else "disabled") return testbed
def test_Testbed_GetOrAdd_duplicates(session): assert session.query(deeplearning.deepsmith.testbed.Testbed).count() == 0 assert session.query( deeplearning.deepsmith.testbed.TestbedOpt).count() == 0 assert session.query( deeplearning.deepsmith.testbed.TestbedOptSet).count() == 0 assert session.query( deeplearning.deepsmith.toolchain.Toolchain).count() == 0 assert session.query( deeplearning.deepsmith.testbed.TestbedOptName).count() == 0 assert session.query( deeplearning.deepsmith.testbed.TestbedOptValue).count() == 0 proto_a1 = deepsmith_pb2.Testbed( toolchain='cpp', name='clang', opts={ 'arch': 'x86_64', 'build': 'debug+assert', }, ) proto_a2 = deepsmith_pb2.Testbed( toolchain='cpp', name='clang', opts={ 'arch': 'x86_64', 'build': 'debug+assert', }, ) proto_b = deepsmith_pb2.Testbed( toolchain='cpp', name='gcc', opts={ 'arch': 'x86_64', 'build': 'opt', }, ) assert proto_a1 == proto_a2 # Sanity check. deeplearning.deepsmith.testbed.Testbed.GetOrAdd(session, proto_a1) assert session.query(deeplearning.deepsmith.testbed.Testbed).count() == 1 assert session.query( deeplearning.deepsmith.testbed.TestbedOpt).count() == 2 assert session.query( deeplearning.deepsmith.testbed.TestbedOptSet).count() == 2 assert session.query( deeplearning.deepsmith.toolchain.Toolchain).count() == 1 assert session.query( deeplearning.deepsmith.testbed.TestbedOptName).count() == 2 assert session.query( deeplearning.deepsmith.testbed.TestbedOptValue).count() == 2 deeplearning.deepsmith.testbed.Testbed.GetOrAdd(session, proto_a2) # proto_a1 == proto_a2, so the counts should remain unchanged. assert session.query(deeplearning.deepsmith.testbed.Testbed).count() == 1 assert session.query( deeplearning.deepsmith.testbed.TestbedOpt).count() == 2 assert session.query( deeplearning.deepsmith.testbed.TestbedOptSet).count() == 2 assert session.query( deeplearning.deepsmith.toolchain.Toolchain).count() == 1 assert session.query( deeplearning.deepsmith.testbed.TestbedOptName).count() == 2 assert session.query( deeplearning.deepsmith.testbed.TestbedOptValue).count() == 2 deeplearning.deepsmith.testbed.Testbed.GetOrAdd(session, proto_b) # proto_b adds a new testbed, new opt (note the duplicate arch), and # two new entries in the TestbedOptSet table. assert session.query(deeplearning.deepsmith.testbed.Testbed).count() == 2 assert session.query( deeplearning.deepsmith.testbed.TestbedOpt).count() == 3 assert session.query( deeplearning.deepsmith.testbed.TestbedOptSet).count() == 4 assert session.query( deeplearning.deepsmith.toolchain.Toolchain).count() == 1 assert session.query( deeplearning.deepsmith.testbed.TestbedOptName).count() == 2 assert session.query( deeplearning.deepsmith.testbed.TestbedOptValue).count() == 3
def _ExportSolidityResults(cursor, start_id, proto_dir): batch_size = 1000 result_id = start_id while True: cursor.execute( """ SELECT results.id, platforms.platform, platforms.version, platforms.host, testbeds.optimizations, programs.generator, programs.date, programs.generation_time, programs.src, testcases.harness, testcases.timeout, results.date, results.returncode, results.runtime, stdouts.stdout, stderrs.stderr FROM results LEFT JOIN testbeds ON results.testbed_id = testbeds.id LEFT JOIN platforms ON testbeds.platform_id = platforms.id LEFT JOIN testcases ON results.testcase_id = testcases.id LEFT JOIN programs ON testcases.program_id = programs.id LEFT JOIN stdouts ON results.stdout_id = stdouts.id LEFT JOIN stderrs ON results.stderr_id = stderrs.id WHERE results.id >= %s ORDER BY results.id LIMIT %s """, (result_id, batch_size)) i = 0 for row in cursor: i += 1 (result_id, platform_name, platform_version, host_os, optimizations, generator_id, program_date, program_generation_time, program_src, harness_id, harness_timeout, result_date, returncode, runtime, stdout, stderr) = row assert harness_id == 2 proto = deepsmith_pb2.Result( testcase=deepsmith_pb2.Testcase( toolchain='solidity', generator=_GetSolidityGenerator(generator_id), harness=deepsmith_pb2.Harness( name='solc', opts={ 'timeout_seconds': str(int(harness_timeout)), 'url': 'https://github.com/ChrisCummins/dsmith/blob/5181c7c95575d428b5144a25549e5a5a55a3da31/dsmith/sol/harnesses.py#L117', }, ), inputs={ "src": program_src, }, invariant_opts={}, profiling_events=[ deepsmith_pb2.ProfilingEvent( client="cc1", type="generation", duration_ms=int(program_generation_time * 1000), event_start_epoch_ms=dateutil. MillisecondsTimestamp(program_date), ), ]), testbed=deepsmith_pb2.Testbed( toolchain='solidity', name=platform_name, opts={ 'version': platform_version, 'optimizations': 'enabled' if optimizations else 'disabled', }, ), returncode=returncode, outputs={ "stdout": stdout, "stderr": stderr, }, profiling_events=[ deepsmith_pb2.ProfilingEvent( client='cc1', type="runtime", duration_ms=int(runtime * 1000), event_start_epoch_ms=dateutil.MillisecondsTimestamp( result_date), ), ], ) with open(proto_dir / 'sol' / 'results' / str(result_id), 'wb') as f: f.write(proto.SerializeToString()) if i < batch_size: return
def _ExportOpenCLResults(cursor, start_id, proto_dir): batch_size = 1000 result_id = start_id while True: cursor.execute( """ SELECT results.id, platforms.platform, platforms.device, platforms.driver, platforms.opencl, platforms.devtype, platforms.host, testbeds.optimizations, programs.generator, programs.date, programs.generation_time, programs.src, testcases.harness, testcases.timeout, results.date, results.returncode, results.runtime, stdouts.stdout, stderrs.stderr, stderrs.truncated, threads.gsize_x, threads.gsize_y, threads.gsize_z, threads.lsize_x, threads.lsize_y, threads.lsize_z, clsmith_testcase_metas.oclverified, dsmith_testcase_metas.gpuverified, dsmith_testcase_metas.oclverified, dsmith_program_metas.contains_floats, dsmith_program_metas.vector_inputs, dsmith_program_metas.compiler_warnings FROM results LEFT JOIN testbeds ON results.testbed_id = testbeds.id LEFT JOIN platforms ON testbeds.platform_id = platforms.id LEFT JOIN testcases ON results.testcase_id = testcases.id LEFT JOIN programs ON testcases.program_id = programs.id LEFT JOIN threads ON testcases.threads_id = threads.id LEFT JOIN stdouts ON results.stdout_id = stdouts.id LEFT JOIN stderrs ON results.stderr_id = stderrs.id LEFT JOIN clsmith_testcase_metas ON testcases.id=clsmith_testcase_metas.id LEFT JOIN dsmith_testcase_metas ON testcases.id=dsmith_testcase_metas.id LEFT JOIN dsmith_program_metas ON programs.id=dsmith_program_metas.id WHERE results.id >= %s ORDER BY results.id LIMIT %s """, (result_id, batch_size)) i = 0 for row in cursor: i += 1 (result_id, platform_name, device_name, driver_version, opencl_version, devtype, host_os, cl_opt, generator_id, program_date, program_generation_time, program_src, harness_id, harness_timeout, result_date, returncode, runtime, stdout, stderr, truncated_stderr, gsize_x, gsize_y, gsize_z, lsize_x, lsize_y, lsize_z, clsmith_oclverified, dsmith_gpuverified, dsmith_oclverified, dsmith_program_contains_floats, dsmith_program_vector_inputs, dsmith_program_compiler_warnings) = row inputs = { 'src': program_src, } if harness_id != -1: inputs['gsize'] = f'{gsize_x},{gsize_y},{gsize_z}' inputs['lsize'] = f'{lsize_x},{lsize_y},{lsize_z}' testbed_name = OPENCL_DEVICE_MAP[device_name] testbed_opts = {} _SetIf(testbed_opts, 'opencl_device', device_name.strip()) _SetIf(testbed_opts, 'opencl_version', opencl_version.strip()) _SetIf(testbed_opts, 'host', HOSTS_MAP.get(host_os, host_os)) if testbed_name == "clang": _SetIf(testbed_opts, 'llvm_version', driver_version.strip()) else: _SetIf(testbed_opts, 'driver_version', driver_version.strip()) _SetIf(testbed_opts, 'opencl_devtype', OPENCL_DEVTYPE_MAP.get(devtype, devtype)) _SetIf(testbed_opts, 'opencl_platform', platform_name.strip()) _SetIf(testbed_opts, 'opencl_opt', 'enabled' if cl_opt else 'disabled') invariant_opts = {} if clsmith_oclverified == 0: invariant_opts['oclverify'] = 'fail' elif clsmith_oclverified == 1: invariant_opts['oclverify'] = 'pass' elif dsmith_oclverified == 0: invariant_opts['oclverify'] = 'fail' elif dsmith_oclverified == 1: invariant_opts['oclverify'] = 'pass' if dsmith_gpuverified == 0: invariant_opts['gpuverify'] = 'fail' elif dsmith_gpuverified == 1: invariant_opts['gpuverify'] = 'pass' if dsmith_program_contains_floats == 0: invariant_opts['kernel_uses_floats'] = 'false' elif dsmith_program_contains_floats == 1: invariant_opts['kernel_uses_floats'] = 'true' if dsmith_program_vector_inputs == 0: invariant_opts['kernel_has_vector_inputs'] = 'false' elif dsmith_program_vector_inputs == 1: invariant_opts['kernel_has_vector_inputs'] = 'true' if dsmith_program_compiler_warnings == 0: invariant_opts['kernel_throws_compiler_warning'] = 'false' elif dsmith_program_compiler_warnings == 1: invariant_opts['kernel_throws_compiler_warning'] = 'true' testbed = deepsmith_pb2.Testbed( toolchain='opencl', name=testbed_name, opts=testbed_opts, ) proto = deepsmith_pb2.Result( testcase=deepsmith_pb2.Testcase( toolchain="opencl", generator=_GetOpenCLGenerator(generator_id), harness=_GetOpenCLHarness(harness_id, harness_timeout), inputs=inputs, invariant_opts=invariant_opts, profiling_events=[ deepsmith_pb2.ProfilingEvent( client="cc1", type="generation", duration_ms=int(program_generation_time * 1000), event_start_epoch_ms=dateutil. MillisecondsTimestamp(program_date), ), ]), testbed=testbed, returncode=returncode, outputs={ "stdout": stdout, "stderr": stderr, }, profiling_events=[ deepsmith_pb2.ProfilingEvent( client={ 'Ubuntu 16.04 64bit': 'cc1', 'CentOS Linux 7.1.1503 64bit': 'fuji', 'openSUSE 13.1 64bit': 'kobol', }[host_os], type="runtime", duration_ms=int(runtime * 1000), event_start_epoch_ms=dateutil.MillisecondsTimestamp( result_date), ), ], ) with open(proto_dir / 'opencl' / 'results' / str(result_id), 'wb') as f: f.write(proto.SerializeToString()) if i < batch_size: return
def _ExportOpenCLResults(cursor, start_id, proto_dir): batch_size = 1000 result_id = start_id while True: cursor.execute(""" SELECT results.id, platforms.platform, platforms.device, platforms.driver, platforms.opencl, platforms.devtype, platforms.host, testbeds.optimizations, programs.generator, programs.date, programs.generation_time, programs.src, testcases.harness, testcases.timeout, results.date, results.returncode, results.runtime, stdouts.stdout, stderrs.stderr, stderrs.truncated, threads.gsize_x, threads.gsize_y, threads.gsize_z, threads.lsize_x, threads.lsize_y, threads.lsize_z FROM results LEFT JOIN testbeds ON results.testbed_id = testbeds.id LEFT JOIN platforms ON testbeds.platform_id = platforms.id LEFT JOIN testcases on results.testcase_id = testcases.id LEFT JOIN programs on testcases.program_id = programs.id LEFT JOIN threads on testcases.threads_id = threads.id LEFT JOIN stdouts on results.stdout_id = stdouts.id LEFT JOIN stderrs on results.stderr_id = stderrs.id WHERE results.id >= %s ORDER BY results.id LIMIT %s """, (result_id, batch_size)) i = 0 for row in cursor: i += 1 ( result_id, platform_name, device_name, driver_version, opencl_version, devtype, host_os, cl_opt, generator_id, program_date, program_generation_time, program_src, harness_id, harness_timeout, result_date, returncode, runtime, stdout, stderr, truncated_stderr, gsize_x, gsize_y, gsize_z, lsize_x, lsize_y, lsize_z ) = row inputs = { "src": program_src, } if harness_id != -1: inputs["gsize"] = f"{gsize_x},{gsize_y},{gsize_z}" inputs["lsize"] = f"{lsize_x},{lsize_y},{lsize_z}" testbed_name = OPENCL_DEVICE_MAP[device_name] testbed_opts = {} _SetIf(testbed_opts, 'opencl_device', device_name.strip()) _SetIf(testbed_opts, 'opencl_version', opencl_version.strip()) _SetIf(testbed_opts, 'host', HOSTS_MAP.get(host_os, host_os)) if testbed_name == "clang": _SetIf(testbed_opts, 'llvm_version', driver_version.strip()) else: _SetIf(testbed_opts, 'driver_version', driver_version.strip()) _SetIf(testbed_opts, 'opencl_devtype', OPENCL_DEVTYPE_MAP.get(devtype, devtype)) _SetIf(testbed_opts, 'opencl_platform', platform_name.strip()) _SetIf(testbed_opts, 'opencl_opt', 'enabled' if cl_opt else 'disabled') testbed = deepsmith_pb2.Testbed( toolchain='opencl', name=testbed_name, opts=testbed_opts, ) proto = deepsmith_pb2.Result( testcase=deepsmith_pb2.Testcase( toolchain="opencl", generator=_GetOpenCLGenerator(generator_id), harness=_GetOpenCLHarness(harness_id, harness_timeout), inputs=inputs, invariant_opts={}, profiling_events=[ deepsmith_pb2.ProfilingEvent( client="cc1", type="generation", duration_seconds=program_generation_time, date_epoch_seconds=int(program_date.strftime('%s')), ), ] ), testbed=testbed, returncode=returncode, outputs={ "stdout": stdout, "stderr": stderr, }, profiling_events=[ deepsmith_pb2.ProfilingEvent( client={ 'Ubuntu 16.04 64bit': 'cc1', 'CentOS Linux 7.1.1503 64bit': 'fuji', 'openSUSE 13.1 64bit': 'kobol', }[host_os], type="runtime", duration_seconds=runtime, date_epoch_seconds=int(result_date.strftime('%s')), ), ], ) with open(proto_dir / 'opencl' / 'results' / str(result_id), 'wb') as f: f.write(proto.SerializeToString()) if i < batch_size: return
def test_Generator_GetOrAdd_ToProto_equivalence(session): proto_in = deepsmith_pb2.Result( testcase=deepsmith_pb2.Testcase( toolchain='cpp', generator=deepsmith_pb2.Generator(name='generator'), harness=deepsmith_pb2.Harness(name='harness'), inputs={ 'src': 'void main() {}', 'data': '[1,2]', }, invariant_opts={ 'config': 'opt', }, profiling_events=[ deepsmith_pb2.ProfilingEvent( client='localhost', type='generate', duration_ms=100, event_start_epoch_ms=1123123123, ), deepsmith_pb2.ProfilingEvent( client='localhost', type='foo', duration_ms=100, event_start_epoch_ms=1123123123, ), ]), testbed=deepsmith_pb2.Testbed( toolchain='cpp', name='clang', opts={ 'arch': 'x86_64', 'build': 'debug+assert', }, ), returncode=0, outputs={ 'stdout': 'Hello, world!', 'stderr': '', }, profiling_events=[ deepsmith_pb2.ProfilingEvent( client='localhost', type='exec', duration_ms=500, event_start_epoch_ms=1123123123, ), deepsmith_pb2.ProfilingEvent( client='localhost', type='overhead', duration_ms=100, event_start_epoch_ms=1123123123, ), ], outcome=deepsmith_pb2.Result.PASS, ) result = deeplearning.deepsmith.result.Result.GetOrAdd(session, proto_in) # NOTE: We have to flush so that SQLAlchemy resolves all of the object IDs. session.flush() proto_out = result.ToProto() assert proto_in == proto_out proto_out.ClearField('outputs') assert proto_in != proto_out # Sanity check.