Exemple #1
0
def test_duplicate_testcase_testbed_ignored(session):
    """Test that result is ignored if testbed and testcase are not unique."""
    proto = deepsmith_pb2.Result(
        testcase=deepsmith_pb2.Testcase(
            toolchain='cpp',
            generator=deepsmith_pb2.Generator(name='generator'),
            harness=deepsmith_pb2.Harness(name='harness'),
            inputs={
                'src': 'void main() {}',
                'data': '[1,2]',
            },
            invariant_opts={
                'config': 'opt',
            },
            profiling_events=[
                deepsmith_pb2.ProfilingEvent(
                    client='localhost',
                    type='generate',
                    duration_ms=100,
                    event_start_epoch_ms=1123123123,
                ),
            ]),
        testbed=deepsmith_pb2.Testbed(
            toolchain='cpp',
            name='clang',
            opts={'arch': 'x86_64'},
        ),
        returncode=0,
        outputs={'stdout': 'Hello, world!'},
        profiling_events=[
            deepsmith_pb2.ProfilingEvent(
                client='localhost',
                type='exec',
                duration_ms=100,
                event_start_epoch_ms=1123123123,
            ),
        ],
        outcome=deepsmith_pb2.Result.PASS,
    )
    r1 = deeplearning.deepsmith.result.Result.GetOrAdd(session, proto)
    session.add(r1)
    session.flush()

    # Attempt to add a new result which is identical to the first in all fields
    # except for the outputs.
    proto.outputs['stdout'] = '!'
    r2 = deeplearning.deepsmith.result.Result.GetOrAdd(session, proto)
    session.add(r2)
    session.flush()

    # Check that only one result was added.
    assert session.query(deeplearning.deepsmith.result.Result).count() == 1

    # Check that only the first result was added.
    r3 = session.query(deeplearning.deepsmith.result.Result).first()
    assert r3.outputs['stdout'] == 'Hello, world!'
Exemple #2
0
def test_Generator_GetOrAdd_ToProto_equivalence(session):
  proto_in = deepsmith_pb2.Result(
    testcase=deepsmith_pb2.Testcase(
      toolchain="cpp",
      generator=deepsmith_pb2.Generator(name="generator"),
      harness=deepsmith_pb2.Harness(name="harness"),
      inputs={"src": "void main() {}", "data": "[1,2]",},
      invariant_opts={"config": "opt",},
      profiling_events=[
        deepsmith_pb2.ProfilingEvent(
          client="localhost",
          type="generate",
          duration_ms=100,
          event_start_epoch_ms=1123123123,
        ),
        deepsmith_pb2.ProfilingEvent(
          client="localhost",
          type="foo",
          duration_ms=100,
          event_start_epoch_ms=1123123123,
        ),
      ],
    ),
    testbed=deepsmith_pb2.Testbed(
      toolchain="cpp",
      name="clang",
      opts={"arch": "x86_64", "build": "debug+assert",},
    ),
    returncode=0,
    outputs={"stdout": "Hello, world!", "stderr": "",},
    profiling_events=[
      deepsmith_pb2.ProfilingEvent(
        client="localhost",
        type="exec",
        duration_ms=500,
        event_start_epoch_ms=1123123123,
      ),
      deepsmith_pb2.ProfilingEvent(
        client="localhost",
        type="overhead",
        duration_ms=100,
        event_start_epoch_ms=1123123123,
      ),
    ],
    outcome=deepsmith_pb2.Result.PASS,
  )
  result = deeplearning.deepsmith.result.Result.GetOrAdd(session, proto_in)

  # NOTE: We have to flush so that SQLAlchemy resolves all of the object IDs.
  session.flush()
  proto_out = result.ToProto()
  assert proto_in == proto_out
  proto_out.ClearField("outputs")
  assert proto_in != proto_out  # Sanity check.
Exemple #3
0
def test_Testcase_GetOrAdd(session):
    proto = deepsmith_pb2.Testcase(
        toolchain="cpp",
        generator=deepsmith_pb2.Generator(name="generator", ),
        harness=deepsmith_pb2.Harness(name="harness", ),
        inputs={
            "src": "void main() {}",
            "data": "[1,2]"
        },
        invariant_opts={"config": "opt"},
        profiling_events=[
            deepsmith_pb2.ProfilingEvent(
                client="localhost",
                type="generate",
                duration_ms=100,
                event_start_epoch_ms=1021312312,
            ),
            deepsmith_pb2.ProfilingEvent(
                client="localhost",
                type="foo",
                duration_ms=100,
                event_start_epoch_ms=1230812312,
            ),
        ],
    )
    testcase = deeplearning.deepsmith.testcase.Testcase.GetOrAdd(
        session, proto)

    # NOTE: We have to flush so that SQLAlchemy resolves all of the object IDs.
    session.flush()
    assert testcase.toolchain.string == "cpp"
    assert testcase.generator.name == "generator"
    assert testcase.harness.name == "harness"
    assert len(testcase.inputset) == 2
    assert len(testcase.inputs) == 2
    assert testcase.inputs["src"] == "void main() {}"
    assert testcase.inputs["data"] == "[1,2]"
    assert len(testcase.invariant_optset) == 1
    assert len(testcase.invariant_opts) == 1
    assert testcase.invariant_opts["config"] == "opt"
    assert testcase.profiling_events[0].client.string == "localhost"
    assert testcase.profiling_events[0].type.string == "generate"
    assert testcase.profiling_events[0].duration_ms == 100
    assert testcase.profiling_events[
        0].event_start == labdate.DatetimeFromMillisecondsTimestamp(1021312312)
    assert testcase.profiling_events[1].client.string == "localhost"
    assert testcase.profiling_events[1].type.string == "foo"
    assert testcase.profiling_events[1].duration_ms == 100
    assert testcase.profiling_events[
        1].event_start == labdate.DatetimeFromMillisecondsTimestamp(1230812312)
Exemple #4
0
def test_duplicate_testcase_testbed_ignored(session):
  """Test that result is ignored if testbed and testcase are not unique."""
  proto = deepsmith_pb2.Result(
    testcase=deepsmith_pb2.Testcase(
      toolchain="cpp",
      generator=deepsmith_pb2.Generator(name="generator"),
      harness=deepsmith_pb2.Harness(name="harness"),
      inputs={"src": "void main() {}", "data": "[1,2]",},
      invariant_opts={"config": "opt",},
      profiling_events=[
        deepsmith_pb2.ProfilingEvent(
          client="localhost",
          type="generate",
          duration_ms=100,
          event_start_epoch_ms=1123123123,
        ),
      ],
    ),
    testbed=deepsmith_pb2.Testbed(
      toolchain="cpp", name="clang", opts={"arch": "x86_64"},
    ),
    returncode=0,
    outputs={"stdout": "Hello, world!"},
    profiling_events=[
      deepsmith_pb2.ProfilingEvent(
        client="localhost",
        type="exec",
        duration_ms=100,
        event_start_epoch_ms=1123123123,
      ),
    ],
    outcome=deepsmith_pb2.Result.PASS,
  )
  r1 = deeplearning.deepsmith.result.Result.GetOrAdd(session, proto)
  session.add(r1)
  session.flush()

  # Attempt to add a new result which is identical to the first in all fields
  # except for the outputs.
  proto.outputs["stdout"] = "!"
  r2 = deeplearning.deepsmith.result.Result.GetOrAdd(session, proto)
  session.add(r2)
  session.flush()

  # Check that only one result was added.
  assert session.query(deeplearning.deepsmith.result.Result).count() == 1

  # Check that only the first result was added.
  r3 = session.query(deeplearning.deepsmith.result.Result).first()
  assert r3.outputs["stdout"] == "Hello, world!"
Exemple #5
0
def test_Testcase_GetOrAdd(session):
    proto = deepsmith_pb2.Testcase(
        toolchain='cpp',
        generator=deepsmith_pb2.Generator(name='generator', ),
        harness=deepsmith_pb2.Harness(name='harness', ),
        inputs={
            'src': 'void main() {}',
            'data': '[1,2]'
        },
        invariant_opts={'config': 'opt'},
        profiling_events=[
            deepsmith_pb2.ProfilingEvent(
                client='localhost',
                type='generate',
                duration_ms=100,
                event_start_epoch_ms=1021312312,
            ),
            deepsmith_pb2.ProfilingEvent(
                client='localhost',
                type='foo',
                duration_ms=100,
                event_start_epoch_ms=1230812312,
            ),
        ])
    testcase = deeplearning.deepsmith.testcase.Testcase.GetOrAdd(
        session, proto)

    # NOTE: We have to flush so that SQLAlchemy resolves all of the object IDs.
    session.flush()
    assert testcase.toolchain.string == 'cpp'
    assert testcase.generator.name == 'generator'
    assert testcase.harness.name == 'harness'
    assert len(testcase.inputset) == 2
    assert len(testcase.inputs) == 2
    assert testcase.inputs['src'] == 'void main() {}'
    assert testcase.inputs['data'] == '[1,2]'
    assert len(testcase.invariant_optset) == 1
    assert len(testcase.invariant_opts) == 1
    assert testcase.invariant_opts['config'] == 'opt'
    assert testcase.profiling_events[0].client.string == 'localhost'
    assert testcase.profiling_events[0].type.string == 'generate'
    assert testcase.profiling_events[0].duration_ms == 100
    assert (testcase.profiling_events[0].event_start ==
            labdate.DatetimeFromMillisecondsTimestamp(1021312312))
    assert testcase.profiling_events[1].client.string == 'localhost'
    assert testcase.profiling_events[1].type.string == 'foo'
    assert testcase.profiling_events[1].duration_ms == 100
    assert (testcase.profiling_events[1].event_start ==
            labdate.DatetimeFromMillisecondsTimestamp(1230812312))
Exemple #6
0
def test_Generator_GetOrAdd_ToProto_equivalence(session):
    proto_in = deepsmith_pb2.Testcase(
        toolchain="cpp",
        generator=deepsmith_pb2.Generator(name="generator", ),
        harness=deepsmith_pb2.Harness(name="harness", ),
        inputs={
            "src": "void main() {}",
            "data": "[1,2]"
        },
        invariant_opts={"config": "opt"},
        profiling_events=[
            deepsmith_pb2.ProfilingEvent(
                client="localhost",
                type="generate",
                duration_ms=100,
                event_start_epoch_ms=101231231,
            ),
        ],
    )
    testcase = deeplearning.deepsmith.testcase.Testcase.GetOrAdd(
        session, proto_in)

    # NOTE: We have to flush so that SQLAlchemy resolves all of the object IDs.
    session.flush()
    proto_out = testcase.ToProto()
    assert proto_in == proto_out
    proto_out.ClearField("toolchain")
    assert proto_in != proto_out  # Sanity check.
Exemple #7
0
def test_Generator_GetOrAdd_ToProto_equivalence(session):
    proto_in = deepsmith_pb2.Testcase(
        toolchain='cpp',
        generator=deepsmith_pb2.Generator(name='generator', ),
        harness=deepsmith_pb2.Harness(name='harness', ),
        inputs={
            'src': 'void main() {}',
            'data': '[1,2]'
        },
        invariant_opts={'config': 'opt'},
        profiling_events=[
            deepsmith_pb2.ProfilingEvent(
                client='localhost',
                type='generate',
                duration_ms=100,
                event_start_epoch_ms=101231231,
            ),
        ])
    testcase = deeplearning.deepsmith.testcase.Testcase.GetOrAdd(
        session, proto_in)

    # NOTE: We have to flush so that SQLAlchemy resolves all of the object IDs.
    session.flush()
    proto_out = testcase.ToProto()
    assert proto_in == proto_out
    proto_out.ClearField('toolchain')
    assert proto_in != proto_out  # Sanity check.
Exemple #8
0
def _ExportSolidityTestcases(cursor, start_id, proto_dir):
    batch_size = 1000
    testcase_id = start_id
    while True:
        cursor.execute(
            """
SELECT
  testcases.id,
  programs.generator,
  programs.date,
  programs.generation_time,
  programs.src,
  testcases.harness,
  testcases.timeout
FROM testcases
LEFT JOIN programs ON testcases.program_id = programs.id
WHERE testcases.id >= %s
AND testcases.id NOT IN (
  SELECT testcase_id FROM
    results
)
ORDER BY testcases.id
LIMIT %s
""", (testcase_id, batch_size))
        i = 0
        for row in cursor:
            i += 1
            (testcase_id, generator_id, program_date, program_generation_time,
             program_src, harness_id, harness_timeout) = row
            proto = deepsmith_pb2.Testcase(
                toolchain='solidity',
                generator=_GetSolidityGenerator(generator_id),
                harness=deepsmith_pb2.Harness(
                    name='solc',
                    opts={
                        'timeout_seconds':
                        str(int(harness_timeout)),
                        'url':
                        'https://github.com/ChrisCummins/dsmith/blob/5181c7c95575d428b5144a25549e5a5a55a3da31/dsmith/sol/harnesses.py#L117',
                    },
                ),
                inputs={
                    "src": program_src,
                },
                invariant_opts={},
                profiling_events=[
                    deepsmith_pb2.ProfilingEvent(
                        client="cc1",
                        type="generation",
                        duration_ms=int(program_generation_time * 1000),
                        event_start_epoch_ms=dateutil.MillisecondsTimestamp(
                            program_date),
                    ),
                ])
            with open(proto_dir / 'sol' / 'testcases' / str(testcase_id),
                      'wb') as f:
                f.write(proto.SerializeToString())
        if i < batch_size:
            return
Exemple #9
0
    def ToProto(self) -> deepsmith_pb2.ProfilingEvent:
        """Create protocol buffer representation.

    Returns:
      A ProfilingEvent message.
    """
        proto = deepsmith_pb2.ProfilingEvent()
        return self.SetProto(proto)
Exemple #10
0
def test_duplicate_testcases_ignored(session):
    """Test that testcases are only added if they are unique."""
    proto = deepsmith_pb2.Testcase(
        toolchain="cpp",
        generator=deepsmith_pb2.Generator(name="generator"),
        harness=deepsmith_pb2.Harness(name="harness"),
        inputs={
            "src": "void main() {}",
            "data": "[1,2]"
        },
        invariant_opts={"config": "opt"},
        profiling_events=[
            deepsmith_pb2.ProfilingEvent(
                client="localhost",
                type="generate",
                duration_ms=100,
                event_start_epoch_ms=1021312312,
            ),
            deepsmith_pb2.ProfilingEvent(
                client="localhost",
                type="foo",
                duration_ms=100,
                event_start_epoch_ms=1230812312,
            ),
        ],
    )
    t1 = deeplearning.deepsmith.testcase.Testcase.GetOrAdd(session, proto)
    session.add(t1)
    session.flush()

    # Attempt to add a new testcase which is identical to the first in all fields
    # except for the profiling events.
    proto.profiling_events[0].duration_ms = -1
    t2 = deeplearning.deepsmith.testcase.Testcase.GetOrAdd(session, proto)
    session.add(t2)
    session.flush()

    # Check that only one testcase was added.
    assert session.query(deeplearning.deepsmith.testcase.Testcase).count() == 1

    # Check that only the first testcase was added.
    t3 = session.query(deeplearning.deepsmith.testcase.Testcase).first()
    assert t3.profiling_events[0].duration_ms == 100
    assert t3.profiling_events[1].duration_ms == 100
Exemple #11
0
def test_duplicate_testcases_ignored(session):
    """Test that testcases are only added if they are unique."""
    proto = deepsmith_pb2.Testcase(
        toolchain='cpp',
        generator=deepsmith_pb2.Generator(name='generator'),
        harness=deepsmith_pb2.Harness(name='harness'),
        inputs={
            'src': 'void main() {}',
            'data': '[1,2]'
        },
        invariant_opts={'config': 'opt'},
        profiling_events=[
            deepsmith_pb2.ProfilingEvent(
                client='localhost',
                type='generate',
                duration_ms=100,
                event_start_epoch_ms=1021312312,
            ),
            deepsmith_pb2.ProfilingEvent(
                client='localhost',
                type='foo',
                duration_ms=100,
                event_start_epoch_ms=1230812312,
            ),
        ])
    t1 = deeplearning.deepsmith.testcase.Testcase.GetOrAdd(session, proto)
    session.add(t1)
    session.flush()

    # Attempt to add a new testcase which is identical to the first in all fields
    # except for the profiling events.
    proto.profiling_events[0].duration_ms = -1
    t2 = deeplearning.deepsmith.testcase.Testcase.GetOrAdd(session, proto)
    session.add(t2)
    session.flush()

    # Check that only one testcase was added.
    assert session.query(deeplearning.deepsmith.testcase.Testcase).count() == 1

    # Check that only the first testcase was added.
    t3 = session.query(deeplearning.deepsmith.testcase.Testcase).first()
    assert t3.profiling_events[0].duration_ms == 100
    assert t3.profiling_events[1].duration_ms == 100
Exemple #12
0
def _AddRandomNewTestcase(session):
    deeplearning.deepsmith.testcase.Testcase.GetOrAdd(
        session,
        deepsmith_pb2.Testcase(
            toolchain=str(random.random()),
            generator=deepsmith_pb2.Generator(
                name=str(random.random()),
                opts={
                    str(random.random()): str(random.random()),
                    str(random.random()): str(random.random()),
                    str(random.random()): str(random.random()),
                },
            ),
            harness=deepsmith_pb2.Harness(
                name=str(random.random()),
                opts={
                    str(random.random()): str(random.random()),
                    str(random.random()): str(random.random()),
                    str(random.random()): str(random.random()),
                },
            ),
            inputs={
                str(random.random()): str(random.random()),
                str(random.random()): str(random.random()),
                str(random.random()): str(random.random()),
            },
            invariant_opts={
                str(random.random()): str(random.random()),
                str(random.random()): str(random.random()),
                str(random.random()): str(random.random()),
            },
            profiling_events=[
                deepsmith_pb2.ProfilingEvent(
                    client=str(random.random()),
                    type=str(random.random()),
                    duration_ms=int(random.random() * 1000),
                    event_start_epoch_ms=int(random.random() * 1000000),
                ),
            ],
        ),
    )
    session.flush()
Exemple #13
0
def _AddExistingTestcase(session):
    deeplearning.deepsmith.testcase.Testcase.GetOrAdd(
        session,
        deepsmith_pb2.Testcase(
            toolchain="cpp",
            generator=deepsmith_pb2.Generator(
                name="name",
                opts={
                    "a": "a",
                    "b": "b",
                    "c": "c",
                },
            ),
            harness=deepsmith_pb2.Harness(
                name="name",
                opts={
                    "a": "a",
                    "b": "b",
                    "c": "c",
                },
            ),
            inputs={
                "src": "void main() {}",
                "data": "[1,2]",
                "copt": "-DNDEBUG",
            },
            invariant_opts={
                "config": "opt",
                "working_dir": "/tmp",
                "units": "nanoseconds",
            },
            profiling_events=[
                deepsmith_pb2.ProfilingEvent(
                    client="localhost",
                    type="generate",
                    duration_ms=100,
                    event_start_epoch_ms=101231231,
                ),
            ],
        ),
    )
    session.flush()
Exemple #14
0
def _AddExistingTestcase(session):
    deeplearning.deepsmith.testcase.Testcase.GetOrAdd(
        session,
        deepsmith_pb2.Testcase(toolchain='cpp',
                               generator=deepsmith_pb2.Generator(
                                   name='name',
                                   opts={
                                       'a': 'a',
                                       'b': 'b',
                                       'c': 'c',
                                   },
                               ),
                               harness=deepsmith_pb2.Harness(
                                   name='name',
                                   opts={
                                       'a': 'a',
                                       'b': 'b',
                                       'c': 'c',
                                   },
                               ),
                               inputs={
                                   'src': 'void main() {}',
                                   'data': '[1,2]',
                                   'copt': '-DNDEBUG',
                               },
                               invariant_opts={
                                   'config': 'opt',
                                   'working_dir': '/tmp',
                                   'units': 'nanoseconds',
                               },
                               profiling_events=[
                                   deepsmith_pb2.ProfilingEvent(
                                       client='localhost',
                                       type='generate',
                                       duration_ms=100,
                                       event_start_epoch_ms=101231231,
                                   ),
                               ]))
    session.flush()
Exemple #15
0
def _ExportSolidityResults(cursor, start_id, proto_dir):
    batch_size = 1000
    result_id = start_id
    while True:
        cursor.execute(
            """
SELECT
  results.id,
  platforms.platform,
  platforms.version,
  platforms.host,
  testbeds.optimizations,
  programs.generator,
  programs.date,
  programs.generation_time,
  programs.src,
  testcases.harness,
  testcases.timeout,
  results.date,
  results.returncode,
  results.runtime,
  stdouts.stdout,
  stderrs.stderr
FROM results
LEFT JOIN testbeds ON results.testbed_id = testbeds.id
LEFT JOIN platforms ON testbeds.platform_id = platforms.id
LEFT JOIN testcases ON results.testcase_id = testcases.id
LEFT JOIN programs ON testcases.program_id = programs.id
LEFT JOIN stdouts ON results.stdout_id = stdouts.id
LEFT JOIN stderrs ON results.stderr_id = stderrs.id
WHERE results.id >= %s
ORDER BY results.id
LIMIT %s
""", (result_id, batch_size))
        i = 0
        for row in cursor:
            i += 1
            (result_id, platform_name, platform_version, host_os,
             optimizations, generator_id, program_date,
             program_generation_time, program_src, harness_id, harness_timeout,
             result_date, returncode, runtime, stdout, stderr) = row
            assert harness_id == 2
            proto = deepsmith_pb2.Result(
                testcase=deepsmith_pb2.Testcase(
                    toolchain='solidity',
                    generator=_GetSolidityGenerator(generator_id),
                    harness=deepsmith_pb2.Harness(
                        name='solc',
                        opts={
                            'timeout_seconds':
                            str(int(harness_timeout)),
                            'url':
                            'https://github.com/ChrisCummins/dsmith/blob/5181c7c95575d428b5144a25549e5a5a55a3da31/dsmith/sol/harnesses.py#L117',
                        },
                    ),
                    inputs={
                        "src": program_src,
                    },
                    invariant_opts={},
                    profiling_events=[
                        deepsmith_pb2.ProfilingEvent(
                            client="cc1",
                            type="generation",
                            duration_ms=int(program_generation_time * 1000),
                            event_start_epoch_ms=dateutil.
                            MillisecondsTimestamp(program_date),
                        ),
                    ]),
                testbed=deepsmith_pb2.Testbed(
                    toolchain='solidity',
                    name=platform_name,
                    opts={
                        'version':
                        platform_version,
                        'optimizations':
                        'enabled' if optimizations else 'disabled',
                    },
                ),
                returncode=returncode,
                outputs={
                    "stdout": stdout,
                    "stderr": stderr,
                },
                profiling_events=[
                    deepsmith_pb2.ProfilingEvent(
                        client='cc1',
                        type="runtime",
                        duration_ms=int(runtime * 1000),
                        event_start_epoch_ms=dateutil.MillisecondsTimestamp(
                            result_date),
                    ),
                ],
            )
            with open(proto_dir / 'sol' / 'results' / str(result_id),
                      'wb') as f:
                f.write(proto.SerializeToString())
        if i < batch_size:
            return
Exemple #16
0
def _ExportOpenCLResults(cursor, start_id, proto_dir):
    batch_size = 1000
    result_id = start_id
    while True:
        cursor.execute(
            """
SELECT
  results.id,
  platforms.platform,
  platforms.device,
  platforms.driver,
  platforms.opencl,
  platforms.devtype,
  platforms.host,
  testbeds.optimizations,
  programs.generator,
  programs.date,
  programs.generation_time,
  programs.src,
  testcases.harness,
  testcases.timeout,
  results.date,
  results.returncode,
  results.runtime,
  stdouts.stdout,
  stderrs.stderr,
  stderrs.truncated,
  threads.gsize_x,
  threads.gsize_y,
  threads.gsize_z,
  threads.lsize_x,
  threads.lsize_y,
  threads.lsize_z,
  clsmith_testcase_metas.oclverified,
  dsmith_testcase_metas.gpuverified,
  dsmith_testcase_metas.oclverified,
  dsmith_program_metas.contains_floats,
  dsmith_program_metas.vector_inputs,
  dsmith_program_metas.compiler_warnings
FROM results
LEFT JOIN testbeds ON results.testbed_id = testbeds.id
LEFT JOIN platforms ON testbeds.platform_id = platforms.id
LEFT JOIN testcases ON results.testcase_id = testcases.id
LEFT JOIN programs ON testcases.program_id = programs.id
LEFT JOIN threads ON testcases.threads_id = threads.id
LEFT JOIN stdouts ON results.stdout_id = stdouts.id
LEFT JOIN stderrs ON results.stderr_id = stderrs.id
LEFT JOIN clsmith_testcase_metas ON testcases.id=clsmith_testcase_metas.id
LEFT JOIN dsmith_testcase_metas ON testcases.id=dsmith_testcase_metas.id
LEFT JOIN dsmith_program_metas ON programs.id=dsmith_program_metas.id
WHERE results.id >= %s
ORDER BY results.id
LIMIT %s
""", (result_id, batch_size))

        i = 0
        for row in cursor:
            i += 1
            (result_id, platform_name, device_name, driver_version,
             opencl_version, devtype, host_os, cl_opt, generator_id,
             program_date, program_generation_time, program_src, harness_id,
             harness_timeout, result_date, returncode, runtime, stdout, stderr,
             truncated_stderr, gsize_x, gsize_y, gsize_z, lsize_x, lsize_y,
             lsize_z, clsmith_oclverified, dsmith_gpuverified,
             dsmith_oclverified, dsmith_program_contains_floats,
             dsmith_program_vector_inputs,
             dsmith_program_compiler_warnings) = row
            inputs = {
                'src': program_src,
            }
            if harness_id != -1:
                inputs['gsize'] = f'{gsize_x},{gsize_y},{gsize_z}'
                inputs['lsize'] = f'{lsize_x},{lsize_y},{lsize_z}'
            testbed_name = OPENCL_DEVICE_MAP[device_name]
            testbed_opts = {}
            _SetIf(testbed_opts, 'opencl_device', device_name.strip())
            _SetIf(testbed_opts, 'opencl_version', opencl_version.strip())
            _SetIf(testbed_opts, 'host', HOSTS_MAP.get(host_os, host_os))
            if testbed_name == "clang":
                _SetIf(testbed_opts, 'llvm_version', driver_version.strip())
            else:
                _SetIf(testbed_opts, 'driver_version', driver_version.strip())
                _SetIf(testbed_opts, 'opencl_devtype',
                       OPENCL_DEVTYPE_MAP.get(devtype, devtype))
                _SetIf(testbed_opts, 'opencl_platform', platform_name.strip())
                _SetIf(testbed_opts, 'opencl_opt',
                       'enabled' if cl_opt else 'disabled')
            invariant_opts = {}
            if clsmith_oclverified == 0:
                invariant_opts['oclverify'] = 'fail'
            elif clsmith_oclverified == 1:
                invariant_opts['oclverify'] = 'pass'
            elif dsmith_oclverified == 0:
                invariant_opts['oclverify'] = 'fail'
            elif dsmith_oclverified == 1:
                invariant_opts['oclverify'] = 'pass'
            if dsmith_gpuverified == 0:
                invariant_opts['gpuverify'] = 'fail'
            elif dsmith_gpuverified == 1:
                invariant_opts['gpuverify'] = 'pass'
            if dsmith_program_contains_floats == 0:
                invariant_opts['kernel_uses_floats'] = 'false'
            elif dsmith_program_contains_floats == 1:
                invariant_opts['kernel_uses_floats'] = 'true'
            if dsmith_program_vector_inputs == 0:
                invariant_opts['kernel_has_vector_inputs'] = 'false'
            elif dsmith_program_vector_inputs == 1:
                invariant_opts['kernel_has_vector_inputs'] = 'true'
            if dsmith_program_compiler_warnings == 0:
                invariant_opts['kernel_throws_compiler_warning'] = 'false'
            elif dsmith_program_compiler_warnings == 1:
                invariant_opts['kernel_throws_compiler_warning'] = 'true'
            testbed = deepsmith_pb2.Testbed(
                toolchain='opencl',
                name=testbed_name,
                opts=testbed_opts,
            )

            proto = deepsmith_pb2.Result(
                testcase=deepsmith_pb2.Testcase(
                    toolchain="opencl",
                    generator=_GetOpenCLGenerator(generator_id),
                    harness=_GetOpenCLHarness(harness_id, harness_timeout),
                    inputs=inputs,
                    invariant_opts=invariant_opts,
                    profiling_events=[
                        deepsmith_pb2.ProfilingEvent(
                            client="cc1",
                            type="generation",
                            duration_ms=int(program_generation_time * 1000),
                            event_start_epoch_ms=dateutil.
                            MillisecondsTimestamp(program_date),
                        ),
                    ]),
                testbed=testbed,
                returncode=returncode,
                outputs={
                    "stdout": stdout,
                    "stderr": stderr,
                },
                profiling_events=[
                    deepsmith_pb2.ProfilingEvent(
                        client={
                            'Ubuntu 16.04 64bit': 'cc1',
                            'CentOS Linux 7.1.1503 64bit': 'fuji',
                            'openSUSE  13.1 64bit': 'kobol',
                        }[host_os],
                        type="runtime",
                        duration_ms=int(runtime * 1000),
                        event_start_epoch_ms=dateutil.MillisecondsTimestamp(
                            result_date),
                    ),
                ],
            )
            with open(proto_dir / 'opencl' / 'results' / str(result_id),
                      'wb') as f:
                f.write(proto.SerializeToString())
        if i < batch_size:
            return
Exemple #17
0
def _ExportOpenCLTestcases(cursor, start_id, proto_dir):
    batch_size = 1000
    testcase_id = start_id
    while True:
        cursor.execute(
            """
SELECT
  testcases.id,
  programs.generator,
  programs.date,
  programs.generation_time,
  programs.src,
  testcases.harness,
  testcases.timeout,
  threads.gsize_x,
  threads.gsize_y,
  threads.gsize_z,
  threads.lsize_x,
  threads.lsize_y,
  threads.lsize_z,
  clsmith_testcase_metas.oclverified,
  dsmith_testcase_metas.gpuverified,
  dsmith_testcase_metas.oclverified,
  dsmith_program_metas.contains_floats,
  dsmith_program_metas.vector_inputs,
  dsmith_program_metas.compiler_warnings
FROM testcases
LEFT JOIN programs ON testcases.program_id = programs.id
LEFT JOIN threads ON testcases.threads_id = threads.id
LEFT JOIN clsmith_testcase_metas ON testcases.id=clsmith_testcase_metas.id
LEFT JOIN dsmith_testcase_metas ON testcases.id=dsmith_testcase_metas.id
LEFT JOIN dsmith_program_metas ON programs.id=dsmith_program_metas.id
WHERE testcases.id >= %s
AND testcases.id NOT IN (
  SELECT testcase_id FROM
    results
)
ORDER BY testcases.id
LIMIT %s
""", (testcase_id, batch_size))
        i = 0
        for row in cursor:
            i += 1
            (testcase_id, generator_id, program_date, program_generation_time,
             program_src, harness_id, harness_timeout, gsize_x, gsize_y,
             gsize_z, lsize_x, lsize_y, lsize_z, clsmith_oclverified,
             dsmith_gpuverified, dsmith_oclverified,
             dsmith_program_contains_floats, dsmith_program_vector_inputs,
             dsmith_program_compiler_warnings) = row
            inputs = {
                "src": program_src,
            }
            if harness_id != -1:
                inputs["gsize"] = f"{gsize_x},{gsize_y},{gsize_z}"
                inputs["lsize"] = f"{lsize_x},{lsize_y},{lsize_z}"
            invariant_opts = {}
            if clsmith_oclverified == 0:
                invariant_opts['oclverify'] = 'fail'
            elif clsmith_oclverified == 1:
                invariant_opts['oclverify'] = 'pass'
            elif dsmith_oclverified == 0:
                invariant_opts['oclverify'] = 'fail'
            elif dsmith_oclverified == 1:
                invariant_opts['oclverify'] = 'pass'
            if dsmith_gpuverified == 0:
                invariant_opts['gpuverify'] = 'fail'
            elif dsmith_gpuverified == 1:
                invariant_opts['gpuverify'] = 'pass'
            if dsmith_program_contains_floats == 0:
                invariant_opts['kernel_uses_floats'] = 'false'
            elif dsmith_program_contains_floats == 1:
                invariant_opts['kernel_uses_floats'] = 'true'
            if dsmith_program_vector_inputs == 0:
                invariant_opts['kernel_has_vector_inputs'] = 'false'
            elif dsmith_program_vector_inputs == 1:
                invariant_opts['kernel_has_vector_inputs'] = 'true'
            if dsmith_program_compiler_warnings == 0:
                invariant_opts['kernel_throws_compiler_warning'] = 'false'
            elif dsmith_program_compiler_warnings == 1:
                invariant_opts['kernel_throws_compiler_warning'] = 'true'
            proto = deepsmith_pb2.Testcase(
                toolchain="opencl",
                generator=_GetOpenCLGenerator(generator_id),
                harness=_GetOpenCLHarness(harness_id, harness_timeout),
                inputs=inputs,
                invariant_opts=invariant_opts,
                profiling_events=[
                    deepsmith_pb2.ProfilingEvent(
                        client="cc1",
                        type="generation",
                        duration_ms=int(program_generation_time * 1000),
                        event_start_epoch_ms=dateutil.MillisecondsTimestamp(
                            program_date),
                    ),
                ])
            with open(proto_dir / 'opencl' / 'testcases' / str(testcase_id),
                      'wb') as f:
                f.write(proto.SerializeToString())
        if i < batch_size:
            return
Exemple #18
0
def test_Generator_GetOrAdd_ToProto_equivalence(session):
    proto_in = deepsmith_pb2.Result(
        testcase=deepsmith_pb2.Testcase(
            toolchain='cpp',
            generator=deepsmith_pb2.Generator(name='generator'),
            harness=deepsmith_pb2.Harness(name='harness'),
            inputs={
                'src': 'void main() {}',
                'data': '[1,2]',
            },
            invariant_opts={
                'config': 'opt',
            },
            profiling_events=[
                deepsmith_pb2.ProfilingEvent(
                    client='localhost',
                    type='generate',
                    duration_ms=100,
                    event_start_epoch_ms=1123123123,
                ),
                deepsmith_pb2.ProfilingEvent(
                    client='localhost',
                    type='foo',
                    duration_ms=100,
                    event_start_epoch_ms=1123123123,
                ),
            ]),
        testbed=deepsmith_pb2.Testbed(
            toolchain='cpp',
            name='clang',
            opts={
                'arch': 'x86_64',
                'build': 'debug+assert',
            },
        ),
        returncode=0,
        outputs={
            'stdout': 'Hello, world!',
            'stderr': '',
        },
        profiling_events=[
            deepsmith_pb2.ProfilingEvent(
                client='localhost',
                type='exec',
                duration_ms=500,
                event_start_epoch_ms=1123123123,
            ),
            deepsmith_pb2.ProfilingEvent(
                client='localhost',
                type='overhead',
                duration_ms=100,
                event_start_epoch_ms=1123123123,
            ),
        ],
        outcome=deepsmith_pb2.Result.PASS,
    )
    result = deeplearning.deepsmith.result.Result.GetOrAdd(session, proto_in)

    # NOTE: We have to flush so that SQLAlchemy resolves all of the object IDs.
    session.flush()
    proto_out = result.ToProto()
    assert proto_in == proto_out
    proto_out.ClearField('outputs')
    assert proto_in != proto_out  # Sanity check.
Exemple #19
0
def _ExportOpenCLTestcases(cursor, start_id, proto_dir):
  batch_size = 1000
  testcase_id = start_id
  while True:
    cursor.execute("""
SELECT
  testcases.id,
  programs.generator,
  programs.date,
  programs.generation_time,
  programs.src,
  testcases.harness,
  testcases.timeout,
  threads.gsize_x,
  threads.gsize_y,
  threads.gsize_z,
  threads.lsize_x,
  threads.lsize_y,
  threads.lsize_z
FROM testcases
LEFT JOIN programs on testcases.program_id = programs.id
LEFT JOIN threads on testcases.threads_id = threads.id
WHERE testcases.id >= %s
AND testcases.id NOT IN (
  SELECT testcase_id FROM
    results
)
ORDER BY testcases.id
LIMIT %s
""", (testcase_id, batch_size))
    i = 0
    for row in cursor:
      i += 1
      (
        testcase_id,
        generator_id,
        program_date,
        program_generation_time,
        program_src,
        harness_id,
        harness_timeout,
        gsize_x,
        gsize_y,
        gsize_z,
        lsize_x,
        lsize_y,
        lsize_z
      ) = row
      inputs = {
        "src": program_src,
      }
      if harness_id != -1:
        inputs["gsize"] = f"{gsize_x},{gsize_y},{gsize_z}"
        inputs["lsize"] = f"{lsize_x},{lsize_y},{lsize_z}"
      proto = deepsmith_pb2.Testcase(
          toolchain="opencl",
          generator=_GetOpenCLGenerator(generator_id),
          harness=_GetOpenCLHarness(harness_id, harness_timeout),
          inputs=inputs,
          invariant_opts={},
          profiling_events=[
            deepsmith_pb2.ProfilingEvent(
                client="cc1",
                type="generation",
                duration_seconds=program_generation_time,
                date_epoch_seconds=int(program_date.strftime('%s')),
            ),
          ]
      )
      with open(proto_dir / 'opencl' / 'testcases' / str(testcase_id), 'wb') as f:
        f.write(proto.SerializeToString())
    if i < batch_size:
      return
Exemple #20
0
def _ExportOpenCLResults(cursor, start_id, proto_dir):
  batch_size = 1000
  result_id = start_id
  while True:
    cursor.execute("""
SELECT
  results.id,
  platforms.platform,
  platforms.device,
  platforms.driver,
  platforms.opencl,
  platforms.devtype,
  platforms.host,
  testbeds.optimizations,
  programs.generator,
  programs.date,
  programs.generation_time,
  programs.src,
  testcases.harness,
  testcases.timeout,
  results.date,
  results.returncode,
  results.runtime,
  stdouts.stdout,
  stderrs.stderr,
  stderrs.truncated,
  threads.gsize_x,
  threads.gsize_y,
  threads.gsize_z,
  threads.lsize_x,
  threads.lsize_y,
  threads.lsize_z
FROM results
LEFT JOIN testbeds ON results.testbed_id = testbeds.id
LEFT JOIN platforms ON testbeds.platform_id = platforms.id
LEFT JOIN testcases on results.testcase_id = testcases.id
LEFT JOIN programs on testcases.program_id = programs.id
LEFT JOIN threads on testcases.threads_id = threads.id
LEFT JOIN stdouts on results.stdout_id = stdouts.id
LEFT JOIN stderrs on results.stderr_id = stderrs.id
WHERE results.id >= %s
ORDER BY results.id
LIMIT %s
""", (result_id, batch_size))

    i = 0
    for row in cursor:
      i += 1
      (
        result_id,
        platform_name,
        device_name,
        driver_version,
        opencl_version,
        devtype,
        host_os,
        cl_opt,
        generator_id,
        program_date,
        program_generation_time,
        program_src,
        harness_id,
        harness_timeout,
        result_date,
        returncode,
        runtime,
        stdout,
        stderr,
        truncated_stderr,
        gsize_x,
        gsize_y,
        gsize_z,
        lsize_x,
        lsize_y,
        lsize_z
      ) = row
      inputs = {
        "src": program_src,
      }
      if harness_id != -1:
        inputs["gsize"] = f"{gsize_x},{gsize_y},{gsize_z}"
        inputs["lsize"] = f"{lsize_x},{lsize_y},{lsize_z}"
      testbed_name = OPENCL_DEVICE_MAP[device_name]
      testbed_opts = {}
      _SetIf(testbed_opts, 'opencl_device', device_name.strip())
      _SetIf(testbed_opts, 'opencl_version', opencl_version.strip())
      _SetIf(testbed_opts, 'host', HOSTS_MAP.get(host_os, host_os))
      if testbed_name == "clang":
        _SetIf(testbed_opts, 'llvm_version', driver_version.strip())
      else:
        _SetIf(testbed_opts, 'driver_version', driver_version.strip())
        _SetIf(testbed_opts, 'opencl_devtype', OPENCL_DEVTYPE_MAP.get(devtype, devtype))
        _SetIf(testbed_opts, 'opencl_platform', platform_name.strip())
        _SetIf(testbed_opts, 'opencl_opt', 'enabled' if cl_opt else 'disabled')
      testbed = deepsmith_pb2.Testbed(
          toolchain='opencl',
          name=testbed_name,
          opts=testbed_opts,
      )

      proto = deepsmith_pb2.Result(
          testcase=deepsmith_pb2.Testcase(
              toolchain="opencl",
              generator=_GetOpenCLGenerator(generator_id),
              harness=_GetOpenCLHarness(harness_id, harness_timeout),
              inputs=inputs,
              invariant_opts={},
              profiling_events=[
                deepsmith_pb2.ProfilingEvent(
                    client="cc1",
                    type="generation",
                    duration_seconds=program_generation_time,
                    date_epoch_seconds=int(program_date.strftime('%s')),
                ),
              ]
          ),
          testbed=testbed,
          returncode=returncode,
          outputs={
            "stdout": stdout,
            "stderr": stderr,
          },
          profiling_events=[
            deepsmith_pb2.ProfilingEvent(
                client={
                  'Ubuntu 16.04 64bit': 'cc1',
                  'CentOS Linux 7.1.1503 64bit': 'fuji',
                  'openSUSE  13.1 64bit': 'kobol',
                }[host_os],
                type="runtime",
                duration_seconds=runtime,
                date_epoch_seconds=int(result_date.strftime('%s')),
            ),
          ],
      )
      with open(proto_dir / 'opencl' / 'results' / str(result_id), 'wb') as f:
        f.write(proto.SerializeToString())
    if i < batch_size:
      return