Esempio n. 1
0
def drive_testcase(s: db.session_t, testcase: CLgenTestCase,
                   env: cldrive.OpenCLEnvironment, platform_id: int,
                   device_id: int, timeout: int = 60) -> return_t:
  """ run CLgen program test harness """
  harness = clgen_mkharness.mkharness(s, env, testcase)

  with NamedTemporaryFile(prefix='cldrive-harness-', delete=False) as tmpfile:
    path = tmpfile.name
  try:
    clgen_mkharness.compile_harness(
        harness.src, path, platform_id=platform_id, device_id=device_id)

    cmd = ['timeout', '-s9', str(timeout), tmpfile.name]

    start_time = time()
    proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
    stdout, stderr = proc.communicate()
    try:
      stdout = stdout.decode('utf-8')
    except UnicodeError as e:
      stdout = '<-- UTF-ERROR -->'

    try:
      stderr = stderr.decode('utf-8')
    except UnicodeError as e:
      stderr = '<-- UTF-ERROR -->'
    runtime = time() - start_time

    return return_t(
        runtime=runtime, status=status_t(proc.returncode),
        stdout=stdout, stderr=stderr)
  finally:
    fs.rm(path)
Esempio n. 2
0
def import_clgen_sample(session: session_t, path: Path,
                        cl_launchable: bool = False,
                        harnesses: List[cldriveParams] = [],
                        delete: bool = False) -> None:
  src = fs.read_file(path)
  hash_ = crypto.sha1_str(src)

  dupe = s.query(CLgenProgram).filter(CLgenProgram.hash == hash_).first()

  if dupe:
    print(f"warning: ignoring duplicate file {path}")
  elif not len(src):
    print(f"warning: ignoring empty file {path}")
  else:
    program = CLgenProgram(
        hash=hash_,
        runtime=len(src) / CLGEN_INFERENCE_CPS,
        src=src,
        linecount=len(src.split('\n')),
        cl_launchable=cl_launchable)
    s.add(program)
    s.commit()

    # Make test harnesses, if required
    if harnesses:
      env = cldrive.make_env()
      for params in harnesses:
        testcase = get_or_create(
            s, CLgenTestCase, program_id=program.id, params_id=params.id)
        s.flush()
        clgen_mkharness.mkharness(s, env, testcase)

    if delete:
      fs.rm(path)
Esempio n. 3
0
File: cache.py Progetto: BeauJoh/phd
    def clear(self):
        """
    Empty the filesystem cache.

    This deletes the entire cache directory.
    """
        fs.rm(self.path)
Esempio n. 4
0
def test_cp():
    system.echo("Hello, world!", "/tmp/labm8.tmp")
    assert ["Hello, world!"] == fs.read("/tmp/labm8.tmp")
    # Cleanup any existing file.
    fs.rm("/tmp/labm8.tmp.copy")
    assert not fs.exists("/tmp/labm8.tmp.copy")
    fs.cp("/tmp/labm8.tmp", "/tmp/labm8.tmp.copy")
    assert fs.read("/tmp/labm8.tmp") == fs.read("/tmp/labm8.tmp.copy")
Esempio n. 5
0
def test_cp_dir():
    fs.rm("/tmp/labm8")
    fs.rm("/tmp/labm8.copy")
    fs.mkdir("/tmp/labm8/foo/bar")
    assert not fs.exists("/tmp/labm8.copy")
    fs.cp("/tmp/labm8/", "/tmp/labm8.copy")
    assert fs.isdir("/tmp/labm8.copy")
    assert fs.isdir("/tmp/labm8.copy/foo")
    assert fs.isdir("/tmp/labm8.copy/foo/bar")
Esempio n. 6
0
def migrate_3_to_4(old):
    """
  SkelCL database migration script.

  Arguments:

      old (SkelCLDatabase): The database to migrate
  """
    # Create temporary database
    fs.rm("/tmp/omnitune.skelcl.migration.db")
    tmp = _db.Database("/tmp/omnitune.skelcl.migration.db")
    tmp.attach(old.path, "rhs")

    io.info("Migrating database to version 4.")

    backup_path = old.path + ".3"
    io.info("Creating backup of old database at '{0}'".format(backup_path))
    fs.cp(old.path, backup_path)

    tables = [
        "kernels", "kernel_lookup", "kernel_names", "devices", "device_lookup",
        "datasets", "dataset_lookup", "scenarios", "params", "runtimes",
        "runtime_stats", "oracle_params"
    ]

    for table in tables:
        io.info("Copying data from '{}' ...".format(table))
        tmp.execute("INSERT INTO {} SELECT * FROM rhs.{}".format(table, table))

    tmp_path = tmp.path
    old_path = old.path

    tmp.execute("VACUUM")

    # Sanity checks
    bad = False
    for table in tables:
        old_count = tmp.num_rows("rhs." + table)
        tmp_count = tmp.num_rows(table)

        if old_count != tmp_count:
            io.error("Bad rows count:", old_count, tmp_count)
            bad = True

    if bad:
        io.fatal("Failed sanity check, aborting.")
    else:
        io.info("Passed sanity check.")

    # Copy migrated database over the original one.
    fs.cp(tmp_path, old_path)
    fs.rm(tmp_path)

    old.close()
    tmp.close()
    io.info("Migration completed.")
Esempio n. 7
0
def verify_dsmith_testcase(testcase: 'Testcase') -> bool:
  with NamedTemporaryFile(prefix='dsmith-oclgrind-', delete=False) as tmpfile:
    binary_path = tmpfile.name
  try:
    _, _, harness = mkharness.mkharness(testcase)
    mkharness.compile_harness(harness, binary_path,
                              platform_id=0, device_id=0)
    return oclgrind_verify([binary_path])
  finally:
    fs.rm(binary_path)
Esempio n. 8
0
def verify_clsmith_testcase(testcase: 'Testcase') -> bool:
  with NamedTemporaryFile(prefix='dsmith-oclgrind-', delete=False) as tmpfile:
    src_path = tmpfile.name
  try:
    with open(src_path, "w") as outfile:
      print(testcase.program.src, file=outfile)
    return oclgrind_verify(clsmith.cl_launcher_cli(
        src_path, 0, 0, optimizations=True, timeout=None))
  finally:
    fs.rm(src_path)
Esempio n. 9
0
def test_write_file():
    d1 = {"a": "1", "b": "2"}
    jsonutil.write_file("/tmp/labm8.write_file.json", d1)
    d2 = jsonutil.read_file("/tmp/labm8.write_file.json")
    fs.rm("/tmp/labm8.write_file.json")

    jsonutil.write_file("/tmp/labm8.write_file2.json", d1)
    d3 = jsonutil.read_file("/tmp/labm8.write_file2.json")
    fs.rm("/tmp/labm8.write_file2.json")

    assert d1 == d2 == d3
Esempio n. 10
0
def test_scp():
    system.echo("Hello, world!", "/tmp/labm8.tmp")
    assert ["Hello, world!"] == fs.read("/tmp/labm8.tmp")
    # Cleanup any existing file.
    fs.rm("/tmp/labm8.tmp.copy")
    assert not fs.exists("/tmp/labm8.tmp.copy")
    # Perform scp.
    system.scp("localhost",
               "/tmp/labm8.tmp",
               "/tmp/labm8.tmp.copy",
               path="lib/labm8/data/test/bin")
    assert fs.read("/tmp/labm8.tmp") == fs.read("/tmp/labm8.tmp.copy")
Esempio n. 11
0
def test_GitHubRepo_Index_not_cloned(test_repo: github_repo.GitHubRepo):
    """Indexing a repo which is not cloned does nothing."""
    fs.rm(test_repo.clone_dir)
    assert not test_repo.IsIndexed()
    test_repo.Index([
        scrape_repos_pb2.ContentFilesImporterConfig(
            source_code_pattern='.*\\.java',
            preprocessor=[
                "datasets.github.scrape_repos.preprocessors."
                "extractors:JavaMethods"
            ]),
    ], multiprocessing.Pool(1))
    assert not test_repo.IsIndexed()
Esempio n. 12
0
def migrate_1_to_2(old):
    """
  SkelCL database migration script.

  Arguments:

      old (SkelCLDatabase): The database to migrate
  """
    # Create temporary database
    fs.cp(old.path, "/tmp/omnitune.skelcl.migration.db")
    tmp = _db.Database("/tmp/omnitune.skelcl.migration.db")

    io.info("Migrating database to version 2.")

    backup_path = old.path + ".1"
    io.info("Creating backup of old database at '{0}'".format(backup_path))
    fs.cp(old.path, backup_path)

    # Update database version
    tmp.drop_table("version")
    tmp.create_table("version", (("version", "integer"), ))
    tmp.execute("INSERT INTO version VALUES (2)")

    # Rename table "data" to "datasets"
    tmp.create_table("datasets", (("id", "text primary key"),
                                  ("width", "integer"), ("height", "integer"),
                                  ("tin", "text"), ("tout", "text")))
    tmp.execute("INSERT INTO datasets SELECT * FROM data")
    tmp.drop_table("data")

    # Rename column "scenarios.data" to "scenarios.dataset"
    tmp.execute("ALTER TABLE scenarios RENAME TO old_scenarios")
    tmp.create_table("scenarios", (("id", "text primary key"),
                                   ("host", "text"), ("device", "text"),
                                   ("kernel", "text"), ("dataset", "text")))
    tmp.execute("INSERT INTO scenarios SELECT * FROM old_scenarios")
    tmp.drop_table("old_scenarios")

    tmp.commit()

    old_path = old.path
    tmp_path = tmp.path

    # Copy migrated database over the original one.
    fs.cp(tmp_path, old_path)
    fs.rm(tmp_path)

    old.close()
    tmp.close()
    io.info("Migration completed.")
Esempio n. 13
0
def test_rm():
    system.echo("Hello, world!", "/tmp/labm8.tmp")
    assert fs.isfile("/tmp/labm8.tmp")
    fs.rm("/tmp/labm8.tmp")
    assert not fs.isfile("/tmp/labm8.tmp")
    fs.rm("/tmp/labm8.tmp")
    fs.rm("/tmp/labm8.tmp")
    fs.rm("/tmp/labm8.dir")
    fs.mkdir("/tmp/labm8.dir/foo/bar")
    system.echo("Hello, world!", "/tmp/labm8.dir/foo/bar/baz")
    assert fs.isfile("/tmp/labm8.dir/foo/bar/baz")
    fs.rm("/tmp/labm8.dir")
    assert not fs.isfile("/tmp/labm8.dir/foo/bar/baz")
    assert not fs.isfile("/tmp/labm8.dir/")
Esempio n. 14
0
def test_rmtrash():
    with tempfile.NamedTemporaryFile(prefix='labm8_') as f:
        assert fs.isfile(f.name)
        fs.rmtrash(f.name)
        assert not fs.isfile(f.name)
        fs.rmtrash(f.name)
        fs.rm(f.name)
    with tempfile.TemporaryDirectory() as d:
        fs.rm(d)
        fs.mkdir(d, "foo/bar")
        system.echo("Hello, world!", fs.path(d, "foo/bar/baz"))
        assert fs.isfile(f, "foo/bar/baz")
        fs.rmtrash(d)
        assert not fs.isfile(d, "foo/bar/baz")
        assert not fs.isdir(d)
Esempio n. 15
0
File: cache.py Progetto: BeauJoh/phd
    def __delitem__(self, key):
        """
    Delete cached file.

    Arguments:
        key: Key.

    Raises:
        KeyError: If file not in cache.
    """
        path = self.keypath(key)
        if fs.exists(path):
            fs.rm(path)
        else:
            raise KeyError(key)
Esempio n. 16
0
def test_cp_over_dir():
    fs.mkdir("/tmp/labm8.tmp.src")
    system.echo("Hello, world!", "/tmp/labm8.tmp.src/foo")
    fs.rm("/tmp/labm8.tmp.copy")
    fs.mkdir("/tmp/labm8.tmp.copy")
    assert fs.isdir("/tmp/labm8.tmp.src")
    assert fs.isfile("/tmp/labm8.tmp.src/foo")
    assert fs.isdir("/tmp/labm8.tmp.copy")
    assert not fs.isfile("/tmp/labm8.tmp.copy/foo")
    fs.cp("/tmp/labm8.tmp.src", "/tmp/labm8.tmp.copy/")
    assert fs.isdir("/tmp/labm8.tmp.src")
    assert fs.isfile("/tmp/labm8.tmp.src/foo")
    assert fs.isdir("/tmp/labm8.tmp.copy")
    assert fs.isfile("/tmp/labm8.tmp.copy/foo")
    assert (fs.read("/tmp/labm8.tmp.src/foo") == fs.read(
        "/tmp/labm8.tmp.copy/foo"))
Esempio n. 17
0
def RunTestcase(opencl_environment: env.OpenCLEnvironment,
                testbed: deepsmith_pb2.Testbed,
                testcase: deepsmith_pb2.Testcase,
                cflags: typing.List[str]) -> deepsmith_pb2.Result:
    """Run a testcase."""
    if testcase.toolchain != 'opencl':
        raise ValueError(
            f"Unsupported testcase toolchain: '{testcase.toolchain}'")
    if testcase.harness.name != 'cldrive':
        raise ValueError(
            f"Unsupported testcase harness: '{testcase.harness.name}'")
    result = deepsmith_pb2.Result()
    result.testbed.CopyFrom(testbed)
    platform_id, device_id = opencl_environment.ids()
    driver = MakeDriver(
        testcase, True if testbed.opts['opencl_opt'] == 'enabled' else False)
    # MakeDriver() annotates the testcase, so we must only set the testcase field
    # of the output result after we have called it.
    result.testcase.CopyFrom(testcase)
    # Get a temporary file to write and run the driver from.
    with tempfile.NamedTemporaryFile(prefix='deepsmith_', delete=False) as f:
        path = pathlib.Path(f.name)
    try:
        CompileDriver(driver, path, platform_id, device_id, cflags=cflags)
        timeout = testcase.harness.opts.get('timeout_seconds', '60')
        cmd = ['timeout', '-s9', timeout, f.name]
        start_time = labdate.GetUtcMillisecondsNow()
        proc = opencl_environment.Exec(cmd)
        end_time = labdate.GetUtcMillisecondsNow()
        # Build result message.
        result.returncode = proc.returncode
        result.outputs['stdout'] = proc.stdout
        result.outputs['stderr'] = proc.stderr
        runtime = result.profiling_events.add()
        runtime.client = system.HOSTNAME
        runtime.type = 'runtime'
        runtime.duration_ms = int(
            round((end_time - start_time).total_seconds() * 1000))
        runtime.event_start_epoch_ms = labdate.MillisecondsTimestamp(
            start_time)
        result.outcome = GetResultOutcome(result)
    except DriverCompilationError as e:
        logging.warning('%s', e)
        result.outcome = deepsmith_pb2.Result.UNKNOWN
    finally:
        fs.rm(path)
    return result
Esempio n. 18
0
def test_set_and_get():
    fs.rm("/tmp/labm8-cache-set-and-get")
    c = cache.FSCache("/tmp/labm8-cache-set-and-get")
    # create file
    system.echo("Hello, world!", "/tmp/labm8.testfile.txt")
    # sanity check
    assert fs.read("/tmp/labm8.testfile.txt") == ["Hello, world!"]
    # insert file into cache
    c['foobar'] = "/tmp/labm8.testfile.txt"
    # file must be in cache
    assert fs.isfile(c.keypath("foobar"))
    # file must have been moved
    assert not fs.isfile("/tmp/labm8.testfile.txt")
    # check file contents
    assert fs.read(c['foobar']) == ["Hello, world!"]
    assert fs.read(c['foobar']) == fs.read(c.get('foobar'))
    c.clear()
Esempio n. 19
0
File: co.py Progetto: BeauJoh/phd
def drive(command: List[str], src: str) -> return_t:
  """ invoke cldrive on source """
  start_time = time()

  with NamedTemporaryFile() as tmp:
    tmp_path = tmp.name

  cli = ['timeout', '-s9', '60', './libexec/co.sh', tmp_path] + command
  process = Popen(cli, stdin=PIPE, stdout=PIPE, stderr=PIPE)
  stdout, stderr = process.communicate(src.encode('utf-8'))
  fs.rm(tmp_path)
  stdout, stderr = stdout.decode('utf-8'), stderr.decode('utf-8')

  runtime = time() - start_time

  return return_t(
      runtime=runtime, status=status_t(process.returncode),
      stdout=stdout, stderr=stderr)
Esempio n. 20
0
def build_with_clang(program: Union[CLgenProgram, CLSmithProgram],
                     clang: str) -> Tuple[int, float, str]:
    with NamedTemporaryFile(prefix='buildaclang-', delete=False) as tmpfile:
        src_path = tmpfile.name
    try:
        with open(src_path, "w") as outfile:
            print(program.src, file=outfile)

        cmd = ['timeout', '-s9', '60s', clang, '-cc1', '-xcl', src_path]

        start_time = time()
        process = subprocess.Popen(cmd,
                                   universal_newlines=True,
                                   stdout=subprocess.PIPE,
                                   stderr=subprocess.PIPE)
        _, stderr = process.communicate()

        return process.returncode, time() - start_time, stderr.strip()

    finally:
        fs.rm(src_path)
Esempio n. 21
0
def test_ls_abspaths():
    fs.cp("lib/labm8/data/test/testdir", "/tmp/testdir")
    assert fs.ls("/tmp/testdir", abspaths=True) == [
        "/tmp/testdir/a",
        "/tmp/testdir/b",
        "/tmp/testdir/c",
        "/tmp/testdir/d",
    ]
    assert fs.ls("/tmp/testdir", recursive=True, abspaths=True) == [
        "/tmp/testdir/a",
        "/tmp/testdir/b",
        "/tmp/testdir/c",
        "/tmp/testdir/c/e",
        "/tmp/testdir/c/f",
        "/tmp/testdir/c/f/f",
        "/tmp/testdir/c/f/f/i",
        "/tmp/testdir/c/f/h",
        "/tmp/testdir/c/g",
        "/tmp/testdir/d",
    ]
    fs.rm("/tmp/testdir")
Esempio n. 22
0
def test_rm_glob():
    fs.mkdir("/tmp/labm8.glob")
    system.echo("Hello, world!", "/tmp/labm8.glob/1")
    system.echo("Hello, world!", "/tmp/labm8.glob/2")
    system.echo("Hello, world!", "/tmp/labm8.glob/abc")

    fs.rm("/tmp/labm8.glob/a*", glob=False)
    assert fs.isfile("/tmp/labm8.glob/1")
    assert fs.isfile("/tmp/labm8.glob/2")
    assert fs.isfile("/tmp/labm8.glob/abc")

    fs.rm("/tmp/labm8.glob/a*")
    assert fs.isfile("/tmp/labm8.glob/1")
    assert fs.isfile("/tmp/labm8.glob/2")
    assert not fs.isfile("/tmp/labm8.glob/abc")

    fs.rm("/tmp/labm8.glob/*")
    assert not fs.isfile("/tmp/labm8.glob/1")
    assert not fs.isfile("/tmp/labm8.glob/2")
    assert not fs.isfile("/tmp/labm8.glob/abc")
Esempio n. 23
0
def migrate_0_to_1(old):
    """
  SkelCL database migration script.

  Arguments:

      old (SkelCLDatabase): The database to migrate
  """
    def get_source(checksum):
        query = old.execute("SELECT source FROM kernels WHERE checksum = ?",
                            (checksum, ))
        return query.fetchone()[0]

    def get_device_attr(device_id, name, count):
        query = old.execute("SELECT * FROM devices WHERE name = ?", (name, ))
        attr = query.fetchone()

        # Splice into the new
        newattr = (device_id, attr[0], count) + attr[2:]
        return newattr

    def process_row(tmp, row):
        # Get column values from row.
        host = row[0]
        dev_name = row[1]
        dev_count = row[2]
        kern_checksum = row[3]
        north = row[4]
        south = row[5]
        east = row[6]
        west = row[7]
        data_width = row[8]
        data_height = row[9]
        max_wg_size = row[10]
        wg_c = row[11]
        wg_r = row[12]
        runtime = row[13]
        type_in = "float"
        type_out = "float"

        # Lookup source code.
        source = get_source(kern_checksum)
        user_source = get_user_source(source)

        kernel_id = hash_kernel(north, south, east, west, max_wg_size, source)
        device_id = hash_device(dev_name, dev_count)
        data_id = hash_data(data_width, data_height, type_in, type_out)
        scenario_id = hash_scenario(host, device_id, kernel_id, data_id)
        params_id = hash_workgroup_size(wg_c, wg_r)

        device_attr = get_device_attr(device_id, dev_name, dev_count)

        # Add database entries.
        tmp.execute(
            "INSERT OR IGNORE INTO kernels VALUES (?,?,?,?,?,?,?)",
            (kernel_id, north, south, east, west, max_wg_size, user_source))

        placeholders = ",".join(["?"] * len(device_attr))
        tmp.execute(
            "INSERT OR IGNORE INTO devices VALUES (" + placeholders + ")",
            device_attr)

        tmp.execute("INSERT OR IGNORE INTO data VALUES (?,?,?,?,?)",
                    (data_id, data_width, data_height, type_in, type_out))

        tmp.execute("INSERT OR IGNORE INTO params VALUES (?,?,?)",
                    (params_id, wg_c, wg_r))

        tmp.execute("INSERT OR IGNORE INTO scenarios VALUES (?,?,?,?,?)",
                    (scenario_id, host, device_id, kernel_id, data_id))

        tmp.execute("INSERT INTO runtimes VALUES (?,?,?)",
                    (scenario_id, params_id, runtime))

    # Create temporary database
    tmp = _db.Database("/tmp/omnitune.skelcl.migration.db")

    # Clear anything that's already in the database.
    for table in tmp.tables:
        tmp.drop_table(table)

    io.info("Migrating database to version 1.")

    backup_path = old.path + ".0"
    io.info("Creating backup of old database at '{0}'".format(backup_path))
    fs.cp(old.path, backup_path)

    io.debug("Migration: creating tables ...")

    # Create table: kernels
    tmp.create_table("version", (("version", "integer"), ))

    # Set database version
    tmp.execute("INSERT INTO version VALUES (1)")

    # Create table: kernels
    tmp.create_table("kernels",
                     (("id", "text primary key"), ("north", "integer"),
                      ("south", "integer"), ("east", "integer"),
                      ("west", "integer"), ("max_wg_size", "integer"),
                      ("source", "text")))

    # Create table: devices
    tmp.create_table(
        "devices",
        (("id", "text primary key"), ("name", "text"), ("count", "integer"),
         ("address_bits", "integer"), ("double_fp_config", "integer"),
         ("endian_little", "integer"), ("execution_capabilities", "integer"),
         ("extensions", "text"), ("global_mem_cache_size", "integer"),
         ("global_mem_cache_type", "integer"),
         ("global_mem_cacheline_size", "integer"),
         ("global_mem_size", "integer"), ("host_unified_memory", "integer"),
         ("image2d_max_height", "integer"), ("image2d_max_width", "integer"),
         ("image3d_max_depth", "integer"), ("image3d_max_height", "integer"),
         ("image3d_max_width", "integer"), ("image_support", "integer"),
         ("local_mem_size", "integer"), ("local_mem_type", "integer"),
         ("max_clock_frequency", "integer"), ("max_compute_units", "integer"),
         ("max_constant_args", "integer"),
         ("max_constant_buffer_size", "integer"),
         ("max_mem_alloc_size", "integer"), ("max_parameter_size", "integer"),
         ("max_read_image_args", "integer"), ("max_samplers", "integer"),
         ("max_work_group_size",
          "integer"), ("max_work_item_dimensions",
                       "integer"), ("max_work_item_sizes_0", "integer"),
         ("max_work_item_sizes_1",
          "integer"), ("max_work_item_sizes_2",
                       "integer"), ("max_write_image_args", "integer"),
         ("mem_base_addr_align",
          "integer"), ("min_data_type_align_size",
                       "integer"), ("native_vector_width_char", "integer"),
         ("native_vector_width_double",
          "integer"), ("native_vector_width_float",
                       "integer"), ("native_vector_width_half", "integer"),
         ("native_vector_width_int",
          "integer"), ("native_vector_width_long",
                       "integer"), ("native_vector_width_short", "integer"),
         ("preferred_vector_width_char",
          "integer"), ("preferred_vector_width_double",
                       "integer"), ("preferred_vector_width_float", "integer"),
         ("preferred_vector_width_half",
          "integer"), ("preferred_vector_width_int",
                       "integer"), ("preferred_vector_width_long", "integer"),
         ("preferred_vector_width_short", "integer"),
         ("queue_properties", "integer"), ("single_fp_config",
                                           "integer"), ("type", "integer"),
         ("vendor", "text"), ("vendor_id", "text"), ("version", "text")))

    # Create table: data
    tmp.create_table("data", (("id", "text primary key"), ("width", "integer"),
                              ("height", "integer"), ("tin", "text"),
                              ("tout", "text")))

    # Create table: params
    tmp.create_table("params", (("id", "text primary key"),
                                ("wg_c", "integer"), ("wg_r", "integer")))

    # Create table: scenarios
    tmp.create_table("scenarios", (("id", "text primary key"),
                                   ("host", "text"), ("device", "text"),
                                   ("kernel", "text"), ("data", "text")))

    # Create table: runtimes
    tmp.create_table("runtimes", (("scenario", "text"), ("params", "text"),
                                  ("runtime", "real")))

    i = 0
    for row in old.execute("SELECT * from runtimes"):
        process_row(tmp, row)
        i += 1
        if not i % 2500:
            io.debug("Processed", i, "rows ...")
            if not i % 5000:
                tmp.commit()

    tmp.commit()

    old_path = old.path
    tmp_path = tmp.path

    # Copy migrated database over the original one.
    fs.cp(tmp_path, old_path)
    fs.rm(tmp_path)

    old.close()
    tmp.close()
    io.info("Migration completed.")
Esempio n. 24
0
def migrate_2_to_3(old):
    """
  SkelCL database migration script.

  Arguments:

      old (SkelCLDatabase): The database to migrate
  """
    def _old_kernel2new(old_id):
        kernel = old.execute(
            "SELECT north,south,east,west,max_wg_size,source "
            "FROM kernels WHERE id=?", (old_id, )).fetchone()
        if kernel:
            return tmp.kernel_id(*kernel)

    def _old_scenario2new(old_id):
        device, old_kernel, dataset = old.execute(
            "SELECT device,kernel,dataset "
            "FROM scenarios WHERE id=?", (old_id, )).fetchone()
        kernel = _old_kernel2new(old_kernel)
        return tmp.scenario_id(device, kernel, dataset)

    # TODO: Un-comment out code!

    # Create temporary database
    fs.rm("/tmp/omnitune.skelcl.migration.db")
    tmp = _db.Database("/tmp/omnitune.skelcl.migration.db")
    tmp.attach(old.path, "rhs")

    io.info("Migrating database to version 3.")

    backup_path = old.path + ".2"
    io.info("Creating backup of old database at '{0}'".format(backup_path))
    fs.cp(old.path, backup_path)

    tmp_path = tmp.path
    old_path = old.path

    tmp.run("create_tables")

    # Populate feature and lookup tables.
    for row in old.execute("SELECT * FROM devices"):
        features = row[1:]
        id = hash_device(*features)
        io.debug("Features extracted for device", id)
        row = (id, ) + features
        tmp.execute("INSERT INTO devices VALUES " + placeholders(*row), row)

        row = (features[0], features[1], id)
        tmp.execute("INSERT INTO device_lookup VALUES " + placeholders(*row),
                    row)
        tmp.commit()

    for row in old.execute("SELECT * FROM kernels"):
        args = row[1:]
        tmp.kernel_id(*args)

    for row in old.execute("SELECT * FROM datasets"):
        features = row[1:]
        id = hash_dataset(*features)
        io.debug("Features extracted for dataset", id)
        row = (id, ) + features
        tmp.execute("INSERT INTO datasets VALUES " + placeholders(*row), row)

        row = features + (id, )
        tmp.execute("INSERT INTO dataset_lookup VALUES " + placeholders(*row),
                    row)
        tmp.commit()

    # Populate kernel_names table.
    for row in old.execute("SELECT * FROM kernel_names"):
        old_id = row[0]
        synthetic, name = row[1:]

        kernel = _old_kernel2new(old_id)
        if kernel:
            row = (kernel, synthetic, name)
            tmp.execute(
                "INSERT OR IGNORE INTO kernel_names VALUES " +
                placeholders(*row), row)
    tmp.commit()

    # Populate scenarios table.
    for row in old.execute("SELECT * FROM scenarios"):
        old_id, _, device, old_kernel, dataset = row
        kernel = _old_kernel2new(old_kernel)
        new_id = hash_scenario(device, kernel, dataset)

        row = (new_id, device, kernel, dataset)
        tmp.execute(
            "INSERT OR IGNORE INTO scenarios VALUES " + placeholders(*row),
            row)
    tmp.commit()

    # Populate params table.
    tmp.execute("INSERT INTO params SELECT * from rhs.params")
    tmp.commit()

    scenario_replacements = {
        row[0]: _old_scenario2new(row[0])
        for row in old.execute("SELECT * FROM scenarios")
    }

    tmp.execute("INSERT INTO runtimes SELECT * from rhs.runtimes")
    for old_id, new_id in scenario_replacements.iteritems():
        io.info("Runtimes", old_id, "->", new_id)
        tmp.execute("UPDATE runtimes SET scenario=? WHERE scenario=?",
                    (new_id, old_id))
    tmp.commit()

    # Sanity checks
    bad = False
    for row in tmp.execute("SELECT DISTINCT scenario FROM runtimes"):
        count = tmp.execute("SELECT Count(*) FROM scenarios WHERE id=?",
                            (row[0], )).fetchone()[0]
        if count != 1:
            io.error("Bad scenario count:", row[0], count)
            bad = True

    if bad:
        io.fatal("Failed sanity check, aborting.")
    else:
        io.info("Passed sanity check.")

    # Copy migrated database over the original one.
    fs.cp(tmp_path, old_path)
    fs.rm(tmp_path)

    old.close()
    tmp.close()
    io.info("Migration completed.")
Esempio n. 25
0
def test_GitHubRepo_IsCloned(test_repo: github_repo.GitHubRepo):
    """Test for IsCloned()."""
    assert test_repo.IsCloned()
    fs.rm(test_repo.clone_dir)
    assert not test_repo.IsCloned()
Esempio n. 26
0
 def flush():
   if args.commit:
     s.commit()
     while len(to_del):
       fs.rm(to_del.popleft())
Esempio n. 27
0
def test_mkdir():
    fs.rm("/tmp/labm8.dir")
    assert not fs.isdir("/tmp/labm8.dir")
    fs.mkdir("/tmp/labm8.dir")
    assert fs.isdir("/tmp/labm8.dir")
Esempio n. 28
0
def test_mkopen():
    fs.rm("/tmp/labm8.dir")
    assert not fs.isdir("/tmp/labm8.dir/")
    f = fs.mkopen("/tmp/labm8.dir/foo", "w")
    assert fs.isdir("/tmp/labm8.dir/")
    f.close()
Esempio n. 29
0
def test_ls_empty_dir():
    fs.mkdir("/tmp/labm8.empty")
    assert not fs.ls("/tmp/labm8.empty")
    fs.rm("/tmp/labm8.empty")
Esempio n. 30
0
def test_mv_no_dst():
    system.echo("Hello, world!", "/tmp/labm8.tmp")
    with pytest.raises(IOError):
        fs.mv("/tmp/labm8.tmp", "/not/a/real/path")
    fs.rm("/tmp/labm8.tmp")