Exemplo n.º 1
0
def test_JsonCache():
  with tempfile.NamedTemporaryFile(prefix='labm8_') as f:
    # Load test-set
    fs.cp('labm8/data/test/jsoncache.json', f.name)
    _cache = cache.JsonCache(f.name)

    assert "foo" in _cache
    assert 1 == _cache["foo"]
    _TestCacheOps(_cache)

    # Test copy constructor.
    _cache["foo"] = 1
    _cache["bar"] = 2
    _cache["baz"] = 3

    with tempfile.NamedTemporaryFile(prefix='labm8_') as f2:
      cache2 = cache.JsonCache(f2.name, _cache)
      assert 1 == cache2["foo"]
      assert 2 == cache2["bar"]
      assert 3 == cache2["baz"]
      assert 1 == _cache["foo"]
      assert 2 == _cache["bar"]
      assert 3 == _cache["baz"]
      _cache.clear()
      # Set for next time.
      _cache["foo"] = 1
      _cache.write()
Exemplo n.º 2
0
    def test_commit(self):
        # Create a copy database.
        fs.cp(self.db.path, "/tmp/labm8.con.sql")

        # Open two connections to database.
        c1 = db.Database("/tmp/labm8.con.sql")
        c2 = db.Database("/tmp/labm8.con.sql")

        cmd = 'SELECT * FROM names WHERE first="Bob" AND last="Marley"'

        # Check there's no Bob Marley entry.
        self._test(None, c1.execute(cmd).fetchone())
        self._test(None, c2.execute(cmd).fetchone())

        # Add a Bob Marley entry to one connection.
        c1.execute("INSERT INTO names VALUES ('Bob', 'Marley')")

        # Create a third database connection.
        c3 = db.Database("/tmp/labm8.con.sql")

        # Check that the second and third connections can't see this new entry.
        self._test(("Bob", "Marley"), c1.execute(cmd).fetchone())
        self._test(None, c2.execute(cmd).fetchone())
        self._test(None, c3.execute(cmd).fetchone())

        # Commit, and repeat. Check that all connections can now see
        # Bob Marley.
        c1.commit()
        self._test(("Bob", "Marley"), c1.execute(cmd).fetchone())
        self._test(("Bob", "Marley"), c2.execute(cmd).fetchone())
        self._test(("Bob", "Marley"), c3.execute(cmd).fetchone())

        # Cool, we're jammin'
        fs.rm("/tmp/labm8.con.sql")
Exemplo n.º 3
0
def test_cp():
    system.echo("Hello, world!", "/tmp/labm8.tmp")
    assert ["Hello, world!"] == fs.read("/tmp/labm8.tmp")
    # Cleanup any existing file.
    fs.rm("/tmp/labm8.tmp.copy")
    assert not fs.exists("/tmp/labm8.tmp.copy")
    fs.cp("/tmp/labm8.tmp", "/tmp/labm8.tmp.copy")
    assert fs.read("/tmp/labm8.tmp") == fs.read("/tmp/labm8.tmp.copy")
Exemplo n.º 4
0
 def test_cp(self):
     system.echo("Hello, world!", "/tmp/labm8.tmp")
     self._test(["Hello, world!"], fs.read("/tmp/labm8.tmp"))
     # Cleanup any existing file.
     fs.rm("/tmp/labm8.tmp.copy")
     self._test(False, fs.exists("/tmp/labm8.tmp.copy"))
     fs.cp("/tmp/labm8.tmp", "/tmp/labm8.tmp.copy")
     self._test(fs.read("/tmp/labm8.tmp"), fs.read("/tmp/labm8.tmp.copy"))
Exemplo n.º 5
0
def test_cp_dir():
    fs.rm("/tmp/labm8")
    fs.rm("/tmp/labm8.copy")
    fs.mkdir("/tmp/labm8/foo/bar")
    assert not fs.exists("/tmp/labm8.copy")
    fs.cp("/tmp/labm8/", "/tmp/labm8.copy")
    assert fs.isdir("/tmp/labm8.copy")
    assert fs.isdir("/tmp/labm8.copy/foo")
    assert fs.isdir("/tmp/labm8.copy/foo/bar")
Exemplo n.º 6
0
 def test_cp_dir(self):
     fs.rm("/tmp/labm8")
     fs.rm("/tmp/labm8.copy")
     fs.mkdir("/tmp/labm8/foo/bar")
     self._test(False, fs.exists("/tmp/labm8.copy"))
     fs.cp("/tmp/labm8/", "/tmp/labm8.copy")
     self._test(True, fs.isdir("/tmp/labm8.copy"))
     self._test(True, fs.isdir("/tmp/labm8.copy/foo"))
     self._test(True, fs.isdir("/tmp/labm8.copy/foo/bar"))
Exemplo n.º 7
0
def cp_loc(path, name):
    """
    Copy database from local filesystem.
    """
    path = fs.path(path)
    dst = dst_path(name)

    io.info("Copying", path, "->", name)
    fs.cp(path, dst)
Exemplo n.º 8
0
def migrate_3_to_4(old):
    """
    SkelCL database migration script.

    Arguments:

        old (SkelCLDatabase): The database to migrate
    """
    # Create temporary database
    fs.rm("/tmp/omnitune.skelcl.migration.db")
    tmp = _db.Database("/tmp/omnitune.skelcl.migration.db")
    tmp.attach(old.path, "rhs")

    io.info("Migrating database to version 4.")

    backup_path = old.path + ".3"
    io.info("Creating backup of old database at '{0}'".format(backup_path))
    fs.cp(old.path, backup_path)

    tables = [
        "kernels", "kernel_lookup", "kernel_names", "devices", "device_lookup",
        "datasets", "dataset_lookup", "scenarios", "params", "runtimes",
        "runtime_stats", "oracle_params"
    ]

    for table in tables:
        io.info("Copying data from '{}' ...".format(table))
        tmp.execute("INSERT INTO {} SELECT * FROM rhs.{}".format(table, table))

    tmp_path = tmp.path
    old_path = old.path

    tmp.execute("VACUUM")

    # Sanity checks
    bad = False
    for table in tables:
        old_count = tmp.num_rows("rhs." + table)
        tmp_count = tmp.num_rows(table)

        if old_count != tmp_count:
            io.error("Bad rows count:", old_count, tmp_count)
            bad = True

    if bad:
        io.fatal("Failed sanity check, aborting.")
    else:
        io.info("Passed sanity check.")

    # Copy migrated database over the original one.
    fs.cp(tmp_path, old_path)
    fs.rm(tmp_path)

    old.close()
    tmp.close()
    io.info("Migration completed.")
Exemplo n.º 9
0
    def __init__(self, *args, **kwargs):
        super(TestDatabase, self).__init__(*args, **kwargs)

        # Make a copy of test databases.
        fs.cp("tests/data/db.sql", "/tmp/labm8.db.sql")
        fs.cp("tests/data/db_empty.sql", "/tmp/labm8.db_empty.sql")

        # Load test databases.
        self.db = db.Database("/tmp/labm8.db.sql")
        self.db_empty = db.Database("/tmp/labm8.db_empty.sql")
Exemplo n.º 10
0
    def __init__(self, *args, **kwargs):
        super(TestML, self).__init__(*args, **kwargs)

        # Make a copy of test data.
        fs.cp("tests/data/diabetes.arff", "/tmp/labm8.diabetes.arff")
        fs.cp("tests/data/diabetes.csv", "/tmp/labm8.diabetes.csv")

        # Test data paths.
        self.arff = "/tmp/labm8.diabetes.arff"
        self.csv = "/tmp/labm8.diabetes.csv"
Exemplo n.º 11
0
def migrate_5_to_6(db):
    """
    SkelCL database migration script.

    Database version 5 adds an additional "param_stats" table.

    Arguments:

        old (SkelCLDatabase): The database to migrate
    """
    io.info("Migrating database to version 6.")

    backup_path = db.path + ".5"
    io.info("Creating backup of old database at '{0}'".format(backup_path))
    fs.cp(db.path, backup_path)

    db.execute("DELETE FROM version")
    db.execute("INSERT INTO version VALUES (6)")

    db.execute("""
CREATE TABLE IF NOT EXISTS scenario_stats (
    scenario                        CHAR(40),     -- Key for scenarios
    num_params                      INTEGER,      -- The number of parameters in W_legal for scenario
    oracle_param                    VARCHAR(255), -- The best parameter
    oracle_runtime                  REAL,         -- The runtime of the best parameter
    worst_param                     VARCHAR(255), -- The worst parameter
    worst_runtime                   REAL,         -- The runtime of the worst parameter
    mean_runtime                    REAL,         -- The mean runtime of all parameters
    PRIMARY KEY (scenario)
)
""")

    db.populate_scenario_stats_table()

    # Sanity checks
    bad = False
    if db.num_rows("scenario_stats") != len(db.scenarios):
        io.error("Bad row count in scenario_stats table! Expected",
                 len(db.scenarios), "Observed:", db.num_rows("scenario_stats"))
        bad = True

    if bad:
        io.fatal("Failed sanity check, aborting.")
    else:
        io.info("Passed sanity check.")

    # Copy migrated database over the original one.
    db.close()
    io.info("Migration completed.")
Exemplo n.º 12
0
def migrate_5_to_6(db):
  """
  SkelCL database migration script.

  Database version 5 adds an additional "param_stats" table.

  Arguments:

      old (SkelCLDatabase): The database to migrate
  """
  io.info("Migrating database to version 6.")

  backup_path = db.path + ".5"
  io.info("Creating backup of old database at '{0}'".format(backup_path))
  fs.cp(db.path, backup_path)

  db.execute("DELETE FROM version")
  db.execute("INSERT INTO version VALUES (6)")

  db.execute("""
CREATE TABLE IF NOT EXISTS scenario_stats (
    scenario                        CHAR(40),     -- Key for scenarios
    num_params                      INTEGER,      -- The number of parameters in W_legal for scenario
    oracle_param                    VARCHAR(255), -- The best parameter
    oracle_runtime                  REAL,         -- The runtime of the best parameter
    worst_param                     VARCHAR(255), -- The worst parameter
    worst_runtime                   REAL,         -- The runtime of the worst parameter
    mean_runtime                    REAL,         -- The mean runtime of all parameters
    PRIMARY KEY (scenario)
)
""")

  db.populate_scenario_stats_table()

  # Sanity checks
  bad = False
  if db.num_rows("scenario_stats") != len(db.scenarios):
    io.error("Bad row count in scenario_stats table! Expected",
             len(db.scenarios), "Observed:", db.num_rows("scenario_stats"))
    bad = True

  if bad:
    io.fatal("Failed sanity check, aborting.")
  else:
    io.info("Passed sanity check.")

  # Copy migrated database over the original one.
  db.close()
  io.info("Migration completed.")
Exemplo n.º 13
0
def test_cp_over_dir():
    fs.mkdir("/tmp/labm8.tmp.src")
    system.echo("Hello, world!", "/tmp/labm8.tmp.src/foo")
    fs.rm("/tmp/labm8.tmp.copy")
    fs.mkdir("/tmp/labm8.tmp.copy")
    assert fs.isdir("/tmp/labm8.tmp.src")
    assert fs.isfile("/tmp/labm8.tmp.src/foo")
    assert fs.isdir("/tmp/labm8.tmp.copy")
    assert not fs.isfile("/tmp/labm8.tmp.copy/foo")
    fs.cp("/tmp/labm8.tmp.src", "/tmp/labm8.tmp.copy/")
    assert fs.isdir("/tmp/labm8.tmp.src")
    assert fs.isfile("/tmp/labm8.tmp.src/foo")
    assert fs.isdir("/tmp/labm8.tmp.copy")
    assert fs.isfile("/tmp/labm8.tmp.copy/foo")
    assert (fs.read("/tmp/labm8.tmp.src/foo") == fs.read(
        "/tmp/labm8.tmp.copy/foo"))
Exemplo n.º 14
0
 def test_cp_over_dir(self):
     fs.mkdir("/tmp/labm8.tmp.src")
     system.echo("Hello, world!", "/tmp/labm8.tmp.src/foo")
     fs.rm("/tmp/labm8.tmp.copy")
     fs.mkdir("/tmp/labm8.tmp.copy")
     self._test(True, fs.isdir("/tmp/labm8.tmp.src"))
     self._test(True, fs.isfile("/tmp/labm8.tmp.src/foo"))
     self._test(True, fs.isdir("/tmp/labm8.tmp.copy"))
     self._test(False, fs.isfile("/tmp/labm8.tmp.copy/foo"))
     fs.cp("/tmp/labm8.tmp.src", "/tmp/labm8.tmp.copy/")
     self._test(True, fs.isdir("/tmp/labm8.tmp.src"))
     self._test(True, fs.isfile("/tmp/labm8.tmp.src/foo"))
     self._test(True, fs.isdir("/tmp/labm8.tmp.copy"))
     self._test(True, fs.isfile("/tmp/labm8.tmp.copy/foo"))
     self._test(fs.read("/tmp/labm8.tmp.src/foo"),
                fs.read("/tmp/labm8.tmp.copy/foo"))
Exemplo n.º 15
0
def migrate_4_to_5(db):
  """
  SkelCL database migration script.

  Database version 5 adds an additional "param_stats" table.

  Arguments:

      old (SkelCLDatabase): The database to migrate
  """
  io.info("Migrating database to version 5.")

  backup_path = db.path + ".4"
  io.info("Creating backup of old database at '{0}'".format(backup_path))
  fs.cp(db.path, backup_path)

  db.execute("DELETE FROM version")
  db.execute("INSERT INTO version VALUES (5)")

  db.execute("""
-- Parameter stats table
CREATE TABLE IF NOT EXISTS param_stats (
    params                          VARCHAR(255), -- Key for params
    num_scenarios                   INTEGER,      -- Number of scenarios for which param is legal, 0 < num_scenarios
    coverage                        REAL,         -- num_scenarios / total number of scenarios, 0 < coverage <= 1
    performance                     REAL,         -- Geometric mean of performance relative to the oracle for all scenarios for which param was legal, 0 < performance <= 1
    PRIMARY KEY (params)
)
""")

  db.populate_param_stats_table()

  # Sanity checks
  bad = False
  if db.num_rows("param_stats") != len(db.params):
    io.error("Bad row count in params table! Expected", len(db.params),
             "Observed:", db.num_rows("param_stats"))
    bad = True

  if bad:
    io.fatal("Failed sanity check, aborting.")
  else:
    io.info("Passed sanity check.")

  # Copy migrated database over the original one.
  db.close()
  io.info("Migration completed.")
Exemplo n.º 16
0
def merge(old_oracle, dbs, path):
    """
    Merge databases into one.

    Arguments:

        dbs (list of Database): Databases to merge.
        path (str): Path to merged database.

    Returns:

        Database: merged database instance.
    """
    print("Merging {n} databases:".format(n=len(dbs) + 1))
    print("   ", old_oracle)
    for db in dbs:
        print("   ", db)
    print()

    # Make a copy of the old oracle database to work from.
    io.info("Coping", old_oracle, "->", fs.basename(path))
    fs.cp(old_oracle, path)

    target = migrate(_db.Database(path=path))

    for db in dbs + [target]:
        try:
            db.num_rows("runtimes")
        except sqlite3.DatabaseError as e:
            io.error("Broken db:", db.path)
            io.fatal(e)

    num_runtimes = [db.num_rows("runtimes") for db in dbs]
    expected_total = target.num_rows("runtimes") + sum(num_runtimes)

    target.merge(dbs)

    total = target.num_rows("runtimes")

    if total != expected_total:
        io.fatal("Expected total", expected_total,
                 "!= actual total", total)

    io.info(("Merged {num_db} databases, {n} rows"
             .format(num_db=len(dbs), n=total)))

    return target
Exemplo n.º 17
0
def migrate_4_to_5(db):
    """
    SkelCL database migration script.

    Database version 5 adds an additional "param_stats" table.

    Arguments:

        old (SkelCLDatabase): The database to migrate
    """
    io.info("Migrating database to version 5.")

    backup_path = db.path + ".4"
    io.info("Creating backup of old database at '{0}'".format(backup_path))
    fs.cp(db.path, backup_path)

    db.execute("DELETE FROM version")
    db.execute("INSERT INTO version VALUES (5)")

    db.execute("""
-- Parameter stats table
CREATE TABLE IF NOT EXISTS param_stats (
    params                          VARCHAR(255), -- Key for params
    num_scenarios                   INTEGER,      -- Number of scenarios for which param is legal, 0 < num_scenarios
    coverage                        REAL,         -- num_scenarios / total number of scenarios, 0 < coverage <= 1
    performance                     REAL,         -- Geometric mean of performance relative to the oracle for all scenarios for which param was legal, 0 < performance <= 1
    PRIMARY KEY (params)
)
""")

    db.populate_param_stats_table()

    # Sanity checks
    bad = False
    if db.num_rows("param_stats") != len(db.params):
        io.error("Bad row count in params table! Expected", len(db.params),
                 "Observed:", db.num_rows("param_stats"))
        bad = True

    if bad:
        io.fatal("Failed sanity check, aborting.")
    else:
        io.info("Passed sanity check.")

    # Copy migrated database over the original one.
    db.close()
    io.info("Migration completed.")
Exemplo n.º 18
0
def merge(old_oracle, dbs, path):
    """
    Merge databases into one.

    Arguments:

        dbs (list of Database): Databases to merge.
        path (str): Path to merged database.

    Returns:

        Database: merged database instance.
    """
    print("Merging {n} databases:".format(n=len(dbs) + 1))
    print("   ", old_oracle)
    for db in dbs:
        print("   ", db)
    print()

    # Make a copy of the old oracle database to work from.
    io.info("Coping", old_oracle, "->", fs.basename(path))
    fs.cp(old_oracle, path)

    target = migrate(_db.Database(path=path))

    for db in dbs + [target]:
        try:
            db.num_rows("runtimes")
        except sqlite3.DatabaseError as e:
            io.error("Broken db:", db.path)
            io.fatal(e)

    num_runtimes = [db.num_rows("runtimes") for db in dbs]
    expected_total = target.num_rows("runtimes") + sum(num_runtimes)

    target.merge(dbs)

    total = target.num_rows("runtimes")

    if total != expected_total:
        io.fatal("Expected total", expected_total, "!= actual total", total)

    io.info(("Merged {num_db} databases, {n} rows".format(num_db=len(dbs),
                                                          n=total)))

    return target
Exemplo n.º 19
0
def main():
    """
    Evaluate dataset and omnitune performance.
    """
    # Get the latest dataset from the oracle.
    db = migrate(_db.Database(experiment.ORACLE_PATH))

    backup_path = db.path + ".unprune"
    io.info("Writing backup to", backup_path)
    fs.cp(db.path, backup_path)

    # Strip scenarios for which there isn't enough unique workgroup
    # sizes.
    db.prune_min_params_per_scenario(25)
    # Strip scenarios so that there are at least a certain number of
    # safe parameters.
    db.prune_safe_params(3)
Exemplo n.º 20
0
    def test_remove_preprocessed(self):
        tmpdb = 'test_remove_preprocessed.db'
        fs.cp(tests.db_path('10-kernels-preprocessed'), tmpdb)

        self.assertEqual(8, dbutil.num_good_kernels(tmpdb))
        db = dbutil.connect(tmpdb)
        self.assertFalse(dbutil.is_modified(db))
        db.close()

        dbutil.remove_preprocessed(tmpdb)

        self.assertEqual(0, dbutil.num_good_kernels(tmpdb))

        db = dbutil.connect(tmpdb)
        self.assertTrue(dbutil.is_modified(db))
        db.close()

        fs.rm(tmpdb)
Exemplo n.º 21
0
 def test_ls_abspaths(self):
     fs.cp("tests/data/testdir", "/tmp/testdir")
     self._test(["/tmp/testdir/a",
                 "/tmp/testdir/b",
                 "/tmp/testdir/c",
                 "/tmp/testdir/d"],
                fs.ls("/tmp/testdir", abspaths=True))
     self._test(["/tmp/testdir/a",
                 "/tmp/testdir/b",
                 "/tmp/testdir/c",
                 "/tmp/testdir/c/e",
                 "/tmp/testdir/c/f",
                 "/tmp/testdir/c/f/f",
                 "/tmp/testdir/c/f/f/i",
                 "/tmp/testdir/c/f/h",
                 "/tmp/testdir/c/g",
                 "/tmp/testdir/d"],
                fs.ls("/tmp/testdir", recursive=True, abspaths=True))
     fs.rm("/tmp/testdir")
Exemplo n.º 22
0
def test_ls_abspaths():
    fs.cp("labm8/data/test/testdir", "/tmp/testdir")
    assert fs.ls("/tmp/testdir", abspaths=True) == [
        "/tmp/testdir/a",
        "/tmp/testdir/b",
        "/tmp/testdir/c",
        "/tmp/testdir/d",
    ]
    assert fs.ls("/tmp/testdir", recursive=True, abspaths=True) == [
        "/tmp/testdir/a",
        "/tmp/testdir/b",
        "/tmp/testdir/c",
        "/tmp/testdir/c/e",
        "/tmp/testdir/c/f",
        "/tmp/testdir/c/f/f",
        "/tmp/testdir/c/f/f/i",
        "/tmp/testdir/c/f/h",
        "/tmp/testdir/c/g",
        "/tmp/testdir/d",
    ]
    fs.rm("/tmp/testdir")
Exemplo n.º 23
0
def migrate_1_to_2(old):
  """
  SkelCL database migration script.

  Arguments:

      old (SkelCLDatabase): The database to migrate
  """
  # Create temporary database
  fs.cp(old.path, "/tmp/omnitune.skelcl.migration.db")
  tmp = _db.Database("/tmp/omnitune.skelcl.migration.db")

  io.info("Migrating database to version 2.")

  backup_path = old.path + ".1"
  io.info("Creating backup of old database at '{0}'".format(backup_path))
  fs.cp(old.path, backup_path)

  # Update database version
  tmp.drop_table("version")
  tmp.create_table("version",
                   (("version", "integer"),))
  tmp.execute("INSERT INTO version VALUES (2)")

  # Rename table "data" to "datasets"
  tmp.create_table("datasets",
                   (("id", "text primary key"),
                    ("width", "integer"),
                    ("height", "integer"),
                    ("tin", "text"),
                    ("tout", "text")))
  tmp.execute("INSERT INTO datasets SELECT * FROM data")
  tmp.drop_table("data")

  # Rename column "scenarios.data" to "scenarios.dataset"
  tmp.execute("ALTER TABLE scenarios RENAME TO old_scenarios")
  tmp.create_table("scenarios",
                   (("id", "text primary key"),
                    ("host", "text"),
                    ("device", "text"),
                    ("kernel", "text"),
                    ("dataset", "text")))
  tmp.execute("INSERT INTO scenarios SELECT * FROM old_scenarios")
  tmp.drop_table("old_scenarios")

  tmp.commit()

  old_path = old.path
  tmp_path = tmp.path

  # Copy migrated database over the original one.
  fs.cp(tmp_path, old_path)
  fs.rm(tmp_path)

  old.close()
  tmp.close()
  io.info("Migration completed.")
Exemplo n.º 24
0
def migrate_1_to_2(old):
    """
    SkelCL database migration script.

    Arguments:

        old (SkelCLDatabase): The database to migrate
    """
    # Create temporary database
    fs.cp(old.path, "/tmp/omnitune.skelcl.migration.db")
    tmp = _db.Database("/tmp/omnitune.skelcl.migration.db")

    io.info("Migrating database to version 2.")

    backup_path = old.path + ".1"
    io.info("Creating backup of old database at '{0}'".format(backup_path))
    fs.cp(old.path, backup_path)

    # Update database version
    tmp.drop_table("version")
    tmp.create_table("version",
                     (("version",                         "integer"),))
    tmp.execute("INSERT INTO version VALUES (2)")

    # Rename table "data" to "datasets"
    tmp.create_table("datasets",
                     (("id",                              "text primary key"),
                      ("width",                           "integer"),
                      ("height",                          "integer"),
                      ("tin",                             "text"),
                      ("tout",                            "text")))
    tmp.execute("INSERT INTO datasets SELECT * FROM data")
    tmp.drop_table("data")

    # Rename column "scenarios.data" to "scenarios.dataset"
    tmp.execute("ALTER TABLE scenarios RENAME TO old_scenarios")
    tmp.create_table("scenarios",
                     (("id",                              "text primary key"),
                      ("host",                            "text"),
                      ("device",                          "text"),
                      ("kernel",                          "text"),
                      ("dataset",                         "text")))
    tmp.execute("INSERT INTO scenarios SELECT * FROM old_scenarios")
    tmp.drop_table("old_scenarios")

    tmp.commit()

    old_path = old.path
    tmp_path = tmp.path

    # Copy migrated database over the original one.
    fs.cp(tmp_path, old_path)
    fs.rm(tmp_path)

    old.close()
    tmp.close()
    io.info("Migration completed.")
Exemplo n.º 25
0
def migrate_3_to_4(old):
    """
    SkelCL database migration script.

    Arguments:

        old (SkelCLDatabase): The database to migrate
    """
    # Create temporary database
    fs.rm("/tmp/omnitune.skelcl.migration.db")
    tmp = _db.Database("/tmp/omnitune.skelcl.migration.db")
    tmp.attach(old.path, "rhs")

    io.info("Migrating database to version 4.")

    backup_path = old.path + ".3"
    io.info("Creating backup of old database at '{0}'".format(backup_path))
    fs.cp(old.path, backup_path)

    tables = [
        "kernels",
        "kernel_lookup",
        "kernel_names",
        "devices",
        "device_lookup",
        "datasets",
        "dataset_lookup",
        "scenarios",
        "params",
        "runtimes",
        "runtime_stats",
        "oracle_params"
    ]

    for table in tables:
        io.info("Copying data from '{}' ...".format(table))
        tmp.execute("INSERT INTO {} SELECT * FROM rhs.{}".format(table, table))

    tmp_path = tmp.path
    old_path = old.path

    tmp.execute("VACUUM")

    # Sanity checks
    bad = False
    for table in tables:
        old_count = tmp.num_rows("rhs." + table)
        tmp_count = tmp.num_rows(table)

        if old_count != tmp_count:
            io.error("Bad rows count:", old_count, tmp_count)
            bad = True

    if bad:
        io.fatal("Failed sanity check, aborting.")
    else:
        io.info("Passed sanity check.")

    # Copy migrated database over the original one.
    fs.cp(tmp_path, old_path)
    fs.rm(tmp_path)

    old.close()
    tmp.close()
    io.info("Migration completed.")
Exemplo n.º 26
0
def migrate_0_to_1(old):
  """
  SkelCL database migration script.

  Arguments:

      old (SkelCLDatabase): The database to migrate
  """

  def get_source(checksum):
    query = old.execute("SELECT source FROM kernels WHERE checksum = ?",
                        (checksum,))
    return query.fetchone()[0]

  def get_device_attr(device_id, name, count):
    query = old.execute("SELECT * FROM devices WHERE name = ?",
                        (name,))
    attr = query.fetchone()

    # Splice into the new
    newattr = (device_id, attr[0], count) + attr[2:]
    return newattr

  def process_row(tmp, row):
    # Get column values from row.
    host = row[0]
    dev_name = row[1]
    dev_count = row[2]
    kern_checksum = row[3]
    north = row[4]
    south = row[5]
    east = row[6]
    west = row[7]
    data_width = row[8]
    data_height = row[9]
    max_wg_size = row[10]
    wg_c = row[11]
    wg_r = row[12]
    runtime = row[13]
    type_in = "float"
    type_out = "float"

    # Lookup source code.
    source = get_source(kern_checksum)
    user_source = get_user_source(source)

    kernel_id = hash_kernel(north, south, east, west, max_wg_size, source)
    device_id = hash_device(dev_name, dev_count)
    data_id = hash_data(data_width, data_height, type_in, type_out)
    scenario_id = hash_scenario(host, device_id, kernel_id, data_id)
    params_id = hash_workgroup_size(wg_c, wg_r)

    device_attr = get_device_attr(device_id, dev_name, dev_count)

    # Add database entries.
    tmp.execute("INSERT OR IGNORE INTO kernels VALUES (?,?,?,?,?,?,?)",
                (kernel_id, north, south, east, west, max_wg_size, user_source))

    placeholders = ",".join(["?"] * len(device_attr))
    tmp.execute("INSERT OR IGNORE INTO devices VALUES (" + placeholders + ")",
                device_attr)

    tmp.execute("INSERT OR IGNORE INTO data VALUES (?,?,?,?,?)",
                (data_id, data_width, data_height, type_in, type_out))

    tmp.execute("INSERT OR IGNORE INTO params VALUES (?,?,?)",
                (params_id, wg_c, wg_r))

    tmp.execute("INSERT OR IGNORE INTO scenarios VALUES (?,?,?,?,?)",
                (scenario_id, host, device_id, kernel_id, data_id))

    tmp.execute("INSERT INTO runtimes VALUES (?,?,?)",
                (scenario_id, params_id, runtime))

  # Create temporary database
  tmp = _db.Database("/tmp/omnitune.skelcl.migration.db")

  # Clear anything that's already in the database.
  for table in tmp.tables:
    tmp.drop_table(table)

  io.info("Migrating database to version 1.")

  backup_path = old.path + ".0"
  io.info("Creating backup of old database at '{0}'".format(backup_path))
  fs.cp(old.path, backup_path)

  io.debug("Migration: creating tables ...")

  # Create table: kernels
  tmp.create_table("version",
                   (("version", "integer"),))

  # Set database version
  tmp.execute("INSERT INTO version VALUES (1)")

  # Create table: kernels
  tmp.create_table("kernels",
                   (("id", "text primary key"),
                    ("north", "integer"),
                    ("south", "integer"),
                    ("east", "integer"),
                    ("west", "integer"),
                    ("max_wg_size", "integer"),
                    ("source", "text")))

  # Create table: devices
  tmp.create_table("devices",
                   (("id", "text primary key"),
                    ("name", "text"),
                    ("count", "integer"),
                    ("address_bits", "integer"),
                    ("double_fp_config", "integer"),
                    ("endian_little", "integer"),
                    ("execution_capabilities", "integer"),
                    ("extensions", "text"),
                    ("global_mem_cache_size", "integer"),
                    ("global_mem_cache_type", "integer"),
                    ("global_mem_cacheline_size", "integer"),
                    ("global_mem_size", "integer"),
                    ("host_unified_memory", "integer"),
                    ("image2d_max_height", "integer"),
                    ("image2d_max_width", "integer"),
                    ("image3d_max_depth", "integer"),
                    ("image3d_max_height", "integer"),
                    ("image3d_max_width", "integer"),
                    ("image_support", "integer"),
                    ("local_mem_size", "integer"),
                    ("local_mem_type", "integer"),
                    ("max_clock_frequency", "integer"),
                    ("max_compute_units", "integer"),
                    ("max_constant_args", "integer"),
                    ("max_constant_buffer_size", "integer"),
                    ("max_mem_alloc_size", "integer"),
                    ("max_parameter_size", "integer"),
                    ("max_read_image_args", "integer"),
                    ("max_samplers", "integer"),
                    ("max_work_group_size", "integer"),
                    ("max_work_item_dimensions", "integer"),
                    ("max_work_item_sizes_0", "integer"),
                    ("max_work_item_sizes_1", "integer"),
                    ("max_work_item_sizes_2", "integer"),
                    ("max_write_image_args", "integer"),
                    ("mem_base_addr_align", "integer"),
                    ("min_data_type_align_size", "integer"),
                    ("native_vector_width_char", "integer"),
                    ("native_vector_width_double", "integer"),
                    ("native_vector_width_float", "integer"),
                    ("native_vector_width_half", "integer"),
                    ("native_vector_width_int", "integer"),
                    ("native_vector_width_long", "integer"),
                    ("native_vector_width_short", "integer"),
                    ("preferred_vector_width_char", "integer"),
                    ("preferred_vector_width_double", "integer"),
                    ("preferred_vector_width_float", "integer"),
                    ("preferred_vector_width_half", "integer"),
                    ("preferred_vector_width_int", "integer"),
                    ("preferred_vector_width_long", "integer"),
                    ("preferred_vector_width_short", "integer"),
                    ("queue_properties", "integer"),
                    ("single_fp_config", "integer"),
                    ("type", "integer"),
                    ("vendor", "text"),
                    ("vendor_id", "text"),
                    ("version", "text")))

  # Create table: data
  tmp.create_table("data",
                   (("id", "text primary key"),
                    ("width", "integer"),
                    ("height", "integer"),
                    ("tin", "text"),
                    ("tout", "text")))

  # Create table: params
  tmp.create_table("params",
                   (("id", "text primary key"),
                    ("wg_c", "integer"),
                    ("wg_r", "integer")))

  # Create table: scenarios
  tmp.create_table("scenarios",
                   (("id", "text primary key"),
                    ("host", "text"),
                    ("device", "text"),
                    ("kernel", "text"),
                    ("data", "text")))

  # Create table: runtimes
  tmp.create_table("runtimes",
                   (("scenario", "text"),
                    ("params", "text"),
                    ("runtime", "real")))

  i = 0
  for row in old.execute("SELECT * from runtimes"):
    process_row(tmp, row)
    i += 1
    if not i % 2500:
      io.debug("Processed", i, "rows ...")
      if not i % 5000:
        tmp.commit()

  tmp.commit()

  old_path = old.path
  tmp_path = tmp.path

  # Copy migrated database over the original one.
  fs.cp(tmp_path, old_path)
  fs.rm(tmp_path)

  old.close()
  tmp.close()
  io.info("Migration completed.")
Exemplo n.º 27
0
def migrate_2_to_3(old):
  """
  SkelCL database migration script.

  Arguments:

      old (SkelCLDatabase): The database to migrate
  """

  def _old_kernel2new(old_id):
    kernel = old.execute("SELECT north,south,east,west,max_wg_size,source "
                         "FROM kernels WHERE id=?",
                         (old_id,)).fetchone()
    if kernel:
      return tmp.kernel_id(*kernel)

  def _old_scenario2new(old_id):
    device, old_kernel, dataset = old.execute("SELECT device,kernel,dataset "
                                              "FROM scenarios WHERE id=?",
                                              (old_id,)).fetchone()
    kernel = _old_kernel2new(old_kernel)
    return tmp.scenario_id(device, kernel, dataset)

  # TODO: Un-comment out code!

  # Create temporary database
  fs.rm("/tmp/omnitune.skelcl.migration.db")
  tmp = _db.Database("/tmp/omnitune.skelcl.migration.db")
  tmp.attach(old.path, "rhs")

  io.info("Migrating database to version 3.")

  backup_path = old.path + ".2"
  io.info("Creating backup of old database at '{0}'".format(backup_path))
  fs.cp(old.path, backup_path)

  tmp_path = tmp.path
  old_path = old.path

  tmp.run("create_tables")

  # Populate feature and lookup tables.
  for row in old.execute("SELECT * FROM devices"):
    features = row[1:]
    id = hash_device(*features)
    io.debug("Features extracted for device", id)
    row = (id,) + features
    tmp.execute("INSERT INTO devices VALUES " +
                placeholders(*row), row)

    row = (features[0], features[1], id)
    tmp.execute("INSERT INTO device_lookup VALUES " +
                placeholders(*row), row)
    tmp.commit()

  for row in old.execute("SELECT * FROM kernels"):
    args = row[1:]
    tmp.kernel_id(*args)

  for row in old.execute("SELECT * FROM datasets"):
    features = row[1:]
    id = hash_dataset(*features)
    io.debug("Features extracted for dataset", id)
    row = (id,) + features
    tmp.execute("INSERT INTO datasets VALUES " +
                placeholders(*row), row)

    row = features + (id,)
    tmp.execute("INSERT INTO dataset_lookup VALUES " +
                placeholders(*row), row)
    tmp.commit()

  # Populate kernel_names table.
  for row in old.execute("SELECT * FROM kernel_names"):
    old_id = row[0]
    synthetic, name = row[1:]

    kernel = _old_kernel2new(old_id)
    if kernel:
      row = (kernel, synthetic, name)
      tmp.execute("INSERT OR IGNORE INTO kernel_names VALUES " +
                  placeholders(*row), row)
  tmp.commit()

  # Populate scenarios table.
  for row in old.execute("SELECT * FROM scenarios"):
    old_id, _, device, old_kernel, dataset = row
    kernel = _old_kernel2new(old_kernel)
    new_id = hash_scenario(device, kernel, dataset)

    row = (new_id, device, kernel, dataset)
    tmp.execute("INSERT OR IGNORE INTO scenarios VALUES " +
                placeholders(*row), row)
  tmp.commit()

  # Populate params table.
  tmp.execute("INSERT INTO params SELECT * from rhs.params")
  tmp.commit()

  scenario_replacements = {
    row[0]: _old_scenario2new(row[0])
    for row in old.execute("SELECT * FROM scenarios")
  }

  tmp.execute("INSERT INTO runtimes SELECT * from rhs.runtimes")
  for old_id, new_id in scenario_replacements.iteritems():
    io.info("Runtimes", old_id, "->", new_id)
    tmp.execute("UPDATE runtimes SET scenario=? WHERE scenario=?",
                (new_id, old_id))
  tmp.commit()

  # Sanity checks
  bad = False
  for row in tmp.execute("SELECT DISTINCT scenario FROM runtimes"):
    count = tmp.execute("SELECT Count(*) FROM scenarios WHERE id=?",
                        (row[0],)).fetchone()[0]
    if count != 1:
      io.error("Bad scenario count:", row[0], count)
      bad = True

  if bad:
    io.fatal("Failed sanity check, aborting.")
  else:
    io.info("Passed sanity check.")

  # Copy migrated database over the original one.
  fs.cp(tmp_path, old_path)
  fs.rm(tmp_path)

  old.close()
  tmp.close()
  io.info("Migration completed.")
Exemplo n.º 28
0
def migrate_0_to_1(old):
    """
    SkelCL database migration script.

    Arguments:

        old (SkelCLDatabase): The database to migrate
    """
    def get_source(checksum):
        query = old.execute("SELECT source FROM kernels WHERE checksum = ?",
                            (checksum,))
        return query.fetchone()[0]

    def get_device_attr(device_id, name, count):
        query = old.execute("SELECT * FROM devices WHERE name = ?",
                            (name,))
        attr = query.fetchone()

        # Splice into the new
        newattr = (device_id, attr[0], count) + attr[2:]
        return newattr

    def process_row(tmp, row):
        # Get column values from row.
        host = row[0]
        dev_name = row[1]
        dev_count = row[2]
        kern_checksum = row[3]
        north = row[4]
        south = row[5]
        east = row[6]
        west = row[7]
        data_width = row[8]
        data_height = row[9]
        max_wg_size = row[10]
        wg_c = row[11]
        wg_r = row[12]
        runtime = row[13]
        type_in = "float"
        type_out = "float"

        # Lookup source code.
        source = get_source(kern_checksum)
        user_source = get_user_source(source)

        kernel_id = hash_kernel(north, south, east, west, max_wg_size, source)
        device_id = hash_device(dev_name, dev_count)
        data_id = hash_data(data_width, data_height, type_in, type_out)
        scenario_id = hash_scenario(host, device_id, kernel_id, data_id)
        params_id = hash_workgroup_size(wg_c, wg_r)

        device_attr = get_device_attr(device_id, dev_name, dev_count)

        # Add database entries.
        tmp.execute("INSERT OR IGNORE INTO kernels VALUES (?,?,?,?,?,?,?)",
                    (kernel_id,north,south,east,west,max_wg_size,user_source))

        placeholders = ",".join(["?"] * len(device_attr))
        tmp.execute("INSERT OR IGNORE INTO devices VALUES (" + placeholders + ")",
                    device_attr)

        tmp.execute("INSERT OR IGNORE INTO data VALUES (?,?,?,?,?)",
                    (data_id, data_width, data_height, type_in, type_out))

        tmp.execute("INSERT OR IGNORE INTO params VALUES (?,?,?)",
                    (params_id, wg_c, wg_r))

        tmp.execute("INSERT OR IGNORE INTO scenarios VALUES (?,?,?,?,?)",
                    (scenario_id, host, device_id, kernel_id, data_id))

        tmp.execute("INSERT INTO runtimes VALUES (?,?,?)",
                    (scenario_id, params_id, runtime))

    # Create temporary database
    tmp = _db.Database("/tmp/omnitune.skelcl.migration.db")

    # Clear anything that's already in the database.
    for table in tmp.tables:
        tmp.drop_table(table)

    io.info("Migrating database to version 1.")

    backup_path = old.path + ".0"
    io.info("Creating backup of old database at '{0}'".format(backup_path))
    fs.cp(old.path, backup_path)

    io.debug("Migration: creating tables ...")

    # Create table: kernels
    tmp.create_table("version",
                     (("version",                         "integer"),))

    # Set database version
    tmp.execute("INSERT INTO version VALUES (1)")

    # Create table: kernels
    tmp.create_table("kernels",
                     (("id",                              "text primary key"),
                      ("north",                           "integer"),
                      ("south",                           "integer"),
                      ("east",                            "integer"),
                      ("west",                            "integer"),
                      ("max_wg_size",                     "integer"),
                      ("source",                          "text")))

    # Create table: devices
    tmp.create_table("devices",
                     (("id",                              "text primary key"),
                      ("name",                            "text"),
                      ("count",                           "integer"),
                      ("address_bits",                    "integer"),
                      ("double_fp_config",                "integer"),
                      ("endian_little",                   "integer"),
                      ("execution_capabilities",          "integer"),
                      ("extensions",                      "text"),
                      ("global_mem_cache_size",           "integer"),
                      ("global_mem_cache_type",           "integer"),
                      ("global_mem_cacheline_size",       "integer"),
                      ("global_mem_size",                 "integer"),
                      ("host_unified_memory",             "integer"),
                      ("image2d_max_height",              "integer"),
                      ("image2d_max_width",               "integer"),
                      ("image3d_max_depth",               "integer"),
                      ("image3d_max_height",              "integer"),
                      ("image3d_max_width",               "integer"),
                      ("image_support",                   "integer"),
                      ("local_mem_size",                  "integer"),
                      ("local_mem_type",                  "integer"),
                      ("max_clock_frequency",             "integer"),
                      ("max_compute_units",               "integer"),
                      ("max_constant_args",               "integer"),
                      ("max_constant_buffer_size",        "integer"),
                      ("max_mem_alloc_size",              "integer"),
                      ("max_parameter_size",              "integer"),
                      ("max_read_image_args",             "integer"),
                      ("max_samplers",                    "integer"),
                      ("max_work_group_size",             "integer"),
                      ("max_work_item_dimensions",        "integer"),
                      ("max_work_item_sizes_0",           "integer"),
                      ("max_work_item_sizes_1",           "integer"),
                      ("max_work_item_sizes_2",           "integer"),
                      ("max_write_image_args",            "integer"),
                      ("mem_base_addr_align",             "integer"),
                      ("min_data_type_align_size",        "integer"),
                      ("native_vector_width_char",        "integer"),
                      ("native_vector_width_double",      "integer"),
                      ("native_vector_width_float",       "integer"),
                      ("native_vector_width_half",        "integer"),
                      ("native_vector_width_int",         "integer"),
                      ("native_vector_width_long",        "integer"),
                      ("native_vector_width_short",       "integer"),
                      ("preferred_vector_width_char",     "integer"),
                      ("preferred_vector_width_double",   "integer"),
                      ("preferred_vector_width_float",    "integer"),
                      ("preferred_vector_width_half",     "integer"),
                      ("preferred_vector_width_int",      "integer"),
                      ("preferred_vector_width_long",     "integer"),
                      ("preferred_vector_width_short",    "integer"),
                      ("queue_properties",                "integer"),
                      ("single_fp_config",                "integer"),
                      ("type",                            "integer"),
                      ("vendor",                          "text"),
                      ("vendor_id",                       "text"),
                      ("version",                         "text")))

    # Create table: data
    tmp.create_table("data",
                     (("id",                              "text primary key"),
                      ("width",                           "integer"),
                      ("height",                          "integer"),
                      ("tin",                             "text"),
                      ("tout",                            "text")))

    # Create table: params
    tmp.create_table("params",
                     (("id",                              "text primary key"),
                      ("wg_c",                            "integer"),
                      ("wg_r",                            "integer")))

    # Create table: scenarios
    tmp.create_table("scenarios",
                     (("id",                              "text primary key"),
                      ("host",                            "text"),
                      ("device",                          "text"),
                      ("kernel",                          "text"),
                      ("data",                            "text")))

    # Create table: runtimes
    tmp.create_table("runtimes",
                     (("scenario",                        "text"),
                      ("params",                          "text"),
                      ("runtime",                         "real")))

    i = 0
    for row in old.execute("SELECT * from runtimes"):
        process_row(tmp, row)
        i += 1
        if not i % 2500:
            io.debug("Processed", i, "rows ...")
            if not i % 5000:
                tmp.commit()

    tmp.commit()

    old_path = old.path
    tmp_path = tmp.path

    # Copy migrated database over the original one.
    fs.cp(tmp_path, old_path)
    fs.rm(tmp_path)

    old.close()
    tmp.close()
    io.info("Migration completed.")
Exemplo n.º 29
0
def migrate_2_to_3(old):
    """
    SkelCL database migration script.

    Arguments:

        old (SkelCLDatabase): The database to migrate
    """
    def _old_kernel2new(old_id):
        kernel = old.execute("SELECT north,south,east,west,max_wg_size,source "
                             "FROM kernels WHERE id=?",
                             (old_id,)).fetchone()
        if kernel:
            return tmp.kernel_id(*kernel)

    def _old_scenario2new(old_id):
        device, old_kernel, dataset = old.execute("SELECT device,kernel,dataset "
                                                  "FROM scenarios WHERE id=?",
                                                  (old_id,)).fetchone()
        kernel = _old_kernel2new(old_kernel)
        return tmp.scenario_id(device, kernel, dataset)

    # TODO: Un-comment out code!

    # Create temporary database
    fs.rm("/tmp/omnitune.skelcl.migration.db")
    tmp = _db.Database("/tmp/omnitune.skelcl.migration.db")
    tmp.attach(old.path, "rhs")

    io.info("Migrating database to version 3.")

    backup_path = old.path + ".2"
    io.info("Creating backup of old database at '{0}'".format(backup_path))
    fs.cp(old.path, backup_path)

    tmp_path = tmp.path
    old_path = old.path

    tmp.run("create_tables")

    # Populate feature and lookup tables.
    for row in old.execute("SELECT * FROM devices"):
        features = row[1:]
        id = hash_device(*features)
        io.debug("Features extracted for device", id)
        row = (id,) + features
        tmp.execute("INSERT INTO devices VALUES " +
                    placeholders(*row), row)

        row = (features[0], features[1], id)
        tmp.execute("INSERT INTO device_lookup VALUES " +
                    placeholders(*row), row)
        tmp.commit()

    for row in old.execute("SELECT * FROM kernels"):
        args = row[1:]
        tmp.kernel_id(*args)

    for row in old.execute("SELECT * FROM datasets"):
        features = row[1:]
        id = hash_dataset(*features)
        io.debug("Features extracted for dataset", id)
        row = (id,) + features
        tmp.execute("INSERT INTO datasets VALUES " +
                    placeholders(*row), row)

        row = features + (id,)
        tmp.execute("INSERT INTO dataset_lookup VALUES " +
                    placeholders(*row), row)
        tmp.commit()

    # Populate kernel_names table.
    for row in old.execute("SELECT * FROM kernel_names"):
        old_id = row[0]
        synthetic, name = row[1:]

        kernel = _old_kernel2new(old_id)
        if kernel:
            row = (kernel, synthetic, name)
            tmp.execute("INSERT OR IGNORE INTO kernel_names VALUES " +
                        placeholders(*row), row)
    tmp.commit()

    # Populate scenarios table.
    for row in old.execute("SELECT * FROM scenarios"):
        old_id, _, device, old_kernel, dataset = row
        kernel = _old_kernel2new(old_kernel)
        new_id = hash_scenario(device, kernel, dataset)

        row = (new_id, device, kernel, dataset)
        tmp.execute("INSERT OR IGNORE INTO scenarios VALUES " +
                    placeholders(*row), row)
    tmp.commit()

    # Populate params table.
    tmp.execute("INSERT INTO params SELECT * from rhs.params")
    tmp.commit()

    scenario_replacements = {
        row[0]: _old_scenario2new(row[0])
        for row in old.execute("SELECT * FROM scenarios")
    }

    tmp.execute("INSERT INTO runtimes SELECT * from rhs.runtimes")
    for old_id, new_id in scenario_replacements.iteritems():
        io.info("Runtimes", old_id, "->", new_id)
        tmp.execute("UPDATE runtimes SET scenario=? WHERE scenario=?",
                    (new_id, old_id))
    tmp.commit()

    # Sanity checks
    bad = False
    for row in tmp.execute("SELECT DISTINCT scenario FROM runtimes"):
        count = tmp.execute("SELECT Count(*) FROM scenarios WHERE id=?",
                            (row[0],)).fetchone()[0]
        if count != 1:
            io.error("Bad scenario count:", row[0], count)
            bad = True

    if bad:
        io.fatal("Failed sanity check, aborting.")
    else:
        io.info("Passed sanity check.")

    # Copy migrated database over the original one.
    fs.cp(tmp_path, old_path)
    fs.rm(tmp_path)

    old.close()
    tmp.close()
    io.info("Migration completed.")