def test_from_archive_path(): # delete any existing unpacked directory fs.rm(tests.data_path("tiny", "corpus", exists=False)) c = clgen.Corpus.from_json( {"path": tests.data_path("tiny", "corpus.tar.bz2")}) assert TINY_HASH == c.hash
def test_commit(self): # Create a copy database. fs.cp(self.db.path, "/tmp/labm8.con.sql") # Open two connections to database. c1 = db.Database("/tmp/labm8.con.sql") c2 = db.Database("/tmp/labm8.con.sql") cmd = 'SELECT * FROM names WHERE first="Bob" AND last="Marley"' # Check there's no Bob Marley entry. self._test(None, c1.execute(cmd).fetchone()) self._test(None, c2.execute(cmd).fetchone()) # Add a Bob Marley entry to one connection. c1.execute("INSERT INTO names VALUES ('Bob', 'Marley')") # Create a third database connection. c3 = db.Database("/tmp/labm8.con.sql") # Check that the second and third connections can't see this new entry. self._test(("Bob", "Marley"), c1.execute(cmd).fetchone()) self._test(None, c2.execute(cmd).fetchone()) self._test(None, c3.execute(cmd).fetchone()) # Commit, and repeat. Check that all connections can now see # Bob Marley. c1.commit() self._test(("Bob", "Marley"), c1.execute(cmd).fetchone()) self._test(("Bob", "Marley"), c2.execute(cmd).fetchone()) self._test(("Bob", "Marley"), c3.execute(cmd).fetchone()) # Cool, we're jammin' fs.rm("/tmp/labm8.con.sql")
def clear(self): """ Empty the filesystem cache. This deletes the entire cache directory. """ fs.rm(self.path)
def test_from_archive_path(self): # delete any existing unpacked directory fs.rm(tests.data_path("tiny", "corpus")) c = corpus.Corpus.from_json( {"path": tests.data_path("tiny", "corpus.tar.bz2")}) self.assertEqual(TINY_HASH, c.hash)
def drive_testcase(s: db.session_t, testcase: CLgenTestCase, env: cldrive.OpenCLEnvironment, platform_id: int, device_id: int, timeout: int = 60) -> return_t: """ run CLgen program test harness """ harness = clgen_mkharness.mkharness(s, env, testcase) with NamedTemporaryFile(prefix='cldrive-harness-', delete=False) as tmpfile: path = tmpfile.name try: clgen_mkharness.compile_harness( harness.src, path, platform_id=platform_id, device_id=device_id) cmd = ['timeout', '-s9', str(timeout), tmpfile.name] start_time = time() proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = proc.communicate() try: stdout = stdout.decode('utf-8') except UnicodeError as e: stdout = '<-- UTF-ERROR -->' try: stderr = stderr.decode('utf-8') except UnicodeError as e: stderr = '<-- UTF-ERROR -->' runtime = time() - start_time return return_t( runtime=runtime, status=status_t(proc.returncode), stdout=stdout, stderr=stderr) finally: fs.rm(path)
def test_cp(self): system.echo("Hello, world!", "/tmp/labm8.tmp") self._test(["Hello, world!"], fs.read("/tmp/labm8.tmp")) # Cleanup any existing file. fs.rm("/tmp/labm8.tmp.copy") self._test(False, fs.exists("/tmp/labm8.tmp.copy")) fs.cp("/tmp/labm8.tmp", "/tmp/labm8.tmp.copy") self._test(fs.read("/tmp/labm8.tmp"), fs.read("/tmp/labm8.tmp.copy"))
def _init_error(err: Exception, files_to_rm: List[str]=[]) -> None: """ tidy up in case of error """ log.error("corpus creation failed. Deleting corpus files") for path in files_to_rm: if fs.exists(path): log.info("removing", path) fs.rm(path) raise err
def test_cp(): system.echo("Hello, world!", "/tmp/labm8.tmp") assert ["Hello, world!"] == fs.read("/tmp/labm8.tmp") # Cleanup any existing file. fs.rm("/tmp/labm8.tmp.copy") assert not fs.exists("/tmp/labm8.tmp.copy") fs.cp("/tmp/labm8.tmp", "/tmp/labm8.tmp.copy") assert fs.read("/tmp/labm8.tmp") == fs.read("/tmp/labm8.tmp.copy")
def test_cp_dir(self): fs.rm("/tmp/labm8") fs.rm("/tmp/labm8.copy") fs.mkdir("/tmp/labm8/foo/bar") self._test(False, fs.exists("/tmp/labm8.copy")) fs.cp("/tmp/labm8/", "/tmp/labm8.copy") self._test(True, fs.isdir("/tmp/labm8.copy")) self._test(True, fs.isdir("/tmp/labm8.copy/foo")) self._test(True, fs.isdir("/tmp/labm8.copy/foo/bar"))
def test_export_csv_file(self): tmp = "/tmp/labm8.sql.csv" self._test(None, self.db.export_csv("names", tmp)) self._test("first,last\n" "David,Bowie\n" "David,Brent\n" "Joe,Bloggs\n", open(tmp).read()) fs.rm(tmp)
def test_create_db_gh(): db_path = tests.data_path("db", "tmp.db", exists=False) fs.rm(db_path) dbutil.create_db(db_path, github=True) assert fs.exists(db_path) with pytest.raises(clgen.UserError): dbutil.create_db(db_path, github=True)
def migrate_1_to_2(old): """ SkelCL database migration script. Arguments: old (SkelCLDatabase): The database to migrate """ # Create temporary database fs.cp(old.path, "/tmp/omnitune.skelcl.migration.db") tmp = _db.Database("/tmp/omnitune.skelcl.migration.db") io.info("Migrating database to version 2.") backup_path = old.path + ".1" io.info("Creating backup of old database at '{0}'".format(backup_path)) fs.cp(old.path, backup_path) # Update database version tmp.drop_table("version") tmp.create_table("version", (("version", "integer"),)) tmp.execute("INSERT INTO version VALUES (2)") # Rename table "data" to "datasets" tmp.create_table("datasets", (("id", "text primary key"), ("width", "integer"), ("height", "integer"), ("tin", "text"), ("tout", "text"))) tmp.execute("INSERT INTO datasets SELECT * FROM data") tmp.drop_table("data") # Rename column "scenarios.data" to "scenarios.dataset" tmp.execute("ALTER TABLE scenarios RENAME TO old_scenarios") tmp.create_table("scenarios", (("id", "text primary key"), ("host", "text"), ("device", "text"), ("kernel", "text"), ("dataset", "text"))) tmp.execute("INSERT INTO scenarios SELECT * FROM old_scenarios") tmp.drop_table("old_scenarios") tmp.commit() old_path = old.path tmp_path = tmp.path # Copy migrated database over the original one. fs.cp(tmp_path, old_path) fs.rm(tmp_path) old.close() tmp.close() io.info("Migration completed.")
def test_cp_dir(): fs.rm("/tmp/labm8") fs.rm("/tmp/labm8.copy") fs.mkdir("/tmp/labm8/foo/bar") assert not fs.exists("/tmp/labm8.copy") fs.cp("/tmp/labm8/", "/tmp/labm8.copy") assert fs.isdir("/tmp/labm8.copy") assert fs.isdir("/tmp/labm8.copy/foo") assert fs.isdir("/tmp/labm8.copy/foo/bar")
def test_must_exist(self): system.echo("Hello, world!", "/tmp/labm8.must_exist.txt") self.assertEqual(fs.must_exist("/tmp/labm8.must_exist.txt"), "/tmp/labm8.must_exist.txt") self.assertEqual(fs.must_exist("/tmp", "labm8.must_exist.txt"), "/tmp/labm8.must_exist.txt") with self.assertRaises(fs.File404): fs.must_exist("/not/a/real/path") fs.rm("/tmp/labm8.must_exist.txt")
def migrate_3_to_4(old): """ SkelCL database migration script. Arguments: old (SkelCLDatabase): The database to migrate """ # Create temporary database fs.rm("/tmp/omnitune.skelcl.migration.db") tmp = _db.Database("/tmp/omnitune.skelcl.migration.db") tmp.attach(old.path, "rhs") io.info("Migrating database to version 4.") backup_path = old.path + ".3" io.info("Creating backup of old database at '{0}'".format(backup_path)) fs.cp(old.path, backup_path) tables = [ "kernels", "kernel_lookup", "kernel_names", "devices", "device_lookup", "datasets", "dataset_lookup", "scenarios", "params", "runtimes", "runtime_stats", "oracle_params" ] for table in tables: io.info("Copying data from '{}' ...".format(table)) tmp.execute("INSERT INTO {} SELECT * FROM rhs.{}".format(table, table)) tmp_path = tmp.path old_path = old.path tmp.execute("VACUUM") # Sanity checks bad = False for table in tables: old_count = tmp.num_rows("rhs." + table) tmp_count = tmp.num_rows(table) if old_count != tmp_count: io.error("Bad rows count:", old_count, tmp_count) bad = True if bad: io.fatal("Failed sanity check, aborting.") else: io.info("Passed sanity check.") # Copy migrated database over the original one. fs.cp(tmp_path, old_path) fs.rm(tmp_path) old.close() tmp.close() io.info("Migration completed.")
def test_scp_user(self): system.echo("Hello, world!", "/tmp/labm8.tmp") self._test(["Hello, world!"], fs.read("/tmp/labm8.tmp")) # Cleanup any existing file. fs.rm("/tmp/labm8.tmp.copy") self._test(False, fs.exists("/tmp/labm8.tmp.copy")) # Perform scp. system.scp("localhost", "/tmp/labm8.tmp", "/tmp/labm8.tmp.copy", path="tests/bin", user="******") self._test(fs.read("/tmp/labm8.tmp"), fs.read("/tmp/labm8.tmp.copy"))
def test_scp_user(): system.echo("Hello, world!", "/tmp/labm8.tmp") assert ["Hello, world!"] == fs.read("/tmp/labm8.tmp") # Cleanup any existing file. fs.rm("/tmp/labm8.tmp.copy") assert not fs.exists("/tmp/labm8.tmp.copy") # Perform scp. system.scp("localhost", "/tmp/labm8.tmp", "/tmp/labm8.tmp.copy", path="labm8/data/test/bin", user="******") assert fs.read("/tmp/labm8.tmp") == fs.read("/tmp/labm8.tmp.copy")
def test_create_drop_tables(self): fs.rm("/tmp/labm8.sql") _db = db.Database("/tmp/labm8.sql") _db.drop_table("foo") self._test(False, "foo" in _db.tables) _db.create_table("foo", (("id", "integer primary key"),)) self._test(True, "foo" in _db.tables) _db.drop_table("foo") self._test(False, "foo" in _db.tables)
def test_empty_table(self): fs.rm("/tmp/labm8.sql") _db = db.Database("/tmp/labm8.sql") _db.create_table("foo", (("id", "integer primary key"),)) _db.execute("INSERT INTO foo VALUES (1)") _db.execute("INSERT INTO foo VALUES (2)") _db.execute("INSERT INTO foo VALUES (3)") self._test(3, _db.num_rows("foo")) _db.empty_table("foo") self._test(0, _db.num_rows("foo"))
def test_write_file(): d1 = {"a": "1", "b": "2"} jsonutil.write_file("/tmp/labm8.write_file.json", d1) d2 = jsonutil.read_file("/tmp/labm8.write_file.json") fs.rm("/tmp/labm8.write_file.json") jsonutil.write_file("/tmp/labm8.write_file2.json", d1) d3 = jsonutil.read_file("/tmp/labm8.write_file2.json") fs.rm("/tmp/labm8.write_file2.json") assert d1 == d2 == d3
def test_GitHubRepo_Index_not_cloned(test_repo: github_repo.GitHubRepo): """Indexing a repo which is not cloned does nothing.""" fs.rm(test_repo.clone_dir) assert not test_repo.IsIndexed() test_repo.Index([ scrape_repos_pb2.ContentFilesImporterConfig( source_code_pattern='.*\\.java', preprocessor=[ "datasets.github.scrape_repos.preprocessors." "extractors:JavaMethods" ]), ], multiprocessing.Pool(1)) assert not test_repo.IsIndexed()
def test_rmtrash(self): system.echo("Hello, world!", "/tmp/labm8.tmp") self.assertTrue(fs.isfile("/tmp/labm8.tmp")) fs.rmtrash("/tmp/labm8.tmp") self.assertFalse(fs.isfile("/tmp/labm8.tmp")) fs.rmtrash("/tmp/labm8.tmp") fs.rm("/tmp/labm8.tmp") fs.rm("/tmp/labm8.dir") fs.mkdir("/tmp/labm8.dir/foo/bar") system.echo("Hello, world!", "/tmp/labm8.dir/foo/bar/baz") self.assertTrue(fs.isfile("/tmp/labm8.dir/foo/bar/baz")) fs.rmtrash("/tmp/labm8.dir") self.assertFalse(fs.isfile("/tmp/labm8.dir/foo/bar/baz")) self.assertFalse(fs.isfile("/tmp/labm8.dir/"))
def test_rm(): system.echo("Hello, world!", "/tmp/labm8.tmp") assert fs.isfile("/tmp/labm8.tmp") fs.rm("/tmp/labm8.tmp") assert not fs.isfile("/tmp/labm8.tmp") fs.rm("/tmp/labm8.tmp") fs.rm("/tmp/labm8.tmp") fs.rm("/tmp/labm8.dir") fs.mkdir("/tmp/labm8.dir/foo/bar") system.echo("Hello, world!", "/tmp/labm8.dir/foo/bar/baz") assert fs.isfile("/tmp/labm8.dir/foo/bar/baz") fs.rm("/tmp/labm8.dir") assert not fs.isfile("/tmp/labm8.dir/foo/bar/baz") assert not fs.isfile("/tmp/labm8.dir/")
def __delitem__(self, key: str) -> None: """ Delete cached file. Arguments: key (str): Key. Raises: Cache404: If file not in cache. """ assert (isinstance(key, string_types)) path = self._incache(self.keypath(key)) fs.rm(path)
def test_rm(self): system.echo("Hello, world!", "/tmp/labm8.tmp") self._test(True, fs.isfile("/tmp/labm8.tmp")) fs.rm("/tmp/labm8.tmp") self._test(False, fs.isfile("/tmp/labm8.tmp")) fs.rm("/tmp/labm8.tmp") fs.rm("/tmp/labm8.tmp") fs.rm("/tmp/labm8.dir") fs.mkdir("/tmp/labm8.dir/foo/bar") system.echo("Hello, world!", "/tmp/labm8.dir/foo/bar/baz") self._test(True, fs.isfile("/tmp/labm8.dir/foo/bar/baz")) fs.rm("/tmp/labm8.dir") self._test(False, fs.isfile("/tmp/labm8.dir/foo/bar/baz")) self._test(False, fs.isfile("/tmp/labm8.dir/"))
def _init_error(err: Exception) -> None: """ tidy up in case of error """ log.error("corpus creation failed. Deleting corpus files") paths = [ fs.path(self.contentcache.path, "kernels.db"), fs.path(self.cache.path, "corpus.txt"), fs.path(self.cache.path, "tensor.npy"), fs.path(self.cache.path, "atomizer.pkl") ] for path in paths: if fs.exists(path): log.info("removing", path) fs.rm(path) raise err
def test_remove_bad_preprocessed(self): fs.rm("tmp.db") dbutil.create_db("tmp.db") db = sqlite3.connect("tmp.db") c = db.cursor() # Create some data to test with: c.execute("DELETE FROM PreprocessedFiles") c.execute("INSERT INTO PreprocessedFiles VALUES(?,?,?)", ("id1", 0, "good output")) c.execute("INSERT INTO PreprocessedFiles VALUES(?,?,?)", ("id2", 1, "bad output")) c.execute("INSERT INTO PreprocessedFiles VALUES(?,?,?)", ("id3", 2, "ugly output")) db.commit() c.close() # Check that data was written properly: c = db.cursor() c.execute("SELECT Count(*) FROM PreprocessedFiles") count = c.fetchone()[0] self.assertEqual(3, count) db.close() preprocess.remove_bad_preprocessed("tmp.db") # Check that clean worked: db = sqlite3.connect("tmp.db") c = db.cursor() c.execute("SELECT Count(*) FROM PreprocessedFiles") count = c.fetchone()[0] self.assertEqual(3, count) c.execute("SELECT contents FROM PreprocessedFiles WHERE status=1 " "OR status=2") rows = c.fetchall() print(rows) self.assertTrue(all(not r == "[DELETED]" for r in rows)) # Clean up: c.execute("DELETE FROM PreprocessedFiles") db.commit() c.close() # Check that clean-up worked: c = db.cursor() c.execute("SELECT Count(*) FROM PreprocessedFiles") count = c.fetchone()[0] self.assertEqual(0, count) fs.rm("tmp.db")
def test_write_file(self): d1 = { "a": "1", "b": "2" } jsonutil.write_file("/tmp/labm8.write_file.json", d1) d2 = jsonutil.read_file("/tmp/labm8.write_file.json") fs.rm("/tmp/labm8.write_file.json") jsonutil.write_file("/tmp/labm8.write_file2.json", d1) d3 = jsonutil.read_file("/tmp/labm8.write_file2.json") fs.rm("/tmp/labm8.write_file2.json") self.assertEqual(d1, d2) self.assertEqual(d1, d3)
def __delitem__(self, key): """ Delete cached file. Arguments: key: Key. Raises: KeyError: If file not in cache. """ path = self.keypath(key) if fs.exists(path): fs.rm(path) else: raise KeyError(key)
def test_rmtrash(): with tempfile.NamedTemporaryFile(prefix='labm8_') as f: assert fs.isfile(f.name) fs.rmtrash(f.name) assert not fs.isfile(f.name) fs.rmtrash(f.name) fs.rm(f.name) with tempfile.TemporaryDirectory() as d: fs.rm(d) fs.mkdir(d, "foo/bar") system.echo("Hello, world!", fs.path(d, "foo/bar/baz")) assert fs.isfile(f, "foo/bar/baz") fs.rmtrash(d) assert not fs.isfile(d, "foo/bar/baz") assert not fs.isdir(d)
def test_remove_bad_preprocessed(self): fs.rm("tmp.db") dbutil.create_db("tmp.db") db = sqlite3.connect("tmp.db") c = db.cursor() # Create some data to test with: c.execute("DELETE FROM PreprocessedFiles") c.execute("INSERT INTO PreprocessedFiles VALUES(?,?,?)", ("id1", 0, "good output")) c.execute("INSERT INTO PreprocessedFiles VALUES(?,?,?)", ("id2", 1, "bad output")) c.execute("INSERT INTO PreprocessedFiles VALUES(?,?,?)", ("id3", 2, "ugly output")) db.commit() c.close() # Check that data was written properly: c = db.cursor() c.execute("SELECT Count(*) FROM PreprocessedFiles") count = c.fetchone()[0] self.assertEqual(3, count) db.close() preprocess.remove_bad_preprocessed("tmp.db") # Check that clean worked: db = sqlite3.connect("tmp.db") c = db.cursor() c.execute("SELECT Count(*) FROM PreprocessedFiles") count = c.fetchone()[0] self.assertEqual(3, count) c.execute("SELECT contents FROM PreprocessedFiles WHERE status=1 OR status=2") rows = c.fetchall() print(rows) self.assertTrue(all(not r == "[DELETED]" for r in rows)) # Clean up: c.execute("DELETE FROM PreprocessedFiles") db.commit() c.close() # Check that clean-up worked: c = db.cursor() c.execute("SELECT Count(*) FROM PreprocessedFiles") count = c.fetchone()[0] self.assertEqual(0, count) fs.rm("tmp.db")
def test_cp_over_dir(): fs.mkdir("/tmp/labm8.tmp.src") system.echo("Hello, world!", "/tmp/labm8.tmp.src/foo") fs.rm("/tmp/labm8.tmp.copy") fs.mkdir("/tmp/labm8.tmp.copy") assert fs.isdir("/tmp/labm8.tmp.src") assert fs.isfile("/tmp/labm8.tmp.src/foo") assert fs.isdir("/tmp/labm8.tmp.copy") assert not fs.isfile("/tmp/labm8.tmp.copy/foo") fs.cp("/tmp/labm8.tmp.src", "/tmp/labm8.tmp.copy/") assert fs.isdir("/tmp/labm8.tmp.src") assert fs.isfile("/tmp/labm8.tmp.src/foo") assert fs.isdir("/tmp/labm8.tmp.copy") assert fs.isfile("/tmp/labm8.tmp.copy/foo") assert (fs.read("/tmp/labm8.tmp.src/foo") == fs.read( "/tmp/labm8.tmp.copy/foo"))
def test_cp_over_dir(self): fs.mkdir("/tmp/labm8.tmp.src") system.echo("Hello, world!", "/tmp/labm8.tmp.src/foo") fs.rm("/tmp/labm8.tmp.copy") fs.mkdir("/tmp/labm8.tmp.copy") self._test(True, fs.isdir("/tmp/labm8.tmp.src")) self._test(True, fs.isfile("/tmp/labm8.tmp.src/foo")) self._test(True, fs.isdir("/tmp/labm8.tmp.copy")) self._test(False, fs.isfile("/tmp/labm8.tmp.copy/foo")) fs.cp("/tmp/labm8.tmp.src", "/tmp/labm8.tmp.copy/") self._test(True, fs.isdir("/tmp/labm8.tmp.src")) self._test(True, fs.isfile("/tmp/labm8.tmp.src/foo")) self._test(True, fs.isdir("/tmp/labm8.tmp.copy")) self._test(True, fs.isfile("/tmp/labm8.tmp.copy/foo")) self._test(fs.read("/tmp/labm8.tmp.src/foo"), fs.read("/tmp/labm8.tmp.copy/foo"))
def RunTestcase(opencl_environment: env.OpenCLEnvironment, testbed: deepsmith_pb2.Testbed, testcase: deepsmith_pb2.Testcase, cflags: typing.List[str]) -> deepsmith_pb2.Result: """Run a testcase.""" if testcase.toolchain != 'opencl': raise ValueError( f"Unsupported testcase toolchain: '{testcase.toolchain}'") if testcase.harness.name != 'cldrive': raise ValueError( f"Unsupported testcase harness: '{testcase.harness.name}'") result = deepsmith_pb2.Result() result.testbed.CopyFrom(testbed) platform_id, device_id = opencl_environment.ids() driver = MakeDriver( testcase, True if testbed.opts['opencl_opt'] == 'enabled' else False) # MakeDriver() annotates the testcase, so we must only set the testcase field # of the output result after we have called it. result.testcase.CopyFrom(testcase) # Get a temporary file to write and run the driver from. with tempfile.NamedTemporaryFile(prefix='deepsmith_', delete=False) as f: path = pathlib.Path(f.name) try: CompileDriver(driver, path, platform_id, device_id, cflags=cflags) timeout = testcase.harness.opts.get('timeout_seconds', '60') cmd = ['timeout', '-s9', timeout, f.name] start_time = labdate.GetUtcMillisecondsNow() proc = opencl_environment.Exec(cmd) end_time = labdate.GetUtcMillisecondsNow() # Build result message. result.returncode = proc.returncode result.outputs['stdout'] = proc.stdout result.outputs['stderr'] = proc.stderr runtime = result.profiling_events.add() runtime.client = system.HOSTNAME runtime.type = 'runtime' runtime.duration_ms = int( round((end_time - start_time).total_seconds() * 1000)) runtime.event_start_epoch_ms = labdate.MillisecondsTimestamp( start_time) result.outcome = GetResultOutcome(result) except DriverCompilationError as e: logging.warning('%s', e) result.outcome = deepsmith_pb2.Result.UNKNOWN finally: fs.rm(path) return result
def test_set_and_get(): fs.rm("/tmp/labm8-cache-set-and-get") c = cache.FSCache("/tmp/labm8-cache-set-and-get") # create file system.echo("Hello, world!", "/tmp/labm8.testfile.txt") # sanity check assert fs.read("/tmp/labm8.testfile.txt") == ["Hello, world!"] # insert file into cache c['foobar'] = "/tmp/labm8.testfile.txt" # file must be in cache assert fs.isfile(c.keypath("foobar")) # file must have been moved assert not fs.isfile("/tmp/labm8.testfile.txt") # check file contents assert fs.read(c['foobar']) == ["Hello, world!"] assert fs.read(c['foobar']) == fs.read(c.get('foobar')) c.clear()
def test_remove_preprocessed(self): tmpdb = 'test_remove_preprocessed.db' fs.cp(tests.db_path('10-kernels-preprocessed'), tmpdb) self.assertEqual(8, dbutil.num_good_kernels(tmpdb)) db = dbutil.connect(tmpdb) self.assertFalse(dbutil.is_modified(db)) db.close() dbutil.remove_preprocessed(tmpdb) self.assertEqual(0, dbutil.num_good_kernels(tmpdb)) db = dbutil.connect(tmpdb) self.assertTrue(dbutil.is_modified(db)) db.close() fs.rm(tmpdb)
def drive(command: List[str], src: str) -> return_t: """ invoke cldrive on source """ start_time = time() with NamedTemporaryFile() as tmp: tmp_path = tmp.name cli = ['timeout', '-s9', '60', './libexec/co.sh', tmp_path] + command process = Popen(cli, stdin=PIPE, stdout=PIPE, stderr=PIPE) stdout, stderr = process.communicate(src.encode('utf-8')) fs.rm(tmp_path) stdout, stderr = stdout.decode('utf-8'), stderr.decode('utf-8') runtime = time() - start_time return return_t( runtime=runtime, status=status_t(process.returncode), stdout=stdout, stderr=stderr)
def test_ls_abspaths(self): fs.cp("tests/data/testdir", "/tmp/testdir") self._test(["/tmp/testdir/a", "/tmp/testdir/b", "/tmp/testdir/c", "/tmp/testdir/d"], fs.ls("/tmp/testdir", abspaths=True)) self._test(["/tmp/testdir/a", "/tmp/testdir/b", "/tmp/testdir/c", "/tmp/testdir/c/e", "/tmp/testdir/c/f", "/tmp/testdir/c/f/f", "/tmp/testdir/c/f/f/i", "/tmp/testdir/c/f/h", "/tmp/testdir/c/g", "/tmp/testdir/d"], fs.ls("/tmp/testdir", recursive=True, abspaths=True)) fs.rm("/tmp/testdir")
def sample_iteration(self, model: Model, quiet: bool = False) -> None: """ Run one sample iteration. Arguments: model (Model): CLgen model. """ assert (isinstance(model, Model)) cache = self.cache(model) if self.kernel_opts.get("args", None): start_text = serialize_argspec(self.kernel_opts["args"]) else: start_text = "__kernel void A(" tmppath = fs.path(cache.path, "sampler-{pid}.tmp.cl".format(pid=system.PID)) with open(tmppath, "w") as outfile: opts = { "output": outfile, "num_samples": self.batch_size, "temperature": self.kernel_opts.get("temperature", 1), "max_length": self.kernel_opts.get("max_length", 10000), "seed_text": start_text, "quiet": quiet } model.sample(**opts) sys.stdout.flush() sys.stderr.flush() fetch.process_sample_file(cache["kernels.db"], tmppath, max_kernel_len=opts["max_length"], quiet=True) if self.static_checker: # TODO: Parse dynamic checker requirement preprocess.preprocess_db(cache["kernels.db"]) fs.rm(tmppath)
def test_insert(): db_path = tests.data_path("db", "tmp.db", exists=False) fs.rm(db_path) dbutil.create_db(db_path) db = dbutil.connect(db_path) c = db.cursor() assert dbutil.num_rows_in(db_path, "ContentFiles") == 0 dbutil.sql_insert_dict(c, "ContentFiles", {"id": "a", "contents": "foo"}) dbutil.sql_insert_dict(c, "PreprocessedFiles", { "id": "a", "status": 0, "contents": "bar" }) dbutil.sql_insert_dict(c, "PreprocessedFiles", { "id": "b", "status": 1, "contents": "car" }) db.commit() c = db.cursor() assert dbutil.num_rows_in(db_path, "ContentFiles") == 1 assert dbutil.num_rows_in(db_path, "PreprocessedFiles") == 2 assert dbutil.cc(db_path, "ContentFiles", "contents") == 3 assert dbutil.cc(db_path, "ContentFiles", "id") == 1 assert dbutil.lc(db_path, "ContentFiles", "contents") == 1 dbutil.remove_bad_preprocessed(db_path) assert dbutil.num_rows_in(db_path, "ContentFiles") == 1 # remove_bad_preprocessed doesn't actually delete any rows, just # replaces contents assert dbutil.num_rows_in(db_path, "PreprocessedFiles") == 2 dbutil.remove_preprocessed(db_path) assert dbutil.num_rows_in(db_path, "ContentFiles") == 1 assert dbutil.num_rows_in(db_path, "PreprocessedFiles") == 0
def test_ls_abspaths(): fs.cp("labm8/data/test/testdir", "/tmp/testdir") assert fs.ls("/tmp/testdir", abspaths=True) == [ "/tmp/testdir/a", "/tmp/testdir/b", "/tmp/testdir/c", "/tmp/testdir/d", ] assert fs.ls("/tmp/testdir", recursive=True, abspaths=True) == [ "/tmp/testdir/a", "/tmp/testdir/b", "/tmp/testdir/c", "/tmp/testdir/c/e", "/tmp/testdir/c/f", "/tmp/testdir/c/f/f", "/tmp/testdir/c/f/f/i", "/tmp/testdir/c/f/h", "/tmp/testdir/c/g", "/tmp/testdir/d", ] fs.rm("/tmp/testdir")
def test_set_and_get(self): fs.rm("/tmp/labm8-cache-set-and-get") c = cache.FSCache("/tmp/labm8-cache-set-and-get") # create file system.echo("Hello, world!", "/tmp/labm8.testfile.txt") # sanity check self.assertEqual(fs.read("/tmp/labm8.testfile.txt"), ["Hello, world!"]) # insert file into cache c['foobar'] = "/tmp/labm8.testfile.txt" # file must be in cache self.assertTrue(fs.isfile(c.keypath("foobar"))) # file must have been moved self.assertFalse(fs.isfile("/tmp/labm8.testfile.txt")) # check file contents self.assertTrue(fs.read(c['foobar']), ["Hello, world!"]) self.assertEqual(fs.read(c['foobar']), fs.read(c.get('foobar'))) c.clear()
def build_with_clang(program: Union[CLgenProgram, CLSmithProgram], clang: str) -> Tuple[int, float, str]: with NamedTemporaryFile(prefix='buildaclang-', delete=False) as tmpfile: src_path = tmpfile.name try: with open(src_path, "w") as outfile: print(program.src, file=outfile) cmd = ['timeout', '-s9', '60s', clang, '-cc1', '-xcl', src_path] start_time = time() process = subprocess.Popen(cmd, universal_newlines=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) _, stderr = process.communicate() return process.returncode, time() - start_time, stderr.strip() finally: fs.rm(src_path)
def import_clgen_sample(session: session_t, path: Path, cl_launchable: bool = False, harnesses: List[cldriveParams] = [], delete: bool = False) -> None: src = fs.read_file(path) hash_ = crypto.sha1_str(src) dupe = s.query(CLgenProgram).filter(CLgenProgram.hash == hash_).first() if dupe: print(f"warning: ignoring duplicate file {path}") elif not len(src): print(f"warning: ignoring empty file {path}") else: program = CLgenProgram(hash=hash_, runtime=len(src) / CLGEN_INFERENCE_CPS, src=src, linecount=len(src.split('\n')), cl_launchable=cl_launchable) s.add(program) s.commit() # Make test harnesses, if required if harnesses: env = cldrive.make_env() for params in harnesses: testcase = get_or_create(s, CLgenTestCase, program_id=program.id, params_id=params.id) s.flush() clgen_mkharness.mkharness(s, env, testcase) if delete: fs.rm(path)
def to_dist(self, distpath: str, author: str = None) -> str: """ Create a dist file. Arguments: distpath (str): Path to dist file. author (str, optional): Author name. Returns: str: Path to generated distfile. """ outpath = fs.abspath(distpath) + ".tar.bz2" if fs.exists(outpath): raise DistError("file {} exists".format(outpath)) meta = self.meta if author is not None: meta["author"] = author log.debug(clgen.format_json(meta)) try: tar = tarfile.open(outpath, 'w:bz2') # write meta metapath = mktemp(prefix="clgen-", suffix=".json") clgen.write_file(metapath, clgen.format_json(meta)) log.debug("metafile:", metapath) # create tarball tar.add(metapath, arcname="meta.json") # pack contents: for path in meta["contents"]: abspath = fs.path(cache.ROOT, path) log.verbose("packing", abspath) tar.add(abspath, arcname=fs.path("contents", path)) # tidy up fs.rm(metapath) tar.close() except Exception as e: tar.close() fs.rm(metapath) fs.rm(outpath) raise e return outpath
def test_rm_glob(self): fs.mkdir("/tmp/labm8.glob") system.echo("Hello, world!", "/tmp/labm8.glob/1") system.echo("Hello, world!", "/tmp/labm8.glob/2") system.echo("Hello, world!", "/tmp/labm8.glob/abc") fs.rm("/tmp/labm8.glob/a*", glob=False) self._test(True, fs.isfile("/tmp/labm8.glob/1")) self._test(True, fs.isfile("/tmp/labm8.glob/2")) self._test(True, fs.isfile("/tmp/labm8.glob/abc")) fs.rm("/tmp/labm8.glob/a*") self._test(True, fs.isfile("/tmp/labm8.glob/1")) self._test(True, fs.isfile("/tmp/labm8.glob/2")) self._test(False, fs.isfile("/tmp/labm8.glob/abc")) fs.rm("/tmp/labm8.glob/*") self._test(False, fs.isfile("/tmp/labm8.glob/1")) self._test(False, fs.isfile("/tmp/labm8.glob/2")) self._test(False, fs.isfile("/tmp/labm8.glob/abc"))
def test_mv_no_dst(self): system.echo("Hello, world!", "/tmp/labm8.tmp") with self.assertRaises(IOError): fs.mv("/tmp/labm8.tmp", "/not/a/real/path") fs.rm("/tmp/labm8.tmp")
def test_finalise_tight(self): self._mkplot() viz.finalise("/tmp/labm8.png", tight=True) self.assertTrue(fs.exists("/tmp/labm8.png")) fs.rm("/tmp/labm8.png")
def migrate_0_to_1(old): """ SkelCL database migration script. Arguments: old (SkelCLDatabase): The database to migrate """ def get_source(checksum): query = old.execute("SELECT source FROM kernels WHERE checksum = ?", (checksum,)) return query.fetchone()[0] def get_device_attr(device_id, name, count): query = old.execute("SELECT * FROM devices WHERE name = ?", (name,)) attr = query.fetchone() # Splice into the new newattr = (device_id, attr[0], count) + attr[2:] return newattr def process_row(tmp, row): # Get column values from row. host = row[0] dev_name = row[1] dev_count = row[2] kern_checksum = row[3] north = row[4] south = row[5] east = row[6] west = row[7] data_width = row[8] data_height = row[9] max_wg_size = row[10] wg_c = row[11] wg_r = row[12] runtime = row[13] type_in = "float" type_out = "float" # Lookup source code. source = get_source(kern_checksum) user_source = get_user_source(source) kernel_id = hash_kernel(north, south, east, west, max_wg_size, source) device_id = hash_device(dev_name, dev_count) data_id = hash_data(data_width, data_height, type_in, type_out) scenario_id = hash_scenario(host, device_id, kernel_id, data_id) params_id = hash_workgroup_size(wg_c, wg_r) device_attr = get_device_attr(device_id, dev_name, dev_count) # Add database entries. tmp.execute("INSERT OR IGNORE INTO kernels VALUES (?,?,?,?,?,?,?)", (kernel_id,north,south,east,west,max_wg_size,user_source)) placeholders = ",".join(["?"] * len(device_attr)) tmp.execute("INSERT OR IGNORE INTO devices VALUES (" + placeholders + ")", device_attr) tmp.execute("INSERT OR IGNORE INTO data VALUES (?,?,?,?,?)", (data_id, data_width, data_height, type_in, type_out)) tmp.execute("INSERT OR IGNORE INTO params VALUES (?,?,?)", (params_id, wg_c, wg_r)) tmp.execute("INSERT OR IGNORE INTO scenarios VALUES (?,?,?,?,?)", (scenario_id, host, device_id, kernel_id, data_id)) tmp.execute("INSERT INTO runtimes VALUES (?,?,?)", (scenario_id, params_id, runtime)) # Create temporary database tmp = _db.Database("/tmp/omnitune.skelcl.migration.db") # Clear anything that's already in the database. for table in tmp.tables: tmp.drop_table(table) io.info("Migrating database to version 1.") backup_path = old.path + ".0" io.info("Creating backup of old database at '{0}'".format(backup_path)) fs.cp(old.path, backup_path) io.debug("Migration: creating tables ...") # Create table: kernels tmp.create_table("version", (("version", "integer"),)) # Set database version tmp.execute("INSERT INTO version VALUES (1)") # Create table: kernels tmp.create_table("kernels", (("id", "text primary key"), ("north", "integer"), ("south", "integer"), ("east", "integer"), ("west", "integer"), ("max_wg_size", "integer"), ("source", "text"))) # Create table: devices tmp.create_table("devices", (("id", "text primary key"), ("name", "text"), ("count", "integer"), ("address_bits", "integer"), ("double_fp_config", "integer"), ("endian_little", "integer"), ("execution_capabilities", "integer"), ("extensions", "text"), ("global_mem_cache_size", "integer"), ("global_mem_cache_type", "integer"), ("global_mem_cacheline_size", "integer"), ("global_mem_size", "integer"), ("host_unified_memory", "integer"), ("image2d_max_height", "integer"), ("image2d_max_width", "integer"), ("image3d_max_depth", "integer"), ("image3d_max_height", "integer"), ("image3d_max_width", "integer"), ("image_support", "integer"), ("local_mem_size", "integer"), ("local_mem_type", "integer"), ("max_clock_frequency", "integer"), ("max_compute_units", "integer"), ("max_constant_args", "integer"), ("max_constant_buffer_size", "integer"), ("max_mem_alloc_size", "integer"), ("max_parameter_size", "integer"), ("max_read_image_args", "integer"), ("max_samplers", "integer"), ("max_work_group_size", "integer"), ("max_work_item_dimensions", "integer"), ("max_work_item_sizes_0", "integer"), ("max_work_item_sizes_1", "integer"), ("max_work_item_sizes_2", "integer"), ("max_write_image_args", "integer"), ("mem_base_addr_align", "integer"), ("min_data_type_align_size", "integer"), ("native_vector_width_char", "integer"), ("native_vector_width_double", "integer"), ("native_vector_width_float", "integer"), ("native_vector_width_half", "integer"), ("native_vector_width_int", "integer"), ("native_vector_width_long", "integer"), ("native_vector_width_short", "integer"), ("preferred_vector_width_char", "integer"), ("preferred_vector_width_double", "integer"), ("preferred_vector_width_float", "integer"), ("preferred_vector_width_half", "integer"), ("preferred_vector_width_int", "integer"), ("preferred_vector_width_long", "integer"), ("preferred_vector_width_short", "integer"), ("queue_properties", "integer"), ("single_fp_config", "integer"), ("type", "integer"), ("vendor", "text"), ("vendor_id", "text"), ("version", "text"))) # Create table: data tmp.create_table("data", (("id", "text primary key"), ("width", "integer"), ("height", "integer"), ("tin", "text"), ("tout", "text"))) # Create table: params tmp.create_table("params", (("id", "text primary key"), ("wg_c", "integer"), ("wg_r", "integer"))) # Create table: scenarios tmp.create_table("scenarios", (("id", "text primary key"), ("host", "text"), ("device", "text"), ("kernel", "text"), ("data", "text"))) # Create table: runtimes tmp.create_table("runtimes", (("scenario", "text"), ("params", "text"), ("runtime", "real"))) i = 0 for row in old.execute("SELECT * from runtimes"): process_row(tmp, row) i += 1 if not i % 2500: io.debug("Processed", i, "rows ...") if not i % 5000: tmp.commit() tmp.commit() old_path = old.path tmp_path = tmp.path # Copy migrated database over the original one. fs.cp(tmp_path, old_path) fs.rm(tmp_path) old.close() tmp.close() io.info("Migration completed.")
def test_constructor_schema(self): fs.rm("/tmp/labm8.sql") _db = db.Database("/tmp/labm8.sql", { "foo": (("id", "integer"), ("prop", "text")) }) self._test(["foo"], _db.tables)
def migrate_2_to_3(old): """ SkelCL database migration script. Arguments: old (SkelCLDatabase): The database to migrate """ def _old_kernel2new(old_id): kernel = old.execute("SELECT north,south,east,west,max_wg_size,source " "FROM kernels WHERE id=?", (old_id,)).fetchone() if kernel: return tmp.kernel_id(*kernel) def _old_scenario2new(old_id): device, old_kernel, dataset = old.execute("SELECT device,kernel,dataset " "FROM scenarios WHERE id=?", (old_id,)).fetchone() kernel = _old_kernel2new(old_kernel) return tmp.scenario_id(device, kernel, dataset) # TODO: Un-comment out code! # Create temporary database fs.rm("/tmp/omnitune.skelcl.migration.db") tmp = _db.Database("/tmp/omnitune.skelcl.migration.db") tmp.attach(old.path, "rhs") io.info("Migrating database to version 3.") backup_path = old.path + ".2" io.info("Creating backup of old database at '{0}'".format(backup_path)) fs.cp(old.path, backup_path) tmp_path = tmp.path old_path = old.path tmp.run("create_tables") # Populate feature and lookup tables. for row in old.execute("SELECT * FROM devices"): features = row[1:] id = hash_device(*features) io.debug("Features extracted for device", id) row = (id,) + features tmp.execute("INSERT INTO devices VALUES " + placeholders(*row), row) row = (features[0], features[1], id) tmp.execute("INSERT INTO device_lookup VALUES " + placeholders(*row), row) tmp.commit() for row in old.execute("SELECT * FROM kernels"): args = row[1:] tmp.kernel_id(*args) for row in old.execute("SELECT * FROM datasets"): features = row[1:] id = hash_dataset(*features) io.debug("Features extracted for dataset", id) row = (id,) + features tmp.execute("INSERT INTO datasets VALUES " + placeholders(*row), row) row = features + (id,) tmp.execute("INSERT INTO dataset_lookup VALUES " + placeholders(*row), row) tmp.commit() # Populate kernel_names table. for row in old.execute("SELECT * FROM kernel_names"): old_id = row[0] synthetic, name = row[1:] kernel = _old_kernel2new(old_id) if kernel: row = (kernel, synthetic, name) tmp.execute("INSERT OR IGNORE INTO kernel_names VALUES " + placeholders(*row), row) tmp.commit() # Populate scenarios table. for row in old.execute("SELECT * FROM scenarios"): old_id, _, device, old_kernel, dataset = row kernel = _old_kernel2new(old_kernel) new_id = hash_scenario(device, kernel, dataset) row = (new_id, device, kernel, dataset) tmp.execute("INSERT OR IGNORE INTO scenarios VALUES " + placeholders(*row), row) tmp.commit() # Populate params table. tmp.execute("INSERT INTO params SELECT * from rhs.params") tmp.commit() scenario_replacements = { row[0]: _old_scenario2new(row[0]) for row in old.execute("SELECT * FROM scenarios") } tmp.execute("INSERT INTO runtimes SELECT * from rhs.runtimes") for old_id, new_id in scenario_replacements.iteritems(): io.info("Runtimes", old_id, "->", new_id) tmp.execute("UPDATE runtimes SET scenario=? WHERE scenario=?", (new_id, old_id)) tmp.commit() # Sanity checks bad = False for row in tmp.execute("SELECT DISTINCT scenario FROM runtimes"): count = tmp.execute("SELECT Count(*) FROM scenarios WHERE id=?", (row[0],)).fetchone()[0] if count != 1: io.error("Bad scenario count:", row[0], count) bad = True if bad: io.fatal("Failed sanity check, aborting.") else: io.info("Passed sanity check.") # Copy migrated database over the original one. fs.cp(tmp_path, old_path) fs.rm(tmp_path) old.close() tmp.close() io.info("Migration completed.")
def test_mkdir(self): fs.rm("/tmp/labm8.dir") self._test(False, fs.isdir("/tmp/labm8.dir")) fs.mkdir("/tmp/labm8.dir") self._test(True, fs.isdir("/tmp/labm8.dir"))
def test_ls_empty_dir(self): fs.mkdir("/tmp/labm8.empty") self._test([], fs.ls("/tmp/labm8.empty")) fs.rm("/tmp/labm8.empty")
def test_mkopen(self): fs.rm("/tmp/labm8.dir") self._test(False, fs.isdir("/tmp/labm8.dir/")) f = fs.mkopen("/tmp/labm8.dir/foo", "w") self._test(True, fs.isdir("/tmp/labm8.dir/")) f.close()
def main(): db = _db.Database(experiment.ORACLE_PATH) ml.start() # Delete any old stuff. fs.rm(experiment.IMG_ROOT + "/*") fs.rm(experiment.TAB_ROOT + "/*") # Make directories fs.mkdir(experiment.TAB_ROOT) fs.mkdir(fs.path(experiment.IMG_ROOT, "scenarios/bars")) fs.mkdir(fs.path(experiment.IMG_ROOT, "scenarios/heatmap")) fs.mkdir(fs.path(experiment.IMG_ROOT, "scenarios/trisurf")) fs.mkdir(fs.path(experiment.IMG_ROOT, "coverage/devices")) fs.mkdir(fs.path(experiment.IMG_ROOT, "coverage/kernels")) fs.mkdir(fs.path(experiment.IMG_ROOT, "coverage/datasets")) fs.mkdir(fs.path(experiment.IMG_ROOT, "safety/devices")) fs.mkdir(fs.path(experiment.IMG_ROOT, "safety/kernels")) fs.mkdir(fs.path(experiment.IMG_ROOT, "safety/datasets")) fs.mkdir(fs.path(experiment.IMG_ROOT, "oracle/devices")) fs.mkdir(fs.path(experiment.IMG_ROOT, "oracle/kernels")) fs.mkdir(fs.path(experiment.IMG_ROOT, "oracle/datasets")) visualise.pie(db.num_scenarios_by_device, fs.path(experiment.IMG_ROOT, "num_sceanrios_by_device")) visualise.pie(db.num_runtime_stats_by_device, fs.path(experiment.IMG_ROOT, "num_runtime_stats_by_device")) visualise.pie(db.num_scenarios_by_dataset, fs.path(experiment.IMG_ROOT, "num_sceanrios_by_dataset")) visualise.pie(db.num_runtime_stats_by_dataset, fs.path(experiment.IMG_ROOT, "num_runtime_stats_by_dataset")) visualise.pie(db.num_runtime_stats_by_kernel, fs.path(experiment.IMG_ROOT, "num_runtime_stats_by_kernel")) visualise.pie(db.num_runtime_stats_by_kernel, fs.path(experiment.IMG_ROOT, "num_runtime_stats_by_kernel")) # Per-scenario plots for row in db.scenario_properties: scenario,device,kernel,north,south,east,west,max_wgsize,width,height,tout = row title = ("{device}: {kernel}[{n},{s},{e},{w}]\n" "{width} x {height} {type}s" .format(device=text.truncate(device, 18), kernel=kernel, n=north, s=south, e=east, w=west, width=width, height=height, type=tout)) output = fs.path(experiment.IMG_ROOT, "scenarios/heatmap/{id}.png".format(id=scenario)) space = _space.ParamSpace.from_dict(db.perf_scenario(scenario)) max_c = min(25, len(space.c)) max_r = min(25, len(space.r)) space.reshape(max_c=max_c, max_r=max_r) # Heatmaps. mask = _space.ParamSpace(space.c, space.r) for j in range(len(mask.r)): for i in range(len(mask.c)): if space.matrix[j][i] == 0: r, c = space.r[j], space.c[i] # TODO: Get values from refused_params table. if r * c >= max_wgsize: # Illegal mask.matrix[j][i] = -1 else: # Refused db.execute("INSERT OR IGNORE INTO refused_params VALUES(?,?)", (scenario, hash_params(c, r))) space.matrix[j][i] = -1 mask.matrix[j][i] = 1 db.commit() new_order = list(reversed(range(space.matrix.shape[0]))) data = space.matrix[:][new_order] figsize=(12,6) _, ax = plt.subplots(1, 2, figsize=figsize, sharey=True) sns.heatmap(data, ax=ax[0], vmin=-1, vmax=1, xticklabels=space.c, yticklabels=list(reversed(space.r)), square=True) ax[0].set_title(title) new_order = list(reversed(range(mask.matrix.shape[0]))) data = mask.matrix[:][new_order] sns.heatmap(data, ax=ax[1], vmin=-1, vmax=1, xticklabels=space.c, yticklabels=list(reversed(space.r)), square=True) # Set labels. ax[0].set_ylabel("Rows") ax[0].set_xlabel("Columns") ax[1].set_ylabel("Rows") ax[1].set_xlabel("Columns") # plt.tight_layout() # plt.gcf().set_size_inches(*figsize, dpi=300) viz.finalise(output) # 3D bars. output = fs.path(experiment.IMG_ROOT, "scenarios/bars/{id}.png".format(id=scenario)) space.bar3d(output=output, title=title, zlabel="Performance", rotation=45) # Trisurfs. output = fs.path(experiment.IMG_ROOT, "scenarios/trisurf/{id}.png".format(id=scenario)) space.trisurf(output=output, title=title, zlabel="Performance", rotation=45) ##################### # ML Visualisations # ##################### #features_tab(db, experiment.TAB_ROOT) visualise_classification_job(db, "xval") visualise_classification_job(db, "arch") visualise_classification_job(db, "xval_real") visualise_classification_job(db, "synthetic_real") # Runtime regression accuracy. visualise_regression_job(db, "xval") visualise_regression_job(db, "arch") visualise_regression_job(db, "xval_real") visualise_regression_job(db, "synthetic_real") # Whole-dataset plots visualise.runtimes_variance(db, fs.path(experiment.IMG_ROOT, "runtime_variance.png"), min_samples=30) visualise.num_samples(db, fs.path(experiment.IMG_ROOT, "num_samples.png")) visualise.runtimes_range(db, fs.path(experiment.IMG_ROOT, "runtimes_range.png")) visualise.max_speedups(db, fs.path(experiment.IMG_ROOT, "max_speedups.png")) visualise.kernel_performance(db, fs.path(experiment.IMG_ROOT, "kernel_performance.png")) visualise.device_performance(db, fs.path(experiment.IMG_ROOT, "device_performance.png")) visualise.dataset_performance(db, fs.path(experiment.IMG_ROOT, "dataset_performance.png")) visualise.num_params_vs_accuracy(db, fs.path(experiment.IMG_ROOT, "num_params_vs_accuracy.png")) visualise.performance_vs_coverage(db, fs.path(experiment.IMG_ROOT, "performance_vs_coverage.png")) visualise.performance_vs_max_wgsize( db, fs.path(experiment.IMG_ROOT, "performance_vs_max_wgsize.png") ) visualise.performance_vs_wgsize(db, fs.path(experiment.IMG_ROOT, "performance_vs_wgsize.png")) visualise.performance_vs_wg_c(db, fs.path(experiment.IMG_ROOT, "performance_vs_wg_c.png")) visualise.performance_vs_wg_r(db, fs.path(experiment.IMG_ROOT, "performance_vs_wg_r.png")) visualise.max_wgsizes(db, fs.path(experiment.IMG_ROOT, "max_wgsizes.png")) visualise.oracle_speedups(db, fs.path(experiment.IMG_ROOT, "oracle_speedups.png")) visualise.coverage(db, fs.path(experiment.IMG_ROOT, "coverage/coverage.png")) visualise.safety(db, fs.path(experiment.IMG_ROOT, "safety/safety.png")) visualise.oracle_wgsizes(db, fs.path(experiment.IMG_ROOT, "oracle/all.png")) # Per-device plots for i,device in enumerate(db.devices): where = ("scenario IN " "(SELECT id from scenarios WHERE device='{0}')" .format(device)) output = fs.path(experiment.IMG_ROOT, "coverage/devices/{0}.png".format(i)) visualise.coverage(db, output=output, where=where, title=device) output = fs.path(experiment.IMG_ROOT, "safety/devices/{0}.png".format(i)) visualise.safety(db, output, where=where, title=device) output = fs.path(experiment.IMG_ROOT, "oracle/devices/{0}.png".format(i)) visualise.oracle_wgsizes(db, output, where=where, title=device) where = ("scenario IN (\n" " SELECT id from scenarios WHERE device='{0}'\n" ") AND scenario IN (\n" " SELECT id FROM scenarios WHERE kernel IN (\n" " SELECT id FROM kernel_names WHERE synthetic=0\n" " )\n" ")" .format(device)) output = fs.path(experiment.IMG_ROOT, "coverage/devices/{0}_real.png".format(i)) visualise.coverage(db, output=output, where=where, title=device + ", real") output = fs.path(experiment.IMG_ROOT, "safety/devices/{0}_real.png".format(i)) visualise.safety(db, output, where=where, title=device + ", real") output = fs.path(experiment.IMG_ROOT, "oracle/devices/{0}_real.png".format(i)) visualise.oracle_wgsizes(db, output, where=where, title=device + ", real") where = ("scenario IN (\n" " SELECT id from scenarios WHERE device='{0}'\n" ") AND scenario IN (\n" " SELECT id FROM scenarios WHERE kernel IN (\n" " SELECT id FROM kernel_names WHERE synthetic=1\n" " )\n" ")" .format(device)) output = fs.path(experiment.IMG_ROOT, "coverage/devices/{0}_synthetic.png".format(i)) visualise.coverage(db, output=output, where=where, title=device + ", synthetic") output = fs.path(experiment.IMG_ROOT, "safety/devices/{0}_synthetic.png".format(i)) visualise.safety(db, output, where=where, title=device + ", synthetic") output = fs.path(experiment.IMG_ROOT, "oracle/devices/{0}_synthetic.png".format(i)) visualise.oracle_wgsizes(db, output, where=where, title=device + ", synthetic") # Per-kernel plots for kernel,ids in db.lookup_named_kernels().iteritems(): id_wrapped = ['"' + id + '"' for id in ids] where = ("scenario IN " "(SELECT id from scenarios WHERE kernel IN ({0}))" .format(",".join(id_wrapped))) output = fs.path(experiment.IMG_ROOT, "coverage/kernels/{0}.png".format(kernel)) visualise.coverage(db, output=output, where=where, title=kernel) output = fs.path(experiment.IMG_ROOT, "safety/kernels/{0}.png".format(kernel)) visualise.safety(db, output=output, where=where, title=kernel) output = fs.path(experiment.IMG_ROOT, "oracle/kernels/{0}.png".format(kernel)) visualise.safety(db, output=output, where=where, title=kernel) # Per-dataset plots for i,dataset in enumerate(db.datasets): where = ("scenario IN " "(SELECT id from scenarios WHERE dataset='{0}')" .format(dataset)) output = fs.path(experiment.IMG_ROOT, "coverage/datasets/{0}.png".format(i)) visualise.coverage(db, output, where=where, title=dataset) output = fs.path(experiment.IMG_ROOT, "safety/datasets/{0}.png".format(i)) visualise.safety(db, output, where=where, title=dataset) output = fs.path(experiment.IMG_ROOT, "oracle/datasets/{0}.png".format(i)) visualise.safety(db, output, where=where, title=dataset) ml.stop()
def test_finalise_figsize(self): self._mkplot() viz.finalise("/tmp/labm8.png", figsize=(10, 5)) self.assertTrue(fs.exists("/tmp/labm8.png")) fs.rm("/tmp/labm8.png")